Merge to upstream 7c8181bedebe0edbd43c5d14c760f0011d8b3fcc.
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..af1b7ac
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,17 @@
+# Generated files
+*.pyc
+*~
+*.swp
+buildbot/cbuildbot.log
+buildbot/revisions.pfq
+.project
+.pydevproject
+buildbot/.completed_stages
+
+# source cross-reference files.
+tags
+.ctags
+cscope.out
+
+# Directory that might hold site config checkout.
+config/
diff --git a/AUTHORS b/AUTHORS
new file mode 100644
index 0000000..563c2ec
--- /dev/null
+++ b/AUTHORS
@@ -0,0 +1,6 @@
+# Names should be added to this file like so:
+# Name or Organization <email address>
+
+Google Inc. <opensource@google.com>
+Code Aurora Forum, Inc. <opensource@codeaurora.org>
+NVIDIA <chromium-os@nvidia.com>
diff --git a/COMMIT-QUEUE.ini b/COMMIT-QUEUE.ini
new file mode 100644
index 0000000..baecb5b
--- /dev/null
+++ b/COMMIT-QUEUE.ini
@@ -0,0 +1,11 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Per-project Commit Queue settings.
+# Documentation: http://goo.gl/5J7oND
+
+[GENERAL]
+
+# Run the default configs plus the binhost-pre-cq.
+pre-cq-configs: default binhost-pre-cq
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..0aa7fc9
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,27 @@
+// Copyright (c) 2006-2009 The Chromium OS Authors. All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//    * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//    * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//    * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..0875a84
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1,2 @@
+davidjames@chromium.org
+vapier@chromium.org
diff --git a/PRESUBMIT.cfg b/PRESUBMIT.cfg
new file mode 100644
index 0000000..a38497e
--- /dev/null
+++ b/PRESUBMIT.cfg
@@ -0,0 +1,3 @@
+[Hook Scripts]
+hook0=bin/cros lint ${PRESUBMIT_FILES}
+hook1=bin/preupload_dump_config
diff --git a/README.chromium b/README.chromium
new file mode 100644
index 0000000..7f2bad0
--- /dev/null
+++ b/README.chromium
@@ -0,0 +1,12 @@
+Name: chromite
+Short Name: chromite
+URL: https://chromium.googlesource.com/chromiumos/chromite
+Version: 0.0.2
+License: BSD
+License File: LICENSE
+Security Critical: no
+
+Description:
+This contains scripts used to build Chromium for Chromium OS
+('cros chrome-sdk'), as well as interact with the Chromium OS
+build system.
diff --git a/__init__.py b/__init__.py
new file mode 100644
index 0000000..324d57a
--- /dev/null
+++ b/__init__.py
@@ -0,0 +1,38 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import print_function
+
+import os
+import sys
+
+# Add the third_party/ dir to our search path so that we can find the
+# modules in there automatically.  This isn't normal, so don't replicate
+# this pattern elsewhere.
+_chromite_dir = os.path.normpath(os.path.dirname(os.path.realpath(__file__)))
+_containing_dir = os.path.dirname(_chromite_dir)
+_third_party_dirs = [os.path.join(_chromite_dir, 'third_party')]
+# If chromite is living inside the Chrome checkout under
+# <chrome_root>/src/third_party/chromite, its dependencies will be checked out
+# to <chrome_root>/src/third_party instead of the normal chromite/third_party
+# location due to git-submodule limitations (a submodule cannot be contained
+# inside another submodule's workspace), so we want to add that to the
+# search path.
+if os.path.basename(_containing_dir) == 'third_party':
+  _third_party_dirs.append(_containing_dir)
+
+# List of third_party packages that might need subpaths added to search.
+_paths = [
+    'dpkt',
+    os.path.join('gdata', 'src'),
+    'pyelftools',
+    'swarming.client',
+]
+
+for _path in _paths:
+  for _third_party in _third_party_dirs[:]:
+    _component = os.path.join(_third_party, _path)
+    if os.path.isdir(_component):
+      _third_party_dirs.append(_component)
+sys.path = _third_party_dirs + sys.path
diff --git a/appengine/.gitignore b/appengine/.gitignore
new file mode 100644
index 0000000..b65f7ad
--- /dev/null
+++ b/appengine/.gitignore
@@ -0,0 +1,2 @@
+/google_appengine
+/google_appengine_*
diff --git a/appengine/.testignore b/appengine/.testignore
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/.testignore
diff --git a/appengine/README b/appengine/README
new file mode 100644
index 0000000..38fbc14
--- /dev/null
+++ b/appengine/README
@@ -0,0 +1,21 @@
+AppEngine supports Python 2.5 and 2.7:
+  https://developers.google.com/appengine/docs/python/
+
+A utility script dev_appserver is in this directory to automatically
+download the sdk and call ./google_appengine/dev_appserver.py with
+arguments for serving at <yourhostname>:8080 (which can then be
+accessed from other machines on intranet).  See contents of script
+for details on what it does (it is very short).
+
+Examples based on project name chromiumos-build-stats.
+
+To start the dev server on <yourhostname>:8080 do this:
+%> ./dev_appserver chromiumos-build-stats/app.yaml
+
+To clear DB before starting:
+%> ./dev_appserver -c chromiumos-build-stats/app.yaml
+
+To update on chromiumos-build-stats.appspot.com (WHEN READY):
+%> ./google_appengine/appcfg.py update chromiumos-build-stats/
+
+See cq_stats/README to learn about how to work with that zapping good app!
diff --git a/appengine/ae_pylint b/appengine/ae_pylint
new file mode 100755
index 0000000..6557f26
--- /dev/null
+++ b/appengine/ae_pylint
@@ -0,0 +1,18 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a simple wrapper around pylint that allows imports to work.
+# The import path is unusual because these python files normally run
+# as an AppEngine instance, so the import paths are set up to find
+# libraries when run there.  They are not set up to find libraries
+# rooted at 'chromite'.
+
+# Run in appengine project directory.  Example:
+# cd chromiumos-build-stats ; ../ae_pylint main.py
+# Or run from this directory.  Example:
+# ./ae_pylint chromiumos-build-stats/main.py
+
+ROOT=$(realpath $(dirname $0))
+PYTHONPATH="${ROOT}"/google_appengine pylint --rcfile="${ROOT}"/../pylintrc $@
diff --git a/appengine/ae_shell b/appengine/ae_shell
new file mode 100755
index 0000000..24df18f
--- /dev/null
+++ b/appengine/ae_shell
@@ -0,0 +1,202 @@
+#!/bin/bash
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+APP_YAML="app.yaml"
+DEFAULT_SDK_MIRROR="https://storage.googleapis.com/appengine-sdks/featured/google_appengine_1.9.19.zip"
+# Apps can further modify the appengine sdk by providing this shell script in
+# their top level directory. This is needed because the turnaround time to
+# submitting patches upstream to the SDK is rather large.
+# WARNING: Remember that this only changes the local installation of the SDK.
+# So, this is only useful to fix bugs that make local development hard. AE
+# will use a non-patched version of the SDK.
+# The script will be run as:
+#   sdk_mod <absolute/path/to/sdk>
+APPENGINE_SDK_MOD_FILE="appengine_sdk_mod"
+
+PYTHONPATH_PREFIX=""
+PATH_PREFIX=""
+PS1_PREFIX=""
+
+usage() {
+  cat << EOF
+Usage: ${BASH_SOURCE} <app_dir>
+
+Use this script to enter an environment to develop an appengine app in.
+This script will:
+  - Download the requested version of SDK if it's not already available.
+  - Set up the environment in the new shell so that relevant SDK and project
+    tools are available, and PYTHONPATH is setup to use these tools.
+
+You can create some files under your toplevel directory to modify the
+behaviour of this script for your project:
+  - appengine_sdk_mod: A bash script that will be executed by this script as:
+        ./fancy_project/appengine_sdk_mod <absolute/path/to/AE/SDK>
+        This script can be used to modify the *local installation only* of the
+        SDK. This can, for example, fixup the SDK to ease local development.
+        For an example, see cq_stats/appengine_sdk_mod.
+EOF
+}
+
+enter_ae_shell() {
+  local rcfile="$(mktemp)"
+
+  cat >"${rcfile}" << EOF
+[[ -e ~/.bashrc ]] && source ~/.bashrc
+
+export PYTHONPATH="${PYTHONPATH_PREFIX}:\${PYTHONPATH}"
+export PATH="${PATH_PREFIX}:\${PATH}"
+export PS1="${PS1_PREFIX} \${PS1}"
+
+# Clear BASH_ENV so that if a subshell is launched, we don't
+# get sourced twice. (This file is going to dissapear after the first time it's
+# sourced.)
+unset BASH_ENV
+rm -f "${rcfile}"
+EOF
+
+  info "Entering ae_shell for ${appname}..."
+  if [[ $# -eq 0 ]]; then
+    # Enter a shell that will survive successful completion of this script, and
+    # will have the new environment setup for the user.
+    exec bash --rcfile "${rcfile}" -i
+  else
+    # A command was given, run that command in the new shell.
+    # bash will ignore BASH_ENV if it detects that it's launched by sshd.
+    # Trick it!
+    unset SSH_CLIENT
+    unset SSH_CONNECTION
+    unset SSH_TTY
+    BASH_ENV=${rcfile} exec bash -c '"$@"' "$@"
+  fi
+}
+
+prepare_sdk() {
+  local -r appengine_dir="$1"
+  local -r ae_sdk_dir="$2"
+  local -r appname="$3"
+
+  if [[ ! -d "${ae_sdk_dir}" ]]; then
+    local temp_ae_sdk_dir="temp_ae_sdk_dir"
+
+    info "Using appegine SDK mirror ${DEFAULT_SDK_MIRROR}"
+
+    rm -rf "${temp_ae_sdk_dir}"
+    mkdir -p "${temp_ae_sdk_dir}"
+    info "Downloading appengine SDK"
+    local sdk_zip="${temp_ae_sdk_dir}/sdk.zip"
+    wget -c "${DEFAULT_SDK_MIRROR}" -O "${sdk_zip}"
+    if [[ $? -ne 0 ]]; then
+      error "Failed to download SDK from ${DEFAULT_SDK_MIRROR}"
+      rm -rf "${temp_ae_sdk_dir}"
+      return ${E_GENERAL}
+    fi
+
+    info "Unpacking..."
+    unzip -q "${sdk_zip}" -d "${temp_ae_sdk_dir}"
+    if [[ $? -ne 0 ]]; then
+      error "Failed to unzip ${sdk_zip}."
+      rm -rf "${temp_ae_sdk_dir}"
+      return ${E_GENERAL}
+    fi
+
+    mv "${temp_ae_sdk_dir}/google_appengine" "${ae_sdk_dir}"
+    rm -rf "${temp_ae_sdk_dir}"
+
+    if [[ -f "${appname}/${APPENGINE_SDK_MOD_FILE}" ]]; then
+      info "Running appengine sdk mod script from " \
+          "${appname}/${APPENGINE_SDK_MOD_FILE}"
+      if ! "./${appname}/${APPENGINE_SDK_MOD_FILE}" \
+          "${appengine_dir}/${ae_sdk_dir}"; then
+        return ${E_GENERAL}
+      fi
+    fi
+  fi
+
+  info "Using appengine SDK at ${ae_sdk_dir}"
+  return 0
+}
+
+setup_django_path() {
+  local -r appengine_dir="$1"
+  local -r ae_sdk_dir="$2"
+  local -r appname="$3"
+
+  if [[ ! -f "${appname}/${APP_YAML}" ]]; then
+    return ${E_GENERAL}
+  fi
+
+  local django_version
+  django_version="$(awk '$0 == "- name: django" { getline; print $NF }' \
+                    "${appname}/${APP_YAML}")"
+  if [[ -z "${django_version}" ]]; then
+    return ${E_GENERAL}
+  fi
+
+  info "Setting django version to ${django_version}"
+  django_dir="${ae_sdk_dir}/lib/django-${django_version}"
+  PYTHONPATH_PREFIX="${appengine_dir}/${django_dir}:${PYTHONPATH_PREFIX}"
+  PATH_PREFIX="${appengine_dir}/${django_dir}/django/bin:${PATH_PREFIX}"
+}
+
+# This sets up the chromite path so that chromite is available inside ae_shell.
+# Note that this is different from using chromite/scripts/wrapper.py because the
+# appengine apps that launched / deployed inside the ae_shell run in an
+# environment controlled by the AE SDK's dev_appserver.py
+# This ensures that chromite is available inside that environment as well.
+setup_chromite_path() {
+  local -r appengine_dir="$1"
+  # Must go deeper.
+  local basedir
+  base_dir="$(dirname "$(dirname "${appengine_dir}")")"
+  PYTHONPATH_PREFIX="${base_dir}:${PYTHONPATH_PREFIX}"
+}
+
+main() {
+  local -r appengine_dir="$(readlink -e "$(dirname "${BASH_SOURCE}")")"
+  source "${appengine_dir}/common.sh"
+
+  # Argument parsing.
+  local -r appdir="$1"
+  shift
+
+  if [[ $# -gt 0 && "$1" != "--" ]]; then
+    error "Unexpected argument: $1"
+    usage
+    exit ${E_GENERAL}
+  fi
+  # End argument parsing.
+
+  local -r appname="$(basename "${appdir}")"
+  local -r ae_sdk_dir="google_appengine_${appname}"
+
+  local appname_shell="$(echo "${appname}" | tr '[:lower:]' '[:upper:]')"
+
+  if [[ ! -d "${appdir}" ]]; then
+    error "'${appdir}' is not an appengine app source directory!"
+    usage
+    exit ${E_GENERAL}
+  fi
+
+  info "Found appengine directory ${appengine_dir}"
+  info "Found appengine app ${appname} at ${appdir}"
+
+  pushd "${appengine_dir}" >/dev/null
+
+  if ! prepare_sdk "${appengine_dir}" "${ae_sdk_dir}" "${appname}"; then
+    exit ${E_GENERAL}
+  fi
+
+  setup_django_path "${appengine_dir}" "${ae_sdk_dir}" "${appname}"
+  setup_chromite_path "${appengine_dir}"
+  PYTHONPATH_PREFIX="${appengine_dir}/${ae_sdk_dir}:${PYTHONPATH_PREFIX}"
+  PYTHONPATH="${appengine_dir}/${appname}:${PYTHONPATH}"
+  PATH_PREFIX="${appengine_dir}/${ae_sdk_dir}:${appengine_dir}:${PATH_PREFIX}"
+  PS1_PREFIX="AE:${appname_shell}${PS1_PREFIX}"
+
+  popd >/dev/null
+  enter_ae_shell "$@"
+}
+
+main "$@"
diff --git a/appengine/chromiumos-build-stats/app.yaml b/appengine/chromiumos-build-stats/app.yaml
new file mode 100644
index 0000000..cadac75
--- /dev/null
+++ b/appengine/chromiumos-build-stats/app.yaml
@@ -0,0 +1,25 @@
+application: chromiumos-build-stats
+version: 6
+runtime: python27
+api_version: 1
+threadsafe: false
+
+handlers:
+- url: /stylesheets
+  static_dir: stylesheets
+  secure: always
+
+- url: /upload_command_stats
+  script: main.app
+  secure: always
+
+- url: /.*
+  script: main.app
+  secure: always
+  login: required
+
+libraries:
+- name: webapp2
+  version: latest
+- name: jinja2
+  version: latest
diff --git a/appengine/chromiumos-build-stats/index.html b/appengine/chromiumos-build-stats/index.html
new file mode 100644
index 0000000..17a6087
--- /dev/null
+++ b/appengine/chromiumos-build-stats/index.html
@@ -0,0 +1,87 @@
+<!DOCTYPE html>
+{% autoescape true %}
+<html>
+  <head>
+    <link type="text/css" rel="stylesheet" href="/stylesheets/main.css" />
+  </head>
+  <body>
+    <div align="center">
+      <h2>Build Command Statistics - Prototype</h2>
+      <p>
+        This is an admittedly primitive interface to the build command
+        statistics gathered every time a build command (currently just
+        build_packages) is run in golo.chromium.org or corp.google.com.
+      </p>
+      <p>
+        Create
+        <a href="https://developers.google.com/appengine/docs/python/datastore/gqlreference">GCL queries</a>
+        to search the statistics database.  GCL has no SELECT or FROM clauses.  This site
+        supports a SQL-like SELECT clause for column filtering, but no FROM clause is
+        needed/supported.  If no SELECT clause is given then default columns are selected
+        (see below).  A default LIMIT 30 is used unless otherwise specified.
+      </p>
+    </div>
+
+    <hr>
+    <h3>Prepare a new query</h3>
+    <form action="/stats" method="get">
+      <h5>Write your own query</h5>
+      <div><textarea name="query" rows="2" cols="100">{{ user_query }}</textarea></div>
+      <div>
+        display format:
+        <input type="radio" name="format" value="table" checked>table (default)
+        <input type="radio" name="format" value="json">json
+      </div>
+      <div><input type="submit" value="Submit Query"></div>
+    </form>
+
+    <form action="/stats" method="get">
+      <h5>Select an example query</h5>
+      <div>
+        <select name="query">
+          {% for example_query in example_queries %}
+            <option>{{ example_query }}</option>
+          {% endfor %}
+        </select>
+      </div>
+      <div>
+        display format:
+        <input type="radio" name="format" value="table" checked>table (default)
+        <input type="radio" name="format" value="json">json
+      </div>
+      <div><input type="submit" value="Submit Query"></div>
+    </form>
+
+    <hr>
+    <div><h3>Query results</h3></div>
+    <div>For query: <b>{{ user_query }}</b></div>
+    <p/>
+    {% if error_msg %}
+    <b><font color="red">{{ error_msg }}</font></b>
+    {% else %}
+    <table border="1">
+      {% for row in results_table %}
+      <tr>
+        {% for cell in row %}
+        <td>{{ cell }}</td>
+        {% endfor %}
+      </tr>
+      {% endfor %}
+    </table>
+    {% endif %}
+    <p/>
+
+    <hr>
+    <div><h3>Database columns available</h3></div>
+    <ul>
+    {% for col in column_list %}
+    <li>{{ col }}</li>
+    {% endfor %}
+    </ul>
+    <hr>
+    <div align="center">
+      Signed in as <b>{{ user_email }}</b>
+    </div>
+  </body>
+</html>
+{% endautoescape %}
diff --git a/appengine/chromiumos-build-stats/index.yaml b/appengine/chromiumos-build-stats/index.yaml
new file mode 100644
index 0000000..115e7f1
--- /dev/null
+++ b/appengine/chromiumos-build-stats/index.yaml
@@ -0,0 +1,132 @@
+indexes:
+
+# AUTOGENERATED
+
+# This index.yaml is automatically updated whenever the dev_appserver
+# detects that a new type of query is run.  If you want to manage the
+# index.yaml file manually, remove the above marker line (the line
+# saying "# AUTOGENERATED").  If you want to manage some indexes
+# manually, move them above the marker line.  The index.yaml file is
+# automatically uploaded to the admin console when you next deploy
+# your application using appcfg.py.
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: board
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: board
+  - name: end_date
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: board
+  - name: end_datetime
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: board
+  - name: end_time
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: cmd_base
+  - name: end_date
+  - name: run_time
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: cmd_base
+  - name: end_datetime
+    direction: desc
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_date
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_date
+  - name: end_time
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_date
+  - name: end_time
+    direction: desc
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_date
+  - name: run_time
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_date
+    direction: desc
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_datetime
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_datetime
+    direction: desc
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_time
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: end_time
+    direction: desc
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: run_time
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: run_time
+  - name: end_time
+    direction: desc
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: run_time
+    direction: desc
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: username
+  - name: end_date
+  - name: run_time
+
+- kind: Statistics
+  ancestor: yes
+  properties:
+  - name: username
+  - name: end_date
+    direction: desc
diff --git a/appengine/chromiumos-build-stats/main.py b/appengine/chromiumos-build-stats/main.py
new file mode 100644
index 0000000..6d27637
--- /dev/null
+++ b/appengine/chromiumos-build-stats/main.py
@@ -0,0 +1,15 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import webapp2
+
+import stats
+
+# Application configuration.
+URLS = [
+  ('/', stats.MainPage),
+  ('/stats', stats.MainPage),
+  ('/upload_command_stats', stats.PostPage),
+]
+app = webapp2.WSGIApplication(URLS, debug=True)
diff --git a/appengine/chromiumos-build-stats/model.py b/appengine/chromiumos-build-stats/model.py
new file mode 100644
index 0000000..bd1fca7
--- /dev/null
+++ b/appengine/chromiumos-build-stats/model.py
@@ -0,0 +1,27 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""All database model classes for this AppEngine instance."""
+
+from google.appengine.ext import db
+
+class Statistics(db.Model):
+  """Each entry holds stats for one build command run."""
+
+  # Properties common to all commands.
+  end_datetime = db.DateTimeProperty(auto_now_add=True)
+  end_date = db.DateProperty()
+  end_time = db.TimeProperty()
+  cmd_line = db.StringProperty()
+  cmd_base = db.StringProperty()
+  cmd_args = db.StringProperty()
+  run_time = db.IntegerProperty()
+  username = db.StringProperty()
+  board = db.StringProperty()
+  host = db.StringProperty()
+  cpu_count = db.StringProperty()
+  cpu_type = db.StringProperty()
+
+  # Properties for build_packages only.
+  package_count = db.IntegerProperty()
diff --git a/appengine/chromiumos-build-stats/stats.py b/appengine/chromiumos-build-stats/stats.py
new file mode 100644
index 0000000..68b6589
--- /dev/null
+++ b/appengine/chromiumos-build-stats/stats.py
@@ -0,0 +1,258 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import datetime
+import json
+import os
+import re
+
+from chromite.lib import cros_logging as logging
+
+from google.appengine.api import datastore_errors
+from google.appengine.ext import db
+from google.appengine.api import users
+
+import webapp2
+import jinja2
+
+import model
+
+# Could replace this with a function if there is ever any reason
+# to spread entries over multiple datastores.  Consistency is only
+# gauranteed within a datastore, but access should be limited to
+# about 1 per second.  That should not be a problem for us.
+DATASTORE_KEY = db.Key.from_path('Stats', 'default')
+
+JINJA_ENVIRONMENT = jinja2.Environment(
+    loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
+    extensions=['jinja2.ext.autoescape'],
+    autoescape=True)
+
+
+class MainPage(webapp2.RequestHandler):
+  """Provide interface for interacting with DB."""
+
+  # Regex to peel SQL-like SELECT off front, if present, grabbing SELECT args.
+  # Example: "SELECT foo,bar WHERE blah blah"
+  #          ==> group(1)="foo,bar", group(2)="WHERE blah blah"
+  # Example: "SELECT foo , bar"
+  #          ==> group(1)="foo , bar", group(2)=""
+  # Example: "WHERE blah blah"
+  #          ==> No match
+  QUERY_SELECT_PREFIX_RE = re.compile(r'^\s*SELECT\s+'
+                                      r'([^\s,]+(?:\s*,\s*[^\s,]+)*)' # Group 1
+                                      r'(?:$|\s+)(.*)',               # Group 2
+                                      re.IGNORECASE | re.VERBOSE)
+
+  # Regex to determine if WHERE is present, and capture everything after it.
+  # Example: "WHERE foo=bar ORDER BY whatever"
+  #          ==> group(1)="foo=bar ORDER BY whatever"
+  # Example: "ORDER BY whatever"
+  #          ==> No match
+  QUERY_WHERE_PREFIX_RE = re.compile(r'^WHERE\s+(.+)$',
+                                     re.IGNORECASE | re.VERBOSE)
+
+  # Regex to discover ORDER BY columns in order to highlight them in results.
+  QUERY_ORDER_RE = re.compile(r'ORDER\s+BY\s+(\S+)', re.IGNORECASE)
+
+  # Regex to discover LIMIT value in query.
+  QUERY_LIMIT_RE = re.compile(r'LIMIT\s+(\d+)', re.IGNORECASE)
+
+  # Regex for separating tokens by commas, allowing spaces on either side.
+  COMMA_RE = re.compile(r'\s*,\s*')
+
+  # Default columns to show in results table if no SELECT given.
+  DEFAULT_COLUMNS = ['end_date', 'cmd_line', 'run_time', 'board',
+                     'package_count']
+
+  # All possible columns in Statistics model.
+  ALL_COLUMNS = sorted(model.Statistics.properties())
+
+  # Provide example queries in interface as a form of documentation.
+  EXAMPLE_QUERIES = [
+    ("ORDER BY end_date,run_time"
+     " LIMIT 30"),
+    ("WHERE username='mtennant'"
+     " ORDER BY end_date DESC"
+     " LIMIT 30"),
+    ("SELECT end_datetime,cmd_base,cmd_args,run_time,package_count"
+     " WHERE board='amd64-generic'"
+     " ORDER BY end_datetime"
+     " LIMIT 30"),
+    ("SELECT end_date,cmd_base,run_time,board,package_count"
+     " WHERE end_date=DATE('2012-03-28')"
+     " ORDER BY run_time"
+     " LIMIT 30"),
+    ("SELECT end_date,cmd_base,cmd_args,run_time,username"
+     " WHERE run_time>20"
+     " LIMIT 30"),
+    ]
+
+  def get(self):
+    """Support GET to stats page."""
+    # Note that google.com authorization is required to access this page, which
+    # is controlled in app.yaml and on appspot admin page.
+    orig_query = self.request.get('query')
+    logging.debug('Received raw query %r', orig_query)
+
+    # If no LIMIT was provided, default to a LIMIT of 30 for sanity.
+    if not self.QUERY_LIMIT_RE.search(orig_query):
+      orig_query += ' LIMIT 30'
+
+    query = orig_query
+
+    # Peel off "SELECT" clause from front of query.  GCL does not support SELECT
+    # filtering, but we will support it right here to select/filter columns.
+    query, columns = self._RemoveSelectFromQuery(query)
+    if query == orig_query and columns == self.DEFAULT_COLUMNS:
+      # This means there was no SELECT in query.  That is equivalent to
+      # SELECT of default columns, so show that to user.
+      orig_query = 'SELECT %s %s' % (','.join(columns), orig_query)
+
+    # All queries should have the "ancestor" WHERE clause in them, but that
+    # need not be exposed to interface.  Insert the clause intelligently.
+    query = self._AdjustWhereInQuery(query)
+
+    stat_entries = []
+    error_msg = None
+    try:
+      stat_entries = model.Statistics.gql(query, DATASTORE_KEY)
+    except datastore_errors.BadQueryError as ex:
+      error_msg = '<p>%s.</p><p>Actual GCL query used: "%s"</p>' % (ex, query)
+
+    if self.request.get('format') == 'json':
+      # Write output in the JSON format.
+      d = self._ResultsToDictionary(stat_entries, columns)
+
+      class CustomEncoder(json.JSONEncoder):
+        """Handles non-serializable classes by converting them to strings."""
+        def default(self, obj):
+          if (isinstance(obj, datetime.datetime) or
+              isinstance(obj, datetime.date) or
+              isinstance(obj, datetime.time)):
+            return obj.isoformat()
+
+          return json.JSONEncoder.default(self, obj)
+
+      self.response.content_type = 'application/json'
+      self.response.write(json.dumps(d, cls=CustomEncoder))
+    else:
+      # Write output to the HTML page.
+      results_table = self._PrepareResultsTable(stat_entries, columns)
+      template_values = {
+          'error_msg': error_msg,
+          'gcl_query': query,
+          'user_query': orig_query,
+          'user_email': users.get_current_user(),
+          'results_table': results_table,
+          'column_list': self.ALL_COLUMNS,
+          'example_queries': self.EXAMPLE_QUERIES,
+      }
+      template = JINJA_ENVIRONMENT.get_template('index.html')
+      self.response.write(template.render(template_values))
+
+  def _RemoveSelectFromQuery(self, query):
+    """Remove SELECT clause from |query|, return tuple (new_query, columns)."""
+    match = self.QUERY_SELECT_PREFIX_RE.search(query)
+    if match:
+      # A SELECT clause is present.  Remove it but save requested columns.
+      columns = self.COMMA_RE.split(match.group(1))
+      query = match.group(2)
+
+      if columns == ['*']:
+        columns = self.ALL_COLUMNS
+
+      logging.debug('Columns selected for viewing: %s', ', '.join(columns))
+      return query, columns
+    else:
+      logging.debug('Using default columns for viewing: %s',
+                    ', '.join(self.DEFAULT_COLUMNS))
+      return query, self.DEFAULT_COLUMNS
+
+  def _AdjustWhereInQuery(self, query):
+    """Insert WHERE ANCESTOR into |query| and return."""
+    match = self.QUERY_WHERE_PREFIX_RE.search(query)
+    if match:
+      return 'WHERE ANCESTOR IS :1 AND %s' % match.group(1)
+    else:
+      return 'WHERE ANCESTOR IS :1 %s' % query
+
+  def _PrepareResultsTable(self, stat_entries, columns):
+    """Prepare table for |stat_entries| using only |columns|."""
+    # One header blank for row numbers, then each column name.
+    table = [[c for c in [''] + columns]]
+    # Prepare list of table rows, one for each stat entry.
+    for stat_ix, stat_entry in enumerate(stat_entries):
+      row = [stat_ix + 1]
+      row += [getattr(stat_entry, col) for col in columns]
+      table.append(row)
+
+    return table
+
+  def _ResultsToDictionary(self, stat_entries, columns):
+    """Converts |stat_entries| to a dictionary with |columns| as keys.
+
+    Args:
+      stat_entries: A list of GqlQuery objects.
+      columns: A list of keys to use.
+
+    Returns:
+      A dictionary with |columns| as keys.
+    """
+    stats_dict = dict()
+    keys = [c for c in columns]
+    for stat_ix, stat_entry in enumerate(stat_entries):
+      stats_dict[stat_ix] = dict(
+          (col, getattr(stat_entry, col)) for col in columns)
+
+    return stats_dict
+
+
+class PostPage(webapp2.RequestHandler):
+  """Provides interface for uploading command stats to database."""
+
+  NO_VALUE = '__NO_VALUE_AT_ALL__'
+
+  def post(self):
+    """Support POST of command stats."""
+    logging.info('Stats POST received at %r', self.request.uri)
+
+    new_stat = model.Statistics(parent=DATASTORE_KEY)
+
+    # Check each supported DB property to see if it has a value set
+    # in the POST request.
+    for prop in model.Statistics.properties():
+      # Skip properties with auto_now or auto_now_add enabled.
+      model_prop = getattr(model.Statistics, prop)
+      if ((hasattr(model_prop, 'auto_now_add') and model_prop.auto_now_add) or
+          (hasattr(model_prop, 'auto_now') and model_prop.auto_now)):
+        continue
+
+      # Note that using hasattr with self.request does not work at all.
+      # It (almost) always says the attribute is not present, when getattr
+      # does actually return a value.  Also note that self.request.get is
+      # not returning None as the default value if no explicit default value
+      # is provided, contrary to the spec for dict.get.
+      value = self.request.get(prop, self.NO_VALUE)
+
+      if value is not self.NO_VALUE:
+        # String properties must be 500 characters or less (GQL requirement).
+        if isinstance(model_prop, db.StringProperty) and len(value) > 500:
+          logging.debug('  String property %r too long.  Cutting off at 500'
+                        ' characters.', prop)
+          value = value[:500]
+
+        # Integer properties require casting
+        if isinstance(model_prop, db.IntegerProperty):
+          value = int(value)
+
+        logging.debug('  Stats POST property %r ==> %r', prop, value)
+        setattr(new_stat, prop, value)
+
+    # Use automatically set end_datetime prop to set end_date and end_time.
+    new_stat.end_time = new_stat.end_datetime.time()
+    new_stat.end_date = new_stat.end_datetime.date()
+
+    # Save to model.
+    new_stat.put()
diff --git a/appengine/chromiumos-build-stats/stylesheets/main.css b/appengine/chromiumos-build-stats/stylesheets/main.css
new file mode 100644
index 0000000..a8666e2
--- /dev/null
+++ b/appengine/chromiumos-build-stats/stylesheets/main.css
@@ -0,0 +1,4 @@
+body {
+  font-family: Verdana, Helvetica, sans-serif;
+  background-color: #DDDDDD;
+}
diff --git a/appengine/common.sh b/appengine/common.sh
new file mode 100644
index 0000000..4cf5ba0
--- /dev/null
+++ b/appengine/common.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+readonly E_GENERAL=1
+
+error() {
+  (
+  # Red log line.
+  tput setaf 1
+  echo "ERROR: $1"
+  tput sgr0
+  ) >&2
+}
+
+warning() {
+  (
+  # Yellow warning line.
+  tput setaf 3
+  echo "WARNING: $1"
+  tput sgr0
+  ) >&2
+}
+
+info() {
+  echo "INFO: $1"
+}
diff --git a/appengine/cq_stats/.gitignore b/appengine/cq_stats/.gitignore
new file mode 100644
index 0000000..a55e0c2
--- /dev/null
+++ b/appengine/cq_stats/.gitignore
@@ -0,0 +1,2 @@
+/annotator_cidb_creds
+/cq_stats/static
diff --git a/appengine/cq_stats/README b/appengine/cq_stats/README
new file mode 100644
index 0000000..5a59a8a
--- /dev/null
+++ b/appengine/cq_stats/README
@@ -0,0 +1,51 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+This project contains the cros-cq-stats-sheet app.
+
+WARNING: A word about using django: In general, don't.
+We're still discussing whether django is a good choice for future apps. It's not
+the standard template engine used by other infra apps. Before you start writing
+a new app for infra, email chromeos-infra-discuss@.
+
+Local development
+=================
+- We require cidb credentials to be made available to the app in a very specific
+  way. Create a directory / symlink to your *annotator* user credentials for the
+  *debug-cidb* instance named "annotator_cidb_creds". See go/cros-cidb-admin to
+  obtain these credentials.
+- Then from chromite/appengine, you can launch the local dev_appserver using:
+    $ ./cq_stats/dev_appserver
+- There are two kinds of changes that will not be picked up by the dev_appserver
+  automatically:
+  - changes to chromite/ outside of the cq_stats app. (Yes, chromite/ is
+    available to the app, just like to any other code under chromite)
+  - changes to static files.
+
+Deploying the app.
+=============
+Simply use `deploy_app` provided in this directory.
+- You should first deploy the the 'dbg' instance of the app, verify that
+  everything is as you expect by navigating to go/chromiumos-build-annotator-dbg
+- Only then should you deploy to 'prod'.
+
+The script requires you to obtain the secret key used to encrypt the cookies for
+pages serverd from:
+- debug: You can find the key in valentine under
+    Description/hostname: google.com:chromiumos-build-annotator-dbg
+    Purpose/username: build_annotations app secret_key
+- prod: You can find the key in valentine under
+    Description/hostname: google.com:chromiumos-build-annotator
+    Purpose/username: build_annotations app secret_key
+
+Deploy-Troubleshooting
+===============
+If deployment succeeds but the app fails for some reason, you'll get a very
+unhelpful page without any stack trace. This is by design. You should redeploy
+with DEBUG turned on. To do this, set DEBUG to True in the "DEPLOY OVERRIDES"
+section in cq_stats/settings.py
+Other settings autogenerated for deploy can also be overridden there.
+
+pylint-pro-tip: Enter the ae_shell (chromite/appengine/ae_shell cq_stats) before
+running pylint, so you it can resolve all imports.
diff --git a/appengine/cq_stats/app.yaml b/appengine/cq_stats/app.yaml
new file mode 100644
index 0000000..417060d
--- /dev/null
+++ b/appengine/cq_stats/app.yaml
@@ -0,0 +1,21 @@
+application: google.com:chromiumos-build-annotator-dbg
+version: 1
+runtime: python27
+api_version: 1
+threadsafe: true
+
+libraries:
+- name: django
+  version: 1.5
+- name: MySQLdb
+  version: latest
+
+builtins:
+- django_wsgi: on
+
+env_variables:
+  DJANGO_SETTINGS_MODULE: 'cq_stats.settings'
+
+handlers:
+- url: /static
+  static_dir: cq_stats/static
diff --git a/appengine/cq_stats/appengine_sdk_mod b/appengine/cq_stats/appengine_sdk_mod
new file mode 100755
index 0000000..4f7f469
--- /dev/null
+++ b/appengine/cq_stats/appengine_sdk_mod
@@ -0,0 +1,27 @@
+#!/bin/bash
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+TOPLVL="$(readlink -e "$(dirname "$0")")"
+APPENGINE_SDK_DIR="$1"
+PATCH_DIR="${TOPLVL}/appengine_sdk_patches"
+SDK_PATCHES=(
+  "${PATCH_DIR}/fix-dbshell-to-use-user-password-from-settings-file.patch"
+  "${PATCH_DIR}/fix-mysql-backend-to-pass-the-ssl-options-through.patch"
+)
+
+source ${TOPLVL}/../common.sh
+
+if [[ ! -d "${APPENGINE_SDK_DIR}" ]]; then
+  error "No appengine SDK at ${APPENGINE_SDK_DIR}"
+  exit ${E_GENERAL}
+fi
+
+for sdk_patch in "${SDK_PATCHES[@]}"; do
+  echo "Applying ${sdk_patch} to ${APPENGINE_SDK_DIR}"
+  if ! patch -d "${APPENGINE_SDK_DIR}" -p 1 < "${sdk_patch}"; then
+    error "Failed to apply patch ${sdk_patch}. Bailing."
+    exit ${E_GENERAL}
+  fi
+done
diff --git a/appengine/cq_stats/appengine_sdk_patches/fix-dbshell-to-use-user-password-from-settings-file.patch b/appengine/cq_stats/appengine_sdk_patches/fix-dbshell-to-use-user-password-from-settings-file.patch
new file mode 100644
index 0000000..3ba4a9c
--- /dev/null
+++ b/appengine/cq_stats/appengine_sdk_patches/fix-dbshell-to-use-user-password-from-settings-file.patch
@@ -0,0 +1,76 @@
+From 7776458f4c723b1d1b4d796cc958fdb46cf5a03f Mon Sep 17 00:00:00 2001
+From: Prathmesh Prabhu <pprabhu@chromium.org>
+Date: Mon, 5 Jan 2015 13:44:26 +0530
+Subject: Fix dbshell to use user/password from settings file.
+
+---
+ google/storage/speckle/python/django/backend/client.py | 16 ++++++++++++++--
+ google/storage/speckle/python/tool/google_sql.py       |  7 ++++++-
+ 2 files changed, 20 insertions(+), 3 deletions(-)
+
+diff --git a/google/storage/speckle/python/django/backend/client.py b/google/storage/speckle/python/django/backend/client.py
+index 3c65897..cab320a 100644
+--- a/google/storage/speckle/python/django/backend/client.py
++++ b/google/storage/speckle/python/django/backend/client.py
+@@ -35,12 +35,24 @@ class DatabaseClient(backends.BaseDatabaseClient):
+   def runshell(self):
+     """Start an interactive database shell."""
+     settings_dict = self.connection.settings_dict
+-    args = [self.executable_name]
+-    args = ['', settings_dict.get('INSTANCE')]
++    #args = [self.executable_name]
++    args = ['']
++
++    user = settings_dict.get('USER')
++    if user:
++      args.append('--user')
++      args.append(user)
++    password = settings_dict.get('PASSWORD')
++    if password:
++      args.append('--password')
++      args.append(password)
++
++    args.append(settings_dict.get('INSTANCE'))
+     database = settings_dict.get('NAME')
+     if database:
+       args.append(database)
+
++    print('[xckd] Args for google_sql: (%s)' % args)
+
+
+
+diff --git a/google/storage/speckle/python/tool/google_sql.py b/google/storage/speckle/python/tool/google_sql.py
+index 3aa1288..91132f4 100644
+--- a/google/storage/speckle/python/tool/google_sql.py
++++ b/google/storage/speckle/python/tool/google_sql.py
+@@ -210,6 +210,10 @@ def main(argv):
+   parser.add_option('-e', '--output_encoding', dest='output_encoding',
+                     default=DEFAULT_ENCODING,
+                     help='Output encoding. Defaults to %s.' % DEFAULT_ENCODING)
++  parser.add_option('--user', dest='user',
++                    default=None, help=optparse.SUPPRESS_HELP)
++  parser.add_option('--password', dest='password',
++                    default=None, help=optparse.SUPPRESS_HELP)
+   parser.add_option('--oauth_credentials_path', dest='oauth_credentials_path',
+                     default=None, help=optparse.SUPPRESS_HELP)
+
+@@ -223,6 +227,7 @@ def main(argv):
+
+
+   instance_alias = instance.replace(':', '#')
++  print(instance_alias)
+   database = None
+   if len(args) == 2:
+     database = args[1]
+@@ -237,7 +242,7 @@ def main(argv):
+   db.add_driver(GoogleSqlDriver.NAME, GoogleSqlDriver)
+   sql_cmd_config = config.SQLCmdConfig(None)
+   sql_cmd_config.add('__googlesql__', instance_alias, None, None, database,
+-                     GoogleSqlDriver.NAME, None, None)
++                     GoogleSqlDriver.NAME, options.user, options.password)
+   sql_cmd = GoogleSqlCmd(sql_cmd_config)
+   sql_cmd.set_output_encoding(options.output_encoding)
+   sql_cmd.set_database(instance_alias)
+--
+2.2.0.rc0.207.ga3a616c
+
diff --git a/appengine/cq_stats/appengine_sdk_patches/fix-mysql-backend-to-pass-the-ssl-options-through.patch b/appengine/cq_stats/appengine_sdk_patches/fix-mysql-backend-to-pass-the-ssl-options-through.patch
new file mode 100644
index 0000000..60d20f2
--- /dev/null
+++ b/appengine/cq_stats/appengine_sdk_patches/fix-mysql-backend-to-pass-the-ssl-options-through.patch
@@ -0,0 +1,43 @@
+From fa8dc227f8b1804bb3ccae38db168f32c34cfc27 Mon Sep 17 00:00:00 2001
+From: Prathmesh Prabhu <pprabhu@chromium.org>
+Date: Tue, 6 Jan 2015 13:31:00 +0530
+Subject: Fix mysql backend to pass the ssl options through.
+
+---
+ lib/django-1.5/django/db/backends/mysql/client.py | 13 +++++++++++++
+ 1 file changed, 13 insertions(+)
+
+diff --git a/lib/django-1.5/django/db/backends/mysql/client.py b/lib/django-1.5/django/db/backends/mysql/client.py
+index 1cf8cee..f395564 100644
+--- a/lib/django-1.5/django/db/backends/mysql/client.py
++++ b/lib/django-1.5/django/db/backends/mysql/client.py
+@@ -15,6 +15,14 @@ class DatabaseClient(BaseDatabaseClient):
+         host = settings_dict['OPTIONS'].get('host', settings_dict['HOST'])
+         port = settings_dict['OPTIONS'].get('port', settings_dict['PORT'])
+         defaults_file = settings_dict['OPTIONS'].get('read_default_file')
++
++        # ssl options
++        ssl = settings_dict['OPTIONS'].get('ssl')
++        if ssl:
++          server_ca = ssl.get('ca', '')
++          client_cert = ssl.get('cert', '')
++          client_key = ssl.get('key', '')
++
+         # Seems to be no good way to set sql_mode with CLI.
+
+         if defaults_file:
+@@ -30,6 +38,11 @@ class DatabaseClient(BaseDatabaseClient):
+                 args += ["--host=%s" % host]
+         if port:
+             args += ["--port=%s" % port]
++        if ssl:
++            args += ["--ssl-ca=%s" % server_ca]
++            args += ["--ssl-cert=%s" % client_cert]
++            args += ["--ssl-key=%s" % client_key]
++
+         if db:
+             args += [db]
+
+--
+2.2.0.rc0.207.ga3a616c
+
diff --git a/appengine/cq_stats/build_annotations/__init__.py b/appengine/cq_stats/build_annotations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/__init__.py
diff --git a/appengine/cq_stats/build_annotations/build_row_controller.py b/appengine/cq_stats/build_annotations/build_row_controller.py
new file mode 100644
index 0000000..d628a59
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/build_row_controller.py
@@ -0,0 +1,289 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Controller for the build_annotations app.
+
+This controller sits between the django models for cidb tables and the views
+that power the app.
+Keep non-trivial logic to aggregate data / optimize db access here and test it.
+"""
+
+from __future__ import print_function
+
+import collections
+
+from django.db import models
+from django.db.models import query
+
+from build_annotations import models as ba_models
+
+# We need to fake out some system modules before importing chromite modules.
+from cq_stats import fake_system_modules  # pylint: disable=unused-import
+from chromite.lib import clactions
+
+
+class BuildRow(collections.MutableMapping):
+  """A database "view" that collects all relevant stats about a build."""
+
+  def __init__(self, build_entry, build_stage_entries,
+               cl_action_entries, failure_entries, annotations,
+               costly_annotations_qs):
+    """Initialize a BuildRow.
+
+    Do not use QuerySets as arguments. All query sets must have been evaluated
+    before creating this object. All data manipulation within this object is
+    pure python.
+
+    All non-trivial computation on this object should be lazy: Defer it to
+    property getters.
+    """
+    assert not isinstance(build_entry, query.QuerySet)
+    assert not isinstance(build_stage_entries, query.QuerySet)
+    assert not isinstance(cl_action_entries, query.QuerySet)
+    assert not isinstance(failure_entries, query.QuerySet)
+
+    self._data = {}
+
+    self.build_entry = build_entry
+    self._build_stage_entries = build_stage_entries
+    self._cl_action_entries = cl_action_entries
+    self._failure_entries = failure_entries
+
+    # The readonly data is accessible from this object as dict entries.
+    self['id'] = self.build_entry.id
+    self['build_number'] = self.build_entry.build_number
+    self['status'] = self.build_entry.status
+    self['summary'] = self.build_entry.summary
+    self['start_time'] = self.build_entry.start_time
+    if (self.build_entry.finish_time is not None and
+        self['start_time'] is not None):
+      self['run_time'] = self.build_entry.finish_time - self['start_time']
+    else:
+      self['run_time'] = None
+    if self['start_time'] is not None:
+      self['weekday'] = (self['start_time'].date().weekday() != 6)
+    else:
+      self['weekday'] = None
+    self['chromeos_version'] = self.build_entry.full_version
+    self['chrome_version'] = self.build_entry.chrome_version
+    self['waterfall'] = self.build_entry.waterfall
+    self['builder_name'] = self.build_entry.builder_name
+
+    failed_stages = [x.name for x in build_stage_entries if
+                     x.status == x.FAIL]
+    self['failed_stages'] = ', '.join(failed_stages)
+    self['picked_up_count'] = self._CountCLActions(
+        ba_models.ClActionTable.PICKED_UP)
+    self['submitted_count'] = self._CountCLActions(
+        ba_models.ClActionTable.SUBMITTED)
+    self['kicked_out_count'] = self._CountCLActions(
+        ba_models.ClActionTable.KICKED_OUT)
+    self['annotation_summary'] = self._SummaryAnnotations(annotations)
+    self._costly_annotations_qs = costly_annotations_qs
+
+  def GetAnnotationsQS(self):
+    """Return the queryset backing annotations.
+
+    Executing this queryset is costly because there is no way to optimize the
+    query execution.
+    Since this is a related_set queryset, that was further filtered, each item
+    in the queryset causes a db hit.
+    """
+    return self._costly_annotations_qs
+
+  def __getitem__(self, *args, **kwargs):
+    return self._data.__getitem__(*args, **kwargs)
+
+  def __iter__(self, *args, **kwargs):
+    return self._data.__iter__(*args, **kwargs)
+
+  def __len__(self, *args, **kwargs):
+    return self._data.__len__(*args, **kwargs)
+
+  def __setitem__(self, *args, **kwargs):
+    return self._data.__setitem__(*args, **kwargs)
+
+  def __delitem__(self, *args, **kwargs):
+    return self._data.__delitem__(*args, **kwargs)
+
+  def _CountCLActions(self, cl_action):
+    actions = [x for x in self._cl_action_entries if x.action == cl_action]
+    return len(actions)
+
+  def _SummaryAnnotations(self, annotations):
+    if not annotations:
+      return ''
+
+    result = '%d annotations: ' % len(annotations)
+    summaries = []
+    for annotation in annotations:
+      summary = annotation.failure_category
+      failure_message = annotation.failure_message
+      blame_url = annotation.blame_url
+      if failure_message:
+        summary += '(%s)' % failure_message[:30]
+      elif blame_url:
+        summary += '(%s)' % blame_url[:30]
+      summaries.append(summary)
+
+    result += '; '.join(summaries)
+    return result
+
+
+class BuildRowController(object):
+  """The 'controller' class that collates stats for builds.
+
+  More details here.
+  Unit-test this class please.
+  """
+
+  DEFAULT_NUM_BUILDS = 100
+
+  def __init__(self):
+    self._latest_build_id = 0
+    self._build_rows_map = {}
+
+
+  def GetStructuredBuilds(self, latest_build_id=None,
+                          num_builds=DEFAULT_NUM_BUILDS, extra_filter_q=None):
+    """The primary method to obtain stats for builds
+
+    Args:
+      latest_build_id: build_id of the latest build to query.
+      num_builds: Number of build to query.
+      extra_filter_q: An optional Q object to filter builds. Use GetQ* methods
+          provided in this class to form the filter.
+
+    Returns:
+      A list of BuildRow entries for the queried builds.
+    """
+    # If we're not given any latest_build_id, we fetch the latest builds
+    if latest_build_id is not None:
+      build_qs = ba_models.BuildTable.objects.filter(id__lte=latest_build_id)
+    else:
+      build_qs = ba_models.BuildTable.objects.all()
+
+    if extra_filter_q is not None:
+      build_qs = build_qs.filter(extra_filter_q)
+    build_qs = build_qs.order_by('-id')
+    build_qs = build_qs[:num_builds]
+
+    # Critical for performance: Prefetch all the join relations we'll need.
+    build_qs = build_qs.prefetch_related('buildstagetable_set')
+    build_qs = build_qs.prefetch_related('clactiontable_set')
+    build_qs = build_qs.prefetch_related(
+        'buildstagetable_set__failuretable_set')
+    build_qs = build_qs.prefetch_related('annotationstable_set')
+
+    # Now hit the database.
+    build_entries = [x for x in build_qs]
+
+    self._build_rows_map = {}
+    build_rows = []
+    for build_entry in build_entries:
+      build_stage_entries = [x for x in build_entry.buildstagetable_set.all()]
+      cl_action_entries = [x for x in build_entry.clactiontable_set.all()]
+      failure_entries = []
+      for entry in build_stage_entries:
+        failure_entries += [x for x in entry.failuretable_set.all()]
+      # Filter in python, filter'ing the queryset changes the queryset, and we
+      # end up hitting the database again.
+      annotations = [a for a in build_entry.annotationstable_set.all() if
+                     a.deleted == False]
+      costly_annotations_qs = build_entry.annotationstable_set.filter(
+          deleted=False)
+
+      build_row = BuildRow(build_entry, build_stage_entries, cl_action_entries,
+                           failure_entries, annotations, costly_annotations_qs)
+
+      self._build_rows_map[build_entry.id] = build_row
+      build_rows.append(build_row)
+
+    if build_entries:
+      self._latest_build_id = build_entries[0].id
+
+    return build_rows
+
+  def GetHandlingTimeHistogram(self, latest_build_id=None,
+                               num_builds=DEFAULT_NUM_BUILDS,
+                               extra_filter_q=None):
+    """Get CL handling time histogram."""
+    # If we're not given any latest_build_id, we fetch the latest builds
+    if latest_build_id is not None:
+      build_qs = ba_models.BuildTable.objects.filter(id__lte=latest_build_id)
+    else:
+      build_qs = ba_models.BuildTable.objects.all()
+
+    if extra_filter_q is not None:
+      build_qs = build_qs.filter(extra_filter_q)
+    build_qs = build_qs.order_by('-id')
+    build_qs = build_qs[:num_builds]
+
+    # Hit the database.
+    build_entries = list(build_qs)
+    claction_qs = ba_models.ClActionTable.objects.select_related('build_id')
+    claction_qs = claction_qs.filter(
+        build_id__in=set(b.id for b in build_entries))
+    # Hit the database.
+    claction_entries = [c for c in claction_qs]
+
+    claction_history = clactions.CLActionHistory(
+        self._JoinBuildTableClActionTable(build_entries, claction_entries))
+    # Convert times seconds -> minutes.
+    return {k: v / 60.0
+            for k, v in claction_history.GetPatchHandlingTimes().iteritems()}
+
+  def _JoinBuildTableClActionTable(self, build_entries, claction_entries):
+    """Perform the join operation in python.
+
+    Args:
+      build_entries: A list of buildTable entries.
+      claction_entries: A list of claction_entries.
+
+    Returns:
+      A list fo claction.CLAction objects created by joining the list of builds
+      and list of claction entries.
+    """
+    claction_entries_by_build_id = {}
+    for entry in claction_entries:
+      entries = claction_entries_by_build_id.setdefault(entry.build_id.id, [])
+      entries.append(entry)
+
+    claction_list = []
+    for build_entry in build_entries:
+      for claction_entry in claction_entries_by_build_id.get(build_entry.id,
+                                                             []):
+        claction_list.append(clactions.CLAction(
+            id=claction_entry.id,
+            build_id=build_entry.id,
+            action=claction_entry.action,
+            reason=claction_entry.reason,
+            build_config=build_entry.build_config,
+            change_number=claction_entry.change_number,
+            patch_number=claction_entry.patch_number,
+            change_source=claction_entry.change_source,
+            timestamp=claction_entry.timestamp))
+
+    return claction_list
+
+  ############################################################################
+  # GetQ* methods are intended to be used in nifty search expressions to search
+  # for builds.
+  @classmethod
+  def GetQNoAnnotations(cls):
+    """Return a Q for builds with no annotations yet."""
+    return models.Q(annotationstable__isnull=True)
+
+  @classmethod
+  def GetQRestrictToBuildConfig(cls, build_config):
+    """Return a Q for builds with the given build_config."""
+    return models.Q(build_config=build_config)
+
+  @property
+  def num_builds(self):
+    return len(self._build_rows_map)
+
+  @property
+  def latest_build_id(self):
+    return self._latest_build_id
diff --git a/appengine/cq_stats/build_annotations/fields.py b/appengine/cq_stats/build_annotations/fields.py
new file mode 100644
index 0000000..e7d840c
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/fields.py
@@ -0,0 +1,116 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Custom django model field definitions.
+
+This module defines some convenience fields and readonly versions of required
+django field types.
+"""
+
+from __future__ import print_function
+
+from django.db import models
+
+
+class BlobField(models.Field):
+  """A binary blob field."""
+  description = 'Blob'
+
+  def db_type(self, connection):
+    return 'blob'
+
+
+class EnumField(models.CharField):
+  """An enumeration field.
+
+  This is a text field that additionally provides attributes to access the
+  choices available for the enum values.
+  """
+
+  def __init__(self, *args, **kwargs):
+    choices = kwargs.get('choices', [])
+    max_length = max(len(x) for x in choices)
+    kwargs['max_length'] = max_length
+    for choice in choices:
+      setattr(self, choice.upper(), choice)
+    super(EnumField, self).__init__(*args, **kwargs)
+
+
+# For all ReadOnly* fields, set null=True
+# This allows us to use test data that has null values. Without this option,
+# tests complain during loaddata if any of the fields (that we don't care about
+# in the test itself) are null. Since this data is readonly, this data storage
+# option is irrelevant in prod.
+
+class ReadOnlyIntegerField(models.IntegerField):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+  def __init__(self, *args, **kwargs):
+    kwargs['editable'] = False
+    kwargs['blank'] = True
+    if not kwargs.get('primary_key', False):
+      kwargs['null'] = True
+    super(ReadOnlyIntegerField, self).__init__(*args, **kwargs)
+
+
+class ReadOnlyBooleanField(models.NullBooleanField):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+  def __init__(self, *args, **kwargs):
+    kwargs['editable'] = False
+    kwargs['blank'] = True
+    super(ReadOnlyBooleanField, self).__init__(*args, **kwargs)
+
+
+class ReadOnlyDateTimeField(models.DateTimeField):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+  def __init__(self, *args, **kwargs):
+    kwargs['editable'] = False
+    kwargs['blank'] = True
+    if not kwargs.get('primary_key', False):
+      kwargs['null'] = True
+    super(ReadOnlyDateTimeField, self).__init__(*args, **kwargs)
+
+
+class ReadOnlyForeignKey(models.ForeignKey):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+  def __init__(self, *args, **kwargs):
+    kwargs['editable'] = False
+    kwargs['blank'] = True
+    if not kwargs.get('primary_key', False):
+      kwargs['null'] = True
+    super(ReadOnlyForeignKey, self).__init__(*args, **kwargs)
+
+
+class ReadOnlyCharField(models.CharField):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+  def __init__(self, *args, **kwargs):
+    kwargs['editable'] = False
+    kwargs['blank'] = True
+    if not kwargs.get('primary_key', False):
+      kwargs['null'] = True
+    kwargs['max_length'] = 1024
+    super(ReadOnlyCharField, self).__init__(*args, **kwargs)
+
+
+class ReadOnlyBlobField(BlobField):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+  def __init__(self, *args, **kwargs):
+    kwargs['editable'] = False
+    kwargs['blank'] = True
+    if not kwargs.get('primary_key', False):
+      kwargs['null'] = True
+    super(ReadOnlyBlobField, self).__init__(*args, **kwargs)
+
+
+class ReadOnlyEnumField(ReadOnlyCharField):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+
+
+class ReadOnlyURLField(models.URLField):
+  """Thou shalt not edit this field, otherwise, we're very accomodating."""
+  def __init__(self, *args, **kwargs):
+    kwargs['editable'] = False
+    kwargs['blank'] = True
+    if not kwargs.get('primary_key', False):
+      kwargs['null'] = True
+    super(ReadOnlyURLField, self).__init__(*args, **kwargs)
diff --git a/appengine/cq_stats/build_annotations/forms.py b/appengine/cq_stats/build_annotations/forms.py
new file mode 100644
index 0000000..852394a
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/forms.py
@@ -0,0 +1,37 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Forms used by the build_annotations app."""
+
+from __future__ import print_function
+
+from django import forms
+
+from build_annotations import models as ba_models
+
+
+class SearchForm(forms.Form):
+  """Form to limit builds shown on the landing page."""
+  latest_build_id = forms.IntegerField()
+  num_builds = forms.IntegerField(label='Number of results')
+
+
+class AnnotationsForm(forms.ModelForm):
+  """Form to add/edit a single annotation to a build."""
+
+  # pylint: disable=no-init, old-style-class
+  class Meta:
+    """Set meta options for the form."""
+    model = ba_models.AnnotationsTable
+    fields = ['failure_category', 'failure_message', 'blame_url', 'notes',
+              'deleted']
+
+
+# NB: Explicitly set can_delete=False for clarity.
+# Due to a bug in (< django-1.7), models get deleted when the formset is saved
+# even if we request not to commit changes.
+AnnotationsFormSet = forms.models.modelformset_factory(
+    ba_models.AnnotationsTable,
+    form=AnnotationsForm,
+    can_delete=False)
diff --git a/appengine/cq_stats/build_annotations/models.py b/appengine/cq_stats/build_annotations/models.py
new file mode 100644
index 0000000..fc860d6
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/models.py
@@ -0,0 +1,224 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Django models for cidb tables."""
+
+from __future__ import print_function
+
+from django.db import models
+
+from build_annotations import fields as ba_fields
+
+
+class BaseModel(models.Model):
+  """Abstract base class to store all app-wide Meta options."""
+
+  class Meta(object):
+    """Set meta options for all models in this module."""
+    # This property not inherited.
+    abstract = True
+
+    # The schema for CIDB is maintained external to this app.
+    managed = False
+    # Allow us to split the models.py file into different modules.
+    app_label = 'cq_stats_sheet'
+    # Each model should explicitly set this option. The default django model to
+    # table name mapping does not work for us.
+    db_table = 'Please define me'
+
+  def __iter__(self):
+    for field_name in self._meta.get_all_field_names():
+      value = None
+      if hasattr(self, field_name):
+        value = getattr(self, field_name)
+      yield field_name, value
+
+  def __unicode__(self):
+    result = []
+    for _, value in self:
+      result.append(unicode(value))
+    return u', '.join(result)
+
+  def __str__(self):
+    return str(unicode(self))
+
+
+class BuildTable(BaseModel):
+  """Model for cidb.buildTable."""
+
+  class Meta(object):
+    """Set extra table options."""
+    db_table = 'buildTable'
+
+  id = ba_fields.ReadOnlyIntegerField(primary_key=True)
+  last_updated = ba_fields.ReadOnlyDateTimeField()
+  master_build_id = ba_fields.ReadOnlyForeignKey('self',
+                                                 db_column='master_build_id')
+  buildbot_generation = ba_fields.ReadOnlyIntegerField()
+  builder_name = ba_fields.ReadOnlyCharField()
+  waterfall = ba_fields.ReadOnlyCharField()
+  build_number = ba_fields.ReadOnlyIntegerField()
+  build_config = ba_fields.ReadOnlyCharField()
+  bot_hostname = ba_fields.ReadOnlyCharField()
+  start_time = ba_fields.ReadOnlyDateTimeField()
+  finish_time = ba_fields.ReadOnlyDateTimeField()
+  status = ba_fields.ReadOnlyCharField()
+  status_pickle = ba_fields.ReadOnlyBlobField()
+  build_type = ba_fields.ReadOnlyCharField()
+  chrome_version = ba_fields.ReadOnlyCharField()
+  milestone_version = ba_fields.ReadOnlyCharField()
+  platform_version = ba_fields.ReadOnlyCharField()
+  full_version = ba_fields.ReadOnlyCharField()
+  sdk_version = ba_fields.ReadOnlyCharField()
+  toolchain_url = ba_fields.ReadOnlyURLField()
+  final = ba_fields.ReadOnlyBooleanField()
+  metadata_url = ba_fields.ReadOnlyURLField()
+  summary = ba_fields.ReadOnlyCharField()
+  deadline = ba_fields.ReadOnlyDateTimeField()
+
+
+class BuildStageTable(BaseModel):
+  """Model for cidb.buildStageTable."""
+
+  class Meta(object):
+    """Set extra table options."""
+    db_table = 'buildStageTable'
+
+  # Not used directly in field definition for readonly tables, but used
+  # elsewhere as constants.
+  FAIL = 'fail'
+  PASS = 'pass'
+  INFLIGHT = 'inflight'
+  MISSING = 'missing'
+  PLANNED = 'planned'
+  SKIPPED = 'skipped'
+  FORGIVEN = 'forgiven'
+  STATUS_CHOICES = (
+      (FAIL, 'Stage failed'),
+      (PASS, 'Stage passed! Hurray!'),
+      (INFLIGHT, 'Stage is inflight'),
+      (MISSING, 'Status missing'),
+      (PLANNED, 'Stage is planned'),
+      (SKIPPED, 'Stage skipped'),
+      (FORGIVEN, 'Stage failed but forgiven'))
+
+  id = ba_fields.ReadOnlyIntegerField(primary_key=True)
+  build_id = ba_fields.ReadOnlyForeignKey('BuildTable',
+                                          db_column='build_id')
+  name = ba_fields.ReadOnlyCharField()
+  board = ba_fields.ReadOnlyCharField()
+  status = ba_fields.ReadOnlyEnumField()
+  last_updated = ba_fields.ReadOnlyDateTimeField()
+  start_time = ba_fields.ReadOnlyDateTimeField()
+  finish_time = ba_fields.ReadOnlyDateTimeField()
+  final = ba_fields.ReadOnlyBooleanField()
+
+
+class ClActionTable(BaseModel):
+  """Model for cidb.clActionTable."""
+
+  class Meta(object):
+    """Set extra table options."""
+    db_table = 'clActionTable'
+
+  # Not used directly in field definition for readonly tables, but used
+  # elsewhere as constants.
+  PICKED_UP = 'picked_up'
+  SUBMITTED = 'submitted'
+  KICKED_OUT = 'kicked_out'
+  SUBMIT_FAILED = 'submit_failed'
+  VERIFIED = 'verified'
+  FORGIVEN = 'forgiven'
+  # This list of choices is not exhaustive yet. It's only enough for CQ stats.
+  ACTION_CHOICES = (
+      (PICKED_UP, 'CL picked up by CQ'),
+      (SUBMITTED, 'CL submitted by CQ'),
+      (KICKED_OUT, 'CL kicked out by CQ'),
+      (SUBMIT_FAILED, 'CQ failed to submit CL'),
+      (VERIFIED, 'CL verified by CQ'),
+      (FORGIVEN, 'CQ run failed, but CL forgiven'))
+
+  id = ba_fields.ReadOnlyIntegerField(primary_key=True)
+  build_id = ba_fields.ReadOnlyForeignKey('BuildTable',
+                                          db_column='build_id')
+  change_number = ba_fields.ReadOnlyIntegerField()
+  patch_number = ba_fields.ReadOnlyIntegerField()
+  change_source = ba_fields.ReadOnlyEnumField()
+  action = ba_fields.ReadOnlyEnumField()
+  reason = ba_fields.ReadOnlyCharField()
+  timestamp = ba_fields.ReadOnlyDateTimeField()
+
+
+class FailureTable(BaseModel):
+  """Model for cidb.failureTable."""
+
+  class Meta(object):
+    """Set extra table options."""
+    db_table = 'failureTable'
+
+  id = ba_fields.ReadOnlyIntegerField(primary_key=True)
+  build_stage_id = ba_fields.ReadOnlyForeignKey('BuildStageTable',
+                                                db_column='build_stage_id')
+  outer_failure_id = ba_fields.ReadOnlyForeignKey('self',
+                                                  db_column='outer_failure_id')
+  exception_type = ba_fields.ReadOnlyCharField()
+  exception_message = ba_fields.ReadOnlyCharField()
+  exception_category = ba_fields.ReadOnlyEnumField()
+  extra_info = ba_fields.ReadOnlyCharField()
+  timestamp = ba_fields.ReadOnlyDateTimeField()
+
+
+class AnnotationsTable(BaseModel):
+  """Model for cidb.annotationsTable."""
+
+  class Meta(object):
+    """Set extra table options."""
+    db_table = 'annotationsTable'
+
+  BAD_CL = 'bad_cl'
+  BUG_IN_TOT = 'bug_in_tot'
+  MERGE_CONFLICT = 'merge_conflict'
+  TREE_CONFLICT = 'tree_conflict'
+  SCHEDULED_ABORT = 'scheduled_abort'
+  CL_NOT_READY = 'cl_not_ready'
+  BAD_CHROME = 'bad_chrome'
+  TEST_FLAKE = 'test_flake'
+  GERRIT_FAILURE = 'gerrit_failure'
+  GS_FAILURE = 'gs_failure'
+  LAB_FAILURE = 'lab_failure'
+  BAD_BINARY_FAILURE = 'bad_binary_failure'
+  BUILD_FLAKE = 'build_flake'
+  INFRA_FAILURE = 'infra_failure'
+  MYSTERY = 'mystery'
+  FAILURE_CATEGORY_CHOICES = (
+      (BAD_CL, 'Bad CL (Please specify CL)'),
+      (BUG_IN_TOT, 'Bug in ToT (Please specify bug)'),
+      (MERGE_CONFLICT, 'Merge conflict'),
+      (TREE_CONFLICT, 'Tree conflict'),
+      (SCHEDULED_ABORT, 'Scheduled Abort'),
+      (CL_NOT_READY, 'CL was marked not ready (Please specify CL)'),
+      (BAD_CHROME, 'Bad chrome (Please speficy bug)'),
+      (TEST_FLAKE, 'Test flake'),
+      (GERRIT_FAILURE, 'Gerrit failure'),
+      (GS_FAILURE, 'Google Storage failure'),
+      (LAB_FAILURE, 'Lab failure'),
+      (BAD_BINARY_FAILURE, 'Bad binary packages'),
+      (BUILD_FLAKE, 'Local build flake'),
+      (INFRA_FAILURE, 'Other Infrastructure failure'),
+      (MYSTERY, 'Unknown failure: MyStErY'))
+
+  # Warning: Some field constraints are duplicated here from the database
+  # schema in CIDB.
+  id = models.AutoField(primary_key=True)
+  build_id = models.ForeignKey('BuildTable', db_column='build_id')
+  last_updated = models.DateTimeField(auto_now=True)
+  last_annotator = models.CharField(max_length=80)
+  failure_category = models.CharField(
+      max_length=max(len(x) for x, y in FAILURE_CATEGORY_CHOICES),
+      choices=FAILURE_CATEGORY_CHOICES,
+      default='mystery')
+  failure_message = models.CharField(max_length=1024, blank=True, null=True)
+  blame_url = models.CharField(max_length=80, blank=True, null=True)
+  notes = models.CharField(max_length=1024, blank=True, null=True)
+  deleted = models.BooleanField(default=False, null=False)
diff --git a/appengine/cq_stats/build_annotations/static/build_annotations/base.css b/appengine/cq_stats/build_annotations/static/build_annotations/base.css
new file mode 100644
index 0000000..a95a00e
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/static/build_annotations/base.css
@@ -0,0 +1,100 @@
+/* Copyright 2015 The Chromium OS Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+body {
+  margin-left: 20px;
+  margin-right: 20px;
+}
+
+/* Navigation bar */
+ul.navbar {
+  list-style-type: none;
+  margin: 0;
+  padding: 0;
+  overflow: hidden;
+}
+li.navbar {
+  float: left;
+  padding: 5px;
+}
+a.navbar {
+  display: block;
+  width: 150px;
+  background-color: #B2CFB9;
+  font-weight: bold;
+  text-align: center;
+}
+
+td, th {
+  text-align: left;
+}
+ul.errorlist {
+  color: #DF0101;
+  font-weight: bold;
+}
+table {
+  margin-top: 30px;
+  margin-bottom: 30px;
+}
+h1, h2, h3 {
+  color: #0B3B17;
+}
+
+table.build_list, table.annotations_list {
+  border: 1px solid green;
+  width: 100%;
+  display: inline;
+}
+th.build_list, th.annotations_list {
+  background-color: #0B3B17;
+  color: #F2F5A9;
+  text-align: center;
+}
+td.build_list, td.annotations_list {
+  padding-left: 10px;
+  padding-right: 10px;
+  text-align: left;
+  display: inherit;
+}
+tr.build_item_pass {
+  background-color: #B2DF99;
+}
+tr.build_item_inflight {
+  background-color: #DDDF99;
+}
+tr.build_item_fail {
+  background-color: #DF99A9;
+}
+tr.build_item_aborted {
+  background-color: #C499DF;
+}
+
+th.build_details, td.build_details {
+  padding-right: 30px;
+  background-color: #E6E6E6;
+}
+
+.hidden {
+  display: none;
+}
+.shown {
+  display: block;
+}
+
+a.external_link:after {
+  display: inline-block;
+  content: "";
+  width: 15px;
+  height: 15px;
+  background: transparent url("images/popup_link.png") no-repeat;
+}
+
+div.centered_div_container {
+  text-align: center;
+}
+div.centered_div_block {
+  display: inline-block;
+  margin: auto;
+}
diff --git a/appengine/cq_stats/build_annotations/static/build_annotations/base.js b/appengine/cq_stats/build_annotations/static/build_annotations/base.js
new file mode 100644
index 0000000..846b84d
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/static/build_annotations/base.js
@@ -0,0 +1,69 @@
+// Copyright 2015 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+window.onload = function() {
+  localize_times()
+}
+
+function editAnnotation(base_id_str) {
+  document.getElementById("annotation_" + base_id_str + "_noedit").className =
+      "hidden";
+  document.getElementById("annotation_" + base_id_str + "_edit").className = "";
+  return false;
+}
+
+function populateLatest() {
+  var latest_build_id = document.getElementById("latest_build_id_cached").value;
+  document.getElementById("id_latest_build_id").value = latest_build_id;
+  return false;
+}
+
+// Copied/forked from
+// https://chromium.googlesource.com/infra/infra.git/+/master/appengine/chromium_status/static/js/main/main.js
+function localize_times() {
+  // Localize all the UTC timestamps coming from the server to whatever
+  // the user has set in their browser.
+  require(["dojo/date/locale"], function(locale) {
+    function format(date, datePattern, timePattern) {
+      // The dojo guys like to add a sep between the date and the time
+      // fields for us (based on locale).  Since we want a standards
+      // format, that sep is pure noise, so kill it with {...}.
+      // https://bugs.dojotoolkit.org/ticket/17544
+      return locale.format(new Date(date), {
+          formatLength: 'short',
+          datePattern: datePattern + '{',
+          timePattern: '}' + timePattern
+        }).replace(/{.*}/, ' ');
+    }
+    function long_date(date) { // RFC2822
+      return format(date, 'EEE, dd MMM yyyy', 'HH:mm:ss z');
+    }
+    function short_date(date) {
+      return format(date, 'EEE, dd MMM', 'HH:mm');
+    }
+    var now = new Date();
+    var curr_year = now.getFullYear();
+    var tzname = locale.format(now, {
+        selector: 'time',
+        timePattern: 'z'
+      });
+    var i, elements;
+    // Convert all the fields that have a timezone already.
+    elements = document.getElementsByName('date.datetz');
+    for (i = 0; i < elements.length; ++i)
+      elements[i].innerText = long_date(elements[i].innerText);
+    // Convert all the fields that lack a timezone (which we know is UTC).
+    // We'll assume the timestamps represent the current year as it'll only
+    // really affect the short day-of-week name, and even then it'll only be
+    // slightly off during the ~1st week of January.
+    elements = document.getElementsByName('date.date');
+    for (i = 0; i < elements.length; ++i)
+      elements[i].innerText = short_date(elements[i].innerText + ' ' + curr_year
+                                         + ' UTC');
+    // Convert all the fields that are just a timezone.
+    elements = document.getElementsByName('date.tz');
+    for (i = 0; i < elements.length; ++i)
+      elements[i].innerText = tzname;
+  });
+}
diff --git a/appengine/cq_stats/build_annotations/static/build_annotations/images/popup_link.png b/appengine/cq_stats/build_annotations/static/build_annotations/images/popup_link.png
new file mode 100644
index 0000000..78e8f9d
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/static/build_annotations/images/popup_link.png
Binary files differ
diff --git a/appengine/cq_stats/build_annotations/templates/build_annotations/base.html b/appengine/cq_stats/build_annotations/templates/build_annotations/base.html
new file mode 100644
index 0000000..52d7575
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/templates/build_annotations/base.html
@@ -0,0 +1,28 @@
+<html lang="en">
+  <head>
+  {% load staticfiles %}
+  <link rel="stylesheet" type="text/css" href="{% static 'build_annotations/base.css' %}"/>
+  <script type="text/javascript"
+          daja-dojo-config="async:true"
+          src="//ajax.googleapis.com/ajax/libs/dojo/1.10.3/dojo/dojo.js"></script>
+  <script type="text/javascript" src="{% static 'build_annotations/base.js' %}"></script>
+
+  {% block template-private-imports %}
+  <!-- This is especially needed for the django 'load' command which works like
+       python import - loaded files are private to the template -->
+  {% endblock %}
+  </head>
+
+  <body>
+    {% block welcome-header %}
+    <p style="text-align: right">Welcome {{ username }}! 'notate 'em all!</p>
+    {% endblock %}
+
+    {% block content %}
+    <p style="color: red; layout: block">
+      All the page's real content should have gone here.
+      If you're reading me, someone forgot to do their homework.
+    </p>
+    {% endblock %}  <!-- content -->
+  </body>
+</html>
diff --git a/appengine/cq_stats/build_annotations/templates/build_annotations/edit_annotations.html b/appengine/cq_stats/build_annotations/templates/build_annotations/edit_annotations.html
new file mode 100644
index 0000000..663611d
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/templates/build_annotations/edit_annotations.html
@@ -0,0 +1,111 @@
+{% extends 'build_annotations/base.html' %}
+
+{% block template-private-imports %}
+{% load build_annotations_filters %}
+{% endblock %}
+
+{% block content %}
+<h1>Annotate {{ build_config|title }} Build {{ build_row.id }}</h1>
+<ul class="navbar">
+  <li class="navbar">
+    <a class="navbar" href="{% url 'build_annotations:builds_list' build_config %}">Builds List</a>
+  </li>
+</ul>
+
+<table class="build_details">
+  <tr>
+    <th class="build_details">Build Number</th>
+    <td class="build_details"><a class="external_link" href="https://uberchromegw.corp.google.com/i/{{ build_row.waterfall }}/builders/{{ build_row.builder_name }}/builds/{{ build_row.build_number }}" target="_blank" rel="nofollow">{{ build_row.build_number }}</a></td>
+  </tr>
+  <tr>
+    <th class="build_details">Status</th>
+    <td class="build_details">{{ build_row.status }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">Summary</th>
+    <td class="build_details">{{ build_row.summary|linebreaksbr }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">Start Time</th>
+    <td class="build_details" name="date.datetz">{{ build_row.start_time|date:"D, d M Y H:i" }} UTC</td>
+  </tr>
+  <tr>
+    <th class="build_details">Runtime</th>
+    <td class="build_details">{{ build_row.run_time }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">Weekday</th>
+    <td class="build_details">{{ build_row.weekday }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">ChromeOS Version</th>
+    <td class="build_details">{{ build_row.chromeos_version }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">Chrome Version</th>
+    <td class="build_details">{{ build_row.chrome_version }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">Failed Stages</th>
+    <td class="build_details">{{ build_row.failed_stages }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">CLs Picked Up</th>
+    <td class="build_details">{{ build_row.picked_up_count }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">CLs Submitted</th>
+    <td class="build_details">{{ build_row.submitted_count }}</td>
+  </tr>
+  <tr>
+    <th class="build_details">CLs Kicked Out</th>
+    <td class="build_details">{{ build_row.kicked_out_count }}</td>
+  </tr>
+</table>
+<form action="{% url 'build_annotations:edit_annotations' build_config build_row.id %}"
+      method="post">
+  {% csrf_token %}
+  {{ annotations_formset.management_form }}
+  <table class="annotations_list">
+    <tr>
+    {% for form in annotations_formset %}
+      {% if forloop.first %}
+        <tr>
+          <th class="annotations_list">Failure Category</th>
+          <th class="annotations_list">Failure Message</th>
+          <th class="annotations_list">Blame URL</th>
+          <th class="annotations_list">Notes</th>
+          <th class="annotations_list">Update</th>
+        </tr>
+      {% endif %}
+      {% for hidden in form.hidden_fields %}
+        {{ hidden }}
+      {% endfor %}
+
+      {% if not forloop.last %}
+        <tr id="annotation_{{ forloop.counter }}_noedit">
+          <td>{{ form.failure_category.value|default_if_none:""|striptags|crosurlize|linebreaks }}</td>
+          <td>{{ form.failure_message.value|default_if_none:""|striptags|crosurlize|linebreaks }}</td>
+          <td>{{ form.blame_url.value|default_if_none:""|striptags|crosurlize|linebreaks }}</td>
+          <td>{{ form.notes.value|default_if_none:""|striptags|crosurlize|linebreaks }}</td>
+          <td><a href="javascript:void(0)" onclick="editAnnotation('{{ forloop.counter }}')">edit</a>
+        </tr>
+      {% endif %}
+
+      {% if not forloop.last %}
+        <tr id="annotation_{{ forloop.counter }}_edit" class="hidden">
+      {% else %}
+        <tr id="annotation_{{ forloop.counter }}_edit">
+      {% endif %}
+            <td class="annotations_list">{{ form.failure_category }}<br><div class="errorlist">{{ form.failure_category.errors }}</div></td>
+            <td class="annotations_list">{{ form.failure_message }}<br><div class="errorlist">{{ form.failure_message.errors }}</div></td>
+            <td class="annotations_list">{{ form.blame_url }}<br><div class="errorlist">{{ form.blame_url.errors }}</div></td>
+            <td class="annotations_list">{{ form.notes }}<br><div class="errorlist">{{ form.notes.errors }}</div></td>
+            <td class="annotations_list">Mark for deletion: {{ form.deleted }}<br><div class="errorlist">{{ form.deleted.errors }}</div></td>
+        </tr>
+    {% endfor %}
+  </table>
+  <input type="submit" value="Save Changes"/>
+</form>
+{% endblock %}  <!-- content -->
+
diff --git a/appengine/cq_stats/build_annotations/templates/build_annotations/index.html b/appengine/cq_stats/build_annotations/templates/build_annotations/index.html
new file mode 100644
index 0000000..4591d2b
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/templates/build_annotations/index.html
@@ -0,0 +1,108 @@
+{% extends 'build_annotations/base.html' %}
+
+{% block template-private-imports %}
+  <!-- Call this once on every web page. -->
+  <script type="text/javascript" src="https://www.google.com/jsapi"></script>
+
+  <script type="text/javascript">
+    // Load the Google Charts "core" charts (bar, line, pie...)
+    google.load("visualization", '1', {packages:['corechart']});
+    // When the page loads call the drawChart() function.
+    google.setOnLoadCallback(drawChart);
+
+    function drawChart() {
+      var data = google.visualization.arrayToDataTable([          // The chart data.
+        ['CL', 'Handling Time (minutes)'],
+        {% for key, value in histogram_data.items %}
+        ['{{ key }}', {{ value }}],
+        {% endfor %}
+      ]);
+
+      var options = {      // Customize a few of Google Charts' hundreds of options.
+        title: 'Histogram: Patch handling times',
+        width: 600,
+        height: 400,
+        hAxis: { title: 'Patch handling time', },
+        vAxis: { title: '# Patches', },
+      };
+
+      // Create and draw a Google Column Chart.
+      // To experiment with different types of charts, replace "ColumnChart" with
+      // the desired chart type (e.g., "PieChart", "LineChart").
+      var chart = new google.visualization.Histogram(document.getElementById('chart_div'));
+      google.visualization.events.addListener(chart, 'ready', function() {
+        document.getElementById('chart_img_div').innerHTML = '<a href="' + chart.getImageURI() + '">Save Chart</a>'
+      })
+      chart.draw(data, options);
+    }
+  </script>
+
+{% endblock %}  <!-- template-private-impots -->
+{% block content %}
+<h1> {{ build_config|title }} Builds</h1>
+<ul class="navbar">
+  <li class="navbar">
+    <a class="navbar" href="{% url 'build_annotations:builds_list' 'master-paladin' %}">Master-Paladin</a>
+  </li>
+  <li class="navbar">
+    <a class="navbar" href="{% url 'build_annotations:builds_list' 'master-release' %}">Master-Release</a>
+  </li>
+  <li class="navbar">
+    <a class="navbar" href="{% url 'build_annotations:builds_list' 'master-chromium-pfq' %}">Chromium-PFQ</a>
+  </li>
+</ul>
+<form action="{% url 'build_annotations:builds_list' build_config %}" method="post">
+  {% csrf_token %}
+  <table>
+    <tr>
+      <th><label>{{ search_form.latest_build_id.label }}</label></th>
+      <td>{{ search_form.latest_build_id }}</td>
+      <td>
+        <button type="button" onclick="populateLatest()">Get latest</button>
+        <input type="hidden" id="latest_build_id_cached" value="{{ latest_build_id_cached }}"/>
+      </td>
+      <td class="error_message">{{ search_form.latest_build_id.errors }}</td>
+    </tr>
+    <tr>
+      <th><label>{{ search_form.num_builds.label }}</label></th>
+      <td>{{ search_form.num_builds }}</td>
+      <td class="error_message">{{ search_form.num_builds.errors }}</td>
+    </tr>
+  </table>
+  <input type="submit" value="Update List"/>
+</form>
+
+<div id='chart_div_container' class='centered_div_container'>
+  <div id='chart_div' class='centered_div_block'></div>
+  <div id='chart_img_div' class='centered_div_block'></div>
+</div>
+
+<table class="build_list">
+  <tr>
+    <th class="build_list">Build ID</th>
+    <th class="build_list">Build Number</th>
+    <th class="build_list">Status</th>
+    <th class="build_list">Summary</th>
+    <th class="build_list">Start Time</th>
+    <th class="build_list">Runtime</th>
+    <th class="build_list">CLs Picked Up</th>
+    <th class="build_list">Submitted</th>
+    <th class="build_list">Rejected</th>
+    <th class="build_list">Annotation Summary</th>
+  </tr>
+{% for build_row in builds_list %}
+  <tr class="build_item_{{ build_row.status }}">
+    <td class="build_list"><a href="{% url 'build_annotations:edit_annotations' build_config build_row.id %}">{{ build_row.id }}</a></td>
+    <td class="build_list">{{ build_row.build_number }}</td>
+    <td class="build_list">{{ build_row.status|truncatechars:20 }}</td>
+    <td class="build_list">{{ build_row.summary|truncatechars:50|cut:'\n\r' }}</td>
+    <td class="build_list" name="date.datetz">{{ build_row.start_time|date:"D, d M Y H:i" }} UTC</td>
+    <td class="build_list">{{ build_row.run_time }}</td>
+    <td class="build_list">{{ build_row.picked_up_count }}</td>
+    <td class="build_list">{{ build_row.submitted_count }}</td>
+    <td class="build_list">{{ build_row.kicked_out_count }}</td>
+    <td class="build_list">{{ build_row.annotation_summary|truncatechars:150 }}</td>
+  </tr>
+{% endfor %}
+</table>
+{% endblock %}  <!-- content -->
diff --git a/appengine/cq_stats/build_annotations/templatetags/__init__.py b/appengine/cq_stats/build_annotations/templatetags/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/templatetags/__init__.py
diff --git a/appengine/cq_stats/build_annotations/templatetags/build_annotations_filters.py b/appengine/cq_stats/build_annotations/templatetags/build_annotations_filters.py
new file mode 100644
index 0000000..771feaf
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/templatetags/build_annotations_filters.py
@@ -0,0 +1,46 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Custom template tags for the build_annotations app."""
+
+from __future__ import print_function
+
+from django import template
+from django.template import defaultfilters
+from django.utils import safestring
+
+register = template.Library()
+
+
+@register.filter(needs_autoescape=True, is_safe=True)
+@defaultfilters.stringfilter
+def crosurlize(value, autoescape=None):
+  """URLize strings.
+
+  This builds on top of the url'ize function from django. In addition, it
+  creates links for cros specific regexs.
+
+  TODO(pprabhu) This should be merged with the (much more thorough) urlize
+  functionality in the chromium_status AE app.
+  """
+  words = value.split(' ')
+  for i in xrange(len(words)):
+    is_url = False
+    word = words[i]
+    if (word.startswith('crbug.com/') or word.startswith('crosreview.com/') or
+        word.startswith('b/')):
+      parts = word.split('/')
+      if len(parts) == 2:
+        try:
+          int(parts[1])
+          is_url = True
+        except ValueError:
+          pass
+
+    if is_url:
+      # In-place urlize.
+      words[i] = '<a href="http://%s" rel="nofollow">%s</a>' % (word, word)
+
+  value = safestring.mark_safe(' '.join(words))
+  return defaultfilters.urlize(value, autoescape)
diff --git a/appengine/cq_stats/build_annotations/urls.py b/appengine/cq_stats/build_annotations/urls.py
new file mode 100644
index 0000000..21a643b
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/urls.py
@@ -0,0 +1,25 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Url disptacher for the build_annotations app."""
+
+from __future__ import print_function
+
+from django import http
+from django.conf import urls
+
+from build_annotations import views
+
+
+urlpatterns = urls.patterns(
+    '',
+    urls.url(r'^$',
+             lambda r: http.HttpResponseRedirect(
+                 'builds_list/master-paladin/')),
+    urls.url(r'^builds_list/(?P<build_config>[\w-]+)/$',
+             views.ListBuildsView.as_view(),
+             name='builds_list'),
+    urls.url(r'edit_annotations/(?P<build_config>[\w-]+)/(?P<build_id>\d+)/$',
+             views.EditAnnotationsView.as_view(),
+             name='edit_annotations'))
diff --git a/appengine/cq_stats/build_annotations/views.py b/appengine/cq_stats/build_annotations/views.py
new file mode 100644
index 0000000..10c0f1f
--- /dev/null
+++ b/appengine/cq_stats/build_annotations/views.py
@@ -0,0 +1,191 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""All django views for the build_annotations app."""
+
+from __future__ import print_function
+
+from django import http
+from django import shortcuts
+from django.core import urlresolvers
+from django.views import generic
+from google.appengine.api import users
+
+from build_annotations import build_row_controller
+from build_annotations import models as ba_models
+from build_annotations import forms as ba_forms
+
+
+_DEFAULT_USERNAME = "SomeoneGotHereWithoutLoggingIn"
+
+class ListBuildsView(generic.list.ListView):
+  """The landing page view of the app. Lists requested builds."""
+
+  template_name = 'build_annotations/index.html'
+
+  def __init__(self, *args, **kwargs):
+    super(ListBuildsView, self).__init__(*args, **kwargs)
+    self._username = _DEFAULT_USERNAME
+    self._build_config = None
+    self._search_form = None
+    self._controller = None
+    self._session = None
+    self._builds_list = None
+    self._hist = None
+
+  def get_queryset(self):
+    self._EnsureSessionInitialized()
+    self._controller = build_row_controller.BuildRowController()
+    build_config_q = None
+    if self._build_config is not None:
+      build_config_q = self._controller.GetQRestrictToBuildConfig(
+          self._build_config)
+    self._builds_list = self._controller.GetStructuredBuilds(
+        latest_build_id=self._session['latest_build_id'],
+        num_builds=self._session['num_builds'],
+        extra_filter_q=build_config_q)
+    self._hist = self._controller.GetHandlingTimeHistogram(
+        latest_build_id=self._session['latest_build_id'],
+        num_builds=self._session['num_builds'],
+        extra_filter_q=build_config_q)
+    return self._builds_list
+
+  def get_context_object_name(self, _):
+    return 'builds_list'
+
+  def get_context_data(self, **kwargs):
+    context = super(ListBuildsView, self).get_context_data(**kwargs)
+    context['username'] = self._username
+    context['search_form'] = self._GetSearchForm()
+    context['latest_build_id_cached'] = self._GetLatestBuildId()
+    context['build_config'] = self._build_config
+    context['histogram_data'] = self._hist
+    return context
+
+  # pylint: disable=arguments-differ
+  def get(self, request, build_config=None):
+    # We're assured that a username exists in prod because our app sits behind
+    # appengine login. Not so when running from dev_appserver.
+    self._username = users.get_current_user()
+    self._session = request.session
+    self._build_config = build_config
+    return super(ListBuildsView, self).get(request)
+
+  def post(self, request, build_config):
+    self._session = request.session
+    form = ba_forms.SearchForm(request.POST)
+    self._search_form = form
+    if form.is_valid():
+      self._session['latest_build_id'] = form.cleaned_data['latest_build_id']
+      self._session['num_builds'] = form.cleaned_data['num_builds']
+    return self.get(request, build_config)
+
+  def put(self, *args, **kwargs):
+    return self.post(*args, **kwargs)
+
+  def _GetSearchForm(self):
+    if self._search_form is not None:
+      return self._search_form
+    return ba_forms.SearchForm(
+        {'latest_build_id': self._session['latest_build_id'],
+         'num_builds': self._session['num_builds']})
+
+  def _EnsureSessionInitialized(self):
+    latest_build_id = self._session.get('latest_build_id', None)
+    num_results = self._session.get('num_builds', None)
+    if latest_build_id is None or num_results is None:
+      # We don't have a valid search history in this session, obtain defaults.
+      controller = build_row_controller.BuildRowController()
+      controller.GetStructuredBuilds(num_builds=1)
+      self._session['latest_build_id'] = controller.latest_build_id
+      self._session['num_builds'] = controller.DEFAULT_NUM_BUILDS
+
+  def _GetLatestBuildId(self):
+    controller = build_row_controller.BuildRowController()
+    controller.GetStructuredBuilds(num_builds=1)
+    return controller.latest_build_id
+
+
+class EditAnnotationsView(generic.base.View):
+  """View that handles annotation editing page."""
+
+  template_name = 'build_annotations/edit_annotations.html'
+
+  def __init__(self, *args, **kwargs):
+    self._username = _DEFAULT_USERNAME
+    self._formset = None
+    self._context = {}
+    self._request = None
+    self._session = None
+    self._build_config = None
+    self._build_id = None
+    super(EditAnnotationsView, self).__init__(*args, **kwargs)
+
+  def get(self, request, build_config, build_id):
+    # We're assured that a username exists in prod because our app sits behind
+    # appengine login. Not so when running from dev_appserver.
+    self._username = users.get_current_user()
+    self._request = request
+    self._build_config = build_config
+    self._build_id = build_id
+    self._session = request.session
+    self._PopulateContext()
+    return shortcuts.render(request, self.template_name, self._context)
+
+  def post(self, request, build_config, build_id):
+    # We're assured that a username exists in prod because our app sits behind
+    # appengine login. Not so when running from dev_appserver.
+    self._username = users.get_current_user()
+    self._request = request
+    self._build_config = build_config
+    self._build_id = build_id
+    self._session = request.session
+    self._formset = ba_forms.AnnotationsFormSet(request.POST)
+    if self._formset.is_valid():
+      self._SaveAnnotations()
+      return http.HttpResponseRedirect(
+          urlresolvers.reverse('build_annotations:edit_annotations',
+                               args=[self._build_config, self._build_id]))
+    else:
+      self._PopulateContext()
+      return shortcuts.render(request, self.template_name, self._context)
+
+  def _PopulateContext(self):
+    build_row = self._GetBuildRow()
+    if build_row is None:
+      raise http.Http404
+
+    self._context = {}
+    self._context['username'] = self._username
+    self._context['build_config'] = self._build_config
+    self._context['build_row'] = build_row
+    self._context['annotations_formset'] = self._GetAnnotationsFormSet()
+
+  def _GetBuildRow(self):
+    controller = build_row_controller.BuildRowController()
+    build_row_list = controller.GetStructuredBuilds(
+        latest_build_id=self._build_id,
+        num_builds=1)
+    if not build_row_list:
+      return None
+    return build_row_list[0]
+
+  def _GetAnnotationsFormSet(self):
+    if self._formset is None:
+      build_row = self._GetBuildRow()
+      if build_row is not None:
+        queryset = build_row.GetAnnotationsQS()
+      else:
+        queryset = ba_models.AnnotationsTable.objects.none()
+      self._formset = ba_forms.AnnotationsFormSet(queryset=queryset)
+    return self._formset
+
+  def _SaveAnnotations(self):
+    models_to_save = self._formset.save(commit=False)
+    build_row = self._GetBuildRow()
+    for model in models_to_save:
+      if not hasattr(model, 'build_id') or model.build_id is None:
+        model.build_id = build_row.build_entry
+      model.last_annotator = self._username
+      model.save()
diff --git a/appengine/cq_stats/cq_stats/__init__.py b/appengine/cq_stats/cq_stats/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/appengine/cq_stats/cq_stats/__init__.py
diff --git a/appengine/cq_stats/cq_stats/fake_system_modules.py b/appengine/cq_stats/cq_stats/fake_system_modules.py
new file mode 100644
index 0000000..a7393cc
--- /dev/null
+++ b/appengine/cq_stats/cq_stats/fake_system_modules.py
@@ -0,0 +1,30 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Fake out system python modules that are not available on AE.
+
+Chromite imports some standard python modules that are not available on the
+restricted sandbox environment on appengine. Fake out this modules such that
+imports don't break, but any attempt to use the modules blows up obviously.
+"""
+
+from __future__ import print_function
+
+import os
+
+
+FAKE_HOMEDIR = '/tmp/an_obviously_non_existent_home_dir'
+def _expanduser(_):
+  """A fake implementation of os.path.expanduser.
+
+  os.path.expanduser needs to import 'pwd' that is not available on appengine.
+  In fact, the concept of HOMEDIR doesn't make sense at all. So, patch it to
+  return a safe fake path. If we try to use it anywhere, it will fail obviously.
+  """
+  return FAKE_HOMEDIR
+
+
+# Importing this module has the side effect of patching all of the following
+# library modules / classes / functions.
+os.path.expanduser = _expanduser
diff --git a/appengine/cq_stats/cq_stats/middleware.py b/appengine/cq_stats/cq_stats/middleware.py
new file mode 100644
index 0000000..92df90e
--- /dev/null
+++ b/appengine/cq_stats/cq_stats/middleware.py
@@ -0,0 +1,50 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Custom middlewares applicable to all apps on this site."""
+
+from __future__ import print_function
+
+from django.db import connection
+
+
+class SqlPrintingMiddleware(object):
+  """Middleware to print SQL stats for each page load."""
+
+  # We hard code the terminal width because appengine SDK does not support the
+  # fcntl python module. Without that, there's no reliable way to obtain the
+  # terminal width.
+  TERMINAL_WIDTH = 80
+  INDENTATION = 2
+  SQL_WIDTH = TERMINAL_WIDTH - INDENTATION
+  INDENTATION_SPACE = ' ' * INDENTATION
+
+  def _DisplayRed(self, value):
+    return '\033[1;31m%s\033[0m' % value
+
+  def _DisplayGreen(self, value):
+    return '\033[1;32m%s\033[0m' % value
+
+  def _PrintWithIndentation(self, value):
+    print ('%s%s' % (self.INDENTATION_SPACE, value))
+
+  def process_response(self, _, response):
+    """Log SQL stats before forwarding response to the user."""
+    if len(connection.queries) > 0:
+      total_time = 0.0
+      for query in connection.queries:
+        total_time = total_time + float(query['time'])
+
+        nice_sql = query['sql']
+        sql = '[%s] %s' % (self._DisplayRed(query['time']), nice_sql)
+
+        while len(sql) > self.SQL_WIDTH:
+          self._PrintWithIndentation(sql[:self.SQL_WIDTH])
+          sql = sql[self.SQL_WIDTH:]
+        self._PrintWithIndentation(sql)
+      self._PrintWithIndentation(self._DisplayGreen(
+          '[TOTAL QUERIES: %s]' % len(connection.queries)))
+      self._PrintWithIndentation(self._DisplayGreen(
+          '[TOTAL TIME: %s seconds]' % total_time))
+    return response
diff --git a/appengine/cq_stats/cq_stats/settings.py b/appengine/cq_stats/cq_stats/settings.py
new file mode 100644
index 0000000..2f54b39
--- /dev/null
+++ b/appengine/cq_stats/cq_stats/settings.py
@@ -0,0 +1,254 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Django settings for cq_stats project
+
+TODO(pprabhu): These settings should be instead stored in the app local DB that
+AE provides.
+It's probably safer that way (no settings in source), and it's easier to manage
+the instance from the AE admin interface than having to update source and
+relaunch.
+"""
+
+from __future__ import print_function
+
+import os
+
+
+def _IsOnAppEngine():
+  return os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine')
+
+
+def _AssertFileExists(path):
+  assert os.path.isfile(path), '%s must exist on %s' % (
+      path,
+      'deployed app' if _IsOnAppEngine() else 'local dev_appserver')
+
+
+if _IsOnAppEngine():
+  # Import into our namespace. This only contains auto-generated constants.
+  # It only exists on the deployed app.
+  # pylint: disable=no-name-in-module
+  # pylint: disable=import-error
+  # pylint: disable=wildcard-import
+  from cq_stats.deploy_settings import *
+else:
+  # All the settings that would be defined by deploy_settings.
+  DEBUG = True
+  TEMPLATE_DEBUG = True
+  SECRET_KEY = 'PLACEHOLDER_NON_KEY'
+  CIDB_PROJECT_NAME = 'cosmic-strategy-646'
+  CIDB_INSTANCE_NAME = 'debug-cidb'
+
+# ##############################################################################
+# DEPLOY OVERRIDES
+# Some settings are autogenerated by the deploy script.
+# If you want to override any of them, just define them here after the generated
+# module has been imported.
+#
+# The most common case. When something goes wrong only after deploy.
+# DEBUG = True
+# TEMPLATE_DEBUG = True
+# ##############################################################################
+
+BASE_DIR = os.path.dirname(os.path.dirname(__file__))
+PROJECT_NAME = os.path.basename(os.path.dirname(__file__))
+PROJECT_DIR = os.path.join(BASE_DIR, PROJECT_NAME)
+# dev_appserver.py doesn't pass in any environment variables, so you *must*
+# create this symlink yourself. :(
+# These credentials must be for the 'annotator' cidb user for the debug
+# instance of cidb. See go/cros-cidb-admin
+CIDB_CREDS_DIR = os.path.join(BASE_DIR, 'annotator_cidb_creds')
+
+ANNOTATOR_PASSWORD_PATH = os.path.join(CIDB_CREDS_DIR, 'password.txt')
+_AssertFileExists(ANNOTATOR_PASSWORD_PATH)
+with open(ANNOTATOR_PASSWORD_PATH, 'r') as f:
+  annotator_password = f.read().strip()
+
+if not _IsOnAppEngine():
+  CIDB_HOST_PATH = os.path.join(CIDB_CREDS_DIR, 'host.txt')
+  CIDB_SERVER_CA_PATH = os.path.join(CIDB_CREDS_DIR, 'server-ca.pem')
+  CIDB_CLIENT_CERT_PATH = os.path.join(CIDB_CREDS_DIR, 'client-cert.pem')
+  CIDB_CLIENT_KEY_PATH = os.path.join(CIDB_CREDS_DIR, 'client-key.pem')
+  _AssertFileExists(CIDB_HOST_PATH)
+  _AssertFileExists(CIDB_SERVER_CA_PATH)
+  _AssertFileExists(CIDB_CLIENT_CERT_PATH)
+  _AssertFileExists(CIDB_CLIENT_KEY_PATH)
+  with open(CIDB_HOST_PATH, 'r') as f:
+    cidb_host = f.read().strip()
+
+# Setup database map.
+if os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine'):
+  # Running on production AppEngine. Use CloudSQL via unix socket.
+  default_database = {
+      'ENGINE': 'django.db.backends.mysql',
+      'HOST': '/cloudsql/%s:%s' % (CIDB_PROJECT_NAME,
+                                   CIDB_INSTANCE_NAME),
+      'NAME': 'cidb',
+      'USER': 'annotator',
+      'PASSWORD': annotator_password}
+else:
+  default_database = {
+      'ENGINE': 'django.db.backends.mysql',
+      'HOST': cidb_host,
+      'PORT': '3306',
+      'NAME': 'cidb',
+      'USER': 'annotator',
+      'PASSWORD': annotator_password,
+      'OPTIONS': {
+          'ssl': {'ca': CIDB_SERVER_CA_PATH,
+                  'cert': CIDB_CLIENT_CERT_PATH,
+                  'key': CIDB_CLIENT_KEY_PATH}}}
+
+DATABASES = {'default': default_database}
+
+# Hosts/domain names that are valid for this site; required if DEBUG is False
+# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
+ALLOWED_HOSTS = ['chromiumos-build-annotator.googleplex.com',
+                 'chromiumos-build-annotator-dbg.googleplex.com']
+
+# Local time zone for this installation. Choices can be found here:
+# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
+# although not all choices may be available on all operating systems.
+# In a Windows environment this must be set to your system time zone.
+TIME_ZONE = 'America/Los_Angeles'
+
+# Language code for this installation. All choices can be found here:
+# http://www.i18nguy.com/unicode/language-identifiers.html
+LANGUAGE_CODE = 'en-us'
+
+SITE_ID = 1
+
+# If you set this to False, Django will make some optimizations so as not
+# to load the internationalization machinery.
+USE_I18N = True
+
+# If you set this to False, Django will not format dates, numbers and
+# calendars according to the current locale.
+USE_L10N = True
+
+# If you set this to False, Django will not use timezone-aware datetimes.
+USE_TZ = True
+
+# Absolute filesystem path to the directory that will hold user-uploaded files.
+# Example: '/var/www/example.com/media/'
+MEDIA_ROOT = ''
+
+# URL that handles the media served from MEDIA_ROOT. Make sure to use a
+# trailing slash.
+# Examples: 'http://example.com/media/', 'http://media.example.com/'
+MEDIA_URL = ''
+
+# Absolute path to the directory static files should be collected to.
+# Don't put anything in this directory yourself; store your static files
+# in apps' 'static/' subdirectories and in STATICFILES_DIRS.
+# Example: '/var/www/example.com/static/'
+STATIC_ROOT = os.path.join(PROJECT_DIR, 'static')
+
+# URL prefix for static files.
+# Example: 'http://example.com/static/', 'http://static.example.com/'
+STATIC_URL = '/static/'
+
+# Additional locations of static files
+STATICFILES_DIRS = (
+    # Put strings here, like '/home/html/static' or 'C:/www/django/static'.
+    # Always use forward slashes, even on Windows.
+    # Don't forget to use absolute paths, not relative paths.
+)
+
+# List of finder classes that know how to find static files in
+# various locations.
+STATICFILES_FINDERS = (
+    'django.contrib.staticfiles.finders.FileSystemFinder',
+    'django.contrib.staticfiles.finders.AppDirectoriesFinder',
+    # 'django.contrib.staticfiles.finders.DefaultStorageFinder',
+)
+
+# Make this unique, and don't share it with anybody.
+# TODO(pprabhu): Add secret key to valentine, must be updated before pushing to
+# prod.
+SECRET_KEY = 'NotSoSecretKeyThatSomeOneDreamtUp'
+
+# List of callables that know how to import templates from various sources.
+TEMPLATE_LOADERS = (
+    'django.template.loaders.filesystem.Loader',
+    'django.template.loaders.app_directories.Loader',
+    # 'django.template.loaders.eggs.Loader',
+)
+
+MIDDLEWARE_CLASSES = [
+    'django.middleware.common.CommonMiddleware',
+    'django.contrib.sessions.middleware.SessionMiddleware',
+    'django.middleware.csrf.CsrfViewMiddleware',
+    # 'django.contrib.auth.middleware.AuthenticationMiddleware',
+    'django.contrib.messages.middleware.MessageMiddleware',
+    # Uncomment the next line for simple clickjacking protection:
+    # 'django.middleware.clickjacking.XFrameOptionsMiddleware',
+]
+if DEBUG:
+  MIDDLEWARE_CLASSES.append('cq_stats.middleware.SqlPrintingMiddleware')
+MIDDLEWARE_CLASSES = tuple(MIDDLEWARE_CLASSES)
+
+ROOT_URLCONF = 'cq_stats.urls'
+
+# Python dotted path to the WSGI application used by Django's runserver.
+WSGI_APPLICATION = 'cq_stats.wsgi.application'
+
+TEMPLATE_DIRS = (
+    # Put strings here, like '/home/html/django_templates' or
+    # 'C:/www/django/templates'.
+    # Always use forward slashes, even on Windows.
+    # Don't forget to use absolute paths, not relative paths.
+)
+
+INSTALLED_APPS = (
+    # 'django.contrib.auth',
+    'django.contrib.contenttypes',
+    'django.contrib.sessions',
+    'django.contrib.sites',
+    'django.contrib.messages',
+    'django.contrib.staticfiles',
+    # 'django.contrib.admin',
+    # Uncomment the next line to enable admin documentation:
+    # 'django.contrib.admindocs',
+
+    # Apps in this project
+    'build_annotations'
+)
+
+SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
+
+# (pprabhu): Cookie based sessions are temporary. They have various drawbacks,
+# including a load time penalty if the size grows. OTOH, they are the easiest to
+# bringup on AppEngine. Let's use these to get started.
+SESSION_ENGINE = 'django.contrib.sessions.backends.signed_cookies'
+
+# A sample logging configuration. The only tangible logging
+# performed by this configuration is to send an email to
+# the site admins on every HTTP 500 error when DEBUG=False.
+# See http://docs.djangoproject.com/en/dev/topics/logging for
+# more details on how to customize your logging configuration.
+LOGGING = {
+    'version': 1,
+    'disable_existing_loggers': False,
+    'filters': {
+        'require_debug_false': {
+            '()': 'django.utils.log.RequireDebugFalse'
+        }
+    },
+    'handlers': {
+        'mail_admins': {
+            'level': 'ERROR',
+            'filters': ['require_debug_false'],
+            'class': 'django.utils.log.AdminEmailHandler'
+        }
+    },
+    'loggers': {
+        'django.request': {
+            'handlers': ['mail_admins'],
+            'level': 'ERROR',
+            'propagate': True,
+        },
+    }
+}
diff --git a/appengine/cq_stats/cq_stats/urls.py b/appengine/cq_stats/cq_stats/urls.py
new file mode 100644
index 0000000..79bf8fe
--- /dev/null
+++ b/appengine/cq_stats/cq_stats/urls.py
@@ -0,0 +1,21 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""The main url dispatcher for this project."""
+
+from __future__ import print_function
+
+from django import http
+from django.conf import urls
+
+
+# Uncomment the next two lines to enable the admin:
+# from django.contrib import admin
+# admin.autodiscover()
+urlpatterns = urls.patterns(
+    '',
+    urls.url(r'^$', lambda r: http.HttpResponseRedirect('build_annotations/')),
+    urls.url(r'^build_annotations/', urls.include(
+        'build_annotations.urls',
+        namespace='build_annotations')))
diff --git a/appengine/cq_stats/cq_stats/wsgi.py b/appengine/cq_stats/cq_stats/wsgi.py
new file mode 100644
index 0000000..e3e6c8f
--- /dev/null
+++ b/appengine/cq_stats/cq_stats/wsgi.py
@@ -0,0 +1,34 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""WSGI config for cq_stats project.
+
+This module contains the WSGI application used by Django's development server
+and any production WSGI deployments. It should expose a module-level variable
+named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
+this application via the ``WSGI_APPLICATION`` setting.
+
+Usually you will have the standard Django WSGI application here, but it also
+might make sense to replace the whole Django WSGI application with a custom one
+that later delegates to the Django one. For example, you could introduce WSGI
+middleware here, or combine a Django application with an application of another
+framework.
+"""
+
+from __future__ import print_function
+
+import os
+
+
+# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
+# if running multiple sites in the same mod_wsgi process. To fix this, use
+# mod_wsgi daemon mode with each site in its own daemon process, or use
+# os.environ["DJANGO_SETTINGS_MODULE"] = "cq_stats.settings"
+os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cq_stats.settings')
+
+# This application object is used by any WSGI server configured to use this
+# file. This includes Django's development server, if the WSGI_APPLICATION
+# setting points here.
+from django.core.wsgi import get_wsgi_application
+application = get_wsgi_application()
diff --git a/appengine/cq_stats/deploy_app b/appengine/cq_stats/deploy_app
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/appengine/cq_stats/deploy_app
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/appengine/cq_stats/deploy_app.py b/appengine/cq_stats/deploy_app.py
new file mode 100644
index 0000000..2b5698f
--- /dev/null
+++ b/appengine/cq_stats/deploy_app.py
@@ -0,0 +1,145 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper script to deploy the cq_stats app to our appengine instances."""
+
+from __future__ import print_function
+
+import os
+import time
+
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+
+
+APP_INSTANCE_DEBUG = 'debug'
+APP_INSTANCE_PROD = 'prod'
+
+APP_INSTANCE_NAME = {
+    APP_INSTANCE_DEBUG: 'google.com:chromiumos-build-annotator-dbg',
+    APP_INSTANCE_PROD: 'google.com:chromiumos-build-annotator',
+}
+APP_INSTANCE_CIDB = {
+    APP_INSTANCE_DEBUG: 'debug-cidb',
+    APP_INSTANCE_PROD: 'cidb',
+}
+
+
+def _GetParser():
+  """Get parser for deploy_app cli.
+
+  Returns:
+    commandline.ArgumentParser object to parse the commandline args.
+  """
+  parser = commandline.ArgumentParser()
+  parser.add_argument('instance', type=str,
+                      choices=(APP_INSTANCE_DEBUG, APP_INSTANCE_PROD),
+                      help='The app instance to deploy to')
+  parser.add_argument('--secret-key', type=str, required=True,
+                      help='The secret key to sign django cookies.')
+  return parser
+
+
+def _GetDeploySettings(options):
+  """The autogenerated part of django settings.
+
+  Returns:
+    python "code" as str to be written to the settings file.
+  """
+  content = [
+      '# DO NOT EDIT! Autogenerated by %s.' % os.path.basename(__file__),
+      'DEBUG = False',
+      'TEMPLATE_DEBUG = False',
+      'SECRET_KEY = "%s"' % options.secret_key,
+      'CIDB_PROJECT_NAME = "cosmic-strategy-646"',
+      'CIDB_INSTANCE_NAME = "%s"' % APP_INSTANCE_CIDB[options.instance],
+  ]
+  return '\n'.join(content)
+
+
+def _DeployApp(basedir):
+  """Deploy the prepared app from basedir.
+
+  Args:
+    basedir: The base directory where the app has already been prepped.
+  """
+  cros_build_lib.RunCommand(
+      ['./ae_shell', 'cq_stats', '--',
+       'python', 'cq_stats/manage.py', 'collectstatic', '--noinput'],
+      cwd=basedir)
+
+  # Remove sensetive files that are needed to run tools locally to prepare the
+  # deploy directory, but that we don't want to push to AE.
+  cidb_cred_path = os.path.join(basedir, 'cq_stats', 'annotator_cidb_creds')
+  osutils.SafeUnlink(os.path.join(cidb_cred_path, 'client-cert.pem'))
+  osutils.SafeUnlink(os.path.join(cidb_cred_path, 'client-key.pem'))
+  osutils.SafeUnlink(os.path.join(cidb_cred_path, 'server-ca.pem'))
+  cros_build_lib.RunCommand(
+      ['./ae_shell', 'cq_stats', '--',
+       'appcfg.py', '--oauth2', 'update', 'cq_stats'],
+      cwd=basedir)
+
+
+def _Hang(tempdir):
+  """How else will you ever work on this script?
+
+  Args:
+    tempdir: The directory prepared for deploying the app.
+  """
+  logging.info('All the real stuff\'s done. Tempdir: %s', tempdir)
+  while True:
+    logging.info('Sleeping... Hit Ctrl-C to exit.')
+    time.sleep(30)
+
+
+def main(argv):
+  parser = _GetParser()
+  options = parser.parse_args(argv)
+  options.Freeze()
+
+  with osutils.TempDir() as tempdir:
+    # This is rsync in 'archive' mode, but symlinks are followed to copy actual
+    # files/directories.
+    rsync_cmd = ['rsync', '-qrLgotD', '--exclude=\'*/*.pyc\'']
+    chromite_dir = os.path.dirname(
+        os.path.dirname(
+            os.path.dirname(
+                os.path.abspath(__file__))))
+
+    cmd = rsync_cmd + [
+        'chromite/appengine/', tempdir,
+        '--exclude=google_appengine_*',
+    ]
+    cros_build_lib.RunCommand(cmd, cwd=os.path.dirname(chromite_dir))
+
+    cmd = rsync_cmd + [
+        'chromite', os.path.join(tempdir, 'cq_stats'),
+        '--exclude=appengine',
+        '--exclude=third_party',
+        '--exclude=ssh_keys',
+        '--exclude=contrib',
+        '--exclude=.git',
+    ]
+    cros_build_lib.RunCommand(cmd, cwd=os.path.dirname(chromite_dir))
+
+    osutils.WriteFile(os.path.join(tempdir, 'cq_stats', 'cq_stats',
+                                   'deploy_settings.py'),
+                      _GetDeploySettings(options))
+
+    # update the instance we're updating.
+    # Use absolute path. Let's not update sourcedir by mistake.
+    app_yaml_path = os.path.join(tempdir, 'cq_stats', 'app.yaml')
+    regex = (r's/^application:[ \t]*[a-zA-Z0-9_-\.:]\+[ \t]*$'
+             '/application: %s/')
+    cmd = [
+        'sed', '-i',
+        '-e', regex % APP_INSTANCE_NAME[options.instance],
+        app_yaml_path,
+    ]
+    cros_build_lib.RunCommand(cmd, cwd=tempdir)
+
+    _DeployApp(tempdir)
+    # _Hang(tempdir)
diff --git a/appengine/cq_stats/dev_appserver b/appengine/cq_stats/dev_appserver
new file mode 100755
index 0000000..112b29a
--- /dev/null
+++ b/appengine/cq_stats/dev_appserver
@@ -0,0 +1,11 @@
+#!/bin/bash
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+this_dir="$(readlink -e "$(dirname "${BASH_SOURCE}")")"
+ae_shell="${this_dir}/../ae_shell"
+"${ae_shell}" cq_stats -- python cq_stats/manage.py collectstatic --noinput
+if [[ $? -eq 0 ]]; then
+  "${ae_shell}" cq_stats -- dev_appserver.py cq_stats
+fi
diff --git a/appengine/cq_stats/manage.py b/appengine/cq_stats/manage.py
new file mode 100755
index 0000000..294feaf
--- /dev/null
+++ b/appengine/cq_stats/manage.py
@@ -0,0 +1,17 @@
+#!/usr/bin/python2
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""(Semi-)Autogenerated django module for app management."""
+
+from __future__ import print_function
+
+import os
+import sys
+
+
+if __name__ == '__main__':
+  os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cq_stats.settings')
+  from django.core.management import execute_from_command_line
+  execute_from_command_line(sys.argv)
diff --git a/appengine/dev_appserver b/appengine/dev_appserver
new file mode 100755
index 0000000..727c135
--- /dev/null
+++ b/appengine/dev_appserver
@@ -0,0 +1,24 @@
+#!/bin/bash -e
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a simple wrapper around ./google_appengine/dev_appserver.py
+
+# https://developers.google.com/appengine/downloads#Google_App_Engine_SDK_for_Python
+SDK_VER="1.8.6"
+
+srcdir="${0%/*}"
+pushd "${srcdir}" >/dev/null
+
+if [ ! -d google_appengine ]; then
+  zip="google_appengine_${SDK_VER}.zip"
+  wget -c http://googleappengine.googlecode.com/files/${zip}
+  echo "Unpacking ${zip}"
+  unzip -q ${zip}
+fi
+
+popd >/dev/null
+
+HOST=$(hostname | awk -F. '{print $1}')
+exec "${srcdir}"/google_appengine/dev_appserver.py --host ${HOST} "$@"
diff --git a/bin/__init__.py b/bin/__init__.py
new file mode 100644
index 0000000..1c53994
--- /dev/null
+++ b/bin/__init__.py
@@ -0,0 +1,6 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(ferringb): remove this once depot_tools is updated to no longer
+# have any real logic in chromite_wrapper.
diff --git a/bin/account_tool b/bin/account_tool
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/account_tool
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/autotest_quickmerge b/bin/autotest_quickmerge
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/autotest_quickmerge
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cbuildbot b/bin/cbuildbot
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cbuildbot
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cbuildbot_view_config b/bin/cbuildbot_view_config
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cbuildbot_view_config
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/check_gdata_token b/bin/check_gdata_token
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/check_gdata_token
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/chrome_update_extension_cache b/bin/chrome_update_extension_cache
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/chrome_update_extension_cache
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cidb_admin b/bin/cidb_admin
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cidb_admin
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros b/bin/cros
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_best_revision b/bin/cros_best_revision
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_best_revision
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_brick_utils b/bin/cros_brick_utils
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_brick_utils
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_check_patches b/bin/cros_check_patches
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_check_patches
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_deps_diff b/bin/cros_deps_diff
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_deps_diff
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_extract_deps b/bin/cros_extract_deps
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_extract_deps
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_gdb b/bin/cros_gdb
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_gdb
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_generate_breakpad_symbols b/bin/cros_generate_breakpad_symbols
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_generate_breakpad_symbols
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_generate_deps_graphs b/bin/cros_generate_deps_graphs
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_generate_deps_graphs
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_generate_local_binhosts b/bin/cros_generate_local_binhosts
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_generate_local_binhosts
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_generate_sysroot b/bin/cros_generate_sysroot
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_generate_sysroot
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_install_debug_syms b/bin/cros_install_debug_syms
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_install_debug_syms
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_list_modified_packages b/bin/cros_list_modified_packages
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_list_modified_packages
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_list_overlays b/bin/cros_list_overlays
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_list_overlays
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_mark_as_stable b/bin/cros_mark_as_stable
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_mark_as_stable
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_mark_chrome_as_stable b/bin/cros_mark_chrome_as_stable
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_mark_chrome_as_stable
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_mark_mojo_as_stable b/bin/cros_mark_mojo_as_stable
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_mark_mojo_as_stable
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_merge_to_branch b/bin/cros_merge_to_branch
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_merge_to_branch
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_portage_upgrade b/bin/cros_portage_upgrade
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_portage_upgrade
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_run_unit_tests b/bin/cros_run_unit_tests
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_run_unit_tests
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_sdk b/bin/cros_sdk
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_sdk
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_set_lsb_release b/bin/cros_set_lsb_release
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_set_lsb_release
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_setup_toolchains b/bin/cros_setup_toolchains
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_setup_toolchains
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_show_waterfall_layout b/bin/cros_show_waterfall_layout
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_show_waterfall_layout
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_sysroot_utils b/bin/cros_sysroot_utils
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_sysroot_utils
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/cros_workon b/bin/cros_workon
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/cros_workon
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/crosfw b/bin/crosfw
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/crosfw
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/dep_tracker b/bin/dep_tracker
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/dep_tracker
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/deploy_chrome b/bin/deploy_chrome
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/deploy_chrome
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/diff_license_html b/bin/diff_license_html
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/diff_license_html
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/fwgdb b/bin/fwgdb
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/fwgdb
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/gconv_strip b/bin/gconv_strip
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/gconv_strip
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/generate_container_spec b/bin/generate_container_spec
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/generate_container_spec
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/generate_delta_sysroot b/bin/generate_delta_sysroot
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/generate_delta_sysroot
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/gerrit b/bin/gerrit
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/gerrit
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/gs_fetch_binpkg b/bin/gs_fetch_binpkg
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/gs_fetch_binpkg
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/lddtree b/bin/lddtree
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/lddtree
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/loman b/bin/loman
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/loman
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/merge_package_status b/bin/merge_package_status
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/merge_package_status
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/parallel_emerge b/bin/parallel_emerge
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/parallel_emerge
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/preupload_dump_config b/bin/preupload_dump_config
new file mode 100755
index 0000000..fc92c04
--- /dev/null
+++ b/bin/preupload_dump_config
@@ -0,0 +1,16 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+bin/cbuildbot_view_config --update_config
+if [ "$(git diff cbuildbot/config_dump.json)" ]; then
+  echo "You have uncommitted changes to cbuildbot/config_dump.json"
+  echo "This is likely because you have modified:"
+  echo "  cbuildbot/chromeos_config.py"
+  echo "Please --amend your commit to include config_dump.json."
+  echo
+  echo "In future, you can update the dump file with the command"
+  echo "bin/cbuildbot_view_config --update_config"
+  exit 1
+fi
diff --git a/bin/pushimage b/bin/pushimage
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/pushimage
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/refresh_package_status b/bin/refresh_package_status
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/refresh_package_status
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/summarize_build_stats b/bin/summarize_build_stats
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/summarize_build_stats
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/sync_chrome b/bin/sync_chrome
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/sync_chrome
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/sync_package_status b/bin/sync_package_status
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/sync_package_status
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/test_image b/bin/test_image
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/test_image
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/update_manifest_remotes b/bin/update_manifest_remotes
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/update_manifest_remotes
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/upload_command_stats b/bin/upload_command_stats
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/upload_command_stats
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/upload_package_status b/bin/upload_package_status
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/upload_package_status
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/upload_prebuilts b/bin/upload_prebuilts
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/upload_prebuilts
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bin/upload_symbols b/bin/upload_symbols
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bin/upload_symbols
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bootstrap/__init__.py b/bootstrap/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/bootstrap/__init__.py
diff --git a/bootstrap/brillo b/bootstrap/brillo
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/bootstrap/brillo
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/bootstrap/cbuildbot b/bootstrap/cbuildbot
new file mode 120000
index 0000000..3a4369d
--- /dev/null
+++ b/bootstrap/cbuildbot
@@ -0,0 +1 @@
+support/chromite_wrapper
\ No newline at end of file
diff --git a/bootstrap/cros b/bootstrap/cros
new file mode 120000
index 0000000..3a4369d
--- /dev/null
+++ b/bootstrap/cros
@@ -0,0 +1 @@
+support/chromite_wrapper
\ No newline at end of file
diff --git a/bootstrap/cros_sdk b/bootstrap/cros_sdk
new file mode 120000
index 0000000..3a4369d
--- /dev/null
+++ b/bootstrap/cros_sdk
@@ -0,0 +1 @@
+support/chromite_wrapper
\ No newline at end of file
diff --git a/bootstrap/repo b/bootstrap/repo
new file mode 100755
index 0000000..c6be5c1
--- /dev/null
+++ b/bootstrap/repo
@@ -0,0 +1,866 @@
+#!/usr/bin/python2
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+## repo default configuration
+##
+REPO_URL='https://chromium.googlesource.com/external/repo'
+REPO_REV='stable'
+
+# Copyright (C) 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# increment this whenever we make important changes to this script
+VERSION = (1, 21)
+
+# increment this if the MAINTAINER_KEYS block is modified
+KEYRING_VERSION = (1, 4)
+MAINTAINER_KEYS = """
+
+     Repo Maintainer <repo@android.kernel.org>
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1.4.2.2 (GNU/Linux)
+
+mQGiBEj3ugERBACrLJh/ZPyVSKeClMuznFIrsQ+hpNnmJGw1a9GXKYKk8qHPhAZf
+WKtrBqAVMNRLhL85oSlekRz98u41H5si5zcuv+IXJDF5MJYcB8f22wAy15lUqPWi
+VCkk1l8qqLiuW0fo+ZkPY5qOgrvc0HW1SmdH649uNwqCbcKb6CxaTxzhOwCgj3AP
+xI1WfzLqdJjsm1Nq98L0cLcD/iNsILCuw44PRds3J75YP0pze7YF/6WFMB6QSFGu
+aUX1FsTTztKNXGms8i5b2l1B8JaLRWq/jOnZzyl1zrUJhkc0JgyZW5oNLGyWGhKD
+Fxp5YpHuIuMImopWEMFIRQNrvlg+YVK8t3FpdI1RY0LYqha8pPzANhEYgSfoVzOb
+fbfbA/4ioOrxy8ifSoga7ITyZMA+XbW8bx33WXutO9N7SPKS/AK2JpasSEVLZcON
+ae5hvAEGVXKxVPDjJBmIc2cOe7kOKSi3OxLzBqrjS2rnjiP4o0ekhZIe4+ocwVOg
+e0PLlH5avCqihGRhpoqDRsmpzSHzJIxtoeb+GgGEX8KkUsVAhbQpUmVwbyBNYWlu
+dGFpbmVyIDxyZXBvQGFuZHJvaWQua2VybmVsLm9yZz6IYAQTEQIAIAUCSPe6AQIb
+AwYLCQgHAwIEFQIIAwQWAgMBAh4BAheAAAoJEBZTDV6SD1xl1GEAn0x/OKQpy7qI
+6G73NJviU0IUMtftAKCFMUhGb/0bZvQ8Rm3QCUpWHyEIu7kEDQRI97ogEBAA2wI6
+5fs9y/rMwD6dkD/vK9v4C9mOn1IL5JCPYMJBVSci+9ED4ChzYvfq7wOcj9qIvaE0
+GwCt2ar7Q56me5J+byhSb32Rqsw/r3Vo5cZMH80N4cjesGuSXOGyEWTe4HYoxnHv
+gF4EKI2LK7xfTUcxMtlyn52sUpkfKsCpUhFvdmbAiJE+jCkQZr1Z8u2KphV79Ou+
+P1N5IXY/XWOlq48Qf4MWCYlJFrB07xjUjLKMPDNDnm58L5byDrP/eHysKexpbakL
+xCmYyfT6DV1SWLblpd2hie0sL3YejdtuBMYMS2rI7Yxb8kGuqkz+9l1qhwJtei94
+5MaretDy/d/JH/pRYkRf7L+ke7dpzrP+aJmcz9P1e6gq4NJsWejaALVASBiioqNf
+QmtqSVzF1wkR5avZkFHuYvj6V/t1RrOZTXxkSk18KFMJRBZrdHFCWbc5qrVxUB6e
+N5pja0NFIUCigLBV1c6I2DwiuboMNh18VtJJh+nwWeez/RueN4ig59gRTtkcc0PR
+35tX2DR8+xCCFVW/NcJ4PSePYzCuuLvp1vEDHnj41R52Fz51hgddT4rBsp0nL+5I
+socSOIIezw8T9vVzMY4ArCKFAVu2IVyBcahTfBS8q5EM63mONU6UVJEozfGljiMw
+xuQ7JwKcw0AUEKTKG7aBgBaTAgT8TOevpvlw91cAAwUP/jRkyVi/0WAb0qlEaq/S
+ouWxX1faR+vU3b+Y2/DGjtXQMzG0qpetaTHC/AxxHpgt/dCkWI6ljYDnxgPLwG0a
+Oasm94BjZc6vZwf1opFZUKsjOAAxRxNZyjUJKe4UZVuMTk6zo27Nt3LMnc0FO47v
+FcOjRyquvgNOS818irVHUf12waDx8gszKxQTTtFxU5/ePB2jZmhP6oXSe4K/LG5T
++WBRPDrHiGPhCzJRzm9BP0lTnGCAj3o9W90STZa65RK7IaYpC8TB35JTBEbrrNCp
+w6lzd74LnNEp5eMlKDnXzUAgAH0yzCQeMl7t33QCdYx2hRs2wtTQSjGfAiNmj/WW
+Vl5Jn+2jCDnRLenKHwVRFsBX2e0BiRWt/i9Y8fjorLCXVj4z+7yW6DawdLkJorEo
+p3v5ILwfC7hVx4jHSnOgZ65L9s8EQdVr1ckN9243yta7rNgwfcqb60ILMFF1BRk/
+0V7wCL+68UwwiQDvyMOQuqkysKLSDCLb7BFcyA7j6KG+5hpsREstFX2wK1yKeraz
+5xGrFy8tfAaeBMIQ17gvFSp/suc9DYO0ICK2BISzq+F+ZiAKsjMYOBNdH/h0zobQ
+HTHs37+/QLMomGEGKZMWi0dShU2J5mNRQu3Hhxl3hHDVbt5CeJBb26aQcQrFz69W
+zE3GNvmJosh6leayjtI9P2A6iEkEGBECAAkFAkj3uiACGwwACgkQFlMNXpIPXGWp
+TACbBS+Up3RpfYVfd63c1cDdlru13pQAn3NQy/SN858MkxN+zym86UBgOad2
+=CMiZ
+-----END PGP PUBLIC KEY BLOCK-----
+
+     Conley Owens <cco3@android.com>
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1.4.11 (GNU/Linux)
+
+mQENBFHRvc8BCADFg45Xx/y6QDC+T7Y/gGc7vx0ww7qfOwIKlAZ9xG3qKunMxo+S
+hPCnzEl3cq+6I1Ww/ndop/HB3N3toPXRCoN8Vs4/Hc7by+SnaLFnacrm+tV5/OgT
+V37Lzt8lhay1Kl+YfpFwHYYpIEBLFV9knyfRXS/428W2qhdzYfvB15/AasRmwmor
+py4NIzSs8UD/SPr1ihqNCdZM76+MQyN5HMYXW/ALZXUFG0pwluHFA7hrfPG74i8C
+zMiP7qvMWIl/r/jtzHioH1dRKgbod+LZsrDJ8mBaqsZaDmNJMhss9g76XvfMyLra
+9DI9/iFuBpGzeqBv0hwOGQspLRrEoyTeR6n1ABEBAAG0H0NvbmxleSBPd2VucyA8
+Y2NvM0BhbmRyb2lkLmNvbT6JATgEEwECACIFAlHRvc8CGwMGCwkIBwMCBhUIAgkK
+CwQWAgMBAh4BAheAAAoJEGe35EhpKzgsP6AIAJKJmNtn4l7hkYHKHFSo3egb6RjQ
+zEIP3MFTcu8HFX1kF1ZFbrp7xqurLaE53kEkKuAAvjJDAgI8mcZHP1JyplubqjQA
+xvv84gK+OGP3Xk+QK1ZjUQSbjOpjEiSZpRhWcHci3dgOUH4blJfByHw25hlgHowd
+a/2PrNKZVcJ92YienaxxGjcXEUcd0uYEG2+rwllQigFcnMFDhr9B71MfalRHjFKE
+fmdoypqLrri61YBc59P88Rw2/WUpTQjgNubSqa3A2+CKdaRyaRw+2fdF4TdR0h8W
+zbg+lbaPtJHsV+3mJC7fq26MiJDRJa5ZztpMn8su20gbLgi2ShBOaHAYDDi5AQ0E
+UdG9zwEIAMoOBq+QLNozAhxOOl5GL3StTStGRgPRXINfmViTsihrqGCWBBUfXlUE
+OytC0mYcrDUQev/8ToVoyqw+iGSwDkcSXkrEUCKFtHV/GECWtk1keyHgR10YKI1R
+mquSXoubWGqPeG1PAI74XWaRx8UrL8uCXUtmD8Q5J7mDjKR5NpxaXrwlA0bKsf2E
+Gp9tu1kKauuToZhWHMRMqYSOGikQJwWSFYKT1KdNcOXLQF6+bfoJ6sjVYdwfmNQL
+Ixn8QVhoTDedcqClSWB17VDEFDFa7MmqXZz2qtM3X1R/MUMHqPtegQzBGNhRdnI2
+V45+1Nnx/uuCxDbeI4RbHzujnxDiq70AEQEAAYkBHwQYAQIACQUCUdG9zwIbDAAK
+CRBnt+RIaSs4LNVeB/0Y2pZ8I7gAAcEM0Xw8drr4omg2fUoK1J33ozlA/RxeA/lJ
+I3KnyCDTpXuIeBKPGkdL8uMATC9Z8DnBBajRlftNDVZS3Hz4G09G9QpMojvJkFJV
+By+01Flw/X+eeN8NpqSuLV4W+AjEO8at/VvgKr1AFvBRdZ7GkpI1o6DgPe7ZqX+1
+dzQZt3e13W0rVBb/bUgx9iSLoeWP3aq/k+/GRGOR+S6F6BBSl0SQ2EF2+dIywb1x
+JuinEP+AwLAUZ1Bsx9ISC0Agpk2VeHXPL3FGhroEmoMvBzO0kTFGyoeT7PR/BfKv
++H/g3HsL2LOB9uoIm8/5p2TTU5ttYCXMHhQZ81AY
+=AUp4
+-----END PGP PUBLIC KEY BLOCK-----
+
+     Stefan Zager <szager@chromium.org>
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1.4.11 (GNU/Linux)
+
+mQINBFIJOcgBEADwZIq4GRGoO1RJFKlrtVK501cwT5H+Acbizc9N5RxTkFmqxDjb
+9ApUaPW6S1b8+nrzE9P1Ri5erfzipuStfaZ/Wl3mP1JjKulibddmgnPOEbAJ673k
+Vj85RUO4rt2oZAHnZN3D3gFJzVY8JVlZ47Enj9fTqzcW78FVsPCpIT9P2LpTLWeE
+jX9Cjxeimy6VvyJstIcDLYhlpUN5UWen79L4LFAkHf3luLuU4W3p9NriqUsy5UG2
+8vO6QdhKrCr5wsjDFFeVnpMtjlSeZJAWH+XhFFibMX1xP5R9BTuJfzw3kOVKvcE0
+e9ClxgoulepXPv2xnDkqO3pG2gQVzl8LA+Aol8/IXfa7KP5FBkxK/g1cDuDtXRk4
+YLpLaLYeeKEhhOHLpsKYkK2DXTIcN+56UnTLGolummpZnCM8UUSZxQgbkFgk4YJL
+Elip0hgLZzqEl5h9vjmnQp89AZIHKcgNmzn+szLTOR9x24joaLyQ534x8OSC8lmu
+tJv2tQjDOVGWVwvY4gOTpyxCWMwur6WOiMk/TPWdiVRFWAGrAHwf0/CTBEqNhosh
+sVXfPeMADBA0PorDbJ6kwcOkLUTGf8CT7OG1R9TuKPEmSjK7BYu/pT4DXitaRCiv
+uPVlwbVFpLFr0/jwaKJVMLUjL5MaYwzjJqI2c4RdROZhpMhkn4LvCMmFSQARAQAB
+tCJTdGVmYW4gWmFnZXIgPHN6YWdlckBjaHJvbWl1bS5vcmc+iQI4BBMBAgAiBQJS
+CTnIAhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgAAKCRDcuoHPGCdZNU0UD/9y
+0zwwOJH2UGPAzZ0YVzr7p0HtKedoxuFvPkdQxlBIaUOueMzFRmNQu3GI9irAu3MQ
+Jkip8/gi7dnLVmJyS/zWARBaRGwSVd1++87XDjw8n7l181p7394X0Agq/heri599
+YheHXkxXKVMPqByWNEPHu4eDbxeJTaDIjcKC2pzKQkm6HbWgW4wA9gCh1TRki8FP
+LMv1Fu/dr13STCR9P2evsTRZ+ZSJhTSboHNHeEAJGiGZQAsN94oht7647lYj+AyR
+ThzyHDMXXiDr8jPJIkyRilY+y82bCOatOfPoCkce3VI+LRUGJ19hJY01m4RRneIE
+55l7fXR3zggcsONjV5b+oLcGQPGgX9w64BJ7khT7Wb9+kuyrdJBIBzJsaACFEbri
+pPi02FS/HahYpLC3J66REAeNyofgVXau6WQsHrHMGsBTL9aAr0nrCrkF4Nyyc2Jd
+do6nYuljuUhORqbEECmmBM2eBtkL6Ac92D6WMBIwBOC5tCNHO2YFIvi8Y8EuE8sc
+1zB5U5Ai4SIu2icRAhzAhCRaUq02cMWuELKH6Vuh9nzgEefFWty6vPbKEyZLu19D
+B80aqP1cTN88FjtKQ/eTF29TUB6AefUeBS17e2e3WUMy4nc8tduuOFYfiHP40ScP
+wOoatwfzpiTIPGbocUEPL+pS0O/Xy8SINxFMCud3zA==
+=Vd2S
+-----END PGP PUBLIC KEY BLOCK-----
+
+     David James <davidjames@google.com>
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1
+
+mQINBFQKWWsBEACjAxD8xLqNVFX/qOAKFW7R63J3KkkXQKyH5KmSWZnmdfTg4AeR
+h9sAUls16nHiOFp/MRLFFhax8dm33zfED+zHpISFUkMq2Q3UyP6Z6eSpJyYriEF1
+hP7PpwksEnh+hoQ36fhsY1vaQRgTCO8XkFVcChb1CoKUl104PornVlZ378RBUUnK
+FAPhRSTEJtK1QXv6JtQXFzEQbX3jgxsKvpw/Zg7V3FnaMRhHw84YvCAbWz9ayTov
+SBOIczOscD9T/F3NbSlgFwWlQ7JeixdOsCMaYh7gYcXqdq2jluHuKQlTGmGlFwGm
+5TOh6NwvVUV68JZfer2CGMQv4JImQfousy9V+KGddTBfjYkwtmG9oTkSWBLuO91/
+q+TFdHkzNxivPcC+iluJkzrJHcS6aUg8vkLZfT2wrGZUBFH7GsZiKht2env1HyVZ
+64md/auhee4ED3V0mtWSWYyjriAQUIE0LHVHP1zyEf5gVwDZyuE2HlFZr1eFJWiH
+jcxQnGi7IpxF2//NCTvO2dc3eTi4f1EexOyomu9AWk/iIDCgCpkU38XlWgVrvmM1
+Mw5pDm691L1Xn3v3yMRZZUCottUpUEnz5qAa0eQHWBU4PpXUCaWElwwuT+3Lcx1U
+Rdq74UPNb+hBGzrID/KmeU0NxGmhzRIwl+LKdCvnM2v4AvRHIjQPBqC5fQARAQAB
+tCNEYXZpZCBKYW1lcyA8ZGF2aWRqYW1lc0Bnb29nbGUuY29tPokCOAQTAQIAIgUC
+VApZawIbAwYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AACgkQSlDprdejN6zH5A//
+XRAytpjxTIHTtMWp1c7vpi1BMiKF0XRSa8iizbVgZIk6i/jftK8tverRvOzQhUEK
+mwP6WDoX9SbkvxxQd+AxaRmDCQSf7h/fFMB+q9WycH5Mj+N4mc7iivsf1RdZzlmF
+l1wcJoGVsOTFrccca/ZcXjMhWCfpVNDGn29nFtHKddSORhQgy8x0NVf/8NXOF1OL
+Le4cZKBwSokPJEL1Ta4bNQPkzY251CSjH9feHCE1ac16/wh1qhkozl8/QbIVFVTA
+wk1m6q7raj22+2HifrM/w5YkNXYcEL/SfusbCo/rtax75fG0lT9whB6OXuzk0CTu
+zsdBHaYGKCQ+gcalpxqQ/o+xFo0HNI6duCo1zBFAkSX20HZcU5IWr8C2psTuB5zo
+3vPT89GMNlFVhG4JBvuSHcgJFBoTEALugDX1xiRqidjhKPpDMl3Gcezakg2ethQM
+9zwmdlsbh/stcLh9U6eNOqxrjMgmrMRjDocaMu0gFXoGbEMeVVJWrLGgF51k6Q9w
+U3/pvyws6OukV4y3Sr57ACbeQ1am0pCKir2HXB2jmShJfINSyPqhluMz/q1CbYEE
+R7oWoVIL70qhCr4hdJ4yVtqajkUr5jk+IV9L2pny6zt3+3e/132O6yzQ/1NJ1vj9
+hxSNFwdO/JWdqgYtvsFvWQGdKp+RwYBJBp1XIOBA+5W5Ag0EVApZawEQAMC/t6AF
+1eU2wZcLQaahmv+1yaQCV7VfwH8/Lh1AZbMNEITnp97gJ/6SlQqL0fDfjX8DKGE+
+U23o3fKMJr8tIxJqLVzPROomeG+9zhtq5hI3qu53zhR3bCqQpYPQcIHRHxtttYkP
+p+rdTZlYX09TaSsTITNs0/1dCHEgyDS48ujOSmA0fr9eGyxv/2Chr0sDEkSaerJp
+teDKmUdkKoF9SCR7ntfrSFP3eXYFFy+wb+IQjVVHAdTgossXKPtNxzdEKQQHJESJ
+e1jD5BlOpvysOcbDJaRCq7TE2o3Grwy8Um1/Fv+n9naIAN6bZNSrPtiH2G7nX4l6
+126so5sBhJTSGbIV/fb93PZCIfzfJCA4pinYPJH46zn2Ih3AF9mi4eguBK9/oGBe
+03LsNBsfoEI81rRuAl5NeFNa+YXf3w7olF2qbwZXcGmRBteUBBvfonW64nk8w+Ui
+x14gzHJXH6l9jsIavA1AMtFulmh6eEf8hsDUzq8s0Yg9PphVmknxPVW44EttOwCi
+OnlVelRSbABcCNNTv1vOC8ubvt191YRNwAgGMRmXfeEFce76ckVJei/tiENycMXl
+Ff3+km6WmswsDmKxz+DfNtf5SXM24EifO2Q6uX9pbg+AcIWI9Sc2WAfmqCooTU8g
+H2Ua0dskiAi9qq4DPYrwPO+OzAT10nn/TqmDABEBAAGJAh8EGAECAAkFAlQKWWsC
+GwwACgkQSlDprdejN6wHURAAncjYkIkSseO8lldTVu0qJi2vetc2Q6bR8Lw1hTAT
+TB2LcbFheTu6Q/sxDSC5slovFSgyDp8wNkDf88+fxV38LC00IeWz7a9EGPzLzA+D
+fNFdctnxXZGaYB3cQ17TkKFj4AMqbzKPkt4xYWU/WdSWPPd4feFJVjg7l8BIxafF
+58ZYbWN3DwAgKE9DDZ9praTNC/2ytWh21a2j8LR4GlYERW1pMGrMt37IGvZqbU6W
+a7HWaB7f0eXg5M5GTr7KP6TTGwY/500cI4fDme6bih/jXDS4vV53b1HHgvzQFXw/
+XURueobmqsbQQzDGsqPzkYJM4fxXu0TWNhW8CieZMMypPq3uSgvN3jTu2JB9NAEz
+21Pso0NzKm6wxhMzPA6KWILmR2KQn/t51NTE6u0+8e9RmQeg9Ce+IpPzPLsGuNca
+u+r4LcB98D8jIUXz9PPbIHiDLJjMWOG8olZz1zcHpt86b+bf8c9TxFAE8p3G/jpQ
+qanHjtbgNmkz+JpvJ9CTEEo69tkcbmOaCNwCWQL+Doqqi7tWMYUbAw0Rk+lOSu/N
+4cAccd41XU/GmIs9zKkbORWubhfFndc7AXnPUU2otjqMQq0f+QCQrHPdyARf2QCm
+j8zzwdwkRpt3SSvqzh3+L3Zq8xeb2M6u/QLz4aLFTR7yQJed0DJFUcISii9ccJr/
+IM4=
+=6VNc
+-----END PGP PUBLIC KEY BLOCK-----
+"""
+
+GIT = 'git'                     # our git command
+MIN_GIT_VERSION = (1, 7, 2)     # minimum supported git version
+repodir = '.repo'               # name of repo's private directory
+S_repo = 'repo'                 # special repo repository
+S_manifests = 'manifests'       # special manifest repository
+REPO_MAIN = S_repo + '/main.py' # main script
+MIN_PYTHON_VERSION = (2, 6)     # minimum supported python version
+
+
+import errno
+import optparse
+import os
+import re
+import stat
+import subprocess
+import sys
+
+if sys.version_info[0] == 3:
+  import urllib.request
+  import urllib.error
+else:
+  import imp
+  import urllib2
+  urllib = imp.new_module('urllib')
+  urllib.request = urllib2
+  urllib.error = urllib2
+
+
+def _print(*objects, **kwargs):
+  sep = kwargs.get('sep', ' ')
+  end = kwargs.get('end', '\n')
+  out = kwargs.get('file', sys.stdout)
+  out.write(sep.join(objects) + end)
+
+
+# Python version check
+ver = sys.version_info
+if ver[0] == 3:
+  _print('warning: Python 3 support is currently experimental. YMMV.\n'
+         'Please use Python 2.6 - 2.7 instead.',
+         file=sys.stderr)
+if (ver[0], ver[1]) < MIN_PYTHON_VERSION:
+  _print('error: Python version %s unsupported.\n'
+         'Please use Python 2.6 - 2.7 instead.'
+         % sys.version.split(' ')[0], file=sys.stderr)
+  sys.exit(1)
+
+home_dot_repo = os.path.expanduser('~/.repoconfig')
+gpg_dir = os.path.join(home_dot_repo, 'gnupg')
+
+extra_args = []
+init_optparse = optparse.OptionParser(usage="repo init -u url [options]")
+
+# Logging
+group = init_optparse.add_option_group('Logging options')
+group.add_option('-q', '--quiet',
+                 dest="quiet", action="store_true", default=False,
+                 help="be quiet")
+
+# Manifest
+group = init_optparse.add_option_group('Manifest options')
+group.add_option('-u', '--manifest-url',
+                 dest='manifest_url',
+                 help='manifest repository location', metavar='URL')
+group.add_option('-b', '--manifest-branch',
+                 dest='manifest_branch',
+                 help='manifest branch or revision', metavar='REVISION')
+group.add_option('-m', '--manifest-name',
+                 dest='manifest_name',
+                 help='initial manifest file', metavar='NAME.xml')
+group.add_option('--mirror',
+                 dest='mirror', action='store_true',
+                 help='create a replica of the remote repositories '
+                      'rather than a client working directory')
+group.add_option('--reference',
+                 dest='reference',
+                 help='location of mirror directory', metavar='DIR')
+group.add_option('--depth', type='int', default=None,
+                 dest='depth',
+                 help='create a shallow clone with given depth; see git clone')
+group.add_option('--archive',
+                 dest='archive', action='store_true',
+                 help='checkout an archive instead of a git repository for '
+                      'each project. See git archive.')
+group.add_option('-g', '--groups',
+                 dest='groups', default='default',
+                 help='restrict manifest projects to ones with specified '
+                      'group(s) [default|all|G1,G2,G3|G4,-G5,-G6]',
+                 metavar='GROUP')
+group.add_option('-p', '--platform',
+                 dest='platform', default="auto",
+                 help='restrict manifest projects to ones with a specified '
+                      'platform group [auto|all|none|linux|darwin|...]',
+                 metavar='PLATFORM')
+
+
+# Tool
+group = init_optparse.add_option_group('repo Version options')
+group.add_option('--repo-url',
+                 dest='repo_url',
+                 help='repo repository location', metavar='URL')
+group.add_option('--repo-branch',
+                 dest='repo_branch',
+                 help='repo branch or revision', metavar='REVISION')
+group.add_option('--no-repo-verify',
+                 dest='no_repo_verify', action='store_true',
+                 help='do not verify repo source code')
+
+# Other
+group = init_optparse.add_option_group('Other options')
+group.add_option('--config-name',
+                 dest='config_name', action="store_true", default=False,
+                 help='Always prompt for name/e-mail')
+
+class CloneFailure(Exception):
+  """Indicate the remote clone of repo itself failed.
+  """
+
+
+def _Init(args):
+  """Installs repo by cloning it over the network.
+  """
+  opt, args = init_optparse.parse_args(args)
+  if args:
+    init_optparse.print_usage()
+    sys.exit(1)
+
+  url = opt.repo_url
+  if not url:
+    url = REPO_URL
+    extra_args.append('--repo-url=%s' % url)
+
+  branch = opt.repo_branch
+  if not branch:
+    branch = REPO_REV
+    extra_args.append('--repo-branch=%s' % branch)
+
+  if branch.startswith('refs/heads/'):
+    branch = branch[len('refs/heads/'):]
+  if branch.startswith('refs/'):
+    _print("fatal: invalid branch name '%s'" % branch, file=sys.stderr)
+    raise CloneFailure()
+
+  try:
+    os.mkdir(repodir)
+  except OSError as e:
+    if e.errno != errno.EEXIST:
+      _print('fatal: cannot make %s directory: %s'
+             % (repodir, e.strerror), file=sys.stderr)
+      # Don't raise CloneFailure; that would delete the
+      # name. Instead exit immediately.
+      #
+      sys.exit(1)
+
+  _CheckGitVersion()
+  try:
+    if NeedSetupGnuPG():
+      can_verify = SetupGnuPG(opt.quiet)
+    else:
+      can_verify = True
+
+    dst = os.path.abspath(os.path.join(repodir, S_repo))
+    _Clone(url, dst, opt.quiet)
+
+    if can_verify and not opt.no_repo_verify:
+      rev = _Verify(dst, branch, opt.quiet)
+    else:
+      rev = 'refs/remotes/origin/%s^0' % branch
+
+    _Checkout(dst, branch, rev, opt.quiet)
+  except CloneFailure:
+    if opt.quiet:
+      _print('fatal: repo init failed; run without --quiet to see why',
+             file=sys.stderr)
+    raise
+
+
+def ParseGitVersion(ver_str):
+  if not ver_str.startswith('git version '):
+    return None
+
+  num_ver_str = ver_str[len('git version '):].strip().split('-')[0]
+  to_tuple = []
+  for num_str in num_ver_str.split('.')[:3]:
+    if num_str.isdigit():
+      to_tuple.append(int(num_str))
+    else:
+      to_tuple.append(0)
+  return tuple(to_tuple)
+
+
+def _CheckGitVersion():
+  cmd = [GIT, '--version']
+  try:
+    proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+  except OSError as e:
+    _print(file=sys.stderr)
+    _print("fatal: '%s' is not available" % GIT, file=sys.stderr)
+    _print('fatal: %s' % e, file=sys.stderr)
+    _print(file=sys.stderr)
+    _print('Please make sure %s is installed and in your path.' % GIT,
+           file=sys.stderr)
+    raise CloneFailure()
+
+  ver_str = proc.stdout.read().strip()
+  proc.stdout.close()
+  proc.wait()
+
+  ver_act = ParseGitVersion(ver_str)
+  if ver_act is None:
+    _print('error: "%s" unsupported' % ver_str, file=sys.stderr)
+    raise CloneFailure()
+
+  if ver_act < MIN_GIT_VERSION:
+    need = '.'.join(map(str, MIN_GIT_VERSION))
+    _print('fatal: git %s or later required' % need, file=sys.stderr)
+    raise CloneFailure()
+
+
+def NeedSetupGnuPG():
+  if not os.path.isdir(home_dot_repo):
+    return True
+
+  kv = os.path.join(home_dot_repo, 'keyring-version')
+  if not os.path.exists(kv):
+    return True
+
+  kv = open(kv).read()
+  if not kv:
+    return True
+
+  kv = tuple(map(int, kv.split('.')))
+  if kv < KEYRING_VERSION:
+    return True
+  return False
+
+
+def SetupGnuPG(quiet):
+  try:
+    os.mkdir(home_dot_repo)
+  except OSError as e:
+    if e.errno != errno.EEXIST:
+      _print('fatal: cannot make %s directory: %s'
+             % (home_dot_repo, e.strerror), file=sys.stderr)
+      sys.exit(1)
+
+  try:
+    os.mkdir(gpg_dir, stat.S_IRWXU)
+  except OSError as e:
+    if e.errno != errno.EEXIST:
+      _print('fatal: cannot make %s directory: %s' % (gpg_dir, e.strerror),
+             file=sys.stderr)
+      sys.exit(1)
+
+  env = os.environ.copy()
+  env['GNUPGHOME'] = gpg_dir.encode()
+
+  cmd = ['gpg', '--import']
+  try:
+    proc = subprocess.Popen(cmd,
+                            env = env,
+                            stdin = subprocess.PIPE)
+  except OSError as e:
+    if not quiet:
+      _print('warning: gpg (GnuPG) is not available.', file=sys.stderr)
+      _print('warning: Installing it is strongly encouraged.', file=sys.stderr)
+      _print(file=sys.stderr)
+    return False
+
+  proc.stdin.write(MAINTAINER_KEYS)
+  proc.stdin.close()
+
+  if proc.wait() != 0:
+    _print('fatal: registering repo maintainer keys failed', file=sys.stderr)
+    sys.exit(1)
+  _print()
+
+  fd = open(os.path.join(home_dot_repo, 'keyring-version'), 'w')
+  fd.write('.'.join(map(str, KEYRING_VERSION)) + '\n')
+  fd.close()
+  return True
+
+
+def _SetConfig(local, name, value):
+  """Set a git configuration option to the specified value.
+  """
+  cmd = [GIT, 'config', name, value]
+  if subprocess.Popen(cmd, cwd = local).wait() != 0:
+    raise CloneFailure()
+
+
+def _InitHttp():
+  handlers = []
+
+  mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
+  try:
+    import netrc
+    n = netrc.netrc()
+    for host in n.hosts:
+      p = n.hosts[host]
+      mgr.add_password(p[1], 'http://%s/'  % host, p[0], p[2])
+      mgr.add_password(p[1], 'https://%s/' % host, p[0], p[2])
+  except:
+    pass
+  handlers.append(urllib.request.HTTPBasicAuthHandler(mgr))
+  handlers.append(urllib.request.HTTPDigestAuthHandler(mgr))
+
+  if 'http_proxy' in os.environ:
+    url = os.environ['http_proxy']
+    handlers.append(urllib.request.ProxyHandler({'http': url, 'https': url}))
+  if 'REPO_CURL_VERBOSE' in os.environ:
+    handlers.append(urllib.request.HTTPHandler(debuglevel=1))
+    handlers.append(urllib.request.HTTPSHandler(debuglevel=1))
+  urllib.request.install_opener(urllib.request.build_opener(*handlers))
+
+def _Fetch(url, local, src, quiet):
+  if not quiet:
+    _print('Get %s' % url, file=sys.stderr)
+
+  cmd = [GIT, 'fetch']
+  if quiet:
+    cmd.append('--quiet')
+    err = subprocess.PIPE
+  else:
+    err = None
+  cmd.append(src)
+  cmd.append('+refs/heads/*:refs/remotes/origin/*')
+  cmd.append('refs/tags/*:refs/tags/*')
+
+  proc = subprocess.Popen(cmd, cwd = local, stderr = err)
+  if err:
+    proc.stderr.read()
+    proc.stderr.close()
+  if proc.wait() != 0:
+    raise CloneFailure()
+
+def _DownloadBundle(url, local, quiet):
+  if not url.endswith('/'):
+    url += '/'
+  url += 'clone.bundle'
+
+  proc = subprocess.Popen(
+    [GIT, 'config', '--get-regexp', 'url.*.insteadof'],
+    cwd = local,
+    stdout = subprocess.PIPE)
+  for line in proc.stdout:
+    m = re.compile(r'^url\.(.*)\.insteadof (.*)$').match(line)
+    if m:
+      new_url = m.group(1)
+      old_url = m.group(2)
+      if url.startswith(old_url):
+        url = new_url + url[len(old_url):]
+        break
+  proc.stdout.close()
+  proc.wait()
+
+  if not url.startswith('http:') and not url.startswith('https:'):
+    return False
+
+  dest = open(os.path.join(local, '.git', 'clone.bundle'), 'w+b')
+  try:
+    try:
+      r = urllib.request.urlopen(url)
+    except urllib.error.HTTPError as e:
+      if e.code in [403, 404]:
+        return False
+      _print('fatal: Cannot get %s' % url, file=sys.stderr)
+      _print('fatal: HTTP error %s' % e.code, file=sys.stderr)
+      raise CloneFailure()
+    except urllib.error.URLError as e:
+      _print('fatal: Cannot get %s' % url, file=sys.stderr)
+      _print('fatal: error %s' % e.reason, file=sys.stderr)
+      raise CloneFailure()
+    try:
+      if not quiet:
+        _print('Get %s' % url, file=sys.stderr)
+      while True:
+        buf = r.read(8192)
+        if buf == '':
+          return True
+        dest.write(buf)
+    finally:
+      r.close()
+  finally:
+    dest.close()
+
+def _ImportBundle(local):
+  path = os.path.join(local, '.git', 'clone.bundle')
+  try:
+    _Fetch(local, local, path, True)
+  finally:
+    os.remove(path)
+
+def _Clone(url, local, quiet):
+  """Clones a git repository to a new subdirectory of repodir
+  """
+  try:
+    os.mkdir(local)
+  except OSError as e:
+    _print('fatal: cannot make %s directory: %s' % (local, e.strerror),
+           file=sys.stderr)
+    raise CloneFailure()
+
+  cmd = [GIT, 'init', '--quiet']
+  try:
+    proc = subprocess.Popen(cmd, cwd = local)
+  except OSError as e:
+    _print(file=sys.stderr)
+    _print("fatal: '%s' is not available" % GIT, file=sys.stderr)
+    _print('fatal: %s' % e, file=sys.stderr)
+    _print(file=sys.stderr)
+    _print('Please make sure %s is installed and in your path.' % GIT,
+          file=sys.stderr)
+    raise CloneFailure()
+  if proc.wait() != 0:
+    _print('fatal: could not create %s' % local, file=sys.stderr)
+    raise CloneFailure()
+
+  _InitHttp()
+  _SetConfig(local, 'remote.origin.url', url)
+  _SetConfig(local, 'remote.origin.fetch',
+                    '+refs/heads/*:refs/remotes/origin/*')
+  if _DownloadBundle(url, local, quiet):
+    _ImportBundle(local)
+  else:
+    _Fetch(url, local, 'origin', quiet)
+
+
+def _Verify(cwd, branch, quiet):
+  """Verify the branch has been signed by a tag.
+  """
+  cmd = [GIT, 'describe', 'origin/%s' % branch]
+  proc = subprocess.Popen(cmd,
+                          stdout=subprocess.PIPE,
+                          stderr=subprocess.PIPE,
+                          cwd = cwd)
+  cur = proc.stdout.read().strip()
+  proc.stdout.close()
+
+  proc.stderr.read()
+  proc.stderr.close()
+
+  if proc.wait() != 0 or not cur:
+    _print(file=sys.stderr)
+    _print("fatal: branch '%s' has not been signed" % branch, file=sys.stderr)
+    raise CloneFailure()
+
+  m = re.compile(r'^(.*)-[0-9]{1,}-g[0-9a-f]{1,}$').match(cur)
+  if m:
+    cur = m.group(1)
+    if not quiet:
+      _print(file=sys.stderr)
+      _print("info: Ignoring branch '%s'; using tagged release '%s'"
+            % (branch, cur), file=sys.stderr)
+      _print(file=sys.stderr)
+
+  env = os.environ.copy()
+  env['GNUPGHOME'] = gpg_dir.encode()
+
+  cmd = [GIT, 'tag', '-v', cur]
+  proc = subprocess.Popen(cmd,
+                          stdout = subprocess.PIPE,
+                          stderr = subprocess.PIPE,
+                          cwd = cwd,
+                          env = env)
+  out = proc.stdout.read()
+  proc.stdout.close()
+
+  err = proc.stderr.read()
+  proc.stderr.close()
+
+  if proc.wait() != 0:
+    _print(file=sys.stderr)
+    _print(out, file=sys.stderr)
+    _print(err, file=sys.stderr)
+    _print(file=sys.stderr)
+    raise CloneFailure()
+  return '%s^0' % cur
+
+
+def _Checkout(cwd, branch, rev, quiet):
+  """Checkout an upstream branch into the repository and track it.
+  """
+  cmd = [GIT, 'update-ref', 'refs/heads/default', rev]
+  if subprocess.Popen(cmd, cwd = cwd).wait() != 0:
+    raise CloneFailure()
+
+  _SetConfig(cwd, 'branch.default.remote', 'origin')
+  _SetConfig(cwd, 'branch.default.merge', 'refs/heads/%s' % branch)
+
+  cmd = [GIT, 'symbolic-ref', 'HEAD', 'refs/heads/default']
+  if subprocess.Popen(cmd, cwd = cwd).wait() != 0:
+    raise CloneFailure()
+
+  cmd = [GIT, 'read-tree', '--reset', '-u']
+  if not quiet:
+    cmd.append('-v')
+  cmd.append('HEAD')
+  if subprocess.Popen(cmd, cwd = cwd).wait() != 0:
+    raise CloneFailure()
+
+
+def _FindRepo():
+  """Look for a repo installation, starting at the current directory.
+  """
+  curdir = os.getcwd()
+  repo = None
+
+  olddir = None
+  while curdir != '/' \
+    and curdir != olddir \
+    and not repo:
+    repo = os.path.join(curdir, repodir, REPO_MAIN)
+    if not os.path.isfile(repo):
+      repo = None
+      olddir = curdir
+      curdir = os.path.dirname(curdir)
+  return (repo, os.path.join(curdir, repodir))
+
+
+class _Options:
+  help = False
+
+
+def _ParseArguments(args):
+  cmd = None
+  opt = _Options()
+  arg = []
+
+  for i in range(len(args)):
+    a = args[i]
+    if a == '-h' or a == '--help':
+      opt.help = True
+
+    elif not a.startswith('-'):
+      cmd = a
+      arg = args[i + 1:]
+      break
+  return cmd, opt, arg
+
+
+def _Usage():
+  _print(
+"""usage: repo COMMAND [ARGS]
+
+repo is not yet installed.  Use "repo init" to install it here.
+
+The most commonly used repo commands are:
+
+  init      Install repo in the current working directory
+  help      Display detailed help on a command
+
+For access to the full online help, install repo ("repo init").
+""", file=sys.stderr)
+  sys.exit(1)
+
+
+def _Help(args):
+  if args:
+    if args[0] == 'init':
+      init_optparse.print_help()
+      sys.exit(0)
+    else:
+      _print("error: '%s' is not a bootstrap command.\n"
+             '        For access to online help, install repo ("repo init").'
+             % args[0], file=sys.stderr)
+  else:
+    _Usage()
+  sys.exit(1)
+
+
+def _NotInstalled():
+  _print('error: repo is not installed.  Use "repo init" to install it here.',
+         file=sys.stderr)
+  sys.exit(1)
+
+
+def _NoCommands(cmd):
+  _print("""error: command '%s' requires repo to be installed first.
+         Use "repo init" to install it here.""" % cmd, file=sys.stderr)
+  sys.exit(1)
+
+
+def _RunSelf(wrapper_path):
+  my_dir = os.path.dirname(wrapper_path)
+  my_main = os.path.join(my_dir, 'main.py')
+  my_git = os.path.join(my_dir, '.git')
+
+  if os.path.isfile(my_main) and os.path.isdir(my_git):
+    for name in ['git_config.py',
+                 'project.py',
+                 'subcmds']:
+      if not os.path.exists(os.path.join(my_dir, name)):
+        return None, None
+    return my_main, my_git
+  return None, None
+
+
+def _SetDefaultsTo(gitdir):
+  global REPO_URL
+  global REPO_REV
+
+  REPO_URL = gitdir
+  proc = subprocess.Popen([GIT,
+                           '--git-dir=%s' % gitdir,
+                           'symbolic-ref',
+                           'HEAD'],
+                          stdout = subprocess.PIPE,
+                          stderr = subprocess.PIPE)
+  REPO_REV = proc.stdout.read().strip()
+  proc.stdout.close()
+
+  proc.stderr.read()
+  proc.stderr.close()
+
+  if proc.wait() != 0:
+    _print('fatal: %s has no current branch' % gitdir, file=sys.stderr)
+    sys.exit(1)
+
+
+def main(orig_args):
+  repo_main, rel_repo_dir = _FindRepo()
+  cmd, opt, args = _ParseArguments(orig_args)
+
+  wrapper_path = os.path.abspath(__file__)
+  my_main, my_git = _RunSelf(wrapper_path)
+
+  if not repo_main:
+    if opt.help:
+      _Usage()
+    if cmd == 'help':
+      _Help(args)
+    if not cmd:
+      _NotInstalled()
+    if cmd == 'init':
+      if my_git:
+        _SetDefaultsTo(my_git)
+      try:
+        _Init(args)
+      except CloneFailure:
+        for root, dirs, files in os.walk(repodir, topdown=False):
+          for name in files:
+            os.remove(os.path.join(root, name))
+          for name in dirs:
+            os.rmdir(os.path.join(root, name))
+        os.rmdir(repodir)
+        sys.exit(1)
+      repo_main, rel_repo_dir = _FindRepo()
+    else:
+      _NoCommands(cmd)
+
+  if cmd == 'sync' and NeedSetupGnuPG():
+    SetupGnuPG(False)
+
+  if my_main:
+    repo_main = my_main
+
+  ver_str = '.'.join(map(str, VERSION))
+  me = [sys.executable, repo_main,
+        '--repo-dir=%s' % rel_repo_dir,
+        '--wrapper-version=%s' % ver_str,
+        '--wrapper-path=%s' % wrapper_path,
+        '--']
+  me.extend(orig_args)
+  me.extend(extra_args)
+  try:
+    os.execv(sys.executable, me)
+  except OSError as e:
+    _print("fatal: unable to start %s" % repo_main, file=sys.stderr)
+    _print("fatal: %s" % e, file=sys.stderr)
+    sys.exit(148)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/bootstrap/scripts/__init__.py b/bootstrap/scripts/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/bootstrap/scripts/__init__.py
diff --git a/bootstrap/scripts/brillo.py b/bootstrap/scripts/brillo.py
new file mode 100644
index 0000000..1841a47
--- /dev/null
+++ b/bootstrap/scripts/brillo.py
@@ -0,0 +1,59 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Bootstrap wrapper for 'brillo' command.
+
+For most commands of the form "brillo XYZ", we reinvoke
+REPO_DIR/chromite/bin/brillo XYZ, after detecting REPO_DIR based on the CWD.
+
+For the "brillo sdk" command, we reinvoke "../bin/brillo sdk" from the current
+git repository. This allows the SDK command to be run, even if there is no repo
+checkout.
+"""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import bootstrap_lib
+from chromite.lib import cros_build_lib
+from chromite.lib import git
+from chromite.lib import workspace_lib
+
+
+def LocateBrilloCommand(args):
+  bootstrap_path = bootstrap_lib.FindBootstrapPath(save_to_env=True)
+
+  if len(args) >= 1 and args[0] == 'sdk':
+    if not bootstrap_path:
+      cros_build_lib.Die(
+          'You are bootstrapping chromite from a repo checkout.\n'
+          'You must use a git clone. (brbug.com/580: link docs)')
+
+    # Run 'brillo sdk' from the repository containing this command.
+    return os.path.join(bootstrap_path, 'bin', 'brillo')
+
+  # If we are in a workspace, and the workspace has an associated SDK, use it.
+  workspace_path = workspace_lib.WorkspacePath()
+  if workspace_path:
+    sdk_path = bootstrap_lib.GetActiveSdkPath(bootstrap_path, workspace_path)
+    if not sdk_path:
+      cros_build_lib.Die(
+          'The current workspace has no valid SDK.\n'
+          'Please run "brillo sdk --update" (brbug.com/580: link docs)')
+
+    # Use SDK associated with workspace, or nothing.
+    return os.path.join(sdk_path, 'chromite', 'bin', 'brillo')
+
+  # Run all other commands from 'brillo' wrapper in repo detected via CWD.
+  repo_path = git.FindRepoCheckoutRoot(os.getcwd())
+  if repo_path:
+    return os.path.join(repo_path, 'chromite', 'bin', 'brillo')
+
+  # Couldn't find the real brillo command to run.
+  cros_build_lib.Die('Unable to detect which SDK you want to use.')
+
+def main(args):
+  bin_cmd = LocateBrilloCommand(args)
+  os.execv(bin_cmd, [bin_cmd] + args)
diff --git a/bootstrap/scripts/brillo_unittest b/bootstrap/scripts/brillo_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/bootstrap/scripts/brillo_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/bootstrap/scripts/brillo_unittest.py b/bootstrap/scripts/brillo_unittest.py
new file mode 100644
index 0000000..c4c5226
--- /dev/null
+++ b/bootstrap/scripts/brillo_unittest.py
@@ -0,0 +1,176 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the bootstrap brillo command."""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+
+from chromite.bootstrap.scripts import brillo
+
+
+class TestBootstrapBrilloCmd(cros_test_lib.WorkspaceTestCase):
+  """Tests for the bootstrap brillo command."""
+
+  def setUp(self):
+    # Make certain we never exec anything.
+    self.mock_exec = self.PatchObject(os, 'execv', autospec=True)
+
+    self.mock_repo_root = self.PatchObject(
+        git, 'FindRepoCheckoutRoot', autospec=True)
+
+  def _verifyLocateBrilloCommand(self, expected):
+    self.assertEqual(expected,
+                     brillo.LocateBrilloCommand(['flash']))
+    self.assertEqual(expected,
+                     brillo.LocateBrilloCommand(['flash', '--help']))
+
+  def _verifyLocateBrilloCommandSdkHandling(self, expected):
+    self.assertEqual(expected,
+                     brillo.LocateBrilloCommand(['sdk']))
+    self.assertEqual(expected,
+                     brillo.LocateBrilloCommand(['sdk', '--help']))
+
+  def _verifyLocateBrilloCommandFail(self):
+    with self.assertRaises(cros_build_lib.DieSystemExit):
+      brillo.LocateBrilloCommand(['flash'])
+
+  def _verifyLocateBrilloCommandSdkFail(self):
+    with self.assertRaises(cros_build_lib.DieSystemExit):
+      brillo.LocateBrilloCommand(['sdk'])
+
+  def testCommandLookupActiveWorkspace(self):
+    """Test that sdk commands are run in the Git Repository."""
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace('1.2.3')
+
+    sdk_wrapper = os.path.join(
+        self.bootstrap_path, 'sdk_checkouts/1.2.3/chromite/bin/brillo')
+    bootstrap_wrapper = os.path.join(self.bootstrap_path, 'bin/brillo')
+
+    # We are not inside a repo.
+    self.mock_repo_root.return_value = None
+
+    self._verifyLocateBrilloCommand(sdk_wrapper)
+    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
+
+    # We are inside a repo, shouldn't affect the result.
+    self.mock_repo_root.return_value = '/repo'
+
+    self._verifyLocateBrilloCommand(sdk_wrapper)
+    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
+
+  def testCommandLookupInactiveWorkspace(self):
+    """Test that sdk commands are run in the Git Repository."""
+    self.CreateBootstrap()
+    self.CreateWorkspace()
+    self.mock_repo_root.return_value = None
+
+    bootstrap_wrapper = os.path.join(self.bootstrap_path, 'bin/brillo')
+
+    self._verifyLocateBrilloCommandFail()
+    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
+
+    # Having a repo root shouldn't affect the result.
+    self.mock_repo_root.return_value = '/repo'
+
+    self._verifyLocateBrilloCommandFail()
+    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
+
+  def testCommandLookupRepoFromBootstrap(self):
+    """Test that sdk commands are run in the Git Repository."""
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace()
+    self.mock_workspace_path.return_value = None
+    self.mock_repo_root.return_value = '/repo'
+
+    bootstrap_wrapper = os.path.join(self.bootstrap_path, 'bin/brillo')
+    repo_wrapper = '/repo/chromite/bin/brillo'
+
+    self._verifyLocateBrilloCommand(repo_wrapper)
+    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
+
+  def testCommandLookupBootstrapOnly(self):
+    """Test that sdk commands are run in the Git Repository."""
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace()
+    self.mock_workspace_path.return_value = None
+    self.mock_repo_root.return_value = None
+
+    bootstrap_wrapper = os.path.join(self.bootstrap_path, 'bin/brillo')
+
+    self._verifyLocateBrilloCommandFail()
+    self._verifyLocateBrilloCommandSdkHandling(bootstrap_wrapper)
+
+  def testCommandLookupRepoOnly(self):
+    """Test that sdk commands are run in the Git Repository."""
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace()
+    self.mock_bootstrap_path.return_value = None
+    self.mock_workspace_path.return_value = None
+    self.mock_repo_root.return_value = '/repo'
+
+    repo_wrapper = '/repo/chromite/bin/brillo'
+
+    self._verifyLocateBrilloCommand(repo_wrapper)
+    self._verifyLocateBrilloCommandSdkFail()
+
+  def testMainInActiveWorkspace(self):
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace('1.2.3')
+    self.mock_repo_root.return_value = None
+
+    brillo.main(['flash', '--help'])
+
+    expected_cmd = os.path.join(
+        self.bootstrap_path, 'sdk_checkouts/1.2.3/chromite/bin/brillo')
+
+    self.assertEqual(
+        [mock.call(expected_cmd, [expected_cmd, 'flash', '--help'])],
+        self.mock_exec.call_args_list)
+
+  def testMainInRepo(self):
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace('1.2.3')
+    self.mock_workspace_path.return_value = None
+    self.mock_repo_root.return_value = '/repo'
+
+    brillo.main(['flash', '--help'])
+
+    expected_cmd = '/repo/chromite/bin/brillo'
+
+    self.assertEqual(
+        [mock.call(expected_cmd, [expected_cmd, 'flash', '--help'])],
+        self.mock_exec.call_args_list)
+
+  def testMainNoCmd(self):
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace('1.2.3')
+    self.mock_workspace_path.return_value = None
+    self.mock_repo_root.return_value = None
+
+    with self.assertRaises(cros_build_lib.DieSystemExit):
+      brillo.main(['flash', '--help'])
+
+    self.assertEqual([], self.mock_exec.call_args_list)
+
+  def testMainSdkCmd(self):
+    self.CreateBootstrap('1.2.3')
+    self.CreateWorkspace('1.2.3')
+    self.mock_workspace_path.return_value = None
+    self.mock_repo_root.return_value = None
+
+    brillo.main(['sdk', '--help'])
+
+    expected_cmd = os.path.join(self.bootstrap_path, 'bin/brillo')
+
+    self.assertEqual(
+        [mock.call(expected_cmd, [expected_cmd, 'sdk', '--help'])],
+        self.mock_exec.call_args_list)
diff --git a/bootstrap/support/chromite_wrapper b/bootstrap/support/chromite_wrapper
new file mode 100755
index 0000000..43cb14b
--- /dev/null
+++ b/bootstrap/support/chromite_wrapper
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper for chromite tools.
+
+The script is intend to be symlinked to any number of chromite tools, attempts
+to find the path for chromite, and hands off to the right tool via exec if
+possible.
+
+It is intended to used strictly outside of the chroot.
+
+If you're looking at a copy and want to know where the original looks at, look
+here:
+  http://git.chromium.org/gitweb/?p=chromite.git;a=blob;f=bin/chromite
+
+Since this script is _copied_, it should remain small and not use internal libs.
+
+"""
+
+import errno
+import os
+import sys
+
+# Due to historical reasons, and the fact depot_tools ToT is used by older
+# factory branches (lacking chromite script cleanups), note we have to
+# fallback to some odd import locations.  This is the only reason for the
+# fallback code- any/all new scripts symlinked to this script *must* exist
+# in chromite/bin/ .
+
+def _FindChromite(path):
+  """Find the chromite dir in a repo, gclient, or submodule checkout."""
+  path = os.path.abspath(path)
+  # Depending on the checkout type (whether repo chromeos or gclient chrome)
+  # Chromite lives in a different location.
+  roots = (
+    ('.repo', 'chromite/.git'),
+    ('.gclient', 'src/third_party/chromite/.git'),
+    ('src/.gitmodules', 'src/third_party/chromite/.git'),
+  )
+
+  while path != '/':
+    for root, chromite_git_dir in roots:
+      if all(os.path.exists(os.path.join(path, x))
+             for x in [root, chromite_git_dir]):
+        return os.path.dirname(os.path.join(path, chromite_git_dir))
+    path = os.path.dirname(path)
+  return None
+
+
+def _MissingErrorOut(target):
+  sys.stderr.write(
+"""ERROR: Couldn't find the chromite tool %s.
+
+Please change to a directory inside your Chromium OS source tree
+and retry.  If you need to setup a Chromium OS source tree, see
+  http://www.chromium.org/chromium-os/developer-guide
+""" % target)
+  return 127
+
+
+def main():
+  chromite_dir = _FindChromite(os.getcwd())
+  target = os.path.basename(sys.argv[0])
+  if chromite_dir is None:
+    return _MissingErrorOut(target)
+
+  path = os.path.join(chromite_dir, 'bin', target)
+  try:
+    os.execv(path, [path] + sys.argv[1:])
+  except EnvironmentError, e:
+    if e.errno not in (errno.ENOENT, errno.EPERM):
+      raise
+
+  # Reaching here means it's either a bad target, or we're working against
+  # an old (pre 6be2efcf5bb575b03862113eec097c44d8d7f93e) revision of
+  # chromite.  Fallback to trying to import it; this code works at least as
+  # far back as branch 0.11.241.B; likely further.
+
+  if target == 'cbuildbot':
+    target = 'chromite.buildbot.cbuildbot'
+  else:
+    target = 'chromite.bin.%s' % (target,)
+
+  # Adjust the path importation so we can import our our target.
+  sys.path.insert(0, os.path.dirname(chromite_dir))
+
+  try:
+    module = __import__(target, fromlist=['main'])
+  except ImportError:
+    return _MissingErrorOut(target)
+  return module.main()
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/cbuildbot/__init__.py b/cbuildbot/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cbuildbot/__init__.py
diff --git a/cbuildbot/afdo.py b/cbuildbot/afdo.py
new file mode 100644
index 0000000..be22e73
--- /dev/null
+++ b/cbuildbot/afdo.py
@@ -0,0 +1,504 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the various utilities to build Chrome with AFDO.
+
+For a description of AFDO see gcc.gnu.org/wiki/AutoFDO.
+"""
+
+from __future__ import print_function
+
+import datetime
+import os
+import re
+
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import path_util
+from chromite.lib import timeout_util
+
+
+# AFDO-specific constants.
+# Chrome URL where AFDO data is stored.
+AFDO_PROD_URL = 'gs://chromeos-prebuilt/afdo-job/canonicals/'
+AFDO_TEST_URL = '%s/afdo-job/canonicals/' % constants.TRASH_BUCKET
+AFDO_BASE_URL = AFDO_PROD_URL
+AFDO_CHROOT_ROOT = os.path.join('%(build_root)s', constants.DEFAULT_CHROOT_DIR)
+AFDO_LOCAL_DIR = os.path.join('%(root)s', 'tmp')
+AFDO_BUILDROOT_LOCAL = AFDO_LOCAL_DIR % {'root': AFDO_CHROOT_ROOT}
+CHROME_ARCH_VERSION = '%(package)s-%(arch)s-%(version)s'
+CHROME_PERF_AFDO_FILE = '%s.perf.data' % CHROME_ARCH_VERSION
+CHROME_PERF_AFDO_URL = '%s%s.bz2' % (AFDO_BASE_URL, CHROME_PERF_AFDO_FILE)
+CHROME_AFDO_FILE = '%s.afdo' % CHROME_ARCH_VERSION
+CHROME_AFDO_URL = '%s%s.bz2' % (AFDO_BASE_URL, CHROME_AFDO_FILE)
+CHROME_ARCH_RELEASE = '%(package)s-%(arch)s-%(release)s'
+LATEST_CHROME_AFDO_FILE = 'latest-%s.afdo' % CHROME_ARCH_RELEASE
+LATEST_CHROME_AFDO_URL = AFDO_BASE_URL + LATEST_CHROME_AFDO_FILE
+CHROME_DEBUG_BIN = os.path.join('%(root)s',
+                                'build/%(board)s/usr/lib/debug',
+                                'opt/google/chrome/chrome.debug')
+CHROME_DEBUG_BIN_URL = '%s%s.debug.bz2' % (AFDO_BASE_URL, CHROME_ARCH_VERSION)
+
+AFDO_GENERATE_GCOV_TOOL = '/usr/bin/create_gcov'
+
+# regex to find AFDO file for specific architecture within the ebuild file.
+CHROME_EBUILD_AFDO_EXP = r'^(?P<bef>AFDO_FILE\["%s"\]=")(?P<name>.*)(?P<aft>")'
+# and corresponding replacement string.
+CHROME_EBUILD_AFDO_REPL = r'\g<bef>%s\g<aft>'
+
+# How old can the AFDO data be? (in days).
+AFDO_ALLOWED_STALE = 7
+
+# TODO(llozano): Currently using sandybridge boards. We should move to
+# a more modern platform.
+# Set of boards that can generate the AFDO profile (can generate 'perf'
+# data with LBR events).
+AFDO_DATA_GENERATORS = ('butterfly', 'lumpy', 'parrot', 'stumpy')
+
+# For a given architecture, which architecture is used to generate
+# the AFDO profile. Some architectures are not able to generate their
+# own profile.
+AFDO_ARCH_GENERATORS = {'amd64': 'amd64',
+                        'arm': 'amd64',
+                        'x86': 'amd64'}
+
+AFDO_ALERT_RECIPIENTS = ['chromeos-toolchain@google.com']
+
+
+class MissingAFDOData(failures_lib.StepFailure):
+  """Exception thrown when necessary AFDO data is missing."""
+
+
+class MissingAFDOMarkers(failures_lib.StepFailure):
+  """Exception thrown when necessary ebuild markers for AFDO are missing."""
+
+
+def CompressAFDOFile(to_compress, buildroot):
+  """Compress file used by AFDO process.
+
+  Args:
+    to_compress: File to compress.
+    buildroot: buildroot where to store the compressed data.
+
+  Returns:
+    Name of the compressed data file.
+  """
+  local_dir = AFDO_BUILDROOT_LOCAL % {'build_root': buildroot}
+  dest = os.path.join(local_dir, os.path.basename(to_compress)) + '.bz2'
+  cros_build_lib.CompressFile(to_compress, dest)
+  return dest
+
+
+def UncompressAFDOFile(to_decompress, buildroot):
+  """Decompress file used by AFDO process.
+
+  Args:
+    to_decompress: File to decompress.
+    buildroot: buildroot where to store the decompressed data.
+  """
+  local_dir = AFDO_BUILDROOT_LOCAL % {'build_root': buildroot}
+  basename = os.path.basename(to_decompress)
+  dest_basename = basename.rsplit('.', 1)[0]
+  dest = os.path.join(local_dir, dest_basename)
+  cros_build_lib.UncompressFile(to_decompress, dest)
+  return dest
+
+
+def GSUploadIfNotPresent(gs_context, src, dest):
+  """Upload a file to GS only if the file does not exist.
+
+  Will not generate an error if the file already exist in GS. It will
+  only emit a warning.
+
+  I could use GSContext.Copy(src,dest,version=0) here but it does not seem
+  to work for large files. Using GSContext.Exists(dest) instead. See
+  crbug.com/395858.
+
+  Args:
+    gs_context: GS context instance.
+    src: File to copy.
+    dest: Destination location.
+
+  Returns:
+    True if file was uploaded. False otherwise.
+  """
+  if gs_context.Exists(dest):
+    logging.warning('File %s already in GS', dest)
+    return False
+  else:
+    gs_context.Copy(src, dest, acl='public-read')
+    return True
+
+
+def GetAFDOPerfDataURL(cpv, arch):
+  """Return the location URL for the AFDO per data file.
+
+  Build the URL for the 'perf' data file given the release and architecture.
+
+  Args:
+    cpv: The portage_util.CPV object for chromeos-chrome.
+    arch: architecture we're going to build Chrome for.
+
+  Returns:
+    URL of the location of the 'perf' data file.
+  """
+
+  # The file name of the perf data is based only in the chrome version.
+  # The test case that produces it does not know anything about the
+  # revision number.
+  # TODO(llozano): perf data filename should include the revision number.
+  version_number = cpv.version_no_rev.split('_')[0]
+  chrome_spec = {'package': cpv.package,
+                 'arch': arch,
+                 'version': version_number}
+  return CHROME_PERF_AFDO_URL % chrome_spec
+
+
+def CheckAFDOPerfData(cpv, arch, gs_context):
+  """Check whether AFDO perf data exists for the given architecture.
+
+  Check if 'perf' data file for this architecture and release is available
+  in GS.
+
+  Args:
+    cpv: The portage_util.CPV object for chromeos-chrome.
+    arch: architecture we're going to build Chrome for.
+    gs_context: GS context to retrieve data.
+
+  Returns:
+    True if AFDO perf data is available. False otherwise.
+  """
+  url = GetAFDOPerfDataURL(cpv, arch)
+  if not gs_context.Exists(url):
+    logging.info('Could not find AFDO perf data at %s', url)
+    return False
+
+  logging.info('Found AFDO perf data at %s', url)
+  return True
+
+
+def WaitForAFDOPerfData(cpv, arch, buildroot, gs_context,
+                        timeout=constants.AFDO_GENERATE_TIMEOUT):
+  """Wait for AFDO perf data to show up (with an appropriate timeout).
+
+  Wait for AFDO 'perf' data to show up in GS and copy it into a temp
+  directory in the buildroot.
+
+  Args:
+    arch: architecture we're going to build Chrome for.
+    cpv: CPV object for Chrome.
+    buildroot: buildroot where AFDO data should be stored.
+    gs_context: GS context to retrieve data.
+    timeout: How long to wait total, in seconds.
+
+  Returns:
+    True if found the AFDO perf data before the timeout expired.
+    False otherwise.
+  """
+  try:
+    timeout_util.WaitForReturnTrue(
+        CheckAFDOPerfData,
+        func_args=(cpv, arch, gs_context),
+        timeout=timeout, period=constants.SLEEP_TIMEOUT)
+  except timeout_util.TimeoutError:
+    logging.info('Could not find AFDO perf data before timeout')
+    return False
+
+  url = GetAFDOPerfDataURL(cpv, arch)
+  dest_dir = AFDO_BUILDROOT_LOCAL % {'build_root': buildroot}
+  dest_path = os.path.join(dest_dir, url.rsplit('/', 1)[1])
+  gs_context.Copy(url, dest_path)
+
+  UncompressAFDOFile(dest_path, buildroot)
+  logging.info('Retrieved AFDO perf data to %s', dest_path)
+  return True
+
+
+def PatchChromeEbuildAFDOFile(ebuild_file, arch_profiles):
+  """Patch the Chrome ebuild with the dictionary of {arch: afdo_file} pairs.
+
+  Args:
+    ebuild_file: path of the ebuild file within the chroot.
+    arch_profiles: {arch: afdo_file} pairs to put into the ebuild.
+  """
+  original_ebuild = path_util.FromChrootPath(ebuild_file)
+  modified_ebuild = '%s.new' % original_ebuild
+
+  arch_patterns = {}
+  arch_repls = {}
+  arch_markers = {}
+  for arch in arch_profiles.keys():
+    arch_patterns[arch] = re.compile(CHROME_EBUILD_AFDO_EXP % arch)
+    arch_repls[arch] = CHROME_EBUILD_AFDO_REPL % arch_profiles[arch]
+    arch_markers[arch] = False
+
+  with open(original_ebuild, 'r') as original:
+    with open(modified_ebuild, 'w') as modified:
+      for line in original:
+        for arch in arch_profiles.keys():
+          matched = arch_patterns[arch].match(line)
+          if matched:
+            arch_markers[arch] = True
+            modified.write(arch_patterns[arch].sub(arch_repls[arch], line))
+            break
+        else: # line without markers, just copy it.
+          modified.write(line)
+
+  for arch, found in arch_markers.iteritems():
+    if not found:
+      raise MissingAFDOMarkers('Chrome ebuild file does not have appropriate '
+                               'AFDO markers for arch %s' % arch)
+
+  os.rename(modified_ebuild, original_ebuild)
+
+
+def UpdateChromeEbuildAFDOFile(board, arch_profiles):
+  """Update chrome ebuild with the dictionary of {arch: afdo_file} pairs.
+
+  Modifies the Chrome ebuild to set the appropriate AFDO file for each
+  given architecture. Regenerates the associated Manifest file and
+  commits the new ebuild and Manifest.
+
+  Args:
+    board: board we are building Chrome for.
+    arch_profiles: {arch: afdo_file} pairs to put into the ebuild.
+  """
+  # Find the Chrome ebuild file.
+  equery_prog = 'equery'
+  ebuild_prog = 'ebuild'
+  if board:
+    equery_prog += '-%s' % board
+    ebuild_prog += '-%s' % board
+
+  equery_cmd = [equery_prog, 'w', 'chromeos-chrome']
+  ebuild_file = cros_build_lib.RunCommand(equery_cmd,
+                                          enter_chroot=True,
+                                          redirect_stdout=True).output.rstrip()
+
+  # Patch the ebuild file with the names of the available afdo_files.
+  PatchChromeEbuildAFDOFile(ebuild_file, arch_profiles)
+
+  # Also patch the 9999 ebuild. This is necessary because the uprev
+  # process starts from the 9999 ebuild file and then compares to the
+  # current version to see if the uprev is really necessary. We dont
+  # want the names of the available afdo_files to show as differences.
+  # It also allows developers to do USE=afdo_use when using the 9999
+  # ebuild.
+  ebuild_9999 = os.path.join(os.path.dirname(ebuild_file),
+                             'chromeos-chrome-9999.ebuild')
+  PatchChromeEbuildAFDOFile(ebuild_9999, arch_profiles)
+
+  # Regenerate the Manifest file.
+  ebuild_gs_dir = None
+  # If using the GS test location, pass this location to the
+  # chrome ebuild.
+  if AFDO_BASE_URL == AFDO_TEST_URL:
+    ebuild_gs_dir = {'AFDO_GS_DIRECTORY': AFDO_TEST_URL}
+  gen_manifest_cmd = [ebuild_prog, ebuild_file, 'manifest', '--force']
+  cros_build_lib.RunCommand(gen_manifest_cmd, enter_chroot=True,
+                            extra_env=ebuild_gs_dir, print_cmd=True)
+
+  ebuild_dir = path_util.FromChrootPath(os.path.dirname(ebuild_file))
+  git.RunGit(ebuild_dir, ['add', 'Manifest'])
+
+  # Check if anything changed compared to the previous version.
+  mod_files = ['Manifest', os.path.basename(ebuild_file),
+               os.path.basename(ebuild_9999)]
+  modifications = git.RunGit(ebuild_dir,
+                             ['status', '--porcelain', '--'] + mod_files,
+                             capture_output=True, print_cmd=True).output
+  if not modifications:
+    logging.info('AFDO info for the Chrome ebuild did not change. '
+                 'Nothing to commit')
+    return
+
+  # If there are changes to ebuild or Manifest, commit them.
+  commit_msg = ('"Set {arch: afdo_file} pairs %s and updated Manifest"'
+                % arch_profiles)
+  git.RunGit(ebuild_dir,
+             ['commit', '-m', commit_msg, '--'] + mod_files,
+             print_cmd=True)
+
+
+def VerifyLatestAFDOFile(afdo_release_spec, buildroot, gs_context):
+  """Verify that the latest AFDO profile for a release is suitable.
+
+  Find the latest AFDO profile file for a particular release and check
+  that it is not too stale. The latest AFDO profile name for a release
+  can be found in a file in GS under the name
+  latest-chrome-<arch>-<release>.afdo.
+
+  Args:
+    afdo_release_spec: architecture and release to find the latest AFDO
+        profile for.
+    buildroot: buildroot where AFDO data should be stored.
+    gs_context: GS context to retrieve data.
+
+  Returns:
+    The name of the AFDO profile file if a suitable one was found.
+    None otherwise.
+  """
+  latest_afdo_url = LATEST_CHROME_AFDO_URL % afdo_release_spec
+
+  # Check if latest-chrome-<arch>-<release>.afdo exists.
+  try:
+    latest_detail = gs_context.List(latest_afdo_url, details=True)
+  except gs.GSNoSuchKey:
+    logging.info('Could not find latest AFDO info file %s' % latest_afdo_url)
+    return None
+
+  # Verify the AFDO profile file is not too stale.
+  mod_date = latest_detail[0].creation_time
+  curr_date = datetime.datetime.now()
+  allowed_stale_days = datetime.timedelta(days=AFDO_ALLOWED_STALE)
+  if (curr_date - mod_date) > allowed_stale_days:
+    logging.info('Found latest AFDO info file %s but it is too old' %
+                 latest_afdo_url)
+    return None
+
+  # Then get the name of the latest valid AFDO profile file.
+  local_dir = AFDO_BUILDROOT_LOCAL % {'build_root': buildroot}
+  latest_afdo_file = LATEST_CHROME_AFDO_FILE % afdo_release_spec
+  latest_afdo_path = os.path.join(local_dir, latest_afdo_file)
+  gs_context.Copy(latest_afdo_url, latest_afdo_path)
+
+  return osutils.ReadFile(latest_afdo_path).strip()
+
+
+def GetLatestAFDOFile(cpv, arch, buildroot, gs_context):
+  """Try to find the latest suitable AFDO profile file.
+
+  Try to find the latest AFDO profile generated for current release
+  and architecture. If there is none, check the previous release (mostly
+  in case we have just branched).
+
+  Args:
+    cpv: cpv object for Chrome.
+    arch: architecture for which we are looking for AFDO profile.
+    buildroot: buildroot where AFDO data should be stored.
+    gs_context: GS context to retrieve data.
+
+  Returns:
+    Name of latest suitable AFDO profile file if one is found.
+    None otherwise.
+  """
+  generator_arch = AFDO_ARCH_GENERATORS[arch]
+  version_number = cpv.version
+  current_release = version_number.split('.')[0]
+  afdo_release_spec = {'package': cpv.package,
+                       'arch': generator_arch,
+                       'release': current_release}
+  afdo_file = VerifyLatestAFDOFile(afdo_release_spec, buildroot, gs_context)
+  if afdo_file:
+    return afdo_file
+
+  # Could not find suitable AFDO file for the current release.
+  # Let's see if there is one from the previous release.
+  previous_release = str(int(current_release) - 1)
+  prev_release_spec = {'package': cpv.package,
+                       'arch': generator_arch,
+                       'release': previous_release}
+  return VerifyLatestAFDOFile(prev_release_spec, buildroot, gs_context)
+
+
+def GenerateAFDOData(cpv, arch, board, buildroot, gs_context):
+  """Generate AFDO profile data from 'perf' data.
+
+  Given the 'perf' profile, generate an AFDO profile using create_gcov.
+  It also creates a latest-chrome-<arch>-<release>.afdo file pointing
+  to the generated AFDO profile.
+  Uploads the generated data to GS for retrieval by the chrome ebuild
+  file when doing an 'afdo_use' build.
+  It is possible the generated data has previously been uploaded to GS
+  in which case this routine will not upload the data again. Uploading
+  again may cause verication failures for the ebuild file referencing
+  the previous contents of the data.
+
+  Args:
+    cpv: cpv object for Chrome.
+    arch: architecture for which we are looking for AFDO profile.
+    board: board we are building for.
+    buildroot: buildroot where AFDO data should be stored.
+    gs_context: GS context to retrieve/store data.
+
+  Returns:
+    Name of the AFDO profile file generated if successful.
+  """
+  CHROME_UNSTRIPPED_NAME = 'chrome.unstripped'
+
+  version_number = cpv.version
+  afdo_spec = {'package': cpv.package,
+               'arch': arch,
+               'version': version_number}
+  chroot_root = AFDO_CHROOT_ROOT % {'build_root': buildroot}
+  local_dir = AFDO_LOCAL_DIR % {'root': chroot_root}
+  in_chroot_local_dir = AFDO_LOCAL_DIR % {'root': ''}
+
+  # Upload compressed chrome debug binary to GS for triaging purposes.
+  # TODO(llozano): This simplifies things in case of need of triaging
+  # problems but is it really necessary?
+  debug_bin = CHROME_DEBUG_BIN % {'root': chroot_root,
+                                  'board': board}
+  comp_debug_bin_path = CompressAFDOFile(debug_bin, buildroot)
+  GSUploadIfNotPresent(gs_context, comp_debug_bin_path,
+                       CHROME_DEBUG_BIN_URL % afdo_spec)
+
+  # create_gcov demands the name of the profiled binary exactly matches
+  # the name of the unstripped binary or it is named 'chrome.unstripped'.
+  # So create a symbolic link with the appropriate name.
+  local_debug_sym = os.path.join(local_dir, CHROME_UNSTRIPPED_NAME)
+  in_chroot_debug_bin = CHROME_DEBUG_BIN % {'root': '', 'board': board}
+  osutils.SafeUnlink(local_debug_sym)
+  os.symlink(in_chroot_debug_bin, local_debug_sym)
+
+  # Call create_gcov tool to generated AFDO profile from 'perf' profile
+  # and upload it to GS. Need to call from within chroot since this tool
+  # was built inside chroot.
+  debug_sym = os.path.join(in_chroot_local_dir, CHROME_UNSTRIPPED_NAME)
+  # The name of the 'perf' file is based only on the version of chrome. The
+  # revision number is not included.
+  afdo_spec_no_rev = {'package': cpv.package,
+                      'arch': arch,
+                      'version': cpv.version_no_rev.split('_')[0]}
+  perf_afdo_file = CHROME_PERF_AFDO_FILE % afdo_spec_no_rev
+  perf_afdo_path = os.path.join(in_chroot_local_dir, perf_afdo_file)
+  afdo_file = CHROME_AFDO_FILE % afdo_spec
+  afdo_path = os.path.join(in_chroot_local_dir, afdo_file)
+  afdo_cmd = [AFDO_GENERATE_GCOV_TOOL,
+              '--binary=%s' % debug_sym,
+              '--profile=%s' % perf_afdo_path,
+              '--gcov=%s' % afdo_path]
+  cros_build_lib.RunCommand(afdo_cmd, enter_chroot=True, capture_output=True,
+                            print_cmd=True)
+
+  afdo_local_path = os.path.join(local_dir, afdo_file)
+  comp_afdo_path = CompressAFDOFile(afdo_local_path, buildroot)
+  uploaded_afdo_file = GSUploadIfNotPresent(gs_context, comp_afdo_path,
+                                            CHROME_AFDO_URL % afdo_spec)
+
+  if uploaded_afdo_file:
+    # Create latest-chrome-<arch>-<release>.afdo pointing to the name
+    # of the AFDO profile file and upload to GS.
+    current_release = version_number.split('.')[0]
+    afdo_release_spec = {'package': cpv.package,
+                         'arch': arch,
+                         'release': current_release}
+    latest_afdo_file = LATEST_CHROME_AFDO_FILE % afdo_release_spec
+    latest_afdo_path = os.path.join(local_dir, latest_afdo_file)
+    osutils.WriteFile(latest_afdo_path, afdo_file)
+    gs_context.Copy(latest_afdo_path,
+                    LATEST_CHROME_AFDO_URL % afdo_release_spec,
+                    acl='public-read')
+
+  return afdo_file
+
+
+def CanGenerateAFDOData(board):
+  """Does this board has the capability of generating its own AFDO data?."""
+  return board in AFDO_DATA_GENERATORS
diff --git a/cbuildbot/archive_lib.py b/cbuildbot/archive_lib.py
new file mode 100644
index 0000000..a4cef35
--- /dev/null
+++ b/cbuildbot/archive_lib.py
@@ -0,0 +1,184 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module with utilities for archiving functionality."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import osutils
+
+
+def GetBaseUploadURI(config, archive_base=None, bot_id=None,
+                     remote_trybot=False):
+  """Get the base URL where artifacts from this builder are uploaded.
+
+  Each build run stores its artifacts in a subdirectory of the base URI.
+  We also have LATEST files under the base URI which help point to the
+  latest build available for a given builder.
+
+  Args:
+    config: The build config to examine.
+    archive_base: Optional. The root URL under which objects from all
+      builders are uploaded. If not specified, we use the default archive
+      bucket.
+    bot_id: The bot ID to archive files under.
+    remote_trybot: Whether this is a remote trybot run. This is used to
+      make sure that uploads from remote trybot runs do not conflict with
+      uploads from production builders.
+
+  Returns:
+    Google Storage URI (i.e. 'gs://...') under which all archived files
+      should be uploaded.  In other words, a path like a directory, even
+      through GS has no real directories.
+  """
+  if not bot_id:
+    bot_id = config.GetBotId(remote_trybot=remote_trybot)
+
+  if archive_base:
+    return '%s/%s' % (archive_base, bot_id)
+  elif remote_trybot or config.gs_path == config_lib.GS_PATH_DEFAULT:
+    return '%s/%s' % (constants.DEFAULT_ARCHIVE_BUCKET, bot_id)
+  else:
+    return config.gs_path
+
+
+def GetUploadACL(config):
+  """Get the ACL we should use to upload artifacts for a given config."""
+  if config.internal:
+    # Use the bucket default ACL.
+    return None
+
+  return 'public-read'
+
+
+class Archive(object):
+  """Class to represent the archive for one builder run.
+
+  An Archive object is a read-only object with attributes and methods useful
+  for archive purposes.  Most of the attributes are supported as properties
+  because they depend on the ChromeOS version and if they are calculated too
+  soon (i.e. before the sync stage) they will raise an exception.
+
+  Attributes:
+    archive_path: The full local path where output from this builder is stored.
+    download_url: The URL where we can download artifacts.
+    upload_url: The Google Storage location where we should upload artifacts.
+    version: The ChromeOS version for this archive.
+  """
+
+  _BUILDBOT_ARCHIVE = 'buildbot_archive'
+  _TRYBOT_ARCHIVE = 'trybot_archive'
+
+  def __init__(self, bot_id, version_getter, options, config):
+    """Initialize.
+
+    Args:
+      bot_id: The bot id associated with this archive.
+      version_getter: Functor that should return the ChromeOS version for
+        this run when called, if the version is known.  Typically, this
+        is BuilderRun.GetVersion.
+      options: The command options object for this run.
+      config: The build config for this run.
+    """
+    self._options = options
+    self._config = config
+    self._version_getter = version_getter
+    self._version = None
+
+    self.bot_id = bot_id
+
+  @property
+  def version(self):
+    if self._version is None:
+      self._version = self._version_getter()
+
+    return self._version
+
+  @property
+  def archive_path(self):
+    return os.path.join(self.GetLocalArchiveRoot(), self.bot_id, self.version)
+
+  @property
+  def upload_url(self):
+    base_upload_url = GetBaseUploadURI(
+        self._config,
+        archive_base=self._options.archive_base,
+        bot_id=self.bot_id,
+        remote_trybot=self._options.remote_trybot)
+    return '%s/%s' % (base_upload_url, self.version)
+
+  @property
+  def upload_acl(self):
+    """Get the ACL we should use to upload artifacts for a given config."""
+    return GetUploadACL(self._config)
+
+  @property
+  def download_url(self):
+    if self._options.buildbot or self._options.remote_trybot:
+      # Translate the gs:// URI to the URL for downloading the same files.
+      return self.upload_url.replace('gs://', gs.PRIVATE_BASE_HTTPS_URL)
+    else:
+      return self.archive_path
+
+  def GetLocalArchiveRoot(self, trybot=None):
+    """Return the location on disk where archive images are kept."""
+    buildroot = os.path.abspath(self._options.buildroot)
+
+    if trybot is None:
+      trybot = not self._options.buildbot or self._options.debug
+
+    archive_base = self._TRYBOT_ARCHIVE if trybot else self._BUILDBOT_ARCHIVE
+    return os.path.join(buildroot, archive_base)
+
+  def SetupArchivePath(self):
+    """Create a fresh directory for archiving a build."""
+    logging.info('Preparing local archive directory at "%s".',
+                 self.archive_path)
+    if self._options.buildbot:
+      # Buildbot: Clear out any leftover build artifacts, if present, for
+      # this particular run.  The Clean stage is responsible for trimming
+      # back the number of archive paths to the last X runs.
+      osutils.RmDir(self.archive_path, ignore_missing=True)
+    else:
+      # Clear the list of uploaded file if it exists.  In practice, the Clean
+      # stage deletes everything in the archive root, so this may not be
+      # doing anything at all.
+      osutils.SafeUnlink(os.path.join(self.archive_path,
+                                      commands.UPLOADED_LIST_FILENAME))
+
+    osutils.SafeMakedirs(self.archive_path)
+
+  def UpdateLatestMarkers(self, manifest_branch, debug, upload_urls=None):
+    """Update the LATEST markers in GS archive area.
+
+    Args:
+      manifest_branch: The name of the branch in the manifest for this run.
+      debug: Boolean debug value for this run.
+      upload_urls: Google storage urls to upload the Latest Markers to.
+    """
+    if not upload_urls:
+      upload_urls = [self.upload_url]
+    # self.version will be one of these forms, shown through examples:
+    # R35-1234.5.6 or R35-1234.5.6-b123.  In either case, we want "1234.5.6".
+    version_marker = self.version.split('-')[1]
+
+    filenames = ('LATEST-%s' % manifest_branch,
+                 'LATEST-%s' % version_marker)
+    base_archive_path = os.path.dirname(self.archive_path)
+    base_upload_urls = [os.path.dirname(url) for url in upload_urls]
+    for base_upload_url in base_upload_urls:
+      for filename in filenames:
+        latest_path = os.path.join(base_archive_path, filename)
+        osutils.WriteFile(latest_path, self.version, mode='w')
+        commands.UploadArchivedFile(
+            base_archive_path, [base_upload_url], filename,
+            debug, acl=self.upload_acl)
diff --git a/cbuildbot/archive_lib_unittest b/cbuildbot/archive_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/archive_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/archive_lib_unittest.py b/cbuildbot/archive_lib_unittest.py
new file mode 100644
index 0000000..96dc2c7
--- /dev/null
+++ b/cbuildbot/archive_lib_unittest.py
@@ -0,0 +1,221 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the archive_lib module."""
+
+from __future__ import print_function
+
+import mock
+
+from chromite.cbuildbot import archive_lib
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import config_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import parallel_unittest
+
+
+DEFAULT_ARCHIVE_PREFIX = 'bogus_bucket/TheArchiveBase'
+DEFAULT_ARCHIVE_BASE = 'gs://%s' % DEFAULT_ARCHIVE_PREFIX
+DEFAULT_BUILDROOT = '/tmp/foo/bar/buildroot'
+DEFAULT_BUILDNUMBER = 12345
+DEFAULT_BRANCH = 'TheBranch'
+DEFAULT_CHROME_BRANCH = 'TheChromeBranch'
+DEFAULT_VERSION_STRING = 'TheVersionString'
+DEFAULT_BOARD = 'TheBoard'
+DEFAULT_BOT_NAME = 'TheCoolBot'
+
+# Access to protected member.
+# pylint: disable=W0212
+
+DEFAULT_OPTIONS = cros_test_lib.EasyAttr(
+    archive_base=DEFAULT_ARCHIVE_BASE,
+    buildroot=DEFAULT_BUILDROOT,
+    buildnumber=DEFAULT_BUILDNUMBER,
+    buildbot=True,
+    branch=DEFAULT_BRANCH,
+    remote_trybot=False,
+    debug=False,
+)
+DEFAULT_CONFIG = config_lib.BuildConfig(
+    name=DEFAULT_BOT_NAME,
+    master=True,
+    boards=[DEFAULT_BOARD],
+    child_configs=[config_lib.BuildConfig(name='foo'),
+                   config_lib.BuildConfig(name='bar'),
+                  ],
+)
+
+
+def _ExtendDefaultOptions(**kwargs):
+  """Extend DEFAULT_OPTIONS with keys/values in kwargs."""
+  options_kwargs = DEFAULT_OPTIONS.copy()
+  options_kwargs.update(kwargs)
+  return cros_test_lib.EasyAttr(**options_kwargs)
+
+
+def _ExtendDefaultConfig(**kwargs):
+  """Extend DEFAULT_CONFIG with keys/values in kwargs."""
+  config_kwargs = DEFAULT_CONFIG.copy()
+  config_kwargs.update(kwargs)
+  return config_lib.BuildConfig(**config_kwargs)
+
+
+def _NewBuilderRun(options=None, config=None):
+  """Create a BuilderRun objection from options and config values.
+
+  Args:
+    options: Specify options or default to DEFAULT_OPTIONS.
+    config: Specify build config or default to DEFAULT_CONFIG.
+
+  Returns:
+    BuilderRun object.
+  """
+  manager = parallel_unittest.FakeMultiprocessManager()
+  options = options or DEFAULT_OPTIONS
+  config = config or DEFAULT_CONFIG
+  site_config = config_lib_unittest.MockSiteConfig()
+  site_config[config.name] = config
+
+  return cbuildbot_run.BuilderRun(options, site_config, config, manager)
+
+
+class GetBaseUploadURITest(cros_test_lib.TestCase):
+  """Test the GetBaseUploadURI function."""
+
+  ARCHIVE_BASE = '/tmp/the/archive/base'
+  BOT_ID = 'TheNewBotId'
+
+  def setUp(self):
+    self.cfg = DEFAULT_CONFIG
+
+  def _GetBaseUploadURI(self, *args, **kwargs):
+    """Test GetBaseUploadURI with archive_base and no bot_id."""
+    return archive_lib.GetBaseUploadURI(self.cfg, *args, **kwargs)
+
+  def testArchiveBaseRemoteTrybotFalse(self):
+    expected_result = '%s/%s' % (self.ARCHIVE_BASE, DEFAULT_BOT_NAME)
+    result = self._GetBaseUploadURI(archive_base=self.ARCHIVE_BASE,
+                                    remote_trybot=False)
+    self.assertEqual(expected_result, result)
+
+  def testArchiveBaseRemoteTrybotTrue(self):
+    expected_result = '%s/trybot-%s' % (self.ARCHIVE_BASE, DEFAULT_BOT_NAME)
+    result = self._GetBaseUploadURI(archive_base=self.ARCHIVE_BASE,
+                                    remote_trybot=True)
+    self.assertEqual(expected_result, result)
+
+  def testArchiveBaseBotIdRemoteTrybotFalse(self):
+    expected_result = '%s/%s' % (self.ARCHIVE_BASE, self.BOT_ID)
+    result = self._GetBaseUploadURI(archive_base=self.ARCHIVE_BASE,
+                                    bot_id=self.BOT_ID, remote_trybot=False)
+    self.assertEqual(expected_result, result)
+
+  def testArchiveBaseBotIdRemoteTrybotTrue(self):
+    expected_result = '%s/%s' % (self.ARCHIVE_BASE, self.BOT_ID)
+    result = self._GetBaseUploadURI(archive_base=self.ARCHIVE_BASE,
+                                    bot_id=self.BOT_ID, remote_trybot=True)
+    self.assertEqual(expected_result, result)
+
+  def testRemoteTrybotTrue(self):
+    """Test GetBaseUploadURI with no archive base but remote_trybot is True."""
+    expected_result = ('%s/trybot-%s' %
+                       (archive_lib.constants.DEFAULT_ARCHIVE_BUCKET,
+                        DEFAULT_BOT_NAME))
+    result = self._GetBaseUploadURI(remote_trybot=True)
+    self.assertEqual(expected_result, result)
+
+  def testBotIdRemoteTrybotTrue(self):
+    expected_result = ('%s/%s' %
+                       (archive_lib.constants.DEFAULT_ARCHIVE_BUCKET,
+                        self.BOT_ID))
+    result = self._GetBaseUploadURI(bot_id=self.BOT_ID, remote_trybot=True)
+    self.assertEqual(expected_result, result)
+
+  def testDefaultGSPathRemoteTrybotFalse(self):
+    """Test GetBaseUploadURI with default gs_path value in config."""
+    self.cfg = _ExtendDefaultConfig(gs_path=config_lib.GS_PATH_DEFAULT)
+
+    # Test without bot_id.
+    expected_result = ('%s/%s' %
+                       (archive_lib.constants.DEFAULT_ARCHIVE_BUCKET,
+                        DEFAULT_BOT_NAME))
+    result = self._GetBaseUploadURI(remote_trybot=False)
+    self.assertEqual(expected_result, result)
+
+    # Test with bot_id.
+    expected_result = ('%s/%s' %
+                       (archive_lib.constants.DEFAULT_ARCHIVE_BUCKET,
+                        self.BOT_ID))
+    result = self._GetBaseUploadURI(bot_id=self.BOT_ID, remote_trybot=False)
+    self.assertEqual(expected_result, result)
+
+  def testOverrideGSPath(self):
+    """Test GetBaseUploadURI with default gs_path value in config."""
+    self.cfg = _ExtendDefaultConfig(gs_path='gs://funkytown/foo/bar')
+
+    # Test without bot_id.
+    expected_result = self.cfg.gs_path
+    result = self._GetBaseUploadURI(remote_trybot=False)
+    self.assertEqual(expected_result, result)
+
+    # Test with bot_id.
+    expected_result = self.cfg.gs_path
+    result = self._GetBaseUploadURI(bot_id=self.BOT_ID, remote_trybot=False)
+    self.assertEqual(expected_result, result)
+
+
+class ArchiveTest(cros_test_lib.TestCase):
+  """Test the Archive class."""
+  _VERSION = '6543.2.1'
+
+  def _GetAttributeValue(self, attr, options=None, config=None):
+    with mock.patch.object(cbuildbot_run._BuilderRunBase, 'GetVersion') as m:
+      m.return_value = self._VERSION
+
+      run = _NewBuilderRun(options, config)
+      return getattr(run.GetArchive(), attr)
+
+  def testVersion(self):
+    value = self._GetAttributeValue('version')
+    self.assertEqual(self._VERSION, value)
+
+  def testVersionNotReady(self):
+    run = _NewBuilderRun()
+    self.assertRaises(AttributeError, getattr, run, 'version')
+
+  def testArchivePathTrybot(self):
+    options = _ExtendDefaultOptions(buildbot=False)
+    value = self._GetAttributeValue('archive_path', options=options)
+    expected_value = ('%s/%s/%s/%s' %
+                      (DEFAULT_BUILDROOT,
+                       archive_lib.Archive._TRYBOT_ARCHIVE,
+                       DEFAULT_BOT_NAME,
+                       self._VERSION))
+    self.assertEqual(expected_value, value)
+
+  def testArchivePathBuildbot(self):
+    value = self._GetAttributeValue('archive_path')
+    expected_value = ('%s/%s/%s/%s' %
+                      (DEFAULT_BUILDROOT,
+                       archive_lib.Archive._BUILDBOT_ARCHIVE,
+                       DEFAULT_BOT_NAME,
+                       self._VERSION))
+    self.assertEqual(expected_value, value)
+
+  def testUploadUri(self):
+    value = self._GetAttributeValue('upload_url')
+    expected_value = '%s/%s/%s' % (DEFAULT_ARCHIVE_BASE,
+                                   DEFAULT_BOT_NAME,
+                                   self._VERSION)
+    self.assertEqual(expected_value, value)
+
+  def testDownloadURLBuildbot(self):
+    value = self._GetAttributeValue('download_url')
+    expected_value = ('%s%s/%s/%s' %
+                      (archive_lib.gs.PRIVATE_BASE_HTTPS_URL,
+                       DEFAULT_ARCHIVE_PREFIX,
+                       DEFAULT_BOT_NAME,
+                       self._VERSION))
+    self.assertEqual(expected_value, value)
diff --git a/cbuildbot/autotest_rpc_errors.py b/cbuildbot/autotest_rpc_errors.py
new file mode 100644
index 0000000..816df9f
--- /dev/null
+++ b/cbuildbot/autotest_rpc_errors.py
@@ -0,0 +1,23 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Error codes used for the Autotest RPC Client, Proxy, and Server.
+
+This is a copy of scripts/slave-internal/autotest_rpc/autotest_rpc_errors.py
+from https://chrome-internal.googlesource.com/chrome/tools/build.
+"""
+
+PROXY_CANNOT_SEND_REQUEST = 11
+PROXY_CONNECTION_LOST = 12
+PROXY_TIMED_OUT = 13
+
+SERVER_NO_COMMAND = 21
+SERVER_NO_ARGUMENTS = 22
+SERVER_UNKNOWN_COMMAND = 23
+SERVER_BAD_ARGUMENT_COUNT = 24
+
+CLIENT_CANNOT_CONNECT = 31
+CLIENT_HTTP_CODE = 32
+CLIENT_EMPTY_RESPONSE = 33
+CLIENT_NO_RETURN_CODE = 34
diff --git a/cbuildbot/binhost.py b/cbuildbot/binhost.py
new file mode 100644
index 0000000..a8ef86c
--- /dev/null
+++ b/cbuildbot/binhost.py
@@ -0,0 +1,321 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions for calculating compatible binhosts."""
+
+from __future__ import print_function
+
+import collections
+import json
+import os
+import tempfile
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import parallel
+
+
+# A unique identifier for looking up CompatIds by board/useflags.
+_BoardKey = collections.namedtuple('_BoardKey', ['board', 'useflags'])
+
+
+def BoardKey(board, useflags):
+  """Create a new _BoardKey object.
+
+  Args:
+    board: The board associated with this config.
+    useflags: A sequence of extra useflags associated with this config.
+  """
+  return _BoardKey(board, tuple(useflags))
+
+
+def GetBoardKey(config, board=None):
+  """Get the BoardKey associated with a given config.
+
+  Args:
+    config: A config_lib.BuildConfig object.
+    board: Board to use. Defaults to the first board in the config.
+      Optional if len(config.boards) == 1.
+  """
+  if board is None:
+    assert len(config.boards) == 1
+    board = config.boards[0]
+  else:
+    assert board in config.boards
+  return BoardKey(board, config.useflags)
+
+
+def GetAllImportantBoardKeys(site_config):
+  """Get a list of all board keys used in a top-level config.
+
+  Args:
+    site_config: A config_lib.SiteConfig instance.
+  """
+  boards = set()
+  for config in site_config.values():
+    if config.important:
+      for board in config.boards:
+        boards.add(GetBoardKey(config, board))
+  return boards
+
+
+def GetChromePrebuiltConfigs(site_config):
+  """Get a mapping of the boards used in the Chrome PFQ.
+
+  Args:
+    site_config: A config_lib.SiteConfig instance.
+
+  Returns:
+    A dict mapping BoardKey objects to configs.
+  """
+  boards = {}
+  master_chromium_pfq = site_config['master-chromium-pfq']
+  for config in site_config.GetSlavesForMaster(master_chromium_pfq):
+    if config.prebuilts:
+      for board in config.boards:
+        boards[GetBoardKey(config, board)] = config
+  return boards
+
+
+# A tuple of dicts describing our Chrome PFQs.
+# by_compat_id: A dict mapping CompatIds to sets of BoardKey objects.
+# by_arch_useflags: A dict mapping (arch, useflags) tuples to sets of
+#     BoardKey objects.
+_PrebuiltMapping = collections.namedtuple(
+    '_PrebuiltMapping', ['by_compat_id', 'by_arch_useflags'])
+
+
+class PrebuiltMapping(_PrebuiltMapping):
+  """A tuple of dicts describing our Chrome PFQs.
+
+  Members:
+    by_compat_id: A dict mapping CompatIds to sets of BoardKey objects.
+    by_arch_useflags: A dict mapping (arch, useflags) tuples to sets of
+      BoardKey objects.
+  """
+
+  # The location in a ChromeOS checkout where we should store our JSON dump.
+  INTERNAL_MAP_LOCATION = ('%s/src/private-overlays/chromeos-partner-overlay/'
+                           'chromeos/binhost/%s.json')
+
+  # The location in an external Chromium OS checkout where we should store our
+  # JSON dump.
+  EXTERNAL_MAP_LOCATION = ('%s/src/third_party/chromiumos-overlay/chromeos/'
+                           'binhost/%s.json')
+
+  @classmethod
+  def GetFilename(cls, buildroot, suffix, internal=True):
+    """Get the filename where we should store our JSON dump.
+
+    Args:
+      buildroot: The root of the source tree.
+      suffix: The base filename used for the dump (e.g. "chrome").
+      internal: If true, use the internal binhost location. Otherwise, use the
+        public one.
+    """
+    if internal:
+      return cls.INTERNAL_MAP_LOCATION % (buildroot, suffix)
+
+    return cls.EXTERNAL_MAP_LOCATION % (buildroot, suffix)
+
+  @classmethod
+  def Get(cls, keys, compat_ids):
+    """Get a mapping of the Chrome PFQ configs.
+
+    Args:
+      keys: A list of the BoardKey objects that are considered part of the
+        Chrome PFQ.
+      compat_ids: A dict mapping BoardKey objects to CompatId objects.
+
+    Returns:
+      A PrebuiltMapping object.
+    """
+    configs = cls(by_compat_id=collections.defaultdict(set),
+                  by_arch_useflags=collections.defaultdict(set))
+    for key in keys:
+      compat_id = compat_ids[key]
+      configs.by_compat_id[compat_id].add(key)
+      partial_compat_id = (compat_id.arch, compat_id.useflags)
+      configs.by_arch_useflags[partial_compat_id].add(key)
+    return configs
+
+  def Dump(self, filename, internal=True):
+    """Save a mapping of the Chrome PFQ configs to disk (JSON format).
+
+    Args:
+      filename: A location to write the Chrome PFQ configs.
+      internal: Whether the dump should include internal configurations.
+    """
+    output = []
+    for compat_id, keys in self.by_compat_id.items():
+      for key in keys:
+        # Filter internal prebuilts out of external dumps.
+        if not internal and 'chrome_internal' in key.useflags:
+          continue
+
+        output.append({'key': key.__dict__, 'compat_id': compat_id.__dict__})
+
+    with open(filename, 'w') as f:
+      json.dump(output, f, sort_keys=True, indent=2)
+
+  @classmethod
+  def Load(cls, filename):
+    """Load a mapping of the Chrome PFQ configs from disk (JSON format).
+
+    Args:
+      filename: A location to read the Chrome PFQ configs from.
+    """
+    with open(filename) as f:
+      output = json.load(f)
+
+    compat_ids = {}
+    for d in output:
+      key = BoardKey(**d['key'])
+      compat_ids[key] = CompatId(**d['compat_id'])
+
+    return cls.Get(compat_ids.keys(), compat_ids)
+
+  def GetPrebuilts(self, compat_id):
+    """Get the matching BoardKey objects associated with |compat_id|.
+
+    Args:
+      compat_id: The CompatId to use to look up prebuilts.
+    """
+    if compat_id in self.by_compat_id:
+      return self.by_compat_id[compat_id]
+
+    partial_compat_id = (compat_id.arch, compat_id.useflags)
+    if partial_compat_id in self.by_arch_useflags:
+      return self.by_arch_useflags[partial_compat_id]
+
+    return set()
+
+
+def GetChromeUseFlags(board, extra_useflags):
+  """Get a list of the use flags turned on for Chrome on a given board.
+
+  This function requires that the board has been set up first (e.g. using
+  GenConfigsForBoard)
+
+  Args:
+    board: The board to use.
+    extra_useflags: A sequence of use flags to enable or disable.
+
+  Returns:
+    A tuple of the use flags that are enabled for Chrome on the given board.
+    Use flags that are disabled are not listed.
+  """
+  assert cros_build_lib.IsInsideChroot()
+  assert os.path.exists('/build/%s' % board), 'Board %s not set up' % board
+  extra_env = {'USE': ' '.join(extra_useflags)}
+  cmd = ['equery-%s' % board, 'uses', constants.CHROME_CP]
+  chrome_useflags = cros_build_lib.RunCommand(
+      cmd, capture_output=True, print_cmd=False,
+      extra_env=extra_env).output.rstrip().split()
+  return tuple(x[1:] for x in chrome_useflags if x.startswith('+'))
+
+
+def GenConfigsForBoard(board, regen, error_code_ok):
+  """Set up the configs for the specified board.
+
+  This must be run from within the chroot. It sets up the board but does not
+  fully initialize it (it skips the initialization of the toolchain and the
+  board packages)
+
+  Args:
+    board: Board to set up.
+    regen: Whether to regen configs if the board already exists.
+    error_code_ok: Whether errors are acceptable. We set this to True in some
+      tests for configs that are not on the waterfall.
+  """
+  assert cros_build_lib.IsInsideChroot()
+  if regen or not os.path.exists('/build/%s' % board):
+    cmd = ['%s/src/scripts/setup_board' % constants.CHROOT_SOURCE_ROOT,
+           '--board=%s' % board, '--regen_configs', '--skip_toolchain_update',
+           '--skip_chroot_upgrade', '--skip_board_pkg_init', '--quiet']
+    cros_build_lib.RunCommand(cmd, error_code_ok=error_code_ok)
+
+
+_CompatId = collections.namedtuple('_CompatId', ['arch', 'useflags', 'cflags'])
+
+
+def CompatId(arch, useflags, cflags):
+  """Create a new _CompatId object.
+
+  Args:
+    arch: The architecture of this builder.
+    useflags: The full list of use flags for Chrome.
+    cflags: The full list of CFLAGS.
+  """
+  return _CompatId(arch, tuple(useflags), tuple(cflags))
+
+
+def CalculateCompatId(board, extra_useflags):
+  """Calculate the CompatId for board with the specified extra useflags.
+
+  This function requires that the board has been set up first (e.g. using
+  GenConfigsForBoard)
+
+  Args:
+    board: The board to use.
+    extra_useflags: A sequence of use flags to enable or disable.
+
+  Returns:
+    A CompatId object for the board with the specified extra_useflags.
+  """
+  assert cros_build_lib.IsInsideChroot()
+  useflags = GetChromeUseFlags(board, extra_useflags)
+  cmd = ['portageq-%s' % board, 'envvar', 'ARCH', 'CFLAGS']
+  arch_cflags = cros_build_lib.RunCommand(
+      cmd, print_cmd=False, capture_output=True).output.rstrip()
+  arch, cflags = arch_cflags.split('\n', 1)
+  cflags_split = cflags.split()
+
+  # We will add -clang-syntax to falco and nyan board. So we need to
+  # filter out -clang-syntax to make the flags from PFQ are the same as
+  # the release-board. See crbug.com/499115
+  # TODO(yunlian): Remove this when all the boards are build with -clang-syntax
+  if '-clang-syntax' in cflags_split:
+    cflags_split.remove('-clang-syntax')
+  return CompatId(arch, useflags, cflags_split)
+
+
+class CompatIdFetcher(object):
+  """Class for calculating CompatIds in parallel."""
+
+  def __init__(self, caching=False):
+    """Create a new CompatIdFetcher object.
+
+    Args:
+      caching: Whether to cache setup from run to run. See
+        PrebuiltCompatibilityTest.CACHING for details.
+    """
+    self.compat_ids = None
+    if caching:
+      # This import occurs here rather than at the top of the file because we
+      # don't want to force developers to install joblib. The caching argument
+      # is only set to True if PrebuiltCompatibilityTest.CACHING is hand-edited
+      # (for testing purposes).
+      # pylint: disable=import-error
+      from joblib import Memory
+      memory = Memory(cachedir=tempfile.gettempdir(), verbose=0)
+      self.FetchCompatIds = memory.cache(self.FetchCompatIds)
+
+  def _FetchCompatId(self, board, extra_useflags):
+    self.compat_ids[(board, extra_useflags)] = (
+        CalculateCompatId(board, extra_useflags))
+
+  def FetchCompatIds(self, board_keys):
+    """Generate a dict mapping BoardKeys to their associated CompatId.
+
+    Args:
+      board_keys: A list of BoardKey objects to fetch.
+    """
+    # pylint: disable=method-hidden
+    logging.info('Fetching CompatId objects...')
+    with parallel.Manager() as manager:
+      self.compat_ids = manager.dict()
+      parallel.RunTasksInProcessPool(self._FetchCompatId, board_keys)
+      return dict(self.compat_ids)
diff --git a/cbuildbot/binhost_test b/cbuildbot/binhost_test
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/binhost_test
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/binhost_test.py b/cbuildbot/binhost_test.py
new file mode 100644
index 0000000..0bab437
--- /dev/null
+++ b/cbuildbot/binhost_test.py
@@ -0,0 +1,272 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for verifying prebuilts."""
+
+from __future__ import print_function
+
+import collections
+import inspect
+import os
+import unittest
+import warnings
+
+from chromite.cbuildbot import binhost
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+
+
+class PrebuiltCompatibilityTest(cros_test_lib.TestCase):
+  """Ensure that prebuilts are present for all builders and are compatible."""
+
+  # Whether to cache setup from run to run. If set, requires that you install
+  # joblib (sudo easy_install joblib). This is useful for iterating on the
+  # unit tests, but note that if you 'repo sync', you'll need to clear out
+  # /tmp/joblib and blow away /build in order to update the caches. Note that
+  # this is never normally set to True -- if you want to use this feature,
+  # you'll need to hand-edit this file.
+  # TODO(davidjames): Add a --caching option.
+  CACHING = False
+
+  # A dict mapping BoardKeys to their associated compat ids.
+  COMPAT_IDS = None
+
+  # Boards that don't have Chromium PFQs.
+  # TODO(davidjames): Empty this list.
+  BOARDS_WITHOUT_CHROMIUM_PFQS = ['rush_ryu', 'smaug']
+
+  site_config = config_lib.LoadConfigFromFile()
+
+  @classmethod
+  def setUpClass(cls):
+    assert cros_build_lib.IsInsideChroot()
+    logging.info('Generating board configs. This takes about 10m...')
+    board_keys = binhost.GetAllImportantBoardKeys(cls.site_config)
+    boards = set(key.board for key in board_keys)
+    for board in sorted(boards):
+      binhost.GenConfigsForBoard(board, regen=not cls.CACHING,
+                                 error_code_ok=False)
+    fetcher = binhost.CompatIdFetcher(caching=cls.CACHING)
+    cls.COMPAT_IDS = fetcher.FetchCompatIds(list(board_keys))
+
+  def setUp(self):
+    self.complaints = []
+    self.fatal_complaints = []
+
+  def tearDown(self):
+    if self.complaints:
+      warnings.warn('\n' + '\n'.join(self.complaints))
+    if self.fatal_complaints:
+      self.assertFalse(self.fatal_complaints, '\n'.join(self.fatal_complaints))
+
+  def Complain(self, msg, fatal):
+    """Complain about an error when the test exits.
+
+    Args:
+      msg: The message to print.
+      fatal: Whether the message should be fatal. If not, the message will be
+        considered a warning.
+    """
+    if fatal:
+      self.fatal_complaints.append(msg)
+    else:
+      self.complaints.append(msg)
+
+  def GetCompatIdDiff(self, expected, actual):
+    """Return a string describing the differences between expected and actual.
+
+    Args:
+      expected: Expected value for CompatId.
+      actual: Actual value for CompatId.
+    """
+    if expected.arch != actual.arch:
+      return 'arch differs: %s != %s' % (expected.arch, actual.arch)
+    elif expected.useflags != actual.useflags:
+      msg = self.GetSequenceDiff(expected.useflags, actual.useflags)
+      return msg.replace('Sequences', 'useflags')
+    elif expected.cflags != actual.cflags:
+      msg = self.GetSequenceDiff(expected.cflags, actual.cflags)
+      return msg.replace('Sequences', 'cflags')
+    else:
+      assert expected == actual
+      return 'no differences'
+
+  def AssertChromePrebuilts(self, pfq_configs, config, skip_useflags=False):
+    """Verify that the specified config has Chrome prebuilts.
+
+    Args:
+      pfq_configs: A PrebuiltMapping object.
+      config: The config to check.
+      skip_useflags: Don't use extra useflags from the config.
+    """
+    # Skip over useflags from the useflag if needed.
+    msg_prefix = ''
+    if skip_useflags:
+      config = config.deepcopy()
+      config.useflags = []
+      msg_prefix = 'When we take out extra useflags, '
+
+    compat_id = self.GetCompatId(config)
+    pfqs = pfq_configs.by_compat_id.get(compat_id, set())
+    if not pfqs:
+      arch_useflags = (compat_id.arch, compat_id.useflags)
+      for key in pfq_configs.by_arch_useflags[arch_useflags]:
+        # If there wasn't an exact match for this CompatId, but there
+        # was an (arch, useflags) match, then we'll be using mismatched
+        # Chrome prebuilts. Complain.
+        # TODO(davidjames): This should be a fatal error for important
+        # builders, but we need to clean up existing cases first.
+        pfq_compat_id = self.COMPAT_IDS[key]
+        err = self.GetCompatIdDiff(compat_id, pfq_compat_id)
+        msg = '%s%s uses mismatched Chrome prebuilts from %s -- %s'
+        self.Complain(msg % (msg_prefix, config.name, key.board, err),
+                      fatal=False)
+        pfqs.add(key)
+
+    if not pfqs:
+      pre_cq = (config.build_type == config_lib.CONFIG_TYPE_PRECQ)
+      msg = '%s%s cannot find Chrome prebuilts -- %s'
+      self.Complain(msg % (msg_prefix, config.name, compat_id),
+                    fatal=pre_cq or config.important)
+
+  def GetCompatId(self, config, board=None):
+    """Get the CompatId for a config.
+
+    Args:
+      config: A config_lib.BuildConfig object.
+      board: Board to use. Defaults to the first board in the config.
+          Optional if len(config.boards) == 1.
+    """
+    if board is None:
+      assert len(config.boards) == 1
+      board = config.boards[0]
+    else:
+      assert board in config.boards
+
+    board_key = binhost.GetBoardKey(config, board)
+    compat_id = self.COMPAT_IDS.get(board_key)
+    if compat_id is None:
+      compat_id = binhost.CalculateCompatId(board, config.useflags)
+      self.COMPAT_IDS[board_key] = compat_id
+    return compat_id
+
+  def testChromePrebuiltsPresent(self, filename=None):
+    """Verify Chrome prebuilts exist for all configs that build Chrome.
+
+    Args:
+      filename: Filename to load our PFQ mappings from. By default, generate
+        the PFQ mappings based on the current config.
+    """
+    if filename is not None:
+      pfq_configs = binhost.PrebuiltMapping.Load(filename)
+    else:
+      keys = binhost.GetChromePrebuiltConfigs(self.site_config).keys()
+      pfq_configs = binhost.PrebuiltMapping.Get(keys, self.COMPAT_IDS)
+
+    for compat_id, pfqs in pfq_configs.by_compat_id.items():
+      if len(pfqs) > 1:
+        msg = 'The following Chrome PFQs produce identical prebuilts: %s -- %s'
+        self.Complain(msg % (', '.join(str(x) for x in pfqs), compat_id),
+                      fatal=False)
+
+    for _name, config in sorted(self.site_config.items()):
+      # Skip over configs that don't have Chrome or have >1 board.
+      if config.sync_chrome is False or len(config.boards) != 1:
+        continue
+
+      # Look for boards with missing prebuilts.
+      pre_cq = (config.build_type == config_lib.CONFIG_TYPE_PRECQ)
+      if ((config.usepkg_build_packages and not config.chrome_rev) and
+          (config.active_waterfall or pre_cq)):
+        self.AssertChromePrebuilts(pfq_configs, config)
+
+        # Check that we have a builder for the version w/o custom useflags as
+        # well.
+        if (config.useflags and
+            config.boards[0] not in self.BOARDS_WITHOUT_CHROMIUM_PFQS):
+          self.AssertChromePrebuilts(pfq_configs, config, skip_useflags=True)
+
+  def testCurrentChromePrebuiltsEnough(self):
+    """Verify Chrome prebuilts exist for all configs that build Chrome.
+
+    This loads the list of Chrome prebuilts that were generated during the last
+    Chrome PFQ run from disk and verifies that it is sufficient.
+    """
+    filename = binhost.PrebuiltMapping.GetFilename(constants.SOURCE_ROOT,
+                                                   'chrome')
+    if os.path.exists(filename):
+      self.testChromePrebuiltsPresent(filename)
+
+  def testReleaseGroupSharing(self):
+    """Verify that the boards built in release groups have compatible settings.
+
+    This means that all of the subconfigs in the release group have matching
+    use flags, cflags, and architecture.
+    """
+    for config in self.site_config.values():
+      # Only test release groups.
+      if not config.name.endswith('-release-group'):
+        continue
+
+      # Get a list of the compatibility IDs.
+      compat_ids_for_config = collections.defaultdict(set)
+      for subconfig in config.child_configs:
+        if subconfig.sync_chrome is not False:
+          for board in subconfig.boards:
+            compat_id = self.GetCompatId(subconfig, board)
+            compat_ids_for_config[compat_id].add(board)
+
+      if len(compat_ids_for_config) > 1:
+        arch_useflags = set(tuple(x[:-1]) for x in compat_ids_for_config)
+        if len(arch_useflags) > 1:
+          # If two configs in the same group have mismatched Chrome binaries
+          # (e.g. different use flags), Chrome may be built twice in parallel
+          # and this may result in flaky, slow, and possibly incorrect builds.
+          msg = '%s: %s and %s have mismatched Chrome binaries -- %s'
+          fatal = True
+        else:
+          # TODO(davidjames): This should be marked fatal once the
+          # ivybridge-freon-release-group is cleaned up.
+          msg = '%s: %s and %s have mismatched cflags -- %s'
+          fatal = False
+        ids, board_sets = zip(*compat_ids_for_config.iteritems())
+        boards = [next(iter(x)) for x in board_sets]
+        err = self.GetCompatIdDiff(ids[0], ids[1])
+        msg %= (config.name, boards[0], boards[1], err)
+        self.Complain(msg, fatal=fatal)
+
+  def testDumping(self):
+    """Verify Chrome prebuilts exist for all configs that build Chrome.
+
+    This loads the list of Chrome prebuilts that were generated during the last
+    Chrome PFQ run from disk and verifies that it is sufficient.
+    """
+    with osutils.TempDir() as tempdir:
+      keys = binhost.GetChromePrebuiltConfigs(self.site_config).keys()
+      pfq_configs = binhost.PrebuiltMapping.Get(keys, self.COMPAT_IDS)
+      filename = os.path.join(tempdir, 'foo.json')
+      pfq_configs.Dump(filename)
+      self.assertEqual(pfq_configs, binhost.PrebuiltMapping.Load(filename))
+
+
+def NoIncremental():
+  """Creates a suite containing only non-incremental tests.
+
+  This suite should be used on the Chrome PFQ as we don't need to preserve
+  incremental compatibility of prebuilts.
+
+  Returns:
+    A unittest.TestSuite that does not contain any incremental tests.
+  """
+  suite = unittest.TestSuite()
+  method_names = [f[0] for f in inspect.getmembers(PrebuiltCompatibilityTest,
+                                                   predicate=inspect.ismethod)]
+  for m in method_names:
+    if m.startswith('test') and m != 'testCurrentChromePrebuiltsEnough':
+      suite.addTest(PrebuiltCompatibilityTest(m))
+  return suite
diff --git a/cbuildbot/builders/__init__.py b/cbuildbot/builders/__init__.py
new file mode 100644
index 0000000..33a4309
--- /dev/null
+++ b/cbuildbot/builders/__init__.py
@@ -0,0 +1,90 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module for instantiating builders.
+
+Typically builder classes/objects are obtained indirectly via the helpers in
+this module.  This is because the config_lib settings can't import this
+module (and children directly): it might lead to circular references, and it
+would add a lot of overhead to that module.  Generally only the main cbuildbot
+module needs to care about the builder classes.
+
+If you're looking for a specific builder implementation, then check out the
+*_builders.py modules that are in this same directory.  The config_lib
+has a builder_class_name member that controls the type of builder that is used
+for each config.  e.g. builder_class_name='Simple' would look for the class
+whose name is 'SimpleBuilder' in all the *_builders.py modules.
+"""
+
+from __future__ import print_function
+
+import glob
+import os
+
+from chromite.lib import cros_import
+
+
+def GetBuilderClass(name):
+  """Locate the builder class with |name|.
+
+  Examples:
+    If you want to create a new SimpleBuilder, you'd do:
+    cls = builders.GetBuilderClass('simple_builders.SimpleBuilder')
+    builder = cls(...)
+
+    If you want a site specific builder class, do:
+    cls = builders.GetBuilderClass('config.my_builders.MyBuilder')
+    builder = cls(...)
+
+  Args:
+    name: The base name of the builder class.
+
+  Returns:
+    The class used to instantiate this type of builder.
+
+  Raises:
+    AttributeError when |name| could not be found.
+  """
+  if '.' not in name:
+    raise ValueError('name should be "<module>.<builder>" not "%s"' % name)
+
+  name_parts = name.split('.')
+
+  # Last part is the class name.
+  builder_class_name = name_parts.pop()
+
+  if name_parts[0] == 'config':
+    # config means pull from the site specific config.
+    # config.my_builders -> chromite.config.my_builders
+    name_parts = ['chromite'] + name_parts
+  else:
+    # Otherwise pull from chromite.
+    # simple_builders -> chromite.cbuidlbot.builders.simple_builders
+    name_parts = ['chromite', 'cbuildbot', 'builders'] + name_parts
+
+  target = '.'.join(name_parts)
+  module = cros_import.ImportModule(target)
+
+  # See if this module has the builder we care about.
+  if hasattr(module, builder_class_name):
+    return getattr(module, builder_class_name)
+
+  raise AttributeError('could not locate %s builder' % builder_class_name)
+
+
+def Builder(builder_run):
+  """Given a |builder_run| runtime, return an instantiated builder
+
+  This is a helper wrapper that resolves the builder_class_name field in the
+  builder settings (which was declared in the build config) to the actual class
+  found in the builder modules.
+
+  Args:
+    builder_run: A cbuildbot_run.BuilderRun object.
+
+  Returns:
+    An object of type generic_builders.Builder.
+  """
+  cls = GetBuilderClass(builder_run.config.builder_class_name)
+  return cls(builder_run)
diff --git a/cbuildbot/builders/builders_unittest b/cbuildbot/builders/builders_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/builders/builders_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/builders/builders_unittest.py b/cbuildbot/builders/builders_unittest.py
new file mode 100644
index 0000000..0843651
--- /dev/null
+++ b/cbuildbot/builders/builders_unittest.py
@@ -0,0 +1,56 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for main builder logic (__init__.py)."""
+
+from __future__ import print_function
+
+import mock
+
+from chromite.cbuildbot import builders
+from chromite.cbuildbot.builders import simple_builders
+from chromite.lib import cros_import
+from chromite.lib import cros_test_lib
+
+
+class ModuleTest(cros_test_lib.MockTempDirTestCase):
+  """Module loading related tests"""
+
+  def testGetBuilderClass(self):
+    """Check behavior when requesting a valid builder."""
+    result = builders.GetBuilderClass('simple_builders.SimpleBuilder')
+    self.assertEqual(result, simple_builders.SimpleBuilder)
+
+  def testGetBuilderClassError(self):
+    """Check behavior when requesting missing builders."""
+    self.assertRaises(ValueError, builders.GetBuilderClass, 'Foalksdjo')
+    self.assertRaises(ImportError, builders.GetBuilderClass, 'foo.Foalksdjo')
+    self.assertRaises(AttributeError, builders.GetBuilderClass,
+                      'misc_builders.Foalksdjo')
+
+  def testGetBuilderClassConfig(self):
+    """Check behavior when requesting config builders.
+
+    This can't be done with live classes since the site config may or may not
+    be there.
+    """
+    # Setup
+    mock_module = mock.Mock()
+    mock_module.MyBuilder = 'fake_class'
+    mock_import = self.PatchObject(cros_import, 'ImportModule',
+                                   return_value=mock_module)
+    # Test
+    result = builders.GetBuilderClass('config.my_builders.MyBuilder')
+    # Verify
+    mock_import.assert_called_once_with('chromite.config.my_builders')
+    self.assertEqual(result, 'fake_class')
+
+    # Test again with a nested builder class name.
+    mock_import.reset_mock()
+
+    # Test
+    result = builders.GetBuilderClass('config.nested.my_builders.MyBuilder')
+    # Verify
+    mock_import.assert_called_once_with('chromite.config.nested.my_builders')
+    self.assertEqual(result, 'fake_class')
diff --git a/cbuildbot/builders/generic_builders.py b/cbuildbot/builders/generic_builders.py
new file mode 100644
index 0000000..5614774
--- /dev/null
+++ b/cbuildbot/builders/generic_builders.py
@@ -0,0 +1,341 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the generic builders."""
+
+from __future__ import print_function
+
+import multiprocessing
+import os
+import sys
+import tempfile
+import traceback
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import results_lib
+from chromite.cbuildbot import trybot_patch_pool
+from chromite.cbuildbot.stages import build_stages
+from chromite.cbuildbot.stages import report_stages
+from chromite.cbuildbot.stages import sync_stages
+from chromite.lib import cidb
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import parallel
+
+
+class Builder(object):
+  """Parent class for all builder types.
+
+  This class functions as an abstract parent class for various build types.
+  Its intended use is builder_instance.Run().
+
+  Attributes:
+    _run: The BuilderRun object for this run.
+    archive_stages: Dict of BuildConfig keys to ArchiveStage values.
+    patch_pool: TrybotPatchPool.
+  """
+
+  def __init__(self, builder_run):
+    """Initializes instance variables. Must be called by all subclasses."""
+    self._run = builder_run
+
+    # TODO: all the fields below should not be part of the generic builder.
+    # We need to restructure our SimpleBuilder and see about creating a new
+    # base in there for holding them.
+    if self._run.config.chromeos_official:
+      os.environ['CHROMEOS_OFFICIAL'] = '1'
+
+    self.archive_stages = {}
+    self.patch_pool = trybot_patch_pool.TrybotPatchPool()
+    self._build_image_lock = multiprocessing.Lock()
+
+  def Initialize(self):
+    """Runs through the initialization steps of an actual build."""
+    if self._run.options.resume:
+      results_lib.LoadCheckpoint(self._run.buildroot)
+
+    self._RunStage(report_stages.BuildStartStage)
+
+    self._RunStage(build_stages.CleanUpStage)
+
+  def _GetStageInstance(self, stage, *args, **kwargs):
+    """Helper function to get a stage instance given the args.
+
+    Useful as almost all stages just take in builder_run.
+    """
+    # Normally the default BuilderRun (self._run) is used, but it can
+    # be overridden with "builder_run" kwargs (e.g. for child configs).
+    builder_run = kwargs.pop('builder_run', self._run)
+    return stage(builder_run, *args, **kwargs)
+
+  def _SetReleaseTag(self):
+    """Sets run.attrs.release_tag from the manifest manager used in sync.
+
+    Must be run after sync stage as syncing enables us to have a release tag,
+    and must be run before any usage of attrs.release_tag.
+
+    TODO(mtennant): Find a bottleneck place in syncing that can set this
+    directly.  Be careful, as there are several kinds of syncing stages, and
+    sync stages have been known to abort with sys.exit calls.
+    """
+    manifest_manager = getattr(self._run.attrs, 'manifest_manager', None)
+    if manifest_manager:
+      self._run.attrs.release_tag = manifest_manager.current_version
+    else:
+      self._run.attrs.release_tag = None
+
+    logging.debug('Saved release_tag value for run: %r',
+                  self._run.attrs.release_tag)
+
+  def _RunStage(self, stage, *args, **kwargs):
+    """Wrapper to run a stage.
+
+    Args:
+      stage: A BuilderStage class.
+      args: args to pass to stage constructor.
+      kwargs: kwargs to pass to stage constructor.
+
+    Returns:
+      Whatever the stage's Run method returns.
+    """
+    stage_instance = self._GetStageInstance(stage, *args, **kwargs)
+    return stage_instance.Run()
+
+  @staticmethod
+  def _RunParallelStages(stage_objs):
+    """Run the specified stages in parallel.
+
+    Args:
+      stage_objs: BuilderStage objects.
+    """
+    steps = [stage.Run for stage in stage_objs]
+    try:
+      parallel.RunParallelSteps(steps)
+
+    except BaseException as ex:
+      # If a stage threw an exception, it might not have correctly reported
+      # results (e.g. because it was killed before it could report the
+      # results.) In this case, attribute the exception to any stages that
+      # didn't report back correctly (if any).
+      for stage in stage_objs:
+        for name in stage.GetStageNames():
+          if not results_lib.Results.StageHasResults(name):
+            results_lib.Results.Record(name, ex, str(ex))
+
+      if cidb.CIDBConnectionFactory.IsCIDBSetup():
+        db = cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
+        if db:
+          for stage in stage_objs:
+            for build_stage_id in stage.GetBuildStageIDs():
+              if not db.HasBuildStageFailed(build_stage_id):
+                failures_lib.ReportStageFailureToCIDB(db,
+                                                      build_stage_id,
+                                                      ex)
+
+      raise
+
+  def _RunSyncStage(self, sync_instance):
+    """Run given |sync_instance| stage and be sure attrs.release_tag set."""
+    try:
+      sync_instance.Run()
+    finally:
+      self._SetReleaseTag()
+
+  def SetVersionInfo(self):
+    """Sync the builder's version info with the buildbot runtime."""
+    self._run.attrs.version_info = self.GetVersionInfo()
+
+  def GetVersionInfo(self):
+    """Returns a manifest_version.VersionInfo object for this build.
+
+    Subclasses must override this method.
+    """
+    raise NotImplementedError()
+
+  def GetSyncInstance(self):
+    """Returns an instance of a SyncStage that should be run.
+
+    Subclasses must override this method.
+    """
+    raise NotImplementedError()
+
+  def GetCompletionInstance(self):
+    """Returns the MasterSlaveSyncCompletionStage for this build.
+
+    Subclasses may override this method.
+
+    Returns:
+      None
+    """
+    return None
+
+  def RunStages(self):
+    """Subclasses must override this method.  Runs the appropriate code."""
+    raise NotImplementedError()
+
+  def _ReExecuteInBuildroot(self, sync_instance):
+    """Reexecutes self in buildroot and returns True if build succeeds.
+
+    This allows the buildbot code to test itself when changes are patched for
+    buildbot-related code.  This is a no-op if the buildroot == buildroot
+    of the running chromite checkout.
+
+    Args:
+      sync_instance: Instance of the sync stage that was run to sync.
+
+    Returns:
+      True if the Build succeeded.
+    """
+    if not self._run.options.resume:
+      results_lib.WriteCheckpoint(self._run.options.buildroot)
+
+    args = sync_stages.BootstrapStage.FilterArgsForTargetCbuildbot(
+        self._run.options.buildroot, constants.PATH_TO_CBUILDBOT,
+        self._run.options)
+
+    # Specify a buildroot explicitly (just in case, for local trybot).
+    # Suppress any timeout options given from the commandline in the
+    # invoked cbuildbot; our timeout will enforce it instead.
+    args += ['--resume', '--timeout', '0', '--notee', '--nocgroups',
+             '--buildroot', os.path.abspath(self._run.options.buildroot)]
+
+    # Set --version. Note that --version isn't legal without --buildbot.
+    if (self._run.options.buildbot and
+        hasattr(self._run.attrs, 'manifest_manager')):
+      ver = self._run.attrs.manifest_manager.current_version
+      args += ['--version', ver]
+
+    pool = getattr(sync_instance, 'pool', None)
+    if pool:
+      filename = os.path.join(self._run.options.buildroot,
+                              'validation_pool.dump')
+      pool.Save(filename)
+      args += ['--validation_pool', filename]
+
+    # Reset the cache dir so that the child will calculate it automatically.
+    if not self._run.options.cache_dir_specified:
+      commandline.BaseParser.ConfigureCacheDir(None)
+
+    with tempfile.NamedTemporaryFile(prefix='metadata') as metadata_file:
+      metadata_file.write(self._run.attrs.metadata.GetJSON())
+      metadata_file.flush()
+      args += ['--metadata_dump', metadata_file.name]
+
+      # Re-run the command in the buildroot.
+      # Finally, be generous and give the invoked cbuildbot 30s to shutdown
+      # when something occurs.  It should exit quicker, but the sigterm may
+      # hit while the system is particularly busy.
+      return_obj = cros_build_lib.RunCommand(
+          args, cwd=self._run.options.buildroot, error_code_ok=True,
+          kill_timeout=30)
+      return return_obj.returncode == 0
+
+  def _InitializeTrybotPatchPool(self):
+    """Generate patch pool from patches specified on the command line.
+
+    Do this only if we need to patch changes later on.
+    """
+    changes_stage = sync_stages.PatchChangesStage.StageNamePrefix()
+    check_func = results_lib.Results.PreviouslyCompletedRecord
+    if not check_func(changes_stage) or self._run.options.bootstrap:
+      options = self._run.options
+      self.patch_pool = trybot_patch_pool.TrybotPatchPool.FromOptions(
+          gerrit_patches=options.gerrit_patches,
+          local_patches=options.local_patches,
+          sourceroot=options.sourceroot,
+          remote_patches=options.remote_patches)
+
+  def _GetBootstrapStage(self):
+    """Constructs and returns the BootStrapStage object.
+
+    We return None when there are no chromite patches to test, and
+    --test-bootstrap wasn't passed in.
+    """
+    stage = None
+
+    patches_needed = sync_stages.BootstrapStage.BootstrapPatchesNeeded(
+        self._run, self.patch_pool)
+
+    chromite_branch = git.GetChromiteTrackingBranch()
+
+    if (patches_needed or
+        self._run.options.test_bootstrap or
+        chromite_branch != self._run.options.branch):
+      stage = sync_stages.BootstrapStage(self._run, self.patch_pool)
+    return stage
+
+  def Run(self):
+    """Main runner for this builder class.  Runs build and prints summary.
+
+    Returns:
+      Whether the build succeeded.
+    """
+    self._InitializeTrybotPatchPool()
+
+    if self._run.options.bootstrap:
+      bootstrap_stage = self._GetBootstrapStage()
+      if bootstrap_stage:
+        # BootstrapStage blocks on re-execution of cbuildbot.
+        bootstrap_stage.Run()
+        return bootstrap_stage.returncode == 0
+
+    print_report = True
+    exception_thrown = False
+    success = True
+    sync_instance = None
+    try:
+      self.Initialize()
+      sync_instance = self.GetSyncInstance()
+      self._RunSyncStage(sync_instance)
+
+      if self._run.ShouldPatchAfterSync():
+        # Filter out patches to manifest, since PatchChangesStage can't handle
+        # them.  Manifest patches are patched in the BootstrapStage.
+        non_manifest_patches = self.patch_pool.FilterManifest(negate=True)
+        if non_manifest_patches:
+          self._RunStage(sync_stages.PatchChangesStage, non_manifest_patches)
+
+      # Now that we have a fully synced & patched tree, we can let the builder
+      # extract version information from the sources for this particular build.
+      self.SetVersionInfo()
+      if self._run.ShouldReexecAfterSync():
+        print_report = False
+        success = self._ReExecuteInBuildroot(sync_instance)
+      else:
+        self._RunStage(report_stages.BuildReexecutionFinishedStage)
+        self.RunStages()
+
+    except Exception as ex:
+      exception_thrown = True
+      if results_lib.Results.BuildSucceededSoFar():
+        # If the build is marked as successful, but threw exceptions, that's a
+        # problem. Print the traceback for debugging.
+        if isinstance(ex, failures_lib.CompoundFailure):
+          print(str(ex))
+
+        traceback.print_exc(file=sys.stdout)
+        raise
+
+      if not (print_report and isinstance(ex, failures_lib.StepFailure)):
+        # If the failed build threw a non-StepFailure exception, we
+        # should raise it.
+        raise
+
+    finally:
+      if print_report:
+        results_lib.WriteCheckpoint(self._run.options.buildroot)
+        completion_instance = self.GetCompletionInstance()
+        self._RunStage(report_stages.ReportStage, completion_instance)
+        success = results_lib.Results.BuildSucceededSoFar()
+        if exception_thrown and success:
+          success = False
+          logging.PrintBuildbotStepWarnings()
+          print("""\
+Exception thrown, but all stages marked successful. This is an internal error,
+because the stage that threw the exception should be marked as failing.""")
+
+    return success
diff --git a/cbuildbot/builders/misc_builders.py b/cbuildbot/builders/misc_builders.py
new file mode 100644
index 0000000..7481d68
--- /dev/null
+++ b/cbuildbot/builders/misc_builders.py
@@ -0,0 +1,21 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing various one-off builders."""
+
+from __future__ import print_function
+
+from chromite.cbuildbot.builders import simple_builders
+from chromite.cbuildbot.stages import build_stages
+from chromite.cbuildbot.stages import report_stages
+
+
+class RefreshPackagesBuilder(simple_builders.SimpleBuilder):
+  """Run the refresh packages status update."""
+
+  def RunStages(self):
+    """Runs through build process."""
+    self._RunStage(build_stages.InitSDKStage)
+    self.RunSetupBoard()
+    self._RunStage(report_stages.RefreshPackageStatusStage)
diff --git a/cbuildbot/builders/release_builders.py b/cbuildbot/builders/release_builders.py
new file mode 100644
index 0000000..4b106e5
--- /dev/null
+++ b/cbuildbot/builders/release_builders.py
@@ -0,0 +1,35 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing release engineering related builders."""
+
+from __future__ import print_function
+
+from chromite.lib import parallel
+
+from chromite.cbuildbot.builders import simple_builders
+from chromite.cbuildbot.stages import branch_stages
+from chromite.cbuildbot.stages import release_stages
+
+
+class CreateBranchBuilder(simple_builders.SimpleBuilder):
+  """Create release branches in the manifest."""
+
+  def RunStages(self):
+    """Runs through build process."""
+    self._RunStage(branch_stages.BranchUtilStage)
+
+
+class GeneratePayloadsBuilder(simple_builders.SimpleBuilder):
+  """Run the PaygenStage once for each board."""
+
+  def RunStages(self):
+    """Runs through build process."""
+    def _RunStageWrapper(board):
+      self._RunStage(release_stages.PaygenStage, board=board,
+                     channels=self._run.options.channels, archive_stage=None)
+
+    with parallel.BackgroundTaskRunner(_RunStageWrapper) as queue:
+      for board in self._run.config.boards:
+        queue.put([board])
diff --git a/cbuildbot/builders/sdk_builders.py b/cbuildbot/builders/sdk_builders.py
new file mode 100644
index 0000000..2771275
--- /dev/null
+++ b/cbuildbot/builders/sdk_builders.py
@@ -0,0 +1,37 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing SDK builders."""
+
+from __future__ import print_function
+
+import datetime
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot.builders import simple_builders
+from chromite.cbuildbot.stages import artifact_stages
+from chromite.cbuildbot.stages import build_stages
+from chromite.cbuildbot.stages import chrome_stages
+from chromite.cbuildbot.stages import sdk_stages
+
+
+class ChrootSdkBuilder(simple_builders.SimpleBuilder):
+  """Build the SDK chroot."""
+
+  def RunStages(self):
+    """Runs through build process."""
+    # Unlike normal CrOS builds, the SDK has no concept of pinned CrOS manifest
+    # or specific Chrome version.  Use a datestamp instead.
+    version = datetime.datetime.now().strftime('%Y.%m.%d.%H%M%S')
+    self._RunStage(build_stages.UprevStage, boards=[])
+    self._RunStage(build_stages.InitSDKStage)
+    self._RunStage(build_stages.SetupBoardStage, constants.CHROOT_BUILDER_BOARD)
+    self._RunStage(chrome_stages.SyncChromeStage)
+    self._RunStage(chrome_stages.PatchChromeStage)
+    self._RunStage(sdk_stages.SDKBuildToolchainsStage)
+    self._RunStage(sdk_stages.SDKPackageStage, version=version)
+    self._RunStage(sdk_stages.SDKPackageToolchainOverlaysStage, version=version)
+    self._RunStage(sdk_stages.SDKTestStage)
+    self._RunStage(artifact_stages.UploadPrebuiltsStage,
+                   constants.CHROOT_BUILDER_BOARD, version=version)
diff --git a/cbuildbot/builders/simple_builders.py b/cbuildbot/builders/simple_builders.py
new file mode 100644
index 0000000..ce6ce19
--- /dev/null
+++ b/cbuildbot/builders/simple_builders.py
@@ -0,0 +1,414 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the simple builders."""
+
+from __future__ import print_function
+
+import collections
+
+from chromite.cbuildbot import afdo
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot import results_lib
+from chromite.cbuildbot.builders import generic_builders
+from chromite.cbuildbot.stages import afdo_stages
+from chromite.cbuildbot.stages import artifact_stages
+from chromite.cbuildbot.stages import build_stages
+from chromite.cbuildbot.stages import chrome_stages
+from chromite.cbuildbot.stages import completion_stages
+from chromite.cbuildbot.stages import generic_stages
+from chromite.cbuildbot.stages import release_stages
+from chromite.cbuildbot.stages import report_stages
+from chromite.cbuildbot.stages import sync_stages
+from chromite.cbuildbot.stages import test_stages
+from chromite.lib import cros_logging as logging
+from chromite.lib import patch as cros_patch
+from chromite.lib import parallel
+
+
+# TODO: SimpleBuilder needs to be broken up big time.
+
+
+BoardConfig = collections.namedtuple('BoardConfig', ['board', 'name'])
+
+
+class SimpleBuilder(generic_builders.Builder):
+  """Builder that performs basic vetting operations."""
+
+  def GetSyncInstance(self):
+    """Sync to lkgm or TOT as necessary.
+
+    Returns:
+      The instance of the sync stage to run.
+    """
+    if self._run.options.force_version:
+      sync_stage = self._GetStageInstance(
+          sync_stages.ManifestVersionedSyncStage)
+    elif self._run.config.use_lkgm:
+      sync_stage = self._GetStageInstance(sync_stages.LKGMSyncStage)
+    elif self._run.config.use_chrome_lkgm:
+      sync_stage = self._GetStageInstance(chrome_stages.ChromeLKGMSyncStage)
+    else:
+      sync_stage = self._GetStageInstance(sync_stages.SyncStage)
+
+    return sync_stage
+
+  def GetVersionInfo(self):
+    """Returns the CrOS version info from the chromiumos-overlay."""
+    return manifest_version.VersionInfo.from_repo(self._run.buildroot)
+
+  def _GetChangesUnderTest(self):
+    """Returns the list of GerritPatch changes under test."""
+    changes = set()
+
+    changes_json_list = self._run.attrs.metadata.GetDict().get('changes', [])
+    for change_dict in changes_json_list:
+      change = cros_patch.GerritFetchOnlyPatch.FromAttrDict(change_dict)
+      changes.add(change)
+
+    # Also add the changes from PatchChangeStage, the PatchChangeStage doesn't
+    # write changes into metadata.
+    if self._run.ShouldPatchAfterSync():
+      changes.update(set(self.patch_pool.gerrit_patches))
+
+    return list(changes)
+
+  def _RunHWTests(self, builder_run, board):
+    """Run hwtest-related stages for the specified board.
+
+    Args:
+      builder_run: BuilderRun object for these background stages.
+      board: Board name.
+    """
+    parallel_stages = []
+
+    # We can not run hw tests without archiving the payloads.
+    if builder_run.options.archive:
+      for suite_config in builder_run.config.hw_tests:
+        stage_class = None
+        if suite_config.async:
+          stage_class = test_stages.ASyncHWTestStage
+        elif suite_config.suite == constants.HWTEST_AU_SUITE:
+          stage_class = test_stages.AUTestStage
+        else:
+          stage_class = test_stages.HWTestStage
+        if suite_config.blocking:
+          self._RunStage(stage_class, board, suite_config,
+                         builder_run=builder_run)
+        else:
+          new_stage = self._GetStageInstance(stage_class, board,
+                                             suite_config,
+                                             builder_run=builder_run)
+          parallel_stages.append(new_stage)
+
+    self._RunParallelStages(parallel_stages)
+
+  def _RunBackgroundStagesForBoardAndMarkAsSuccessful(self, builder_run, board):
+    """Run background board-specific stages for the specified board.
+
+    After finishing the build, mark it as successful.
+
+    Args:
+      builder_run: BuilderRun object for these background stages.
+      board: Board name.
+    """
+    self._RunBackgroundStagesForBoard(builder_run, board)
+    board_runattrs = builder_run.GetBoardRunAttrs(board)
+    board_runattrs.SetParallel('success', True)
+
+  def _RunBackgroundStagesForBoard(self, builder_run, board):
+    """Run background board-specific stages for the specified board.
+
+    Used by _RunBackgroundStagesForBoardAndMarkAsSuccessful. Callers should use
+    that method instead.
+
+    Args:
+      builder_run: BuilderRun object for these background stages.
+      board: Board name.
+    """
+    config = builder_run.config
+
+    # TODO(mtennant): This is the last usage of self.archive_stages.  We can
+    # kill it once we migrate its uses to BuilderRun so that none of the
+    # stages below need it as an argument.
+    archive_stage = self.archive_stages[BoardConfig(board, config.name)]
+    if config.afdo_generate_min:
+      self._RunParallelStages([archive_stage])
+      return
+
+    # paygen can't complete without push_image.
+    assert not config.paygen or config.push_image
+
+    if config.build_packages_in_background:
+      self._RunStage(build_stages.BuildPackagesStage, board,
+                     update_metadata=True, builder_run=builder_run,
+                     afdo_use=config.afdo_use)
+
+    if builder_run.config.compilecheck or builder_run.options.compilecheck:
+      self._RunStage(test_stages.UnitTestStage, board,
+                     builder_run=builder_run)
+      return
+
+    # Build the image first before doing anything else.
+    # TODO(davidjames): Remove this lock once http://crbug.com/352994 is fixed.
+    with self._build_image_lock:
+      self._RunStage(build_stages.BuildImageStage, board,
+                     builder_run=builder_run, afdo_use=config.afdo_use)
+
+    # While this stage list is run in parallel, the order here dictates the
+    # order that things will be shown in the log.  So group things together
+    # that make sense when read in order.  Also keep in mind that, since we
+    # gather output manually, early slow stages will prevent any output from
+    # later stages showing up until it finishes.
+
+    # Determine whether to run the DetectIrrelevantChangesStage
+    stage_list = []
+    changes = self._GetChangesUnderTest()
+    if changes:
+      stage_list += [[report_stages.DetectIrrelevantChangesStage, board,
+                      changes]]
+    stage_list += [[chrome_stages.ChromeSDKStage, board]]
+
+    if config.vm_test_runs > 1:
+      # Run the VMTests multiple times to see if they fail.
+      stage_list += [
+          [generic_stages.RepeatStage, config.vm_test_runs,
+           test_stages.VMTestStage, board]]
+    else:
+      # Give the VMTests one retry attempt in case failures are flaky.
+      stage_list += [[generic_stages.RetryStage, 1, test_stages.VMTestStage,
+                      board]]
+
+    if config.afdo_generate:
+      stage_list += [[afdo_stages.AFDODataGenerateStage, board]]
+
+    stage_list += [
+        [release_stages.SignerTestStage, board, archive_stage],
+        [release_stages.PaygenStage, board, archive_stage],
+        [test_stages.ImageTestStage, board],
+        [test_stages.UnitTestStage, board],
+        [artifact_stages.UploadPrebuiltsStage, board],
+        [artifact_stages.DevInstallerPrebuiltsStage, board],
+        [artifact_stages.DebugSymbolsStage, board],
+        [artifact_stages.CPEExportStage, board],
+        [artifact_stages.UploadTestArtifactsStage, board],
+    ]
+
+    stage_objs = [self._GetStageInstance(*x, builder_run=builder_run)
+                  for x in stage_list]
+
+    parallel.RunParallelSteps([
+        lambda: self._RunParallelStages(stage_objs + [archive_stage]),
+        lambda: self._RunHWTests(builder_run, board),
+    ])
+
+  def RunSetupBoard(self):
+    """Run the SetupBoard stage for all child configs and boards."""
+    for builder_run in self._run.GetUngroupedBuilderRuns():
+      for board in builder_run.config.boards:
+        self._RunStage(build_stages.SetupBoardStage, board,
+                       builder_run=builder_run)
+
+  def _RunMasterPaladinOrChromePFQBuild(self):
+    """Runs through the stages of the paladin or chrome PFQ master build."""
+    self._RunStage(build_stages.UprevStage)
+    self._RunStage(build_stages.InitSDKStage)
+    # The CQ/Chrome PFQ master will not actually run the SyncChrome stage, but
+    # we want the logic that gets triggered when SyncChrome stage is skipped.
+    self._RunStage(chrome_stages.SyncChromeStage)
+    if self._run.config.build_type == constants.PALADIN_TYPE:
+      self._RunStage(build_stages.RegenPortageCacheStage)
+    self._RunStage(test_stages.BinhostTestStage)
+    self._RunStage(test_stages.BranchUtilTestStage)
+    self._RunStage(artifact_stages.MasterUploadPrebuiltsStage)
+
+  def _RunDefaultTypeBuild(self):
+    """Runs through the stages of a non-special-type build."""
+    self._RunStage(build_stages.UprevStage)
+    self._RunStage(build_stages.InitSDKStage)
+    self._RunStage(build_stages.RegenPortageCacheStage)
+    self.RunSetupBoard()
+    self._RunStage(chrome_stages.SyncChromeStage)
+    self._RunStage(chrome_stages.PatchChromeStage)
+    self._RunStage(test_stages.BinhostTestStage)
+    self._RunStage(test_stages.BranchUtilTestStage)
+
+    # Prepare stages to run in background.  If child_configs exist then
+    # run each of those here, otherwise use default config.
+    builder_runs = self._run.GetUngroupedBuilderRuns()
+
+    tasks = []
+    for builder_run in builder_runs:
+      # Prepare a local archive directory for each "run".
+      builder_run.GetArchive().SetupArchivePath()
+
+      for board in builder_run.config.boards:
+        archive_stage = self._GetStageInstance(
+            artifact_stages.ArchiveStage, board, builder_run=builder_run,
+            chrome_version=self._run.attrs.chrome_version)
+        board_config = BoardConfig(board, builder_run.config.name)
+        self.archive_stages[board_config] = archive_stage
+        tasks.append((builder_run, board))
+
+    # Set up a process pool to run test/archive stages in the background.
+    # This process runs task(board) for each board added to the queue.
+    task_runner = self._RunBackgroundStagesForBoardAndMarkAsSuccessful
+    with parallel.BackgroundTaskRunner(task_runner) as queue:
+      for builder_run, board in tasks:
+        if not builder_run.config.build_packages_in_background:
+          # Run BuildPackages in the foreground, generating or using AFDO data
+          # if requested.
+          kwargs = {'builder_run': builder_run}
+          if builder_run.config.afdo_generate_min:
+            kwargs['afdo_generate_min'] = True
+          elif builder_run.config.afdo_use:
+            kwargs['afdo_use'] = True
+
+          self._RunStage(build_stages.BuildPackagesStage, board,
+                         update_metadata=True, **kwargs)
+
+          if (builder_run.config.afdo_generate_min and
+              afdo.CanGenerateAFDOData(board)):
+            # Generate the AFDO data before allowing any other tasks to run.
+            self._RunStage(build_stages.BuildImageStage, board, **kwargs)
+            self._RunStage(artifact_stages.UploadTestArtifactsStage, board,
+                           builder_run=builder_run,
+                           suffix='[afdo_generate_min]')
+            for suite in builder_run.config.hw_tests:
+              self._RunStage(test_stages.HWTestStage, board, suite,
+                             builder_run=builder_run)
+            self._RunStage(afdo_stages.AFDODataGenerateStage, board,
+                           builder_run=builder_run)
+
+          if (builder_run.config.afdo_generate_min and
+              builder_run.config.afdo_update_ebuild):
+            self._RunStage(afdo_stages.AFDOUpdateEbuildStage,
+                           builder_run=builder_run)
+
+        # Kick off our background stages.
+        queue.put([builder_run, board])
+
+  def RunStages(self):
+    """Runs through build process."""
+    # TODO(sosa): Split these out into classes.
+    if self._run.config.build_type == constants.PRE_CQ_LAUNCHER_TYPE:
+      self._RunStage(sync_stages.PreCQLauncherStage)
+    elif ((self._run.config.build_type == constants.PALADIN_TYPE or
+           self._run.config.build_type == constants.CHROME_PFQ_TYPE) and
+          self._run.config.master):
+      self._RunMasterPaladinOrChromePFQBuild()
+    else:
+      self._RunDefaultTypeBuild()
+
+
+class DistributedBuilder(SimpleBuilder):
+  """Build class that has special logic to handle distributed builds.
+
+  These builds sync using git/manifest logic in manifest_versions.  In general
+  they use a non-distributed builder code for the bulk of the work.
+  """
+
+  def __init__(self, *args, **kwargs):
+    """Initializes a buildbot builder.
+
+    Extra variables:
+      completion_stage_class:  Stage used to complete a build.  Set in the Sync
+        stage.
+    """
+    super(DistributedBuilder, self).__init__(*args, **kwargs)
+    self.completion_stage_class = None
+    self.sync_stage = None
+    self._completion_stage = None
+
+  def GetSyncInstance(self):
+    """Syncs the tree using one of the distributed sync logic paths.
+
+    Returns:
+      The instance of the sync stage to run.
+    """
+    # Determine sync class to use.  CQ overrides PFQ bits so should check it
+    # first.
+    if self._run.config.pre_cq:
+      sync_stage = self._GetStageInstance(sync_stages.PreCQSyncStage,
+                                          self.patch_pool.gerrit_patches)
+      self.completion_stage_class = completion_stages.PreCQCompletionStage
+      self.patch_pool.gerrit_patches = []
+    elif config_lib.IsCQType(self._run.config.build_type):
+      if self._run.config.do_not_apply_cq_patches:
+        sync_stage = self._GetStageInstance(
+            sync_stages.MasterSlaveLKGMSyncStage)
+      else:
+        sync_stage = self._GetStageInstance(sync_stages.CommitQueueSyncStage)
+      self.completion_stage_class = completion_stages.CommitQueueCompletionStage
+    elif config_lib.IsPFQType(self._run.config.build_type):
+      sync_stage = self._GetStageInstance(sync_stages.MasterSlaveLKGMSyncStage)
+      self.completion_stage_class = (
+          completion_stages.MasterSlaveSyncCompletionStage)
+    elif config_lib.IsCanaryType(self._run.config.build_type):
+      sync_stage = self._GetStageInstance(
+          sync_stages.ManifestVersionedSyncStage)
+      self.completion_stage_class = (
+          completion_stages.CanaryCompletionStage)
+    else:
+      sync_stage = self._GetStageInstance(
+          sync_stages.ManifestVersionedSyncStage)
+      self.completion_stage_class = (
+          completion_stages.ManifestVersionedSyncCompletionStage)
+
+    self.sync_stage = sync_stage
+    return self.sync_stage
+
+  def GetCompletionInstance(self):
+    """Returns the completion_stage_class instance that was used for this build.
+
+    Returns:
+      None if the completion_stage instance was not yet created (this
+      occurs during Publish).
+    """
+    return self._completion_stage
+
+  def Publish(self, was_build_successful, build_finished):
+    """Completes build by publishing any required information.
+
+    Args:
+      was_build_successful: Whether the build succeeded.
+      build_finished: Whether the build completed. A build can be successful
+        without completing if it exits early with sys.exit(0).
+    """
+    completion_stage = self._GetStageInstance(self.completion_stage_class,
+                                              self.sync_stage,
+                                              was_build_successful)
+    self._completion_stage = completion_stage
+    completion_successful = False
+    try:
+      completion_stage.Run()
+      completion_successful = True
+      if (self._run.config.afdo_update_ebuild and
+          not self._run.config.afdo_generate_min):
+        self._RunStage(afdo_stages.AFDOUpdateEbuildStage)
+    finally:
+      if self._run.config.push_overlays:
+        publish = (was_build_successful and completion_successful and
+                   build_finished)
+        self._RunStage(completion_stages.PublishUprevChangesStage, publish)
+
+  def RunStages(self):
+    """Runs simple builder logic and publishes information to overlays."""
+    was_build_successful = False
+    build_finished = False
+    try:
+      super(DistributedBuilder, self).RunStages()
+      was_build_successful = results_lib.Results.BuildSucceededSoFar()
+      build_finished = True
+    except SystemExit as ex:
+      # If a stage calls sys.exit(0), it's exiting with success, so that means
+      # we should mark ourselves as successful.
+      logging.info('Detected sys.exit(%s)', ex.code)
+      if ex.code == 0:
+        was_build_successful = True
+      raise
+    finally:
+      self.Publish(was_build_successful, build_finished)
diff --git a/cbuildbot/builders/simple_builders_unittest b/cbuildbot/builders/simple_builders_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/builders/simple_builders_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/builders/simple_builders_unittest.py b/cbuildbot/builders/simple_builders_unittest.py
new file mode 100644
index 0000000..b013fd1
--- /dev/null
+++ b/cbuildbot/builders/simple_builders_unittest.py
@@ -0,0 +1,105 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for simpler builders."""
+
+from __future__ import print_function
+
+import copy
+import os
+
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot import chromeos_config
+from chromite.cbuildbot import constants
+from chromite.cbuildbot.builders import generic_builders
+from chromite.cbuildbot.builders import simple_builders
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.scripts import cbuildbot
+
+
+# pylint: disable=protected-access
+
+
+class SimpleBuilderTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for the main code paths in simple_builders.SimpleBuilder"""
+
+  def setUp(self):
+    self.buildroot = os.path.join(self.tempdir, 'buildroot')
+    chroot_path = os.path.join(self.buildroot, constants.DEFAULT_CHROOT_DIR)
+    osutils.SafeMakedirs(os.path.join(chroot_path, 'tmp'))
+
+    self.PatchObject(generic_builders.Builder, '_RunStage')
+    self.PatchObject(simple_builders.SimpleBuilder, '_RunParallelStages')
+    self.PatchObject(cbuildbot_run._BuilderRunBase, 'GetVersion',
+                     return_value='R32-1234.0.0')
+
+    self._manager = parallel.Manager()
+    self._manager.__enter__()
+
+  def tearDown(self):
+    # Mimic exiting a 'with' statement.
+    self._manager.__exit__(None, None, None)
+
+  def _initConfig(self, bot_id, extra_argv=None):
+    """Return normal options/build_config for |bot_id|"""
+    site_config = chromeos_config.GetConfig()
+    build_config = copy.deepcopy(site_config[bot_id])
+    build_config['master'] = False
+    build_config['important'] = False
+
+    # Use the cbuildbot parser to create properties and populate default values.
+    parser = cbuildbot._CreateParser()
+    argv = (['-r', self.buildroot, '--buildbot', '--debug', '--nochromesdk'] +
+            (extra_argv if extra_argv else []) + [bot_id])
+    options, _ = cbuildbot._ParseCommandLine(parser, argv)
+
+    # Yikes.
+    options.managed_chrome = build_config['sync_chrome']
+
+    return cbuildbot_run.BuilderRun(
+        options, site_config, build_config, self._manager)
+
+  def testRunStagesPreCQ(self):
+    """Verify RunStages for PRE_CQ_LAUNCHER_TYPE builders"""
+    builder_run = self._initConfig('pre-cq-launcher')
+    simple_builders.SimpleBuilder(builder_run).RunStages()
+
+  def testRunStagesBranchUtil(self):
+    """Verify RunStages for CREATE_BRANCH_TYPE builders"""
+    extra_argv = ['--branch-name', 'foo', '--version', '1234']
+    builder_run = self._initConfig(constants.BRANCH_UTIL_CONFIG,
+                                   extra_argv=extra_argv)
+    simple_builders.SimpleBuilder(builder_run).RunStages()
+
+  def testRunStagesChrootBuilder(self):
+    """Verify RunStages for CHROOT_BUILDER_TYPE builders"""
+    builder_run = self._initConfig('chromiumos-sdk')
+    simple_builders.SimpleBuilder(builder_run).RunStages()
+
+  def testRunStagesRefreshPackages(self):
+    """Verify RunStages for REFRESH_PACKAGES_TYPE builders"""
+    builder_run = self._initConfig('refresh-packages')
+    simple_builders.SimpleBuilder(builder_run).RunStages()
+
+  def testRunStagesDefaultBuild(self):
+    """Verify RunStages for standard board builders"""
+    builder_run = self._initConfig('x86-generic-full')
+    builder_run.attrs.chrome_version = 'TheChromeVersion'
+    simple_builders.SimpleBuilder(builder_run).RunStages()
+
+  def testRunStagesDefaultBuildCompileCheck(self):
+    """Verify RunStages for standard board builders (compile only)"""
+    extra_argv = ['--compilecheck']
+    builder_run = self._initConfig('x86-generic-full', extra_argv=extra_argv)
+    builder_run.attrs.chrome_version = 'TheChromeVersion'
+    simple_builders.SimpleBuilder(builder_run).RunStages()
+
+  def testRunStagesDefaultBuildHwTests(self):
+    """Verify RunStages for boards w/hwtests"""
+    extra_argv = ['--hwtest']
+    builder_run = self._initConfig('lumpy-release', extra_argv=extra_argv)
+    builder_run.attrs.chrome_version = 'TheChromeVersion'
+    simple_builders.SimpleBuilder(builder_run).RunStages()
diff --git a/cbuildbot/builders/test_builders.py b/cbuildbot/builders/test_builders.py
new file mode 100644
index 0000000..7a158e1
--- /dev/null
+++ b/cbuildbot/builders/test_builders.py
@@ -0,0 +1,36 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing builders intended for testing cbuildbot behaviors."""
+
+from __future__ import print_function
+
+from chromite.lib import cros_logging as logging
+
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot.builders import generic_builders
+from chromite.cbuildbot.stages import generic_stages
+from chromite.cbuildbot.stages import sync_stages
+
+
+class SuccessStage(generic_stages.BuilderStage):
+  """Build stage declares success!"""
+  def Run(self):
+    logging.info('!!!SuccessStage, FTW!!!')
+
+
+class ManifestVersionedSyncBuilder(generic_builders.Builder):
+  """Builder that performs sync, then exits."""
+
+  def GetVersionInfo(self):
+    """Returns the CrOS version info from the chromiumos-overlay."""
+    return manifest_version.VersionInfo.from_repo(self._run.buildroot)
+
+  def GetSyncInstance(self):
+    """Returns an instance of a SyncStage that should be run."""
+    return self._GetStageInstance(sync_stages.ManifestVersionedSyncStage)
+
+  def RunStages(self):
+    """Run something after sync/reexec."""
+    self._RunStage(SuccessStage)
diff --git a/cbuildbot/cbuildbot b/cbuildbot/cbuildbot
new file mode 120000
index 0000000..a179d39
--- /dev/null
+++ b/cbuildbot/cbuildbot
@@ -0,0 +1 @@
+cbuildbot.py
\ No newline at end of file
diff --git a/cbuildbot/cbuildbot.py b/cbuildbot/cbuildbot.py
new file mode 100755
index 0000000..41572b3
--- /dev/null
+++ b/cbuildbot/cbuildbot.py
@@ -0,0 +1,24 @@
+#!/usr/bin/python2
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(ferringb): remove this as soon as depot_tools is gutted of its
+# import logic, and is just a re-exec.
+
+"""Dynamic wrapper to invoke cbuildbot with standardized import paths."""
+
+from __future__ import print_function
+
+import os
+import sys
+
+def main():
+  # Bypass all of chromite_wrappers attempted 'help', and execve to the actual
+  # cbuildbot wrapper/helper chromite has.
+  location = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+  location = os.path.join(location, 'bin', 'cbuildbot')
+  os.execv(location, [location] + sys.argv[1:])
+
+if __name__ == '__main__':
+  main()
diff --git a/cbuildbot/cbuildbot_run.py b/cbuildbot/cbuildbot_run.py
new file mode 100644
index 0000000..ee2d67b
--- /dev/null
+++ b/cbuildbot/cbuildbot_run.py
@@ -0,0 +1,937 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provide a class for collecting info on one builder run.
+
+There are two public classes, BuilderRun and ChildBuilderRun, that serve
+this function.  The first is for most situations, the second is for "child"
+configs within a builder config that has entries in "child_configs".
+
+Almost all functionality is within the common _BuilderRunBase class.  The
+only thing the BuilderRun and ChildBuilderRun classes are responsible for
+is overriding the self.config value in the _BuilderRunBase object whenever
+it is accessed.
+
+It is important to note that for one overall run, there will be one
+BuilderRun object and zero or more ChildBuilderRun objects, but they
+will all share the same _BuilderRunBase *object*.  This means, for example,
+that run attributes (e.g. self.attrs.release_tag) are shared between them
+all, as intended.
+"""
+
+from __future__ import print_function
+
+import cPickle
+import functools
+import os
+try:
+  import Queue
+except ImportError:
+  # Python-3 renamed to "queue".  We still use Queue to avoid collisions
+  # with naming variables as "queue".  Maybe we'll transition at some point.
+  # pylint: disable=F0401
+  import queue as Queue
+import types
+
+from chromite.cbuildbot import archive_lib
+from chromite.cbuildbot import metadata_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import tree_status
+from chromite.lib import cidb
+from chromite.lib import portage_util
+
+
+class RunAttributesError(Exception):
+  """Base class for exceptions related to RunAttributes behavior."""
+
+  def __str__(self):
+    """Handle stringify because base class will just spit out self.args."""
+    return self.msg
+
+
+class VersionNotSetError(RuntimeError):
+  """Error raised if trying to access version_info before it's set."""
+
+
+class ParallelAttributeError(AttributeError):
+  """Custom version of AttributeError."""
+
+  def __init__(self, attr, board=None, target=None, *args):
+    if board or target:
+      self.msg = ('No such board-specific parallel run attribute %r for %s/%s' %
+                  (attr, board, target))
+    else:
+      self.msg = 'No such parallel run attribute %r' % attr
+    super(ParallelAttributeError, self).__init__(self.msg, *args)
+    self.args = (attr, board, target) + tuple(args)
+
+  def __str__(self):
+    return self.msg
+
+
+class AttrSepCountError(ValueError):
+  """Custom version of ValueError for when BOARD_ATTR_SEP is misused."""
+  def __init__(self, attr, *args):
+    self.msg = ('Attribute name has an unexpected number of "%s" occurrences'
+                ' in it: %s' % (RunAttributes.BOARD_ATTR_SEP, attr))
+    super(AttrSepCountError, self).__init__(self.msg, *args)
+    self.args = (attr, ) + tuple(args)
+
+  def __str__(self):
+    return self.msg
+
+
+class AttrNotPickleableError(RunAttributesError):
+  """For when attribute value to queue is not pickleable."""
+
+  def __init__(self, attr, value, *args):
+    self.msg = 'Run attribute "%s" value cannot be pickled: %r' % (attr, value)
+    super(AttrNotPickleableError, self).__init__(self.msg, *args)
+    self.args = (attr, value) + tuple(args)
+
+
+class AttrTimeoutError(RunAttributesError):
+  """For when timeout is reached while waiting for attribute value."""
+
+  def __init__(self, attr, *args):
+    self.msg = 'Timed out waiting for value for run attribute "%s".' % attr
+    super(AttrTimeoutError, self).__init__(self.msg, *args)
+    self.args = (attr, ) + tuple(args)
+
+
+class LockableQueue(object):
+  """Multiprocessing queue with associated recursive lock.
+
+  Objects of this class function just like a regular multiprocessing Queue,
+  except that there is also an rlock attribute for getting a multiprocessing
+  RLock associated with this queue.  Actual locking must still be handled by
+  the calling code.  Example usage:
+
+  with queue.rlock:
+    ... process the queue in some way.
+  """
+
+  def __init__(self, manager):
+    self._queue = manager.Queue()
+    self.rlock = manager.RLock()
+
+  def __getattr__(self, attr):
+    """Relay everything to the underlying Queue object at self._queue."""
+    return getattr(self._queue, attr)
+
+
+class RunAttributes(object):
+  """Hold all run attributes for a particular builder run.
+
+  There are two supported flavors of run attributes: REGULAR attributes are
+  available only to stages that are run sequentially as part of the main (top)
+  process and PARALLEL attributes are available to all stages, no matter what
+  process they are in.  REGULAR attributes are accessed directly as normal
+  attributes on a RunAttributes object, while PARALLEL attributes are accessed
+  through the {Set|Has|Get}Parallel methods.  PARALLEL attributes also have the
+  restriction that their values must be pickle-able (in order to be sent
+  through multiprocessing queue).
+
+  The currently supported attributes of each kind are listed in REGULAR_ATTRS
+  and PARALLEL_ATTRS below.  To add support for a new run attribute simply
+  add it to one of those sets.
+
+  A subset of PARALLEL_ATTRS is BOARD_ATTRS.  These attributes only have meaning
+  in the context of a specific board and config target.  The attributes become
+  available once a board/config is registered for a run, and then they can be
+  accessed through the {Set|Has|Get}BoardParallel methods or through the
+  {Get|Set|Has}Parallel methods of a BoardRunAttributes object.  The latter is
+  encouraged.
+
+  To add a new BOARD attribute simply add it to the BOARD_ATTRS set below, which
+  will also add it to PARALLEL_ATTRS (all BOARD attributes are assumed to need
+  PARALLEL support).
+  """
+
+  REGULAR_ATTRS = frozenset((
+      'chrome_version',   # Set by SyncChromeStage, if it runs.
+      'manifest_manager', # Set by ManifestVersionedSyncStage.
+      'release_tag',      # Set by cbuildbot after sync stage.
+      'version_info',     # Set by the builder after sync+patch stage.
+      'metadata',         # Used by various build stages to record metadata.
+  ))
+
+  # TODO(mtennant): It might be useful to have additional info for each board
+  # attribute:  1) a log-friendly pretty name, 2) a rough upper bound timeout
+  # value for consumers of the attribute to use when waiting for it.
+  BOARD_ATTRS = frozenset((
+      'breakpad_symbols_generated', # Set by DebugSymbolsStage.
+      'debug_tarball_generated',    # Set by DebugSymbolsStage.
+      'images_generated',           # Set by BuildImageStage.
+      'payloads_generated',         # Set by UploadHWTestArtifacts.
+      'delta_payloads_generated',   # Set by UploadHWTestArtifacts.
+      'instruction_urls_per_channel', # Set by ArchiveStage
+      'success',                    # Set by cbuildbot.py:Builder
+      'packages_under_test',        # Set by BuildPackagesStage.
+      'gce_tarball_generated',       # Set by ArchiveStage.
+  ))
+
+  # Attributes that need to be set by stages that can run in parallel
+  # (i.e. in a subprocess) must be included here.  All BOARD_ATTRS are
+  # assumed to fit into this category.
+  PARALLEL_ATTRS = BOARD_ATTRS | frozenset((
+      'unittest_value',   # For unittests.  An example of a PARALLEL attribute
+                          # that is not also a BOARD attribute.
+  ))
+
+  # This separator is used to create a unique attribute name for any
+  # board-specific attribute.  For example:
+  # breakpad_symbols_generated||stumpy||stumpy-full-config
+  BOARD_ATTR_SEP = '||'
+
+  # Sanity check, make sure there is no overlap between the attr groups.
+  assert not REGULAR_ATTRS & PARALLEL_ATTRS
+
+  # REGULAR_ATTRS show up as attributes directly on the RunAttributes object.
+  __slots__ = tuple(REGULAR_ATTRS) + (
+      '_board_targets', # Set of registered board/target combinations.
+      '_manager',       # The multiprocessing.Manager to use.
+      '_queues',        # Dict of parallel attribute names to LockableQueues.
+  )
+
+  def __init__(self, multiprocess_manager):
+    # The __slots__ logic above confuses pylint.
+    # https://bitbucket.org/logilab/pylint/issue/380/
+    # pylint: disable=assigning-non-slot
+
+    # Create queues for all non-board-specific parallel attributes now.
+    # Parallel board attributes must wait for the board to be registered.
+    self._manager = multiprocess_manager
+    self._queues = {}
+    for attr in RunAttributes.PARALLEL_ATTRS:
+      if attr not in RunAttributes.BOARD_ATTRS:
+        # pylint: disable=E1101
+        self._queues[attr] = LockableQueue(self._manager)
+
+    # Set of known <board>||<target> combinations.
+    self._board_targets = set()
+
+  def RegisterBoardAttrs(self, board, target):
+    """Register a new valid board/target combination.  Safe to repeat.
+
+    Args:
+      board: Board name to register.
+      target: Build config name to register.
+
+    Returns:
+      A new BoardRunAttributes object for more convenient access to the newly
+        registered attributes specific to this board/target combination.
+    """
+    board_target = RunAttributes.BOARD_ATTR_SEP.join((board, target))
+
+    if not board_target in self._board_targets:
+      # Register board/target as a known board/target.
+      self._board_targets.add(board_target)
+
+      # For each board attribute that should be queue-able, create its queue
+      # now.  Queues are kept by the uniquified run attribute name.
+      for attr in RunAttributes.BOARD_ATTRS:
+        # Every attr in BOARD_ATTRS is in PARALLEL_ATTRS, by construction.
+        # pylint: disable=E1101
+        uniquified_attr = self._GetBoardAttrName(attr, board, target)
+        self._queues[uniquified_attr] = LockableQueue(self._manager)
+
+    return BoardRunAttributes(self, board, target)
+
+  # TODO(mtennant): Complain if a child process attempts to set a non-parallel
+  # run attribute?  It could be done something like this:
+  #def __setattr__(self, attr, value):
+  #  """Override __setattr__ to prevent misuse of run attributes."""
+  #  if attr in self.REGULAR_ATTRS:
+  #    assert not self._IsChildProcess()
+  #  super(RunAttributes, self).__setattr__(attr, value)
+
+  def _GetBoardAttrName(self, attr, board, target):
+    """Translate plain |attr| to uniquified board attribute name.
+
+    Args:
+      attr: Plain run attribute name.
+      board: Board name.
+      target: Build config name.
+
+    Returns:
+      The uniquified board-specific attribute name.
+
+    Raises:
+      AssertionError if the board/target combination does not exist.
+    """
+    board_target = RunAttributes.BOARD_ATTR_SEP.join((board, target))
+    assert board_target in self._board_targets, \
+        'Unknown board/target combination: %s/%s' % (board, target)
+
+    # Translate to the unique attribute name for attr/board/target.
+    return RunAttributes.BOARD_ATTR_SEP.join((attr, board, target))
+
+  def SetBoardParallel(self, attr, value, board, target):
+    """Set board-specific parallel run attribute value.
+
+    Args:
+      attr: Plain board run attribute name.
+      value: Value to set.
+      board: Board name.
+      target: Build config name.
+    """
+    unique_attr = self._GetBoardAttrName(attr, board, target)
+    try:
+      self.SetParallel(unique_attr, value)
+    except ParallelAttributeError:
+      # Clarify the AttributeError.
+      raise ParallelAttributeError(attr, board=board, target=target)
+
+  def HasBoardParallel(self, attr, board, target):
+    """Return True if board-specific parallel run attribute is known and set.
+
+    Args:
+      attr: Plain board run attribute name.
+      board: Board name.
+      target: Build config name.
+    """
+    unique_attr = self._GetBoardAttrName(attr, board, target)
+    return self.HasParallel(unique_attr)
+
+  def SetBoardParallelDefault(self, attr, default_value, board, target):
+    """Set board-specific parallel run attribute value, if not already set.
+
+    Args:
+      attr: Plain board run attribute name.
+      default_value: Value to set.
+      board: Board name.
+      target: Build config name.
+    """
+    if not self.HasBoardParallel(attr, board, target):
+      self.SetBoardParallel(attr, default_value, board, target)
+
+  def GetBoardParallel(self, attr, board, target, timeout=0):
+    """Get board-specific parallel run attribute value.
+
+    Args:
+      attr: Plain board run attribute name.
+      board: Board name.
+      target: Build config name.
+      timeout: See GetParallel for description.
+
+    Returns:
+      The value found.
+    """
+    unique_attr = self._GetBoardAttrName(attr, board, target)
+    try:
+      return self.GetParallel(unique_attr, timeout=timeout)
+    except ParallelAttributeError:
+      # Clarify the AttributeError.
+      raise ParallelAttributeError(attr, board=board, target=target)
+
+  def _GetQueue(self, attr, strict=False):
+    """Return the queue for the given attribute, if it exists.
+
+    Args:
+      attr: The run attribute name.
+      strict: If True, then complain if queue for |attr| is not found.
+
+    Returns:
+      The LockableQueue for this attribute, if it has one, or None
+        (assuming strict is False).
+
+    Raises:
+      ParallelAttributeError if no queue for this attribute is registered,
+        meaning no parallel attribute by this name is known.
+    """
+    queue = self._queues.get(attr)
+
+    if queue is None and strict:
+      raise ParallelAttributeError(attr)
+
+    return queue
+
+  def SetParallel(self, attr, value):
+    """Set the given parallel run attribute value.
+
+    Called to set the value of any parallel run attribute.  The value is
+    saved onto a multiprocessing queue for that attribute.
+
+    Args:
+      attr: Name of the attribute.
+      value: Value to give the attribute.  This value must be pickleable.
+
+    Raises:
+      ParallelAttributeError if attribute is not a valid parallel attribute.
+      AttrNotPickleableError if value cannot be pickled, meaning it cannot
+        go through the queue system.
+    """
+    # Confirm that value can be pickled, because otherwise it will fail
+    # in the queue.
+    try:
+      cPickle.dumps(value, cPickle.HIGHEST_PROTOCOL)
+    except cPickle.PicklingError:
+      raise AttrNotPickleableError(attr, value)
+
+    queue = self._GetQueue(attr, strict=True)
+
+    with queue.rlock:
+      # First empty the queue.  Any value already on the queue is now stale.
+      while True:
+        try:
+          queue.get(False)
+        except Queue.Empty:
+          break
+
+      queue.put(value)
+
+  def HasParallel(self, attr):
+    """Return True if the given parallel run attribute is known and set.
+
+    Args:
+      attr: Name of the attribute.
+    """
+    try:
+      queue = self._GetQueue(attr, strict=True)
+
+      with queue.rlock:
+        return not queue.empty()
+    except ParallelAttributeError:
+      return False
+
+  def SetParallelDefault(self, attr, default_value):
+    """Set the given parallel run attribute only if it is not already set.
+
+    This leverages HasParallel and SetParallel in a convenient pattern.
+
+    Args:
+      attr: Name of the attribute.
+      default_value: Value to give the attribute if it is not set.  This value
+        must be pickleable.
+
+    Raises:
+      ParallelAttributeError if attribute is not a valid parallel attribute.
+      AttrNotPickleableError if value cannot be pickled, meaning it cannot
+        go through the queue system.
+    """
+    if not self.HasParallel(attr):
+      self.SetParallel(attr, default_value)
+
+  # TODO(mtennant): Add an option to log access, including the time to wait
+  # or waited.  It could be enabled with an optional announce=False argument.
+  # See GetParallel helper on BoardSpecificBuilderStage class for ideas.
+  def GetParallel(self, attr, timeout=0):
+    """Get value for the given parallel run attribute, optionally waiting.
+
+    If the given parallel run attr already has a value in the queue it will
+    return that value right away.  Otherwise, it will wait for a value to
+    appear in the queue up to the timeout specified (timeout of None means
+    wait forever) before returning the value found or raising AttrTimeoutError
+    if a timeout was reached.
+
+    Args:
+      attr: The name of the run attribute.
+      timeout: Timeout, in seconds.  A None value means wait forever,
+        which is probably never a good idea.  A value of 0 does not wait at all.
+
+    Raises:
+      ParallelAttributeError if attribute is not set and timeout was 0.
+      AttrTimeoutError if timeout is greater than 0 and timeout is reached
+        before a value is available on the queue.
+    """
+    got_value = False
+    queue = self._GetQueue(attr, strict=True)
+
+    # First attempt to get a value off the queue, without the lock.  This
+    # allows a blocking get to wait for a value to appear.
+    try:
+      value = queue.get(True, timeout)
+      got_value = True
+    except Queue.Empty:
+      # This means there is nothing on the queue.  Let this fall through to
+      # the locked code block to see if another process is in the process
+      # of re-queuing a value.  Any process doing that will have a lock.
+      pass
+
+    # Now grab the queue lock and flush any other values that are on the queue.
+    # This should only happen if another process put a value in after our first
+    # queue.get above.  If so, accept the updated value.
+    with queue.rlock:
+      while True:
+        try:
+          value = queue.get(False)
+          got_value = True
+        except Queue.Empty:
+          break
+
+      if got_value:
+        # First re-queue the value, then return it.
+        queue.put(value)
+        return value
+
+      else:
+        # Handle no value differently depending on whether timeout is 0.
+        if timeout == 0:
+          raise ParallelAttributeError(attr)
+        else:
+          raise AttrTimeoutError(attr)
+
+
+class BoardRunAttributes(object):
+  """Convenience class for accessing board-specific run attributes.
+
+  Board-specific run attributes (actually board/target-specific) are saved in
+  the RunAttributes object but under uniquified names.  A BoardRunAttributes
+  object provides access to these attributes using their plain names by
+  providing the board/target information where needed.
+
+  For example, to access the breakpad_symbols_generated board run attribute on
+  a regular RunAttributes object requires this:
+
+    value = attrs.GetBoardParallel('breakpad_symbols_generated', board, target)
+
+  But on a BoardRunAttributes object:
+
+    boardattrs = BoardRunAttributes(attrs, board, target)
+    ...
+    value = boardattrs.GetParallel('breakpad_symbols_generated')
+
+  The same goes for setting values.
+  """
+
+  __slots__ = ('_attrs', '_board', '_target')
+
+  def __init__(self, attrs, board, target):
+    """Initialize.
+
+    Args:
+      attrs: The main RunAttributes object.
+      board: The board name this is specific to.
+      target: The build config name this is specific to.
+    """
+    self._attrs = attrs
+    self._board = board
+    self._target = target
+
+  def SetParallel(self, attr, value, *args, **kwargs):
+    """Set the value of parallel board attribute |attr| to |value|.
+
+    Relay to SetBoardParallel on self._attrs, supplying board and target.
+    See documentation on RunAttributes.SetBoardParallel for more details.
+    """
+    self._attrs.SetBoardParallel(attr, value, self._board, self._target,
+                                 *args, **kwargs)
+
+  def HasParallel(self, attr, *args, **kwargs):
+    """Return True if parallel board attribute |attr| exists.
+
+    Relay to HasBoardParallel on self._attrs, supplying board and target.
+    See documentation on RunAttributes.HasBoardParallel for more details.
+    """
+    return self._attrs.HasBoardParallel(attr, self._board, self._target,
+                                        *args, **kwargs)
+
+  def SetParallelDefault(self, attr, default_value, *args, **kwargs):
+    """Set the value of parallel board attribute |attr| to |value|, if not set.
+
+    Relay to SetBoardParallelDefault on self._attrs, supplying board and target.
+    See documentation on RunAttributes.SetBoardParallelDefault for more details.
+    """
+    self._attrs.SetBoardParallelDefault(attr, default_value, self._board,
+                                        self._target, *args, **kwargs)
+
+  def GetParallel(self, attr, *args, **kwargs):
+    """Get the value of parallel board attribute |attr|.
+
+    Relay to GetBoardParallel on self._attrs, supplying board and target.
+    See documentation on RunAttributes.GetBoardParallel for more details.
+    """
+    return self._attrs.GetBoardParallel(attr, self._board, self._target,
+                                        *args, **kwargs)
+
+
+# TODO(mtennant): Consider renaming this _BuilderRunState, then renaming
+# _RealBuilderRun to _BuilderRunBase.
+class _BuilderRunBase(object):
+  """Class to represent one run of a builder.
+
+  This class should never be instantiated directly, but instead be
+  instantiated as part of a BuilderRun object.
+  """
+
+  # Class-level dict of RunAttributes objects to make it less
+  # problematic to send BuilderRun objects between processes through
+  # pickle.  The 'attrs' attribute on a BuilderRun object will look
+  # up the RunAttributes for that particular BuilderRun here.
+  _ATTRS = {}
+
+  __slots__ = (
+      'site_config',     # SiteConfig for this run.
+      'config',          # BuildConfig for this run.
+      'options',         # The cbuildbot options object for this run.
+
+      # Run attributes set/accessed by stages during the run.  To add support
+      # for a new run attribute add it to the RunAttributes class above.
+      '_attrs_id',       # Object ID for looking up self.attrs.
+
+      # Some pre-computed run configuration values.
+      'buildnumber',     # The build number for this run.
+      'buildroot',       # The build root path for this run.
+      'debug',           # Boolean, represents "dry run" concept, really.
+      'manifest_branch', # The manifest branch to build and test for this run.
+
+      # Some attributes are available as properties.  In particular, attributes
+      # that use self.config must be determined after __init__.
+      # self.bot_id      # Effective name of builder for this run.
+
+      # TODO(mtennant): Other candidates here include:
+      # trybot, buildbot, remote_trybot, chrome_root,
+      # test = (config build_tests AND option tests)
+  )
+
+  def __init__(self, site_config, options, multiprocess_manager):
+    self.site_config = site_config
+    self.options = options
+
+    # Note that self.config is filled in dynamically by either of the classes
+    # that are actually instantiated: BuilderRun and ChildBuilderRun.  In other
+    # words, self.config can be counted on anywhere except in this __init__.
+    # The implication is that any plain attributes that are calculated from
+    # self.config contents must be provided as properties (or methods).
+    # See the _RealBuilderRun class and its __getattr__ method for details.
+    self.config = None
+
+    # Create the RunAttributes object for this BuilderRun and save
+    # the id number for it in order to look it up via attrs property.
+    attrs = RunAttributes(multiprocess_manager)
+    self._ATTRS[id(attrs)] = attrs
+    self._attrs_id = id(attrs)
+
+    # Fill in values for all pre-computed "run configs" now, which are frozen
+    # by this time.
+
+    # TODO(mtennant): Should this use os.path.abspath like builderstage does?
+    self.buildroot = self.options.buildroot
+    self.buildnumber = self.options.buildnumber
+    self.manifest_branch = self.options.branch
+
+    # For remote_trybot runs, options.debug is implied, but we want true dryrun
+    # mode only if --debug was actually specified (i.e. options.debug_forced).
+    # TODO(mtennant): Get rid of confusing debug and debug_forced, if at all
+    # possible.  Also, eventually use "dry_run" and "verbose" options instead to
+    # represent two distinct concepts.
+    self.debug = self.options.debug
+    if self.options.remote_trybot:
+      self.debug = self.options.debug_forced
+
+    # The __slots__ logic above confuses pylint.
+    # https://bitbucket.org/logilab/pylint/issue/380/
+    # pylint: disable=assigning-non-slot
+
+    # Certain run attributes have sensible defaults which can be set here.
+    # This allows all code to safely assume that the run attribute exists.
+    attrs.chrome_version = None
+    attrs.metadata = metadata_lib.CBuildbotMetadata(
+        multiprocess_manager=multiprocess_manager)
+
+  @property
+  def bot_id(self):
+    """Return the bot_id for this run."""
+    return self.config.GetBotId(remote_trybot=self.options.remote_trybot)
+
+  @property
+  def attrs(self):
+    """Look up the RunAttributes object for this BuilderRun object."""
+    return self._ATTRS[self._attrs_id]
+
+  def IsToTBuild(self):
+    """Returns True if Builder is running on ToT."""
+    return self.manifest_branch == 'master'
+
+  def GetArchive(self):
+    """Create an Archive object for this BuilderRun object."""
+    # The Archive class is very lightweight, and is read-only, so it
+    # is ok to generate a new one on demand.  This also avoids worrying
+    # about whether it can go through pickle.
+    # Almost everything the Archive class does requires GetVersion(),
+    # which means it cannot be used until the version has been settled on.
+    # However, because it does have some use before then we provide
+    # the GetVersion function itself to be called when needed later.
+    return archive_lib.Archive(self.bot_id, self.GetVersion, self.options,
+                               self.config)
+
+  def GetBoardRunAttrs(self, board):
+    """Create a BoardRunAttributes object for this run and given |board|."""
+    return BoardRunAttributes(self.attrs, board, self.config.name)
+
+  def GetWaterfall(self):
+    """Gets the waterfall of the current build."""
+    # Metadata dictionary may not have been written at this time (it
+    # should be written in the BuildStartStage), fall back to get the
+    # environment variable in that case. Assume we are on the trybot
+    # waterfall if no waterfall can be found.
+    return (self.attrs.metadata.GetDict().get('buildbot-master-name') or
+            os.environ.get('BUILDBOT_MASTERNAME') or
+            constants.WATERFALL_TRYBOT)
+
+  def GetBuildbotUrl(self):
+    """Gets the URL of the waterfall hosting the current build."""
+    # Metadata dictionary may not have been written at this time (it
+    # should be written in the BuildStartStage), fall back to the
+    # environment variable in that case. Assume we are on the trybot
+    # waterfall if no waterfall can be found.
+    return (self.attrs.metadata.GetDict().get('buildbot-url') or
+            os.environ.get('BUILDBOT_BUILDBOTURL') or
+            constants.TRYBOT_DASHBOARD)
+
+  def GetBuilderName(self):
+    """Get the name of this builder on the current waterfall."""
+    return os.environ.get('BUILDBOT_BUILDERNAME', self.config.name)
+
+  def ConstructDashboardURL(self, stage=None):
+    """Return the dashboard URL
+
+    This is the direct link to buildbot logs as seen in build.chromium.org
+
+    Args:
+      stage: Link to a specific |stage|, otherwise the general buildbot log
+
+    Returns:
+      The fully formed URL
+    """
+    return tree_status.ConstructDashboardURL(
+        self.GetBuildbotUrl(),
+        self.GetBuilderName(),
+        self.options.buildnumber, stage=stage)
+
+  def ShouldBuildAutotest(self):
+    """Return True if this run should build autotest and artifacts."""
+    return self.config.build_tests and self.options.tests
+
+  def ShouldUploadPrebuilts(self):
+    """Return True if this run should upload prebuilts."""
+    return self.options.prebuilts and self.config.prebuilts
+
+  def GetCIDBHandle(self):
+    """Get the build_id and cidb handle, if available.
+
+    Returns:
+      A (build_id, CIDBConnection) tuple if cidb is set up and a build_id is
+      known in metadata. Otherwise, (None, None).
+    """
+    try:
+      build_id = self.attrs.metadata.GetValue('build_id')
+    except KeyError:
+      return (None, None)
+
+    if not cidb.CIDBConnectionFactory.IsCIDBSetup():
+      return (None, None)
+
+    cidb_handle = cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
+    if cidb_handle:
+      return (build_id, cidb_handle)
+    else:
+      return (None, None)
+
+  def ShouldReexecAfterSync(self):
+    """Return True if this run should re-exec itself after sync stage."""
+    if self.options.postsync_reexec and self.config.postsync_reexec:
+      # Return True if this source is not in designated buildroot.
+      abs_buildroot = os.path.abspath(self.buildroot)
+      return not os.path.abspath(__file__).startswith(abs_buildroot)
+
+    return False
+
+  def ShouldPatchAfterSync(self):
+    """Return True if this run should patch changes after sync stage."""
+    return self.options.postsync_patch and self.config.postsync_patch
+
+  def InProduction(self):
+    """Return True if this is a production run."""
+    return cidb.CIDBConnectionFactory.GetCIDBConnectionType() == 'prod'
+
+  def GetVersionInfo(self):
+    """Helper for picking apart various version bits.
+
+    The Builder must set attrs.version_info before calling this.  Further, it
+    should do so only after the sources have been fully synced & patched, else
+    it could return a confusing value.
+
+    Returns:
+      A manifest_version.VersionInfo object.
+
+    Raises:
+      VersionNotSetError if the version has not yet been set.
+    """
+    if not hasattr(self.attrs, 'version_info'):
+      raise VersionNotSetError('builder must call SetVersionInfo first')
+    return self.attrs.version_info
+
+  def GetVersion(self):
+    """Calculate full R<chrome_version>-<chromeos_version> version string.
+
+    See GetVersionInfo() notes about runtime usage.
+
+    Returns:
+      The version string for this run.
+    """
+    verinfo = self.GetVersionInfo()
+    release_tag = self.attrs.release_tag
+    if release_tag:
+      calc_version = 'R%s-%s' % (verinfo.chrome_branch, release_tag)
+    else:
+      # Non-versioned builds need the build number to uniquify the image.
+      calc_version = 'R%s-%s-b%s' % (verinfo.chrome_branch,
+                                     verinfo.VersionString(),
+                                     self.buildnumber)
+
+    return calc_version
+
+  def DetermineChromeVersion(self):
+    """Determine the current Chrome version in buildroot now and return it.
+
+    This uses the typical portage logic to determine which version of Chrome
+    is active right now in the buildroot.
+
+    Returns:
+      The new value of attrs.chrome_version (e.g. "35.0.1863.0").
+    """
+    cpv = portage_util.BestVisible(constants.CHROME_CP,
+                                   buildroot=self.buildroot)
+    return cpv.version_no_rev.partition('_')[0]
+
+
+class _RealBuilderRun(object):
+  """Base BuilderRun class that manages self.config access.
+
+  For any builder run, sometimes the build config is the top-level config and
+  sometimes it is a "child" config.  In either case, the config to use should
+  override self.config for all cases.  This class provides a mechanism for
+  overriding self.config access generally.
+
+  Also, methods that do more than access state for a BuilderRun should
+  live here.  In particular, any method that uses 'self' as an object
+  directly should be here rather than _BuilderRunBase.
+  """
+
+  __slots__ = _BuilderRunBase.__slots__ + (
+      '_run_base',  # The _BuilderRunBase object where most functionality is.
+      '_config',    # Config to use for dynamically overriding self.config.
+  )
+
+  def __init__(self, run_base, build_config):
+    self._run_base = run_base
+    self._config = build_config
+
+    # Make sure self.attrs has board-specific attributes for each board
+    # in build_config.
+    for board in build_config.boards:
+      self.attrs.RegisterBoardAttrs(board, build_config.name)
+
+  def __getattr__(self, attr):
+    # Remember, __getattr__ only called if attribute was not found normally.
+    # In normal usage, the __init__ guarantees that self._run_base and
+    # self._config will be present.  However, the unpickle process bypasses
+    # __init__, and this object must be pickle-able.  That is why we access
+    # self._run_base and self._config through __getattribute__ here, otherwise
+    # unpickling results in infinite recursion.
+    # TODO(mtennant): Revisit this if pickling support is changed to go through
+    # the __init__ method, such as by supplying __reduce__ method.
+    run_base = self.__getattribute__('_run_base')
+    config = self.__getattribute__('_config')
+
+    # TODO(akeshet): This logic seems to have a subtle flaky bug that only
+    # manifests itself when using unit tests with ParallelMock. As a workaround,
+    # we have simply eliminiated ParallelMock from the affected tests. See
+    # crbug.com/470907 for context.
+    try:
+      # run_base.config should always be None except when accessed through
+      # this routine.  Override the value here, then undo later.
+      run_base.config = config
+
+      result = getattr(run_base, attr)
+      if isinstance(result, types.MethodType):
+        # Make sure run_base.config is also managed when the method is called.
+        @functools.wraps(result)
+        def FuncWrapper(*args, **kwargs):
+          run_base.config = config
+          try:
+            return result(*args, **kwargs)
+          finally:
+            run_base.config = None
+
+        # TODO(mtennant): Find a way to make the following actually work.  It
+        # makes pickling more complicated, unfortunately.
+        # Cache this function wrapper to re-use next time without going through
+        # __getattr__ again.  This ensures that the same wrapper object is used
+        # each time, which is nice for identity and equality checks.  Subtle
+        # gotcha that we accept: if the function itself on run_base is replaced
+        # then this will continue to provide the behavior of the previous one.
+        #setattr(self, attr, FuncWrapper)
+
+        return FuncWrapper
+      else:
+        return result
+
+    finally:
+      run_base.config = None
+
+  def GetChildren(self):
+    """Get ChildBuilderRun objects for child configs, if they exist.
+
+    Returns:
+      List of ChildBuilderRun objects if self.config has child_configs.  []
+        otherwise.
+    """
+    # If there are child configs, construct a list of ChildBuilderRun objects
+    # for those child configs and return that.
+    return [ChildBuilderRun(self, ix)
+            for ix in range(len(self.config.child_configs))]
+
+  def GetUngroupedBuilderRuns(self):
+    """Same as GetChildren, but defaults to [self] if no children exist.
+
+    Returns:
+      Result of self.GetChildren, if children exist, otherwise [self].
+    """
+    return self.GetChildren() or [self]
+
+  def GetBuilderIds(self):
+    """Return a list of builder names for this config and the child configs."""
+    bot_ids = [self.config.name]
+    for config in self.config.child_configs:
+      if config.name:
+        bot_ids.append(config.name)
+    return bot_ids
+
+
+class BuilderRun(_RealBuilderRun):
+  """A standard BuilderRun for a top-level build config."""
+
+  def __init__(self, options, site_config, build_config, multiprocess_manager):
+    """Initialize.
+
+    Args:
+      options: Command line options from this cbuildbot run.
+      site_config: Site config for this cbuildbot run.
+      build_config: Build config for this cbuildbot run.
+      multiprocess_manager: A multiprocessing.Manager.
+    """
+    run_base = _BuilderRunBase(site_config, options, multiprocess_manager)
+    super(BuilderRun, self).__init__(run_base, build_config)
+
+
+class ChildBuilderRun(_RealBuilderRun):
+  """A BuilderRun for a "child" build config."""
+
+  def __init__(self, builder_run, child_index):
+    """Initialize.
+
+    Args:
+      builder_run: BuilderRun for the parent (main) cbuildbot run.  Extract
+        the _BuilderRunBase from it to make sure the same base is used for
+        both the main cbuildbot run and any child runs.
+      child_index: The child index of this child run, used to index into
+        the main run's config.child_configs.
+    """
+    # pylint: disable=W0212
+    run_base = builder_run._run_base
+    config = builder_run.config.child_configs[child_index]
+    super(ChildBuilderRun, self).__init__(run_base, config)
diff --git a/cbuildbot/cbuildbot_run_unittest b/cbuildbot/cbuildbot_run_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/cbuildbot_run_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/cbuildbot_run_unittest.py b/cbuildbot/cbuildbot_run_unittest.py
new file mode 100644
index 0000000..e399c5d
--- /dev/null
+++ b/cbuildbot/cbuildbot_run_unittest.py
@@ -0,0 +1,650 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the cbuildbot_run module."""
+
+from __future__ import print_function
+
+import cPickle
+import os
+import mock
+import time
+
+from chromite.cbuildbot import chromeos_config
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import config_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import parallel
+
+
+DEFAULT_ARCHIVE_GS_PATH = 'bogus_bucket/TheArchiveBase'
+DEFAULT_ARCHIVE_BASE = 'gs://%s' % DEFAULT_ARCHIVE_GS_PATH
+DEFAULT_BUILDROOT = '/tmp/foo/bar/buildroot'
+DEFAULT_BUILDNUMBER = 12345
+DEFAULT_BRANCH = 'TheBranch'
+DEFAULT_CHROME_BRANCH = 'TheChromeBranch'
+DEFAULT_VERSION_STRING = 'TheVersionString'
+DEFAULT_BOARD = 'TheBoard'
+DEFAULT_BOT_NAME = 'TheCoolBot'
+
+# pylint: disable=protected-access
+
+DEFAULT_OPTIONS = cros_test_lib.EasyAttr(
+    archive_base=DEFAULT_ARCHIVE_BASE,
+    buildroot=DEFAULT_BUILDROOT,
+    buildnumber=DEFAULT_BUILDNUMBER,
+    buildbot=True,
+    branch=DEFAULT_BRANCH,
+    remote_trybot=False,
+    debug=False,
+    postsync_patch=True,
+)
+DEFAULT_CONFIG = config_lib.BuildConfig(
+    name=DEFAULT_BOT_NAME,
+    master=True,
+    boards=[DEFAULT_BOARD],
+    postsync_patch=True,
+    child_configs=[
+        config_lib.BuildConfig(
+            name='foo', postsync_patch=False, boards=[]),
+        config_lib.BuildConfig(
+            name='bar', postsync_patch=False, boards=[]),
+    ],
+)
+
+DEFAULT_VERSION = '6543.2.1'
+
+
+def _ExtendDefaultOptions(**kwargs):
+  """Extend DEFAULT_OPTIONS with keys/values in kwargs."""
+  options_kwargs = DEFAULT_OPTIONS.copy()
+  options_kwargs.update(kwargs)
+  return cros_test_lib.EasyAttr(**options_kwargs)
+
+
+def _ExtendDefaultConfig(**kwargs):
+  """Extend DEFAULT_CONFIG with keys/values in kwargs."""
+  config_kwargs = DEFAULT_CONFIG.copy()
+  config_kwargs.update(kwargs)
+  return config_lib.BuildConfig(**config_kwargs)
+
+
+class ExceptionsTest(cros_test_lib.TestCase):
+  """Test that the exceptions in the module are sane."""
+
+  def _TestException(self, err, expected_startswith):
+    """Test that str and pickle behavior of |err| are as expected."""
+    err2 = cPickle.loads(cPickle.dumps(err, cPickle.HIGHEST_PROTOCOL))
+
+    self.assertTrue(str(err).startswith(expected_startswith))
+    self.assertEqual(str(err), str(err2))
+
+  def testParallelAttributeError(self):
+    """Test ParallelAttributeError message and pickle behavior."""
+    err1 = cbuildbot_run.ParallelAttributeError('SomeAttr')
+    self._TestException(err1, 'No such parallel run attribute')
+
+    err2 = cbuildbot_run.ParallelAttributeError('SomeAttr', 'SomeBoard',
+                                                'SomeTarget')
+    self._TestException(err2, 'No such board-specific parallel run attribute')
+
+  def testAttrSepCountError(self):
+    """Test AttrSepCountError message and pickle behavior."""
+    err1 = cbuildbot_run.AttrSepCountError('SomeAttr')
+    self._TestException(err1, 'Attribute name has an unexpected number')
+
+  def testAttrNotPickleableError(self):
+    """Test AttrNotPickleableError message and pickle behavior."""
+    err1 = cbuildbot_run.AttrNotPickleableError('SomeAttr', 'SomeValue')
+    self._TestException(err1, 'Run attribute "SomeAttr" value cannot')
+
+
+# TODO(mtennant): Turn this into a PartialMock.
+class _BuilderRunTestCase(cros_test_lib.MockTestCase):
+  """Provide methods for creating BuilderRun or ChildBuilderRun."""
+
+  def setUp(self):
+    self._manager = parallel.Manager()
+
+    # Mimic entering a 'with' statement.
+    self._manager.__enter__()
+
+  def tearDown(self):
+    # Mimic exiting a 'with' statement.
+    self._manager.__exit__(None, None, None)
+
+  def _NewRunAttributes(self):
+    return cbuildbot_run.RunAttributes(self._manager)
+
+  def _NewBuilderRun(self, options=None, config=None):
+    """Create a BuilderRun objection from options and config values.
+
+    Args:
+      options: Specify options or default to DEFAULT_OPTIONS.
+      config: Specify build config or default to DEFAULT_CONFIG.
+
+    Returns:
+      BuilderRun object.
+    """
+    options = options or DEFAULT_OPTIONS
+    config = config or DEFAULT_CONFIG
+    site_config = config_lib_unittest.MockSiteConfig()
+    site_config[config.name] = config
+
+    return cbuildbot_run.BuilderRun(options, site_config, config, self._manager)
+
+  def _NewChildBuilderRun(self, child_index, options=None, config=None):
+    """Create a ChildBuilderRun objection from options and config values.
+
+    Args:
+      child_index: Index of child config to use within config.
+      options: Specify options or default to DEFAULT_OPTIONS.
+      config: Specify build config or default to DEFAULT_CONFIG.
+
+    Returns:
+      ChildBuilderRun object.
+    """
+    run = self._NewBuilderRun(options, config)
+    return cbuildbot_run.ChildBuilderRun(run, child_index)
+
+
+class BuilderRunPickleTest(_BuilderRunTestCase):
+  """Make sure BuilderRun objects can be pickled."""
+
+  def setUp(self):
+    self.real_config = chromeos_config.GetConfig()['x86-alex-release-group']
+    self.PatchObject(cbuildbot_run._BuilderRunBase, 'GetVersion',
+                     return_value=DEFAULT_VERSION)
+
+  def _TestPickle(self, run1):
+    self.assertEquals(DEFAULT_VERSION, run1.GetVersion())
+    run1.attrs.release_tag = 'TheReleaseTag'
+
+    # Accessing a method on BuilderRun has special behavior, so access and
+    # use one before pickling.
+    patch_after_sync = run1.ShouldPatchAfterSync()
+
+    # Access the archive object before pickling, too.
+    upload_url = run1.GetArchive().upload_url
+
+    # Pickle and unpickle run1 into run2.
+    run2 = cPickle.loads(cPickle.dumps(run1, cPickle.HIGHEST_PROTOCOL))
+
+    self.assertEquals(run1.buildnumber, run2.buildnumber)
+    self.assertEquals(run1.config.boards, run2.config.boards)
+    self.assertEquals(run1.options.branch, run2.options.branch)
+    self.assertEquals(run1.attrs.release_tag, run2.attrs.release_tag)
+    self.assertRaises(AttributeError, getattr, run1.attrs, 'manifest_manager')
+    self.assertRaises(AttributeError, getattr, run2.attrs, 'manifest_manager')
+    self.assertEquals(patch_after_sync, run2.ShouldPatchAfterSync())
+    self.assertEquals(upload_url, run2.GetArchive().upload_url)
+
+    # The attrs objects should be identical.
+    self.assertIs(run1.attrs, run2.attrs)
+
+    # And the run objects themselves are different.
+    self.assertIsNot(run1, run2)
+
+  def testPickleBuilderRun(self):
+    self._TestPickle(self._NewBuilderRun(config=self.real_config))
+
+  def testPickleChildBuilderRun(self):
+    self._TestPickle(self._NewChildBuilderRun(0, config=self.real_config))
+
+
+class BuilderRunTest(_BuilderRunTestCase):
+  """Test the BuilderRun class."""
+
+  def testInit(self):
+    with mock.patch.object(cbuildbot_run._BuilderRunBase, 'GetVersion') as m:
+      m.return_value = DEFAULT_VERSION
+
+      run = self._NewBuilderRun()
+      self.assertEquals(DEFAULT_BUILDROOT, run.buildroot)
+      self.assertEquals(DEFAULT_BUILDNUMBER, run.buildnumber)
+      self.assertEquals(DEFAULT_BRANCH, run.manifest_branch)
+      self.assertEquals(DEFAULT_OPTIONS, run.options)
+      self.assertEquals(DEFAULT_CONFIG, run.config)
+      self.assertTrue(isinstance(run.attrs, cbuildbot_run.RunAttributes))
+      self.assertTrue(isinstance(run.GetArchive(),
+                                 cbuildbot_run.archive_lib.Archive))
+
+      # Make sure methods behave normally, since BuilderRun messes with them.
+      meth1 = run.GetVersionInfo
+      meth2 = run.GetVersionInfo
+      self.assertEqual(meth1.__name__, meth2.__name__)
+
+      # We actually do not support identity and equality checks right now.
+      self.assertNotEqual(meth1, meth2)
+      self.assertIsNot(meth1, meth2)
+
+  def testOptions(self):
+    options = _ExtendDefaultOptions(foo=True, bar=10)
+    run = self._NewBuilderRun(options=options)
+
+    self.assertEquals(True, run.options.foo)
+    self.assertEquals(10, run.options.__getattr__('bar'))
+    self.assertRaises(AttributeError, run.options.__getattr__, 'baz')
+
+  def testConfig(self):
+    config = _ExtendDefaultConfig(foo=True, bar=10)
+    run = self._NewBuilderRun(config=config)
+
+    self.assertEquals(True, run.config.foo)
+    self.assertEquals(10, run.config.__getattr__('bar'))
+    self.assertRaises(AttributeError, run.config.__getattr__, 'baz')
+
+  def testAttrs(self):
+    run = self._NewBuilderRun()
+
+    # manifest_manager is a valid run attribute.  It gives Attribute error
+    # if accessed before being set, but thereafter works fine.
+    self.assertRaises(AttributeError, run.attrs.__getattribute__,
+                      'manifest_manager')
+    run.attrs.manifest_manager = 'foo'
+    self.assertEquals('foo', run.attrs.manifest_manager)
+    self.assertEquals('foo', run.attrs.__getattribute__('manifest_manager'))
+
+    # foobar is not a valid run attribute.  It gives AttributeError when
+    # accessed or changed.
+    self.assertRaises(AttributeError, run.attrs.__getattribute__, 'foobar')
+    self.assertRaises(AttributeError, run.attrs.__setattr__, 'foobar', 'foo')
+
+  def testArchive(self):
+    run = self._NewBuilderRun()
+
+    with mock.patch.object(cbuildbot_run._BuilderRunBase, 'GetVersion') as m:
+      m.return_value = DEFAULT_VERSION
+
+      archive = run.GetArchive()
+
+      # Check archive.archive_path.
+      expected = ('%s/%s/%s/%s' %
+                  (DEFAULT_BUILDROOT,
+                   cbuildbot_run.archive_lib.Archive._BUILDBOT_ARCHIVE,
+                   DEFAULT_BOT_NAME, DEFAULT_VERSION))
+      self.assertEqual(expected, archive.archive_path)
+
+      # Check archive.upload_url.
+      expected = '%s/%s/%s' % (DEFAULT_ARCHIVE_BASE, DEFAULT_BOT_NAME,
+                               DEFAULT_VERSION)
+      self.assertEqual(expected, archive.upload_url)
+
+      # Check archive.download_url.
+      expected = ('%s%s/%s/%s' %
+                  (cbuildbot_run.archive_lib.gs.PRIVATE_BASE_HTTPS_URL,
+                   DEFAULT_ARCHIVE_GS_PATH, DEFAULT_BOT_NAME, DEFAULT_VERSION))
+      self.assertEqual(expected, archive.download_url)
+
+  def _RunAccessor(self, method_name, options_dict, config_dict):
+    """Run the given accessor method of the BuilderRun class.
+
+    Create a BuilderRun object with the options and config provided and
+    then return the result of calling the given method on it.
+
+    Args:
+      method_name: A BuilderRun method to call, specified by name.
+      options_dict: Extend default options with this.
+      config_dict: Extend default config with this.
+
+    Returns:
+      Result of calling the given method.
+    """
+    options = _ExtendDefaultOptions(**options_dict)
+    config = _ExtendDefaultConfig(**config_dict)
+    run = self._NewBuilderRun(options=options, config=config)
+    method = getattr(run, method_name)
+    self.assertEqual(method.__name__, method_name)
+    return method()
+
+  def testDualEnableSetting(self):
+    settings = {
+        'prebuilts': 'ShouldUploadPrebuilts',
+        'postsync_patch': 'ShouldPatchAfterSync',
+    }
+
+    # Both option and config enabled should result in True.
+    # Create truth table with three variables in this order:
+    # <key> option value, <key> config value (e.g. <key> == 'prebuilts').
+    truth_table = cros_test_lib.TruthTable(inputs=[(True, True)])
+
+    for inputs in truth_table:
+      option_val, config_val = inputs
+      for key, accessor in settings.iteritems():
+        self.assertEquals(
+            self._RunAccessor(accessor, {key: option_val}, {key: config_val}),
+            truth_table.GetOutput(inputs))
+
+  def testShouldReexecAfterSync(self):
+    # If option and config have postsync_reexec enabled, and this file is not
+    # in the build root, then we expect ShouldReexecAfterSync to return True.
+
+    # Construct a truth table across three variables in this order:
+    # postsync_reexec option value, postsync_reexec config value, same_root.
+    truth_table = cros_test_lib.TruthTable(inputs=[(True, True, False)])
+
+    for inputs in truth_table:
+      option_val, config_val, same_root = inputs
+
+      if same_root:
+        build_root = os.path.dirname(os.path.dirname(__file__))
+      else:
+        build_root = DEFAULT_BUILDROOT
+
+      result = self._RunAccessor(
+          'ShouldReexecAfterSync',
+          {'postsync_reexec': option_val, 'buildroot': build_root},
+          {'postsync_reexec': config_val})
+
+      self.assertEquals(result, truth_table.GetOutput(inputs))
+
+
+class GetVersionTest(_BuilderRunTestCase):
+  """Test the GetVersion and GetVersionInfo methods of BuilderRun class."""
+
+  # pylint: disable=protected-access
+
+  def testGetVersionInfoNotSet(self):
+    """Verify we throw an error when the version hasn't been set."""
+    run = self._NewBuilderRun()
+    self.assertRaises(RuntimeError, run.GetVersionInfo)
+
+  def testGetVersionInfo(self):
+    """Verify we return the right version info value."""
+    # Prepare a real BuilderRun object with a version_info tag.
+    run = self._NewBuilderRun()
+    verinfo = object()
+    run.attrs.version_info = verinfo
+    result = run.GetVersionInfo()
+    self.assertEquals(verinfo, result)
+
+  def _TestGetVersionReleaseTag(self, release_tag):
+    with mock.patch.object(cbuildbot_run._BuilderRunBase,
+                           'GetVersionInfo') as m:
+      verinfo_mock = mock.Mock()
+      verinfo_mock.chrome_branch = DEFAULT_CHROME_BRANCH
+      verinfo_mock.VersionString = mock.Mock(return_value='VS')
+      m.return_value = verinfo_mock
+
+      # Prepare a real BuilderRun object with a release tag.
+      run = self._NewBuilderRun()
+      run.attrs.release_tag = release_tag
+
+      # Run the test return the result.
+      result = run.GetVersion()
+      m.assert_called_once_with()
+      if release_tag is None:
+        verinfo_mock.VersionString.assert_called_once()
+
+      return result
+
+  def testGetVersionReleaseTag(self):
+    result = self._TestGetVersionReleaseTag('RT')
+    self.assertEquals('R%s-%s' % (DEFAULT_CHROME_BRANCH, 'RT'), result)
+
+  def testGetVersionNoReleaseTag(self):
+    result = self._TestGetVersionReleaseTag(None)
+    expected_result = ('R%s-%s-b%s' %
+                       (DEFAULT_CHROME_BRANCH, 'VS', DEFAULT_BUILDNUMBER))
+    self.assertEquals(result, expected_result)
+
+
+class ChildBuilderRunTest(_BuilderRunTestCase):
+  """Test the ChildBuilderRun class"""
+
+  def testInit(self):
+    with mock.patch.object(cbuildbot_run._BuilderRunBase, 'GetVersion') as m:
+      m.return_value = DEFAULT_VERSION
+
+      crun = self._NewChildBuilderRun(0)
+      self.assertEquals(DEFAULT_BUILDROOT, crun.buildroot)
+      self.assertEquals(DEFAULT_BUILDNUMBER, crun.buildnumber)
+      self.assertEquals(DEFAULT_BRANCH, crun.manifest_branch)
+      self.assertEquals(DEFAULT_OPTIONS, crun.options)
+      self.assertEquals(DEFAULT_CONFIG.child_configs[0], crun.config)
+      self.assertEquals('foo', crun.config.name)
+      self.assertTrue(isinstance(crun.attrs, cbuildbot_run.RunAttributes))
+      self.assertTrue(isinstance(crun.GetArchive(),
+                                 cbuildbot_run.archive_lib.Archive))
+
+      # Make sure methods behave normally, since BuilderRun messes with them.
+      meth1 = crun.GetVersionInfo
+      meth2 = crun.GetVersionInfo
+      self.assertEqual(meth1.__name__, meth2.__name__)
+
+      # We actually do not support identity and equality checks right now.
+      self.assertNotEqual(meth1, meth2)
+      self.assertIsNot(meth1, meth2)
+
+
+class RunAttributesTest(_BuilderRunTestCase):
+  """Test the RunAttributes class."""
+
+  BOARD = 'SomeBoard'
+  TARGET = 'SomeConfigName'
+  VALUE = 'AnyValueWillDo'
+
+  # Any valid board-specific attribute will work here.
+  BATTR = 'breakpad_symbols_generated'
+
+  def testRegisterBoardTarget(self):
+    """Test behavior of attributes before and after registering board target."""
+    ra = self._NewRunAttributes()
+
+    with self.assertRaises(AssertionError):
+      ra.HasBoardParallel(self.BATTR, self.BOARD, self.TARGET)
+
+    ra.RegisterBoardAttrs(self.BOARD, self.TARGET)
+
+    self.assertFalse(ra.HasBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+
+    ra.SetBoardParallel(self.BATTR, 'TheValue', self.BOARD, self.TARGET)
+
+    self.assertTrue(ra.HasBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+
+  def testSetGet(self):
+    """Test simple set/get of regular and parallel run attributes."""
+    ra = self._NewRunAttributes()
+    value = 'foobar'
+
+    # The __slots__ logic above confuses pylint.
+    # https://bitbucket.org/logilab/pylint/issue/380/
+    # pylint: disable=assigning-non-slot
+
+    # Set/Get a regular run attribute using direct access.
+    ra.release_tag = value
+    self.assertEqual(value, ra.release_tag)
+
+    # Set/Get of a parallel run attribute using direct access fails.
+    self.assertRaises(AttributeError, setattr, ra, 'unittest_value', value)
+    self.assertRaises(AttributeError, getattr, ra, 'unittest_value')
+
+    # Set/Get of a parallel run attribute with supported interface.
+    ra.SetParallel('unittest_value', value)
+    self.assertEqual(value, ra.GetParallel('unittest_value'))
+
+    # Set/Get a board parallel run attribute, testing both the encouraged
+    # interface and the underlying interface.
+    ra.RegisterBoardAttrs(self.BOARD, self.TARGET)
+    ra.SetBoardParallel(self.BATTR, value, self.BOARD, self.TARGET)
+    self.assertEqual(value,
+                     ra.GetBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+
+  def testSetDefault(self):
+    """Test setting default value of parallel run attributes."""
+    ra = self._NewRunAttributes()
+    value = 'foobar'
+
+    # Attribute starts off not set.
+    self.assertFalse(ra.HasParallel('unittest_value'))
+
+    # Use SetParallelDefault to set it.
+    ra.SetParallelDefault('unittest_value', value)
+    self.assertTrue(ra.HasParallel('unittest_value'))
+    self.assertEqual(value, ra.GetParallel('unittest_value'))
+
+    # Calling SetParallelDefault again has no effect.
+    ra.SetParallelDefault('unittest_value', 'junk')
+    self.assertTrue(ra.HasParallel('unittest_value'))
+    self.assertEqual(value, ra.GetParallel('unittest_value'))
+
+    # Run through same sequence for a board-specific attribute.
+    with self.assertRaises(AssertionError):
+      ra.HasBoardParallel(self.BATTR, self.BOARD, self.TARGET)
+    ra.RegisterBoardAttrs(self.BOARD, self.TARGET)
+    self.assertFalse(ra.HasBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+
+    # Use SetBoardParallelDefault to set it.
+    ra.SetBoardParallelDefault(self.BATTR, value, self.BOARD, self.TARGET)
+    self.assertTrue(ra.HasBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+    self.assertEqual(value,
+                     ra.GetBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+
+    # Calling SetBoardParallelDefault again has no effect.
+    ra.SetBoardParallelDefault(self.BATTR, 'junk', self.BOARD, self.TARGET)
+    self.assertTrue(ra.HasBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+    self.assertEqual(value,
+                     ra.GetBoardParallel(self.BATTR, self.BOARD, self.TARGET))
+
+  def testAttributeError(self):
+    """Test accessing run attributes that do not exist."""
+    ra = self._NewRunAttributes()
+    value = 'foobar'
+
+    # Set/Get on made up attribute name.
+    self.assertRaises(AttributeError, setattr, ra, 'foo', value)
+    self.assertRaises(AttributeError, getattr, ra, 'foo')
+
+    # A board/target value is valid, but only if it is registered first.
+    self.assertRaises(AssertionError, ra.GetBoardParallel,
+                      self.BATTR, self.BOARD, self.TARGET)
+    ra.RegisterBoardAttrs(self.BOARD, self.TARGET)
+    self.assertRaises(AttributeError, ra.GetBoardParallel,
+                      self.BATTR, self.BOARD, self.TARGET)
+
+
+class BoardRunAttributesTest(_BuilderRunTestCase):
+  """Test the BoardRunAttributes class."""
+
+  BOARD = 'SomeBoard'
+  TARGET = 'SomeConfigName'
+  VALUE = 'AnyValueWillDo'
+
+  # Any valid board-specific attribute will work here.
+  BATTR = 'breakpad_symbols_generated'
+
+  class _SetAttr(object):
+    """Stage-like class to set attr on a BoardRunAttributes obj."""
+    def __init__(self, bra, attr, value, delay=1):
+      self.bra = bra
+      self.attr = attr
+      self.value = value
+      self.delay = delay
+
+    def Run(self):
+      if self.delay:
+        time.sleep(self.delay)
+      self.bra.SetParallel(self.attr, self.value)
+
+  class _WaitForAttr(object):
+    """Stage-like class to wait for attr on BoardRunAttributes obj."""
+    def __init__(self, bra, attr, expected_value, timeout=10):
+      self.bra = bra
+      self.attr = attr
+      self.expected_value = expected_value
+      self.timeout = timeout
+
+    def GetParallel(self):
+      return self.bra.GetParallel(self.attr, timeout=self.timeout)
+
+  class _CheckWaitForAttr(_WaitForAttr):
+    """Stage-like class to wait for then check attr on BoardRunAttributes."""
+    def Run(self):
+      value = self.GetParallel()
+      assert value == self.expected_value, \
+          ('For run attribute %s expected value %r but got %r.' %
+           (self.attr, self.expected_value, value))
+
+  class _TimeoutWaitForAttr(_WaitForAttr):
+    """Stage-like class to time-out waiting for attr on BoardRunAttributes."""
+    def Run(self):
+      try:
+        self.GetParallel()
+        assert False, 'Expected AttrTimeoutError'
+      except cbuildbot_run.AttrTimeoutError:
+        pass
+
+  def setUp(self):
+    self.ra = self._NewRunAttributes()
+    self.bra = self.ra.RegisterBoardAttrs(self.BOARD, self.TARGET)
+
+  def _TestParallelSetGet(self, stage_args):
+    """Helper to run "stages" in parallel, according to |stage_args|.
+
+    Args:
+      stage_args: List of tuples of the form (stage_object, extra_args, ...)
+        where stage_object has a Run method which takes a BoardRunAttributes
+        object as the first argument and extra_args for the remaining arguments.
+    """
+    stages = [a[0](self.bra, *a[1:]) for a in stage_args]
+    steps = [stage.Run for stage in stages]
+
+    parallel.RunParallelSteps(steps)
+
+  def testParallelSetGetFast(self):
+    """Pass the parallel run attribute around with no delay."""
+    stage_args = [
+        (self._CheckWaitForAttr, self.BATTR, self.VALUE),
+        (self._SetAttr, self.BATTR, self.VALUE),
+    ]
+    self._TestParallelSetGet(stage_args)
+    self.assertRaises(AttributeError,
+                      getattr, self.bra, self.BATTR)
+    self.assertEqual(self.VALUE, self.bra.GetParallel(self.BATTR))
+
+  def testParallelSetGetSlow(self):
+    """Pass the parallel run attribute around with a delay."""
+    stage_args = [
+        (self._SetAttr, self.BATTR, self.VALUE, 10),
+        (self._TimeoutWaitForAttr, self.BATTR, self.VALUE, 2),
+    ]
+    self._TestParallelSetGet(stage_args)
+    self.assertEqual(self.VALUE, self.bra.GetParallel(self.BATTR))
+
+  def testParallelSetGetManyGets(self):
+    """Set the parallel run attribute in one stage, access in many stages."""
+    stage_args = [
+        (self._SetAttr, self.BATTR, self.VALUE, 8),
+        (self._CheckWaitForAttr, self.BATTR, self.VALUE, 16),
+        (self._CheckWaitForAttr, self.BATTR, self.VALUE, 16),
+        (self._CheckWaitForAttr, self.BATTR, self.VALUE, 16),
+        (self._TimeoutWaitForAttr, self.BATTR, self.VALUE, 1),
+    ]
+    self._TestParallelSetGet(stage_args)
+    self.assertEqual(self.VALUE, self.bra.GetParallel(self.BATTR))
+
+  def testParallelSetGetManySets(self):
+    """Set the parallel run attribute in many stages, access in one stage."""
+    # Three "stages" set the value, with increasing delays.  The stage that
+    # checks the value should get the first value set.
+    stage_args = [
+        (self._SetAttr, self.BATTR, self.VALUE + '1', 1),
+        (self._SetAttr, self.BATTR, self.VALUE + '2', 11),
+        (self._CheckWaitForAttr, self.BATTR, self.VALUE + '1', 12),
+    ]
+    self._TestParallelSetGet(stage_args)
+    self.assertEqual(self.VALUE + '2', self.bra.GetParallel(self.BATTR))
+
+  def testSetGet(self):
+    """Test that board-specific attrs do not work with set/get directly."""
+    self.assertRaises(AttributeError, setattr,
+                      self.bra, 'breakpad_symbols_generated', self.VALUE)
+    self.assertRaises(AttributeError, getattr,
+                      self.bra, 'breakpad_symbols_generated')
+
+  def testAccessRegularRunAttr(self):
+    """Test that regular attributes are not known to BoardRunAttributes."""
+    self.assertRaises(AttributeError, getattr, self.bra, 'release_tag')
+    self.assertRaises(AttributeError, setattr, self.bra, 'release_tag', 'foo')
diff --git a/cbuildbot/cbuildbot_unittest b/cbuildbot/cbuildbot_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/cbuildbot_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/cbuildbot_unittest.py b/cbuildbot/cbuildbot_unittest.py
new file mode 100644
index 0000000..c17bb78
--- /dev/null
+++ b/cbuildbot/cbuildbot_unittest.py
@@ -0,0 +1,500 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the cbuildbot script."""
+
+from __future__ import print_function
+
+import argparse
+import glob
+import optparse
+import os
+
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import config_lib_unittest
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot.builders import simple_builders
+from chromite.lib import cidb
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import partial_mock
+from chromite.scripts import cbuildbot
+
+
+# pylint: disable=protected-access
+
+
+class BuilderRunMock(partial_mock.PartialMock):
+  """Partial mock for BuilderRun class."""
+
+  TARGET = 'chromite.cbuildbot.cbuildbot_run._BuilderRunBase'
+  ATTRS = ('GetVersionInfo', 'DetermineChromeVersion',)
+
+  def __init__(self, verinfo):
+    super(BuilderRunMock, self).__init__()
+    self._version_info = verinfo
+
+  def GetVersionInfo(self, _inst):
+    """This way builders don't have to set the version from the overlay"""
+    return self._version_info
+
+  def DetermineChromeVersion(self, _inst):
+    """Normaly this runs a portage command to look at the chrome ebuild"""
+    return self._version_info.chrome_branch
+
+
+class SimpleBuilderTestCase(cros_test_lib.MockTestCase):
+  """Common stubs for SimpleBuilder tests."""
+
+  CHROME_BRANCH = '27'
+  VERSION = '1234.5.6'
+
+  def setUp(self):
+    verinfo = manifest_version.VersionInfo(
+        version_string=self.VERSION, chrome_branch=self.CHROME_BRANCH)
+
+    self.StartPatcher(BuilderRunMock(verinfo))
+
+    self.PatchObject(simple_builders.SimpleBuilder, 'GetVersionInfo',
+                     return_value=verinfo)
+
+
+class TestArgsparseError(Exception):
+  """Exception used by parser.error() mock to halt execution."""
+
+
+class TestHaltedException(Exception):
+  """Exception used by mocks to halt execution without indicating failure."""
+
+
+class RunBuildStagesTest(cros_build_lib_unittest.RunCommandTempDirTestCase,
+                         SimpleBuilderTestCase):
+  """Test that cbuildbot runs the appropriate stages for a given config."""
+
+  def setUp(self):
+    self.buildroot = os.path.join(self.tempdir, 'buildroot')
+    osutils.SafeMakedirs(self.buildroot)
+    # Always stub RunCommmand out as we use it in every method.
+    self.site_config = config_lib_unittest.MockSiteConfig()
+    self.build_config = config_lib_unittest.MockBuildConfig()
+    self.bot_id = self.build_config.name
+    self.build_config['master'] = False
+    self.build_config['important'] = False
+
+    # Use the cbuildbot parser to create properties and populate default values.
+    self.parser = cbuildbot._CreateParser()
+
+    argv = ['-r', self.buildroot, '--buildbot', '--debug', self.bot_id]
+    self.options, _ = cbuildbot._ParseCommandLine(self.parser, argv)
+    self.options.bootstrap = False
+    self.options.clean = False
+    self.options.resume = False
+    self.options.sync = False
+    self.options.build = False
+    self.options.uprev = False
+    self.options.tests = False
+    self.options.archive = False
+    self.options.remote_test_status = False
+    self.options.patches = None
+    self.options.prebuilts = False
+
+    self._manager = parallel.Manager()
+    self._manager.__enter__()
+    self.run = cbuildbot_run.BuilderRun(self.options, self.site_config,
+                                        self.build_config, self._manager)
+
+    self.rc.AddCmdResult(
+        [constants.PATH_TO_CBUILDBOT, '--reexec-api-version'],
+        output=constants.REEXEC_API_VERSION)
+
+  def tearDown(self):
+    # Mimic exiting a 'with' statement.
+    if hasattr(self, '_manager'):
+      self._manager.__exit__(None, None, None)
+
+  def testChromeosOfficialSet(self):
+    """Verify that CHROMEOS_OFFICIAL is set correctly."""
+    self.build_config['chromeos_official'] = True
+
+    cidb.CIDBConnectionFactory.SetupNoCidb()
+
+    # Clean up before.
+    os.environ.pop('CHROMEOS_OFFICIAL', None)
+    simple_builders.SimpleBuilder(self.run).Run()
+    self.assertIn('CHROMEOS_OFFICIAL', os.environ)
+
+  def testChromeosOfficialNotSet(self):
+    """Verify that CHROMEOS_OFFICIAL is not always set."""
+    self.build_config['chromeos_official'] = False
+
+    cidb.CIDBConnectionFactory.SetupNoCidb()
+
+    # Clean up before.
+    os.environ.pop('CHROMEOS_OFFICIAL', None)
+    simple_builders.SimpleBuilder(self.run).Run()
+    self.assertNotIn('CHROMEOS_OFFICIAL', os.environ)
+
+
+class LogTest(cros_test_lib.TempDirTestCase):
+  """Test logging functionality."""
+
+  def _generateLogs(self, num):
+    """Generates cbuildbot.log and num backups."""
+    with open(os.path.join(self.tempdir, 'cbuildbot.log'), 'w') as f:
+      f.write(str(num + 1))
+
+    for i in range(1, num + 1):
+      with open(os.path.join(self.tempdir, 'cbuildbot.log.' + str(i)),
+                'w') as f:
+        f.write(str(i))
+
+  def testZeroToOneLogs(self):
+    """Test beginning corner case."""
+    self._generateLogs(0)
+    cbuildbot._BackupPreviousLog(os.path.join(self.tempdir, 'cbuildbot.log'),
+                                 backup_limit=25)
+    with open(os.path.join(self.tempdir, 'cbuildbot.log.1')) as f:
+      self.assertEquals(f.readline(), '1')
+
+  def testNineToTenLogs(self):
+    """Test handling *.log.9 to *.log.10 (correct sorting)."""
+    self._generateLogs(9)
+    cbuildbot._BackupPreviousLog(os.path.join(self.tempdir, 'cbuildbot.log'),
+                                 backup_limit=25)
+    with open(os.path.join(self.tempdir, 'cbuildbot.log.10')) as f:
+      self.assertEquals(f.readline(), '10')
+
+  def testOverLimit(self):
+    """Test going over the limit and having to purge old logs."""
+    self._generateLogs(25)
+    cbuildbot._BackupPreviousLog(os.path.join(self.tempdir, 'cbuildbot.log'),
+                                 backup_limit=25)
+    with open(os.path.join(self.tempdir, 'cbuildbot.log.26')) as f:
+      self.assertEquals(f.readline(), '26')
+
+    self.assertEquals(len(glob.glob(os.path.join(self.tempdir, 'cbuildbot*'))),
+                      25)
+
+
+class InterfaceTest(cros_test_lib.MockTestCase, cros_test_lib.LoggingTestCase):
+  """Test the command line interface."""
+
+  _X86_PREFLIGHT = 'x86-generic-paladin'
+  _BUILD_ROOT = '/b/test_build1'
+
+  def setUp(self):
+    self.parser = cbuildbot._CreateParser()
+    self.site_config = config_lib_unittest.MockSiteConfig()
+
+  def assertDieSysExit(self, *args, **kwargs):
+    self.assertRaises(cros_build_lib.DieSystemExit, *args, **kwargs)
+
+  def testDepotTools(self):
+    """Test that the entry point used by depot_tools works."""
+    path = os.path.join(constants.SOURCE_ROOT, 'chromite', 'bin', 'cbuildbot')
+
+    # Verify the tests below actually are testing correct behaviour;
+    # specifically that it doesn't always just return 0.
+    self.assertRaises(cros_build_lib.RunCommandError,
+                      cros_build_lib.RunCommand,
+                      ['cbuildbot', '--monkeys'], cwd=constants.SOURCE_ROOT)
+
+    # Validate depot_tools lookup.
+    cros_build_lib.RunCommand(
+        ['cbuildbot', '--help'], cwd=constants.SOURCE_ROOT, capture_output=True)
+
+    # Validate buildbot invocation pathway.
+    cros_build_lib.RunCommand(
+        [path, '--help'], cwd=constants.SOURCE_ROOT, capture_output=True)
+
+  def testDebugBuildBotSetByDefault(self):
+    """Test that debug and buildbot flags are set by default."""
+    args = ['--local', '-r', self._BUILD_ROOT, self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertTrue(options.debug)
+    self.assertFalse(options.buildbot)
+
+  def testBuildBotOption(self):
+    """Test that --buildbot option unsets debug flag."""
+    args = ['-r', self._BUILD_ROOT, '--buildbot', self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertFalse(options.debug)
+    self.assertTrue(options.buildbot)
+
+  def testBuildBotWithDebugOption(self):
+    """Test that --debug option overrides --buildbot option."""
+    args = ['-r', self._BUILD_ROOT, '--buildbot', '--debug',
+            self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertTrue(options.debug)
+    self.assertTrue(options.buildbot)
+
+  def testLocalTrybotWithSpacesInPatches(self):
+    """Test that we handle spaces in patch arguments."""
+    args = ['-r', self._BUILD_ROOT, '--remote', '--local-patches',
+            ' proj:br \t  proj2:b2 ',
+            self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertEquals(options.local_patches, ['proj:br', 'proj2:b2'])
+
+  def testBuildBotWithRemotePatches(self):
+    """Test that --buildbot errors out with patches."""
+    args = ['-r', self._BUILD_ROOT, '--buildbot', '-g', '1234',
+            self._X86_PREFLIGHT]
+    self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+
+  def testRemoteBuildBotWithRemotePatches(self):
+    """Test that --buildbot and --remote errors out with patches."""
+    args = ['-r', self._BUILD_ROOT, '--buildbot', '--remote', '-g', '1234',
+            self._X86_PREFLIGHT]
+    self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+
+  def testBuildbotDebugWithPatches(self):
+    """Test we can test patches with --buildbot --debug."""
+    args = ['--remote', '-g', '1234', '--debug', '--buildbot',
+            self._X86_PREFLIGHT]
+    cbuildbot._ParseCommandLine(self.parser, args)
+
+  def testBuildBotWithoutProfileOption(self):
+    """Test that no --profile option gets defaulted."""
+    args = ['--buildbot', self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertEquals(options.profile, None)
+
+  def testBuildBotWithProfileOption(self):
+    """Test that --profile option gets parsed."""
+    args = ['--buildbot', '--profile', 'carp', self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertEquals(options.profile, 'carp')
+
+  def testValidateClobberUserDeclines_1(self):
+    """Test case where user declines in prompt."""
+    self.PatchObject(os.path, 'exists', return_value=True)
+    self.PatchObject(cros_build_lib, 'GetInput', return_value='No')
+    self.assertFalse(commands.ValidateClobber(self._BUILD_ROOT))
+
+  def testValidateClobberUserDeclines_2(self):
+    """Test case where user does not enter the full 'yes' pattern."""
+    self.PatchObject(os.path, 'exists', return_value=True)
+    m = self.PatchObject(cros_build_lib, 'GetInput', side_effect=['asdf', 'No'])
+    self.assertFalse(commands.ValidateClobber(self._BUILD_ROOT))
+    self.assertEqual(m.call_count, 2)
+
+  def testValidateClobberProtectRunningChromite(self):
+    """User should not be clobbering our own source."""
+    cwd = os.path.dirname(os.path.realpath(__file__))
+    buildroot = os.path.dirname(cwd)
+    self.assertDieSysExit(commands.ValidateClobber, buildroot)
+
+  def testValidateClobberProtectRoot(self):
+    """User should not be clobbering /"""
+    self.assertDieSysExit(commands.ValidateClobber, '/')
+
+  def testBuildBotWithBadChromeRevOption(self):
+    """chrome_rev can't be passed an invalid option after chrome_root."""
+    args = [
+        '--local',
+        '--buildroot=/tmp',
+        '--chrome_root=.',
+        '--chrome_rev=%s' % constants.CHROME_REV_TOT,
+        self._X86_PREFLIGHT,
+    ]
+    self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+
+  def testBuildBotWithBadChromeRootOption(self):
+    """chrome_root can't get passed after non-local chrome_rev."""
+    args = [
+        '--local',
+        '--buildroot=/tmp',
+        '--chrome_rev=%s' % constants.CHROME_REV_TOT,
+        '--chrome_root=.',
+        self._X86_PREFLIGHT,
+    ]
+    self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+
+  def testBuildBotWithBadChromeRevOptionLocal(self):
+    """chrome_rev can't be local without chrome_root."""
+    args = [
+        '--local',
+        '--buildroot=/tmp',
+        '--chrome_rev=%s' % constants.CHROME_REV_LOCAL,
+        self._X86_PREFLIGHT,
+    ]
+    self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+
+  def testBuildBotWithGoodChromeRootOption(self):
+    """chrome_root can be set without chrome_rev."""
+    args = [
+        '--local',
+        '--buildroot=/tmp',
+        '--chrome_root=.',
+        self._X86_PREFLIGHT,
+    ]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertEquals(options.chrome_rev, constants.CHROME_REV_LOCAL)
+    self.assertNotEquals(options.chrome_root, None)
+
+  def testBuildBotWithGoodChromeRevAndRootOption(self):
+    """chrome_rev can get reset around chrome_root."""
+    args = [
+        '--local',
+        '--buildroot=/tmp',
+        '--chrome_rev=%s' % constants.CHROME_REV_LATEST,
+        '--chrome_rev=%s' % constants.CHROME_REV_STICKY,
+        '--chrome_rev=%s' % constants.CHROME_REV_TOT,
+        '--chrome_rev=%s' % constants.CHROME_REV_TOT,
+        '--chrome_rev=%s' % constants.CHROME_REV_STICKY,
+        '--chrome_rev=%s' % constants.CHROME_REV_LATEST,
+        '--chrome_rev=%s' % constants.CHROME_REV_LOCAL,
+        '--chrome_root=.',
+        '--chrome_rev=%s' % constants.CHROME_REV_TOT,
+        '--chrome_rev=%s' % constants.CHROME_REV_LOCAL,
+        self._X86_PREFLIGHT,
+    ]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertEquals(options.chrome_rev, constants.CHROME_REV_LOCAL)
+    self.assertNotEquals(options.chrome_root, None)
+
+  def testPassThroughOptions(self):
+    """Test we are building up pass-through list properly."""
+    args = ['--remote', '-g', '1234', self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+
+    self.assertEquals(options.pass_through_args, ['-g', '1234'])
+
+  def testDebugPassThrough(self):
+    """Test we are passing --debug through."""
+    args = ['--remote', '--debug', '--buildbot', self._X86_PREFLIGHT]
+    options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.assertEquals(options.pass_through_args, ['--debug', '--buildbot'])
+
+  def testCreateBranch(self):
+    """Test a normal create branch run."""
+    args = ['--branch-name', 'refs/heads/test', constants.BRANCH_UTIL_CONFIG]
+    self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+
+  def testCreateBranchNoVersion(self):
+    """Test we require --version with branch-util."""
+    with cros_test_lib.LoggingCapturer() as logger:
+      args = [constants.BRANCH_UTIL_CONFIG]
+      self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+      self.AssertLogsContain(logger, '--branch-name')
+
+  def testCreateBranchDelete(self):
+    """Test we don't require --version with --delete."""
+    args = ['--delete-branch', '--branch-name', 'refs/heads/test',
+            constants.BRANCH_UTIL_CONFIG]
+    cbuildbot._ParseCommandLine(self.parser, args)
+
+  def testBranchOptionsWithoutBranchConfig(self):
+    """Error out when branch options passed in without branch-util config."""
+    for extra_args in [['--delete-branch'],
+                       ['--branch-name', 'refs/heads/test'],
+                       ['--rename-to', 'abc']]:
+      with cros_test_lib.LoggingCapturer() as logger:
+        args = [self._X86_PREFLIGHT] + extra_args
+        self.assertDieSysExit(cbuildbot._ParseCommandLine, self.parser, args)
+        self.AssertLogsContain(logger, 'Cannot specify')
+
+
+class FullInterfaceTest(cros_test_lib.MockTempDirTestCase):
+  """Tests that run the cbuildbot.main() function directly.
+
+  Note this explicitly suppresses automatic VerifyAll() calls; thus if you want
+  that checked, you have to invoke it yourself.
+  """
+
+  def MakeTestRootDir(self, relpath):
+    abspath = os.path.join(self.root, relpath)
+    osutils.SafeMakedirs(abspath)
+    return abspath
+
+  def setUp(self):
+    self.root = self.tempdir
+    self.buildroot = self.MakeTestRootDir('build_root')
+    self.sourceroot = self.MakeTestRootDir('source_root')
+    self.trybot_root = self.MakeTestRootDir('trybot')
+    self.trybot_internal_root = self.MakeTestRootDir('trybot-internal')
+    self.external_marker = os.path.join(self.trybot_root, '.trybot')
+    self.internal_marker = os.path.join(self.trybot_internal_root, '.trybot')
+
+    osutils.SafeMakedirs(os.path.join(self.sourceroot, '.repo', 'manifests'))
+    osutils.SafeMakedirs(os.path.join(self.sourceroot, '.repo', 'repo'))
+
+    # Stub out all relevant methods regardless of whether they are called in the
+    # specific test case.
+    self.PatchObject(optparse.OptionParser, 'error',
+                     side_effect=TestArgsparseError())
+    self.PatchObject(argparse.ArgumentParser, 'error',
+                     side_effect=TestArgsparseError())
+    self.inchroot_mock = self.PatchObject(cros_build_lib, 'IsInsideChroot',
+                                          return_value=False)
+    self.input_mock = self.PatchObject(cros_build_lib, 'GetInput',
+                                       side_effect=Exception())
+    self.PatchObject(cbuildbot, '_RunBuildStagesWrapper', return_value=True)
+
+  def assertMain(self, args, common_options=True):
+    if common_options:
+      # Suppress cgroups code.  For cbuildbot invocation, it doesn't hugely
+      # care about cgroups- that's a blackbox to it.  As such these unittests
+      # should not be sensitive to it.
+      args.extend(['--sourceroot', self.sourceroot, '--nocgroups',
+                   '--notee'])
+    return cbuildbot.main(args)
+
+  def testNullArgsStripped(self):
+    """Test that null args are stripped out and don't cause error."""
+    self.assertMain(['--local', '-r', self.buildroot, '', '',
+                     'x86-generic-paladin'])
+
+  def testMultipleConfigsError(self):
+    """Test that multiple configs cause error if --remote is not used."""
+    self.assertRaises(cros_build_lib.DieSystemExit, self.assertMain,
+                      ['--local',
+                       '-r', self.buildroot,
+                       'arm-generic-paladin',
+                       'x86-generic-paladin'])
+
+  def testDontInferBuildrootForBuildBotRuns(self):
+    """Test that we don't infer buildroot if run with --buildbot option."""
+    self.assertRaises(TestArgsparseError, self.assertMain,
+                      ['--buildbot', 'x86-generic-paladin'])
+
+  def testInferExternalBuildRoot(self):
+    """Test that we default to correct buildroot for external config."""
+    self.PatchObject(cbuildbot, '_ConfirmBuildRoot',
+                     side_effect=TestHaltedException())
+    self.assertRaises(TestHaltedException, self.assertMain,
+                      ['--local', 'x86-generic-paladin'])
+
+  def testInferInternalBuildRoot(self):
+    """Test that we default to correct buildroot for internal config."""
+    self.PatchObject(cbuildbot, '_ConfirmBuildRoot',
+                     side_effect=TestHaltedException())
+    self.assertRaises(TestHaltedException, self.assertMain,
+                      ['--local', 'x86-mario-paladin'])
+
+  def testInferBuildRootPromptNo(self):
+    """Test that a 'no' answer on the prompt halts execution."""
+    self.input_mock.side_effect = None
+    self.input_mock.return_value = 'no'
+    self.assertRaises(SystemExit, self.assertMain,
+                      ['--local', 'x86-generic-paladin'])
+
+  def testInferBuildRootExists(self):
+    """Test that we don't prompt the user if buildroot already exists."""
+    osutils.Touch(self.external_marker)
+    os.utime(self.external_marker, None)
+    self.assertMain(['--local', 'x86-generic-paladin'])
+
+  def testBuildbotDiesInChroot(self):
+    """Buildbot should quit if run inside a chroot."""
+    self.inchroot_mock.return_value = True
+    self.assertRaises(cros_build_lib.DieSystemExit, self.assertMain,
+                      ['--local', '-r', self.buildroot, 'x86-generic-paladin'])
diff --git a/cbuildbot/chromeos_config.py b/cbuildbot/chromeos_config.py
new file mode 100644
index 0000000..77131d0
--- /dev/null
+++ b/cbuildbot/chromeos_config.py
@@ -0,0 +1,2894 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configuration options for various cbuildbot builders."""
+
+from __future__ import print_function
+
+import copy
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.lib import factory
+
+
+# Set to 'True' if this is a release branch. This updates the '-release' builder
+# configuration to the shape used by the release waterfall.
+IS_RELEASE_BRANCH = False
+
+
+def OverrideConfigForTrybot(build_config, options):
+  """Apply trybot-specific configuration settings.
+
+  Args:
+    build_config: The build configuration dictionary to override.
+      The dictionary is not modified.
+    options: The options passed on the commandline.
+
+  Returns:
+    A build configuration dictionary with the overrides applied.
+  """
+  copy_config = copy.deepcopy(build_config)
+  for my_config in [copy_config] + copy_config['child_configs']:
+    # Force uprev. This is so patched in changes are always
+    # built.
+    my_config['uprev'] = True
+    if my_config['internal']:
+      my_config['overlays'] = constants.BOTH_OVERLAYS
+
+    # Use the local manifest which only requires elevated access if it's really
+    # needed to build.
+    if not options.remote_trybot:
+      my_config['manifest'] = my_config['dev_manifest']
+
+    my_config['push_image'] = False
+
+    if my_config['build_type'] != constants.PAYLOADS_TYPE:
+      my_config['paygen'] = False
+
+    if options.hwtest and my_config['hw_tests_override'] is not None:
+      my_config['hw_tests'] = my_config['hw_tests_override']
+
+    # Default to starting with a fresh chroot on remote trybot runs.
+    if options.remote_trybot:
+      my_config['chroot_replace'] = True
+
+    # In trybots, we want to always run VM tests and all unit tests, so that
+    # developers will get better testing for their changes.
+    if my_config['vm_tests_override'] is not None:
+      my_config['vm_tests'] = my_config['vm_tests_override']
+
+  return copy_config
+
+
+def GetDefaultWaterfall(build_config):
+  if not (build_config['important'] or build_config['master']):
+    return None
+  if build_config['branch']:
+    return None
+  b_type = build_config['build_type']
+
+  if config_lib.IsCanaryType(b_type):
+    # If this is a canary build, it may fall on different waterfalls:
+    # - If we're building for a release branch, it belongs on a release
+    #   waterfall.
+    # - Otherwise, it belongs on the internal waterfall.
+    if IS_RELEASE_BRANCH:
+      return constants.WATERFALL_RELEASE
+    else:
+      return constants.WATERFALL_INTERNAL
+  elif config_lib.IsCQType(b_type):
+    # A Paladin can appear on the public or internal waterfall depending on its
+    # 'internal' status.
+    return (constants.WATERFALL_INTERNAL if build_config['internal'] else
+            constants.WATERFALL_EXTERNAL)
+  elif config_lib.IsPFQType(b_type) or b_type == constants.PRE_CQ_LAUNCHER_TYPE:
+    # These builder types belong on the internal waterfall.
+    return constants.WATERFALL_INTERNAL
+  else:
+    # No default active waterfall.
+    return None
+
+
+class HWTestList(object):
+  """Container for methods to generate HWTest lists."""
+
+  @classmethod
+  def DefaultList(cls, **kwargs):
+    """Returns a default list of HWTestConfig's for a build
+
+    Args:
+      *kwargs: overrides for the configs
+    """
+    # Number of tests running in parallel in the AU suite.
+    AU_TESTS_NUM = 2
+    # Number of tests running in parallel in the asynchronous canary
+    # test suite
+    ASYNC_TEST_NUM = 2
+
+    # Set the number of machines for the au and qav suites. If we are
+    # constrained in the number of duts in the lab, only give 1 dut to each.
+    if (kwargs.get('num', constants.HWTEST_DEFAULT_NUM) >=
+        constants.HWTEST_DEFAULT_NUM):
+      au_dict = dict(num=AU_TESTS_NUM)
+      async_dict = dict(num=ASYNC_TEST_NUM)
+    else:
+      au_dict = dict(num=1)
+      async_dict = dict(num=1)
+
+    au_kwargs = kwargs.copy()
+    au_kwargs.update(au_dict)
+
+    async_kwargs = kwargs.copy()
+    async_kwargs.update(async_dict)
+    async_kwargs['priority'] = constants.HWTEST_POST_BUILD_PRIORITY
+    async_kwargs['retry'] = False
+    async_kwargs['max_retries'] = None
+    async_kwargs['async'] = True
+    async_kwargs['suite_min_duts'] = 1
+
+    # BVT + AU suite.
+    return [config_lib.HWTestConfig(constants.HWTEST_BVT_SUITE,
+                                    blocking=True, **kwargs),
+            config_lib.HWTestConfig(constants.HWTEST_AU_SUITE,
+                                    blocking=True, **au_kwargs),
+            config_lib.HWTestConfig(constants.HWTEST_COMMIT_SUITE,
+                                    **async_kwargs),
+            config_lib.HWTestConfig(constants.HWTEST_CANARY_SUITE,
+                                    **async_kwargs)]
+
+  @classmethod
+  def DefaultListCanary(cls, **kwargs):
+    """Returns a default list of config_lib.HWTestConfig's for a canary build.
+
+    Args:
+      *kwargs: overrides for the configs
+    """
+    # Set minimum_duts default to 4, which means that lab will check the
+    # number of available duts to meet the minimum requirement before creating
+    # the suite job for canary builds.
+    kwargs.setdefault('minimum_duts', 4)
+    kwargs.setdefault('file_bugs', True)
+    return HWTestList.DefaultList(**kwargs)
+
+  @classmethod
+  def AFDOList(cls, **kwargs):
+    """Returns a default list of HWTestConfig's for a AFDO build.
+
+    Args:
+      *kwargs: overrides for the configs
+    """
+    afdo_dict = dict(pool=constants.HWTEST_SUITES_POOL,
+                     timeout=120 * 60, num=1, async=True, retry=False,
+                     max_retries=None)
+    afdo_dict.update(kwargs)
+    return [config_lib.HWTestConfig('perf_v2', **afdo_dict)]
+
+  @classmethod
+  def DefaultListNonCanary(cls, **kwargs):
+    """Return a default list of HWTestConfig's for a non-canary build.
+
+    Optional arguments may be overridden in `kwargs`, except that
+    the `blocking` setting cannot be provided.
+    """
+    return [config_lib.HWTestConfig(constants.HWTEST_BVT_SUITE, **kwargs),
+            config_lib.HWTestConfig(constants.HWTEST_COMMIT_SUITE, **kwargs)]
+
+  @classmethod
+  def DefaultListCQ(cls, **kwargs):
+    """Return a default list of HWTestConfig's for a CQ build.
+
+    Optional arguments may be overridden in `kwargs`, except that
+    the `blocking` setting cannot be provided.
+    """
+    default_dict = dict(pool=constants.HWTEST_PALADIN_POOL, timeout=120 * 60,
+                        file_bugs=False, priority=constants.HWTEST_CQ_PRIORITY,
+                        minimum_duts=4, offload_failures_only=True)
+    # Allows kwargs overrides to default_dict for cq.
+    default_dict.update(kwargs)
+    return HWTestList.DefaultListNonCanary(**default_dict)
+
+  @classmethod
+  def DefaultListPFQ(cls, **kwargs):
+    """Return a default list of HWTestConfig's for a PFQ build.
+
+    Optional arguments may be overridden in `kwargs`, except that
+    the `blocking` setting cannot be provided.
+    """
+    default_dict = dict(pool=constants.HWTEST_PFQ_POOL, file_bugs=True,
+                        priority=constants.HWTEST_PFQ_PRIORITY,
+                        retry=False, max_retries=None, minimum_duts=4)
+    # Allows kwargs overrides to default_dict for pfq.
+    default_dict.update(kwargs)
+    return HWTestList.DefaultListNonCanary(**default_dict)
+
+  @classmethod
+  def SharedPoolPFQ(cls, **kwargs):
+    """Return a list of HWTestConfigs for PFQ which uses a shared pool.
+
+    The returned suites will run in pool:critical by default, which is
+    shared with other types of builders (canaries, cq). The first suite in the
+    list is a blocking sanity suite that verifies the build will not break dut.
+    """
+    sanity_dict = dict(pool=constants.HWTEST_MACH_POOL,
+                       file_bugs=True, priority=constants.HWTEST_PFQ_PRIORITY,
+                       retry=False, max_retries=None)
+    sanity_dict.update(kwargs)
+    sanity_dict.update(dict(num=1, minimum_duts=1, suite_min_duts=1,
+                            blocking=True))
+    default_dict = dict(pool=constants.HWTEST_MACH_POOL,
+                        suite_min_duts=3)
+    default_dict.update(kwargs)
+    suite_list = [config_lib.HWTestConfig(constants.HWTEST_SANITY_SUITE,
+                                          **sanity_dict)]
+    suite_list.extend(HWTestList.DefaultListPFQ(**default_dict))
+    return suite_list
+
+  @classmethod
+  def SharedPoolCQ(cls, **kwargs):
+    """Return a list of HWTestConfigs for CQ which uses a shared pool.
+
+    The returned suites will run in pool:critical by default, which is
+    shared with other types of builder (canaries, pfq). The first suite in the
+    list is a blocking sanity suite that verifies the build will not break dut.
+    """
+    sanity_dict = dict(pool=constants.HWTEST_MACH_POOL, timeout=120 * 60,
+                       file_bugs=False, priority=constants.HWTEST_CQ_PRIORITY)
+    sanity_dict.update(kwargs)
+    sanity_dict.update(dict(num=1, minimum_duts=1, suite_min_duts=1,
+                            blocking=True))
+    default_dict = dict(pool=constants.HWTEST_MACH_POOL,
+                        suite_min_duts=10)
+    default_dict.update(kwargs)
+    suite_list = [config_lib.HWTestConfig(constants.HWTEST_SANITY_SUITE,
+                                          **sanity_dict)]
+    suite_list.extend(HWTestList.DefaultListCQ(**default_dict))
+    return suite_list
+
+  @classmethod
+  def SharedPoolCanary(cls, **kwargs):
+    """Return a list of HWTestConfigs for Canary which uses a shared pool.
+
+    The returned suites will run in pool:critical by default, which is
+    shared with CQs. The first suite in the list is a blocking sanity suite
+    that verifies the build will not break dut.
+    """
+    sanity_dict = dict(pool=constants.HWTEST_MACH_POOL, file_bugs=True)
+    sanity_dict.update(kwargs)
+    sanity_dict.update(dict(num=1, minimum_duts=1, suite_min_duts=1,
+                            blocking=True))
+    default_dict = dict(pool=constants.HWTEST_MACH_POOL,
+                        suite_min_duts=6)
+    default_dict.update(kwargs)
+    suite_list = [config_lib.HWTestConfig(constants.HWTEST_SANITY_SUITE,
+                                          **sanity_dict)]
+    suite_list.extend(HWTestList.DefaultListCanary(**default_dict))
+    return suite_list
+
+  @classmethod
+  def AFDORecordTest(cls, **kwargs):
+    default_dict = dict(pool=constants.HWTEST_MACH_POOL,
+                        warn_only=True, num=1, file_bugs=True,
+                        timeout=constants.AFDO_GENERATE_TIMEOUT,
+                        priority=constants.HWTEST_PFQ_PRIORITY)
+    # Allows kwargs overrides to default_dict for cq.
+    default_dict.update(kwargs)
+    return config_lib.HWTestConfig(constants.HWTEST_AFDO_SUITE, **default_dict)
+
+  @classmethod
+  def WiFiCellPoolPreCQ(cls, **kwargs):
+    """Return a list of HWTestConfigs which run wifi tests.
+
+    This should be used by the ChromeOS WiFi team to ensure changes pass the
+    wifi tests as a pre-cq sanity check.
+    """
+    default_dict = dict(pool=constants.HWTEST_WIFICELL_PRE_CQ_POOL,
+                        blocking=True, file_bugs=False,
+                        priority=constants.HWTEST_DEFAULT_PRIORITY,
+                        retry=False, max_retries=None, minimum_duts=1)
+    default_dict.update(kwargs)
+    suite_list = [config_lib.HWTestConfig(constants.WIFICELL_PRE_CQ,
+                                          **default_dict)]
+    return suite_list
+
+def append_useflags(useflags):
+  """Used to append a set of useflags to existing useflags.
+
+  Useflags that shadow prior use flags will cause the prior flag to be removed.
+  (e.g. appending '-foo' to 'foo' will cause 'foo' to be removed)
+
+  Usage:
+    new_config = base_config.derive(useflags=append_useflags(['foo', '-bar'])
+
+  Args:
+    useflags: List of string useflags to append.
+  """
+  assert isinstance(useflags, (list, set))
+  shadowed_useflags = {'-' + flag for flag in useflags
+                       if not flag.startswith('-')}
+  shadowed_useflags.update({flag[1:] for flag in useflags
+                            if flag.startswith('-')})
+  def handler(old_useflags):
+    new_useflags = set(old_useflags or [])
+    new_useflags.update(useflags)
+    new_useflags.difference_update(shadowed_useflags)
+    return sorted(list(new_useflags))
+
+  return handler
+
+
+TRADITIONAL_VM_TESTS_SUPPORTED = [constants.SMOKE_SUITE_TEST_TYPE,
+                                  constants.SIMPLE_AU_TEST_TYPE,
+                                  constants.CROS_VM_TEST_TYPE]
+
+#
+# Define assorted constants describing various sets of boards.
+#
+
+# Base per-board configuration.
+# Every board must appear in exactly 1 of the following sets.
+
+_arm_internal_release_boards = frozenset([
+    'arkham',
+    'beaglebone',
+    'beaglebone_servo',
+    'daisy',
+    'daisy_skate',
+    'daisy_spring',
+    'daisy_winter',
+    'kayle',
+    'nyan',
+    'nyan_big',
+    'nyan_blaze',
+    'nyan_freon',
+    'nyan_kitty',
+    'oak',
+    'peach_pi',
+    'peach_pit',
+    'purin',
+    'smaug',
+    'storm',
+    'rush',
+    'rush_ryu',
+    'veyron_brain',
+    'veyron_danger',
+    'veyron_gus',
+    'veyron_jaq',
+    'veyron_jerry',
+    'veyron_mickey',
+    'veyron_mighty',
+    'veyron_minnie',
+    'veyron_pinky',
+    'veyron_rialto',
+    'veyron_romy',
+    'veyron_shark',
+    'veyron_speedy',
+    'veyron_thea',
+    'whirlwind',
+])
+
+_arm_external_boards = frozenset([
+    'arm-generic',
+    'arm-generic_freon',
+    'arm64-generic',
+])
+
+_x86_internal_release_boards = frozenset([
+    'auron',
+    'auron_paine',
+    'auron_yuna',
+    'bayleybay',
+    'banjo',
+    'beltino',
+    'bobcat',
+    'buddy',
+    'butterfly',
+    'candy',
+    'celes',
+    'cid',
+    'clapper',
+    'cranky',
+    'cyan',
+    'enguarde',
+    'expresso',
+    'falco',
+    'falco_li',
+    'gandof',
+    'glados',
+    'glimmer',
+    'gnawty',
+    'guado',
+    'guado_moblab',
+    'heli',
+    'jecht',
+    'kip',
+    'kunimitsu',
+    'lakitu',
+    'lakitu_mobbuild',
+    'leon',
+    'link',
+    'lulu',
+    'lumpy',
+    'mccloud',
+    'monroe',
+    'ninja',
+    'orco',
+    'panther',
+    'panther_embedded',
+    'panther_moblab',
+    'parrot',
+    'parrot_ivb',
+    'parry',
+    'peppy',
+    'quawks',
+    'rambi',
+    'rikku',
+    'samus',
+    'slippy',
+    'squawks',
+    'stout',
+    'strago',
+    'stumpy',
+    'stumpy_moblab',
+    'sumo',
+    'swanky',
+    'tidus',
+    'tricky',
+    'ultima',
+    'winky',
+    'wizpig',
+    'wolf',
+    'x86-alex',
+    'x86-alex_he',
+    'x86-mario',
+    'x86-zgb',
+    'x86-zgb_he',
+    'zako',
+])
+
+_x86_external_boards = frozenset([
+    'amd64-generic',
+    'amd64-generic_freon',
+    'gizmo',
+    'x32-generic',
+    'x86-generic',
+    'x86-generic_freon',
+])
+
+# Every board should be in only 1 of the above sets.
+_distinct_board_sets = [
+    _arm_internal_release_boards,
+    _arm_external_boards,
+    _x86_internal_release_boards,
+    _x86_external_boards,
+]
+
+_arm_full_boards = (_arm_internal_release_boards |
+                    _arm_external_boards)
+_x86_full_boards = (_x86_internal_release_boards |
+                    _x86_external_boards)
+
+_arm_boards = _arm_full_boards
+_x86_boards = _x86_full_boards
+
+_all_release_boards = (
+    _arm_internal_release_boards |
+    _x86_internal_release_boards
+)
+_all_full_boards = (
+    _arm_full_boards |
+    _x86_full_boards
+)
+_all_boards = (
+    _x86_boards |
+    _arm_boards
+)
+
+_arm_release_boards = _arm_internal_release_boards
+_x86_release_boards = _x86_internal_release_boards
+
+_internal_boards = _all_release_boards
+
+# Board can appear in 1 or more of the following sets.
+_brillo_boards = frozenset([
+    'arkham',
+    'gizmo',
+    'kayle',
+    'lakitu',
+    'lakitu_mobbuild',
+    'panther_embedded',
+    'purin',
+    'storm',
+    'whirlwind',
+])
+
+_moblab_boards = frozenset([
+    'stumpy_moblab',
+    'panther_moblab',
+    'guado_moblab',
+])
+
+_minimal_profile_boards = frozenset([
+    'bobcat',
+])
+
+_nofactory_boards = frozenset([
+    'daisy_winter',
+    'smaug',
+])
+
+_toolchains_from_source = frozenset([
+    'x32-generic',
+])
+
+_noimagetest_boards = frozenset([
+    'lakitu',
+    'lakitu_mobbuild',
+])
+
+_nohwqual_boards = frozenset([
+    'kayle',
+    'lakitu',
+    'lakitu_mobbuild',
+])
+
+_norootfs_verification_boards = frozenset([
+])
+
+_base_layout_boards = frozenset([
+    'lakitu',
+    'lakitu_mobbuild',
+])
+
+_no_unittest_boards = frozenset((
+))
+
+_upload_gce_images_boards = frozenset([
+    'lakitu',
+    'lakitu_mobbuild',
+])
+
+_no_vmtest_boards = _arm_boards | _brillo_boards
+
+
+# This is a list of configs that should be included on the main waterfall, but
+# aren't included by default (see IsDefaultMainWaterfall). This loosely
+# corresponds to the set of experimental or self-standing configs.
+_waterfall_config_map = {
+    constants.WATERFALL_EXTERNAL: frozenset([
+        # Experimental Paladins
+        'amd64-generic_freon-paladin',
+
+        # Incremental
+        'amd64-generic-incremental',
+        'daisy-incremental',
+        'x86-generic-incremental',
+
+        # Full
+        'amd64-generic-full',
+        'arm-generic-full',
+        'daisy-full',
+        'oak-full',
+        'x86-generic-full',
+
+        # ASAN
+        'amd64-generic-asan',
+        'x86-generic-asan',
+
+        # Utility
+        'chromiumos-sdk',
+        'refresh-packages',
+
+        # LLVM
+        'amd64-generic-llvm',
+    ]),
+
+    constants.WATERFALL_INTERNAL: frozenset([
+        # Experimental Paladins.
+        'panther_moblab-paladin',
+        'stumpy_moblab-paladin',
+
+        # Experimental Canaries (Group)
+        'storm-release-group',
+        'strago-release-group',
+        'veyron-c-release-group',
+
+        # Experimental Canaries
+        'bobcat-release',
+        'daisy_winter-release',
+        'kayle-release',
+        'nyan_freon-release',
+        'panther_moblab-release',
+        'rush_ryu-release',
+        'smaug-release',
+        'guado_moblab-release',
+
+        # Incremental Builders.
+        'mario-incremental',
+        'lakitu-incremental',
+
+        # Firmware Builders.
+        'link-depthcharge-full-firmware',
+
+        # Toolchain Builders.
+        'internal-toolchain-major',
+        'internal-toolchain-minor',
+    ]),
+
+    constants.WATERFALL_RELEASE: frozenset([
+    ]),
+}
+
+
+@factory.CachedFunctionCall
+def GetConfig():
+  # Chrome OS site parameters.
+  site_params = config_lib.DefaultSiteParameters()
+
+  # Helpers for constructing Chrome OS site parameters.
+  manifest_project = 'chromiumos/manifest'
+  manifest_int_project = 'chromeos/manifest-internal'
+  external_remote = 'cros'
+  internal_remote = 'cros-internal'
+  kayle_internal_remote = 'kayle-cros-internal'
+  chromium_remote = 'chromium'
+  chrome_remote = 'chrome'
+  aosp_remote = 'aosp'
+  weave_remote = 'weave'
+
+  # Gerrit instance site parameters.
+  site_params.update(
+      config_lib.GerritInstanceParameters('EXTERNAL', 'chromium'))
+  site_params.update(
+      config_lib.GerritInstanceParameters('INTERNAL', 'chrome-internal'))
+  site_params.update(
+      config_lib.GerritInstanceParameters('AOSP', 'android'))
+  site_params.update(
+      config_lib.GerritInstanceParameters('WEAVE', 'weave'))
+
+  site_params.update(
+      # Parameters to define which manifests to use.
+      MANIFEST_PROJECT=manifest_project,
+      MANIFEST_INT_PROJECT=manifest_int_project,
+      MANIFEST_PROJECTS=(manifest_project, manifest_int_project),
+      MANIFEST_URL='%s/%s' % (
+          site_params['EXTERNAL_GOB_URL'], manifest_project
+      ),
+      MANIFEST_INT_URL='%s/%s' % (
+          site_params['INTERNAL_GERRIT_URL'], manifest_int_project
+      ),
+
+      # CrOS remotes specified in the manifests.
+      EXTERNAL_REMOTE=external_remote,
+      INTERNAL_REMOTE=internal_remote,
+      GOB_REMOTES={
+          site_params['EXTERNAL_GOB_INSTANCE']: external_remote,
+          site_params['INTERNAL_GOB_INSTANCE']: internal_remote
+      },
+      KAYLE_INTERNAL_REMOTE=kayle_internal_remote,
+      CHROMIUM_REMOTE=chromium_remote,
+      CHROME_REMOTE=chrome_remote,
+      AOSP_REMOTE=aosp_remote,
+      WEAVE_REMOTE=weave_remote,
+
+      # Only remotes listed in CROS_REMOTES are considered branchable.
+      # CROS_REMOTES and BRANCHABLE_PROJECTS must be kept in sync.
+      GERRIT_HOSTS={
+          external_remote: site_params['EXTERNAL_GERRIT_HOST'],
+          internal_remote: site_params['INTERNAL_GERRIT_HOST'],
+          aosp_remote: site_params['AOSP_GERRIT_HOST'],
+          weave_remote: site_params['WEAVE_GERRIT_HOST']
+      },
+      CROS_REMOTES={
+          external_remote: site_params['EXTERNAL_GOB_URL'],
+          internal_remote: site_params['INTERNAL_GOB_URL'],
+          kayle_internal_remote: site_params['INTERNAL_GOB_URL'],
+          aosp_remote: site_params['AOSP_GOB_URL'],
+          weave_remote: site_params['WEAVE_GOB_URL']
+      },
+      GIT_REMOTES={
+          chromium_remote: site_params['EXTERNAL_GOB_URL'],
+          chrome_remote: site_params['INTERNAL_GOB_URL'],
+          external_remote: site_params['EXTERNAL_GOB_URL'],
+          internal_remote: site_params['INTERNAL_GOB_URL'],
+          kayle_internal_remote: site_params['INTERNAL_GOB_URL'],
+          aosp_remote: site_params['AOSP_GOB_URL'],
+          weave_remote: site_params['WEAVE_GOB_URL']
+      },
+
+      # Prefix to distinguish internal and external changes. This is used
+      # when a user specifies a patch with "-g", when generating a key for
+      # a patch to use in our PatchCache, and when displaying a custom
+      # string for the patch.
+      CHANGE_PREFIX={
+          external_remote: site_params['EXTERNAL_CHANGE_PREFIX'],
+          internal_remote: site_params['INTERNAL_CHANGE_PREFIX'],
+      },
+
+      # List of remotes that are okay to include in the external manifest.
+      EXTERNAL_REMOTES=(
+          external_remote, chromium_remote
+      ),
+
+      # Mapping 'remote name' -> regexp that matches names of repositories on
+      # that remote that can be branched when creating CrOS branch.
+      # Branching script will actually create a new git ref when branching
+      # these projects. It won't attempt to create a git ref for other projects
+      # that may be mentioned in a manifest. If a remote is missing from this
+      # dictionary, all projects on that remote are considered to not be
+      # branchable.
+      BRANCHABLE_PROJECTS={
+          external_remote: r'chromiumos/(.+)',
+          internal_remote: r'chromeos/(.+)',
+          kayle_internal_remote: r'chromeos/(.+)'
+      },
+
+      # Additional parameters used to filter manifests, create modified
+      # manifests, and to branch manifests.
+      MANIFEST_VERSIONS_GOB_URL=(
+          '%s/chromiumos/manifest-versions' % site_params['EXTERNAL_GOB_URL']
+      ),
+      MANIFEST_VERSIONS_INT_GOB_URL=(
+          '%s/chromeos/manifest-versions' % site_params['INTERNAL_GOB_URL']
+      ),
+      MANIFEST_VERSIONS_GOB_URL_TEST=(
+          '%s/chromiumos/manifest-versions-test' % (
+              site_params['EXTERNAL_GOB_URL']
+          )
+      ),
+      MANIFEST_VERSIONS_INT_GOB_URL_TEST=(
+          '%s/chromeos/manifest-versions-test' % site_params['INTERNAL_GOB_URL']
+      ),
+      MANIFEST_VERSIONS_GS_URL='gs://chromeos-manifest-versions',
+
+      # Standard directories under buildroot for cloning these repos.
+      EXTERNAL_MANIFEST_VERSIONS_PATH='manifest-versions',
+      INTERNAL_MANIFEST_VERSIONS_PATH='manifest-versions-internal',
+
+      # URL of the repo project.
+      REPO_URL='%s/external/repo' % site_params['EXTERNAL_GOB_URL']
+  )
+
+  # Site specific adjustments for default BuildConfig values.
+  defaults = config_lib.DefaultSettings()
+
+  # Git repository URL for our manifests.
+  #  https://chromium.googlesource.com/chromiumos/manifest
+  #  https://chrome-internal.googlesource.com/chromeos/manifest-internal
+  defaults['manifest_repo_url'] = site_params['MANIFEST_URL']
+
+  # Site configuration.
+  site_config = config_lib.SiteConfig(defaults=defaults,
+                                      site_params=site_params)
+
+  default_hw_tests_override = config_lib.BuildConfig(
+      hw_tests_override=HWTestList.DefaultList(
+          num=constants.HWTEST_TRYBOT_NUM, pool=constants.HWTEST_TRYBOT_POOL,
+          file_bugs=False),
+  )
+
+  # Arch-specific mixins.
+
+  # Config parameters for builders that do not run tests on the builder.
+  no_unittest_builder = config_lib.BuildConfig(
+      unittests=False,
+  )
+
+  no_vmtest_builder = config_lib.BuildConfig(
+      vm_tests=[],
+      vm_tests_override=None,
+  )
+
+  no_hwtest_builder = config_lib.BuildConfig(
+      hw_tests=[],
+      hw_tests_override=[],
+  )
+
+  # Builder-specific mixins
+
+  config_lib.BuildConfig(
+      # Full builds that build fully from binaries.
+      build_type=constants.BUILD_FROM_SOURCE_TYPE,
+      archive_build_debug=True,
+      images=['test', 'factory_install'],
+      git_sync=True,
+  )
+
+  full = site_config.AddTemplate(
+      'full',
+      default_hw_tests_override,
+      # Full builds are test builds to show that we can build from scratch,
+      # so use settings to build from scratch, and archive the results.
+      usepkg_build_packages=False,
+      chrome_sdk=True,
+
+      build_type=constants.BUILD_FROM_SOURCE_TYPE,
+      archive_build_debug=True,
+      images=['base', 'recovery', 'test', 'factory_install'],
+      git_sync=True,
+      trybot_list=True,
+      description='Full Builds',
+      image_test=True,
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Continuous',
+  )
+
+  # Full builders with prebuilts.
+  full_prebuilts = full.derive(
+      prebuilts=constants.PUBLIC,
+  )
+
+  pfq = config_lib.BuildConfig(
+      build_type=constants.PFQ_TYPE,
+      important=True,
+      uprev=True,
+      overlays=constants.PUBLIC_OVERLAYS,
+      manifest_version=True,
+      trybot_list=True,
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Chrome-PFQ',
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE,
+                constants.SIMPLE_AU_TEST_TYPE],
+      vm_tests_override=TRADITIONAL_VM_TESTS_SUPPORTED,
+  )
+
+  paladin = site_config.AddTemplate(
+      'paladin',
+      default_hw_tests_override,
+      chroot_replace=False,
+      important=True,
+      build_type=constants.PALADIN_TYPE,
+      overlays=constants.PUBLIC_OVERLAYS,
+      prebuilts=constants.PUBLIC,
+      manifest_version=True,
+      trybot_list=True,
+      description='Commit Queue',
+      upload_standalone_images=False,
+      images=['base', 'test'],
+      image_test=True,
+      chrome_sdk=True,
+      chrome_sdk_build_chrome=False,
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#TOC-CQ',
+
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+      vm_tests_override=TRADITIONAL_VM_TESTS_SUPPORTED,
+  )
+
+  # Incremental builders are intended to test the developer workflow.
+  # For that reason, they don't uprev.
+  incremental = site_config.AddTemplate(
+      'incremental',
+      default_hw_tests_override,
+      build_type=constants.INCREMENTAL_TYPE,
+      chroot_replace=False,
+      uprev=False,
+      overlays=constants.PUBLIC_OVERLAYS,
+      description='Incremental Builds',
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Continuous',
+  )
+
+  # This builds with more source available.
+  internal = config_lib.BuildConfig(
+      internal=True,
+      overlays=constants.BOTH_OVERLAYS,
+      manifest_repo_url=site_params['MANIFEST_INT_URL'],
+  )
+
+  brillo = config_lib.BuildConfig(
+      sync_chrome=False,
+      chrome_sdk=False,
+      afdo_use=False,
+      dev_installer_prebuilts=False,
+      # TODO(gauravsh): crbug.com/356414 Start running tests on Brillo configs.
+      vm_tests=[],
+      hw_tests=[],
+  )
+
+  moblab = config_lib.BuildConfig(
+      image_test=False,
+      vm_tests=[],
+  )
+
+  beaglebone = brillo.derive(image_test=False, rootfs_verification=False)
+
+  # This adds Chrome branding.
+  official_chrome = config_lib.BuildConfig(
+      useflags=[constants.USE_CHROME_INTERNAL],
+  )
+
+  # This sets chromeos_official.
+  official = official_chrome.derive(
+      chromeos_official=True,
+  )
+
+  _cros_sdk = site_config.AddConfigWithoutTemplate(
+      'chromiumos-sdk',
+      full_prebuilts,
+      no_hwtest_builder,
+      # The amd64-host has to be last as that is when the toolchains
+      # are bundled up for inclusion in the sdk.
+      boards=[
+          'x86-generic', 'arm-generic', 'amd64-generic'
+      ],
+      build_type=constants.CHROOT_BUILDER_TYPE,
+      builder_class_name='sdk_builders.ChrootSdkBuilder',
+      use_sdk=False,
+      trybot_list=True,
+      description='Build the SDK and all the cross-compilers',
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Continuous',
+  )
+
+  asan = site_config.AddTemplate(
+      'asan',
+      default_hw_tests_override,
+      profile='asan',
+      disk_layout='2gb-rootfs',
+      # TODO(deymo): ASan builders generate bigger files, in particular a bigger
+      # Chrome binary, that update_engine can't handle in delta payloads due to
+      # memory limits. Remove the following lines once crbug.com/329248 is
+      # fixed.
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+      vm_tests_override=None,
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-ASAN',
+  )
+
+  llvm = site_config.AddTemplate(
+      'llvm',
+      default_hw_tests_override,
+      profile='llvm',
+      description='Build with LLVM',
+  )
+
+  telemetry = site_config.AddTemplate(
+      'telemetry',
+      default_hw_tests_override,
+      build_type=constants.INCREMENTAL_TYPE,
+      uprev=False,
+      overlays=constants.PUBLIC_OVERLAYS,
+      vm_tests=[constants.TELEMETRY_SUITE_TEST_TYPE],
+      description='Telemetry Builds',
+  )
+
+  chromium_pfq = site_config.AddTemplate(
+      'chromium-pfq',
+      default_hw_tests_override,
+      build_type=constants.CHROME_PFQ_TYPE,
+      important=True,
+      uprev=False,
+      overlays=constants.PUBLIC_OVERLAYS,
+      manifest_version=True,
+      chrome_rev=constants.CHROME_REV_LATEST,
+      chrome_sdk=True,
+      description='Preflight Chromium Uprev & Build (public)',
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE,
+                constants.SIMPLE_AU_TEST_TYPE],
+      vm_tests_override=None,
+  )
+
+  # TODO(davidjames): Convert this to an external config once the unified master
+  # logic is ready.
+  internal_chromium_pfq = internal.derive(
+      chromium_pfq,
+      description='Preflight Chromium Uprev & Build (internal)',
+      overlays=constants.BOTH_OVERLAYS,
+      prebuilts=constants.PUBLIC,
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Chrome-PFQ',
+  )
+
+  site_config.AddConfig(
+      internal_chromium_pfq, 'master-chromium-pfq',
+      boards=[],
+      master=True,
+      binhost_test=True,
+      push_overlays=constants.BOTH_OVERLAYS,
+      afdo_update_ebuild=True,
+      chrome_sdk=False,
+      health_alert_recipients=['chromeos-infra-eng@grotations.appspotmail.com',
+                               'tree',
+                               'chrome'],
+  )
+
+  chrome_pfq = site_config.AddTemplate(
+      'chrome-pfq',
+      internal_chromium_pfq,
+      official,
+      important=True,
+      overlays=constants.BOTH_OVERLAYS,
+      description='Preflight Chrome Uprev & Build (internal)',
+      prebuilts=constants.PRIVATE,
+  )
+
+  chrome_try = config_lib.BuildConfig(
+      build_type=constants.CHROME_PFQ_TYPE,
+      chrome_rev=constants.CHROME_REV_TOT,
+      use_lkgm=True,
+      important=False,
+      manifest_version=False,
+  )
+
+  chromium_info = site_config.AddTemplate(
+      'chromium-pfq-informational',
+      chromium_pfq,
+      chrome_try,
+      chrome_sdk=False,
+      description='Informational Chromium Uprev & Build (public)',
+  )
+
+  chrome_info = site_config.AddTemplate(
+      'chrome-pfq-informational',
+      chromium_info,
+      internal, official,
+      description='Informational Chrome Uprev & Build (internal)',
+  )
+
+  chrome_perf = site_config.AddTemplate(
+      'chrome-perf',
+      chrome_info,
+      no_unittest_builder,
+      no_vmtest_builder,
+      description='Chrome Performance test bot',
+      hw_tests=[config_lib.HWTestConfig(
+          'perf_v2', pool=constants.HWTEST_CHROME_PERF_POOL,
+          timeout=90 * 60, critical=True, num=1)],
+      use_chrome_lkgm=True,
+      use_lkgm=False,
+      useflags=append_useflags(['-cros-debug']),
+  )
+
+
+  # A base config for each board.
+  _base_configs = dict()
+
+  def _CreateBaseConfigs():
+    for board in _all_boards:
+      base = config_lib.BuildConfig()
+
+      if board in _internal_boards:
+        base.update(internal)
+        base.update(official_chrome)
+        base.update(manifest=constants.OFFICIAL_MANIFEST)
+      if board in _brillo_boards:
+        base.update(brillo)
+      if board in _moblab_boards:
+        base.update(moblab)
+      if board in _minimal_profile_boards:
+        base.update(profile='minimal')
+      if board in _nofactory_boards:
+        base.update(factory=False)
+        base.update(factory_toolkit=False)
+        base.update(factory_install_netboot=False)
+      if board in _toolchains_from_source:
+        base.update(usepkg_toolchain=False)
+      if board in _noimagetest_boards:
+        base.update(image_test=False)
+      if board in _nohwqual_boards:
+        base.update(hwqual=False)
+      if board in _norootfs_verification_boards:
+        base.update(rootfs_verification=False)
+      if board in _base_layout_boards:
+        base.update(disk_layout='base')
+      if board in _no_unittest_boards:
+        base.update(no_unittest_builder)
+      if board in _no_vmtest_boards:
+        base.update(no_vmtest_builder)
+      if board in _upload_gce_images_boards:
+        base.update(upload_gce_images=True)
+
+      # TODO(akeshet) Eliminate or clean up this special case.
+      # kayle board has a lot of kayle-specific config changes.
+      if board == 'kayle':
+        base.update(manifest='kayle.xml',
+                    dev_manifest='kayle.xml',
+                    factory_toolkit=False,
+                    # TODO(namnguyen): Cannot build factory net install (no
+                    # usbnet).
+                    factory_install_netboot=False,
+                    # TODO(namngyuyen) Cannot build dev or test images due to
+                    # #436523.
+                    images=['base'])
+
+      board_config = base.derive(boards=[board])
+      # Note: base configs should not specify a useflag list. Convert any
+      # useflags that this base config has accrued (for instance,
+      # 'chrome_internal', via official_chrome) into an append_useflags
+      # callable. This is because the board base config is the last config to be
+      # derived from when creating a board-specific config,
+      if 'useflags' in board_config:
+        board_config['useflags'] = append_useflags(board_config['useflags'])
+      _base_configs[board] = board_config
+
+  _CreateBaseConfigs()
+
+  def _CreateConfigsForBoards(config_base, boards, name_suffix, **kwargs):
+    """Create configs based on |config_base| for all boards in |boards|.
+
+    Note: Existing configs will not be overwritten.
+
+    Args:
+      config_base: A BuildConfig instance to inherit from.
+      boards: A set of boards to create configs for.
+      name_suffix: A naming suffix. Configs will have names of the form
+                   board-name_suffix.
+      **kwargs: Additional keyword arguments to be used in AddConfig.
+    """
+    for board in boards:
+      config_name = '%s-%s' % (board, name_suffix)
+      if config_name not in site_config:
+        base = config_lib.BuildConfig()
+        config = site_config.AddConfig(config_base, config_name, base,
+                                       _base_configs[board], **kwargs)
+        if board in _nofactory_boards:
+          try:
+            config.get('images', []).remove('factory_install')
+          except ValueError:
+            pass
+
+
+  _chromium_pfq_important_boards = frozenset([
+      'arm-generic_freon',
+      'arm-generic',
+      'daisy',
+      'veyron_minnie',
+      'x86-generic',
+  ])
+
+  def _AddFullConfigs():
+    """Add x86 and arm full configs."""
+    external_overrides = config_lib.BuildConfig.delete_keys(internal)
+    external_overrides.update(manifest=config_lib.BuildConfig.delete_key())
+    external_overrides.update(
+        useflags=append_useflags(['-%s' % constants.USE_CHROME_INTERNAL]))
+    _CreateConfigsForBoards(full_prebuilts, _all_full_boards,
+                            config_lib.CONFIG_TYPE_FULL,
+                            **external_overrides)
+    _CreateConfigsForBoards(chromium_info, _all_full_boards,
+                            'tot-chromium-pfq-informational', important=False,
+                            **external_overrides)
+    # Create important configs, then non-important configs.
+    _CreateConfigsForBoards(
+        internal_chromium_pfq, _chromium_pfq_important_boards,
+        'chromium-pfq', **external_overrides)
+    _CreateConfigsForBoards(internal_chromium_pfq, _all_full_boards,
+                            'chromium-pfq', important=False,
+                            **external_overrides)
+
+  _AddFullConfigs()
+
+
+  # These remaining chromium pfq configs have eccentricities that are easier to
+  # create manually.
+
+  site_config.AddConfig(
+      internal_chromium_pfq, 'amd64-generic-chromium-pfq',
+      _base_configs['amd64-generic'],
+      disk_layout='2gb-rootfs',
+  )
+
+  site_config.AddConfig(
+      internal_chromium_pfq, 'amd64-generic_freon-chromium-pfq',
+      _base_configs['amd64-generic_freon'],
+      disk_layout='2gb-rootfs',
+      vm_tests=[],
+  )
+
+  site_config.AddConfig(
+      internal_chromium_pfq, 'x86-generic_freon-chromium-pfq',
+      _base_configs['x86-generic_freon'],
+      vm_tests=[],
+  )
+
+  _chrome_pfq_important_boards = frozenset([
+      'peppy',
+      'rush_ryu',
+      'veyron_pinky',
+      'nyan',
+  ])
+
+
+  # TODO(akeshet): Replace this with a config named x86-alex-chrome-pfq.
+  site_config.AddConfig(
+      chrome_pfq, 'alex-chrome-pfq',
+      _base_configs['x86-alex'],
+  )
+
+  site_config.AddConfig(
+      chrome_pfq, 'lumpy-chrome-pfq',
+      _base_configs['lumpy'],
+      afdo_generate=True,
+      hw_tests=[HWTestList.AFDORecordTest()] + HWTestList.SharedPoolPFQ(),
+  )
+
+  site_config.AddConfig(
+      chrome_pfq, 'daisy_skate-chrome-pfq',
+      _base_configs['daisy_skate'],
+      hw_tests=HWTestList.SharedPoolPFQ(),
+  )
+
+  site_config.AddConfig(
+      chrome_pfq, 'falco-chrome-pfq',
+      _base_configs['falco'],
+      hw_tests=HWTestList.SharedPoolPFQ(),
+  )
+
+  site_config.AddConfig(
+      chrome_pfq, 'peach_pit-chrome-pfq',
+      _base_configs['peach_pit'],
+      hw_tests=HWTestList.SharedPoolPFQ(),
+  )
+
+  site_config.AddConfig(
+      chrome_pfq, 'tricky-chrome-pfq',
+      _base_configs['tricky'],
+      hw_tests=HWTestList.SharedPoolPFQ(),
+  )
+
+  _telemetry_boards = frozenset([
+      'amd64-generic',
+      'arm-generic',
+      'x86-generic',
+  ])
+
+  _CreateConfigsForBoards(telemetry, _telemetry_boards, 'telemetry')
+
+  _toolchain_major = site_config.AddConfigWithoutTemplate(
+      'toolchain-major',
+      _cros_sdk,
+      latest_toolchain=True,
+      prebuilts=False,
+      trybot_list=False,
+      gcc_githash='svn-mirror/google/main',
+      description='Test next major toolchain revision',
+  )
+
+  _toolchain_minor = site_config.AddConfigWithoutTemplate(
+      'toolchain-minor',
+      _cros_sdk,
+      latest_toolchain=True,
+      prebuilts=False,
+      trybot_list=False,
+      gcc_githash='svn-mirror/google/gcc-4_9',
+      description='Test next minor toolchain revision',
+  )
+
+  site_config.AddConfig(
+      llvm,
+      'amd64-generic-llvm',
+      incremental,
+      boards=['amd64-generic'],
+      chroot_replace=True,
+      description='Build with LLVM',
+      trybot_list=True,
+  )
+
+  site_config.AddConfig(
+      asan,
+      'x86-generic-asan',
+      incremental,
+      boards=['x86-generic'],
+      chroot_replace=True,
+      description='Build with Address Sanitizer (Clang)',
+      trybot_list=True,
+  )
+
+  tot_asan_info = site_config.AddTemplate(
+      'tot-asan-informational',
+      chromium_info,
+      asan,
+      description='Build TOT Chrome with Address Sanitizer (Clang)',
+  )
+
+  site_config.AddConfig(
+      tot_asan_info,
+      'x86-generic-tot-asan-informational',
+      boards=['x86-generic'],
+  )
+
+  site_config.AddConfig(
+      asan,
+      'amd64-generic-asan',
+      incremental,
+      boards=['amd64-generic'],
+      description='Build with Address Sanitizer (Clang)',
+      trybot_list=True,
+  )
+
+
+  site_config.AddConfig(
+      tot_asan_info, 'amd64-generic-tot-asan-informational',
+      boards=['amd64-generic'],
+  )
+
+  incremental_beaglebone = incremental.derive(beaglebone)
+  site_config.AddConfig(
+      incremental_beaglebone, 'beaglebone-incremental',
+      boards=['beaglebone'],
+      trybot_list=True,
+      description='Incremental Beaglebone Builder',
+  )
+
+  site_config.AddConfigWithoutTemplate(
+      'refresh-packages',
+      no_vmtest_builder,
+      no_hwtest_builder,
+      boards=['x86-generic', 'arm-generic'],
+      builder_class_name='misc_builders.RefreshPackagesBuilder',
+      description='Check upstream Gentoo for package updates',
+  )
+
+  site_config.AddConfig(
+      incremental, 'x86-generic-incremental',
+      _base_configs['x86-generic'],
+  )
+
+  site_config.AddConfig(
+      incremental, 'daisy-incremental',
+      _base_configs['daisy'],
+      config_lib.BuildConfig.delete_keys(internal),
+      manifest=config_lib.BuildConfig.delete_key(),
+      useflags=append_useflags(['-chrome_internal']),
+  )
+
+  site_config.AddConfig(
+      incremental, 'amd64-generic-incremental',
+      _base_configs['amd64-generic'],
+      # This builder runs on a VM, so it can't run VM tests.
+      vm_tests=[],
+  )
+
+  site_config.AddConfig(
+      incremental, 'x32-generic-incremental',
+      _base_configs['x32-generic'],
+      # This builder runs on a VM, so it can't run VM tests.
+      vm_tests=[],
+  )
+
+  site_config.AddConfig(
+      paladin, 'x86-generic-asan-paladin',
+      _base_configs['x86-generic'],
+      asan,
+      description='Paladin build with Address Sanitizer (Clang)',
+      important=False,
+  )
+
+  site_config.AddConfig(
+      paladin, 'amd64-generic-asan-paladin',
+      _base_configs['amd64-generic'],
+      asan,
+      description='Paladin build with Address Sanitizer (Clang)',
+      important=False,
+  )
+
+  _chrome_perf_boards = frozenset([
+      'daisy',
+      'lumpy',
+      'parrot',
+  ])
+
+  _CreateConfigsForBoards(chrome_perf, _chrome_perf_boards, 'chrome-perf',
+                          trybot_list=True)
+
+
+  _CreateConfigsForBoards(chromium_info,
+                          ['x86-generic', 'amd64-generic'],
+                          'telem-chromium-pfq-informational',
+                          **telemetry.derive(chrome_try))
+
+  #
+  # Internal Builds
+  #
+
+  internal_pfq = internal.derive(
+      official_chrome, pfq,
+      overlays=constants.BOTH_OVERLAYS,
+      prebuilts=constants.PRIVATE,
+  )
+
+  # Because branch directories may be shared amongst builders on multiple
+  # branches, they must delete the chroot every time they run.
+  # They also potentially need to build [new] Chrome.
+  internal_pfq_branch = site_config.AddTemplate(
+      'pre-flight-branch',
+      internal_pfq,
+      branch=True,
+      trybot_list=False,
+      sync_chrome=True,
+      active_waterfall=constants.WATERFALL_RELEASE)
+
+  internal_paladin = internal.derive(
+      official_chrome, paladin,
+      manifest=constants.OFFICIAL_MANIFEST,
+      overlays=constants.BOTH_OVERLAYS,
+      prebuilts=constants.PRIVATE,
+      vm_tests=[],
+      description=paladin['description'] + ' (internal)',
+  )
+
+  # Used for paladin builders with nowithdebug flag (a.k.a -cros-debug)
+  internal_nowithdebug_paladin = internal_paladin.derive(
+      useflags=append_useflags(['-cros-debug']),
+      description=paladin['description'] + ' (internal, nowithdebug)',
+      prebuilts=False,
+  )
+
+  _CreateConfigsForBoards(
+      internal_nowithdebug_paladin,
+      ['x86-generic', 'amd64-generic'],
+      'nowithdebug-paladin',
+      important=False,
+  )
+
+  site_config.AddConfig(
+      internal_nowithdebug_paladin,
+      'x86-mario-nowithdebug-paladin',
+      boards=['x86-mario'])
+
+  # Used for builders which build completely from source except Chrome.
+  full_compile_paladin = paladin.derive(
+      board_replace=True,
+      chrome_binhost_only=True,
+      chrome_sdk=False,
+      cpe_export=False,
+      debug_symbols=False,
+      prebuilts=False,
+      unittests=False,
+      upload_hw_test_artifacts=False,
+      vm_tests=[],
+  )
+
+  # falco is the only board that has the -clang-clean CFLAG right now,
+  # so it's important that falco stays as a full-compile builder.
+  # TODO(yunlian): Add -clang-clean to more boards.
+  # See https://chromium-review.googlesource.com/#/c/275862/
+  _CreateConfigsForBoards(
+      full_compile_paladin,
+      ['falco', 'nyan'],
+      'full-compile-paladin',
+  )
+
+  pre_cq = site_config.AddTemplate(
+      'pre-cq',
+      paladin,
+      build_type=constants.INCREMENTAL_TYPE,
+      build_packages_in_background=True,
+      pre_cq=True,
+      archive=False,
+      chrome_sdk=False,
+      chroot_replace=True,
+      debug_symbols=False,
+      prebuilts=False,
+      cpe_export=False,
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+      vm_tests_override=None,
+      description='Verifies compilation, building an image, and vm/unit tests '
+                  'if supported.',
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Pre-CQ',
+      health_alert_recipients=['chromeos-infra-eng@grotations.appspotmail.com'],
+      health_threshold=3,
+  )
+
+  # Pre-CQ targets that only check compilation and unit tests.
+  unittest_only_pre_cq = pre_cq.derive(
+      no_vmtest_builder,
+      description='Verifies compilation and unit tests only',
+      compilecheck=True,
+  )
+
+  # Pre-CQ targets that don't run VMTests.
+  no_vmtest_pre_cq = site_config.AddTemplate(
+      'no-vmtest-pre-cq',
+      pre_cq,
+      no_vmtest_builder,
+      description='Verifies compilation, building an image, and unit tests '
+                  'if supported.',
+  )
+
+  # Pre-CQ targets that only check compilation.
+  compile_only_pre_cq = site_config.AddTemplate(
+      'compile-only-pre-cq',
+      unittest_only_pre_cq,
+      description='Verifies compilation only',
+      unittests=False,
+  )
+
+  site_config.AddConfigWithoutTemplate(
+      constants.BRANCH_UTIL_CONFIG,
+      internal_paladin,
+      no_vmtest_builder,
+      no_hwtest_builder,
+      boards=[],
+      # Disable postsync_patch to prevent conflicting patches from being applied
+      # - e.g., patches from 'master' branch being applied to a branch.
+      postsync_patch=False,
+      # Disable postsync_reexec to continue running the 'master' branch chromite
+      # for all stages, rather than the chromite in the branch buildroot.
+      postsync_reexec=False,
+      # Need to reset the paladin build_type we inherited.
+      build_type=None,
+      builder_class_name='release_builders.CreateBranchBuilder',
+      description='Used for creating/deleting branches (TPMs only)',
+  )
+
+  # Internal incremental builders don't use official chrome because we want
+  # to test the developer workflow.
+  internal_incremental = internal.derive(
+      incremental,
+      overlays=constants.BOTH_OVERLAYS,
+      description='Incremental Builds (internal)',
+  )
+
+  site_config.AddConfig(
+      internal_pfq_branch, 'lumpy-pre-flight-branch',
+      master=True,
+      push_overlays=constants.BOTH_OVERLAYS,
+      boards=['lumpy'],
+      afdo_generate=True,
+      afdo_update_ebuild=True,
+      hw_tests=[HWTestList.AFDORecordTest()],
+  )
+
+  # A test-ap image is just a test image with a special profile enabled.
+  # Note that each board enabled for test-ap use has to have the testbed-ap
+  # profile linked to from its private overlay.
+  _test_ap = site_config.AddTemplate(
+      'test-ap',
+      internal,
+      default_hw_tests_override,
+      description='WiFi AP images used in testing',
+      profile='testbed-ap',
+      vm_tests=[],
+  )
+
+  site_config.AddGroup(
+      'test-ap-group',
+      site_config.AddConfig(_test_ap, 'stumpy-test-ap', boards=['stumpy']),
+      site_config.AddConfig(_test_ap, 'panther-test-ap', boards=['panther']),
+  )
+
+  ### Master paladin (CQ builder).
+
+  site_config.AddConfig(
+      internal_paladin, 'master-paladin',
+      boards=[],
+      master=True,
+      binhost_test=True,
+      push_overlays=constants.BOTH_OVERLAYS,
+      description='Commit Queue master (all others are slaves)',
+
+      # This name should remain synced with with the name used in
+      # build_internals/masters/master.chromeos/board_config.py.
+      # TODO(mtennant): Fix this.  There should be some amount of auto-
+      # configuration in the board_config.py code.
+      health_threshold=3,
+      health_alert_recipients=['chromeos-infra-eng@grotations.appspotmail.com',
+                               'tree'],
+      sanity_check_slaves=['wolf-tot-paladin'],
+      trybot_list=False,
+  )
+
+  ### Other paladins (CQ builders).
+  # These are slaves of the master paladin by virtue of matching
+  # in a few config values (e.g. 'build_type', 'branch', etc).  If
+  # they are not 'important' then they are ignored slaves.
+  # TODO(mtennant): This master-slave relationship should be specified
+  # here in the configuration, rather than GetSlavesForMaster().
+  # Something like the following:
+  # master_paladin = site_config.AddConfig(internal_paladin, ...)
+  # master_paladin.AddSlave(site_config.AddConfig(internal_paladin, ...))
+
+  # Sanity check builder, part of the CQ but builds without the patches
+  # under test.
+  site_config.AddConfig(
+      internal_paladin, 'wolf-tot-paladin',
+      boards=['wolf'],
+      do_not_apply_cq_patches=True,
+      prebuilts=False,
+      hw_tests=HWTestList.SharedPoolCQ(),
+  )
+
+  _paladin_boards = _all_boards
+
+  # List of paladin boards where the regular paladin config is important.
+  _paladin_important_boards = frozenset([
+      'amd64-generic',
+      'arm-generic',
+      'auron',
+      'beaglebone',
+      'butterfly',
+      'daisy',
+      'daisy_skate',
+      'daisy_spring',
+      'nyan_freon',
+      'falco',
+      'gizmo',
+      'guado_moblab',
+      'kayle',
+      'lakitu',
+      'lakitu_mobbuild',
+      'leon',
+      'link',
+      'lumpy',
+      'monroe',
+      'nyan',
+      'oak',
+      'panther',
+      'parrot',
+      'peach_pit',
+      'peppy',
+      'rambi',
+      'rush_ryu',
+      'samus',
+      'smaug',
+      'storm',
+      'stout',
+      'strago',
+      'stumpy',
+      'tricky',
+      'veyron_pinky',
+      'whirlwind',
+      'wolf',
+      'x86-alex',
+      'x86-generic',
+      'x86-mario',
+      'x86-zgb',
+  ])
+
+  _paladin_simple_vmtest_boards = frozenset([
+      'rambi',
+      'x86-mario',
+  ])
+
+  _paladin_devmode_vmtest_boards = frozenset([
+      'parrot',
+  ])
+
+  _paladin_cros_vmtest_boards = frozenset([
+      'stout',
+  ])
+
+  _paladin_smoke_vmtest_boards = frozenset([
+      'amd64-generic',
+      'x86-generic',
+  ])
+
+  _paladin_default_vmtest_boards = frozenset([
+      'x32-generic',
+  ])
+
+  _paladin_hwtest_boards = frozenset([
+      'daisy_skate',
+      'link',
+      'lumpy',
+      'peach_pit',
+      'peppy',
+      'stumpy',
+      'wolf',
+      'x86-alex',
+      'x86-zgb',
+  ])
+
+  _paladin_moblab_hwtest_boards = frozenset([
+      'guado_moblab',
+  ])
+
+  _paladin_chroot_replace_boards = frozenset([
+      'butterfly',
+      'daisy_spring',
+  ])
+
+  _paladin_separate_symbols = frozenset([
+      'amd64-generic',
+      'gizmo',
+  ])
+
+  def _CreatePaladinConfigs():
+    for board in _paladin_boards:
+      assert board in _base_configs, '%s not in _base_configs' % board
+      config_name = '%s-%s' % (board, constants.PALADIN_TYPE)
+      customizations = config_lib.BuildConfig()
+      base_config = _base_configs[board]
+      if board in _paladin_hwtest_boards:
+        customizations.update(hw_tests=HWTestList.DefaultListCQ())
+      if board in _paladin_moblab_hwtest_boards:
+        customizations.update(
+            hw_tests=[
+                config_lib.HWTestConfig(
+                    constants.HWTEST_MOBLAB_QUICK_SUITE,
+                    blocking=True, num=1, timeout=120*60,
+                    pool=constants.HWTEST_PALADIN_POOL)
+            ])
+      if board not in _paladin_important_boards:
+        customizations.update(important=False)
+      if board in _paladin_chroot_replace_boards:
+        customizations.update(chroot_replace=True)
+      if board in _internal_boards:
+        customizations = customizations.derive(
+            internal, official_chrome,
+            manifest=constants.OFFICIAL_MANIFEST)
+      if board in _paladin_separate_symbols:
+        customizations.update(separate_debug_symbols=True)
+
+      if board not in _paladin_default_vmtest_boards:
+        vm_tests = []
+        if board in _paladin_simple_vmtest_boards:
+          vm_tests.append(constants.SIMPLE_AU_TEST_TYPE)
+        if board in _paladin_cros_vmtest_boards:
+          vm_tests.append(constants.CROS_VM_TEST_TYPE)
+        if board in _paladin_devmode_vmtest_boards:
+          vm_tests.append(constants.DEV_MODE_TEST_TYPE)
+        if board in _paladin_smoke_vmtest_boards:
+          vm_tests.append(constants.SMOKE_SUITE_TEST_TYPE)
+        customizations.update(vm_tests=vm_tests)
+
+        if paladin.vm_tests_override is not None:
+          # Make sure any new tests are also in override.
+          override = paladin.vm_tests_override[:]
+          for test in vm_tests:
+            if test not in override:
+              override.append(test)
+
+          customizations.update(vm_tests_override=override)
+
+      if base_config.get('internal'):
+        customizations.update(
+            prebuilts=constants.PRIVATE,
+            description=paladin['description'] + ' (internal)')
+      else:
+        customizations.update(prebuilts=constants.PUBLIC)
+      site_config.AddConfig(
+          paladin, config_name,
+          customizations,
+          base_config)
+
+
+  _CreatePaladinConfigs()
+
+
+  site_config.AddConfig(
+      internal_paladin, 'lumpy-incremental-paladin',
+      boards=['lumpy'],
+      build_before_patching=True,
+      prebuilts=False,
+      compilecheck=True,
+      unittests=False,
+  )
+
+  ### Paladins (CQ builders) which do not run VM or Unit tests on the builder
+  ### itself.
+  external_brillo_paladin = paladin.derive(brillo)
+
+  site_config.AddConfig(
+      external_brillo_paladin, 'panther_embedded-minimal-paladin',
+      boards=['panther_embedded'],
+      profile='minimal',
+      trybot_list=True,
+  )
+
+  internal_beaglebone_paladin = internal_paladin.derive(beaglebone)
+
+  site_config.AddConfig(
+      internal_beaglebone_paladin, 'beaglebone-paladin',
+      boards=['beaglebone'],
+      trybot_list=True,
+  )
+
+  site_config.AddConfig(
+      internal_beaglebone_paladin, 'beaglebone_servo-paladin',
+      boards=['beaglebone_servo'],
+      important=False,
+  )
+
+
+  def ShardHWTestsBetweenBuilders(*args):
+    """Divide up the hardware tests between the given list of config names.
+
+    Each of the config names must have the same hardware test suites, and the
+    number of suites must be equal to the number of config names.
+
+    Args:
+      *args: A list of config names.
+    """
+    # List of config names.
+    names = args
+    # Verify sanity before sharding the HWTests.
+    for name in names:
+      assert len(site_config[name].hw_tests) == len(names), \
+        '%s should have %d tests, but found %d' % (
+            name, len(names), len(site_config[name].hw_tests))
+    for name in names[1:]:
+      for test1, test2 in zip(site_config[name].hw_tests,
+                              site_config[names[0]].hw_tests):
+        assert test1.__dict__ == test2.__dict__, \
+            '%s and %s have different hw_tests configured' % (names[0], name)
+
+    # Assign each config the Nth HWTest.
+    for i, name in enumerate(names):
+      site_config[name]['hw_tests'] = [site_config[name].hw_tests[i]]
+
+  # Shard the bvt-inline and bvt-cq hw tests between similar builders.
+  # The first builder gets bvt-inline, and the second builder gets bvt-cq.
+  # bvt-cq takes longer, so it usually makes sense to give it the faster board.
+  ShardHWTestsBetweenBuilders('x86-zgb-paladin', 'x86-alex-paladin')
+  ShardHWTestsBetweenBuilders('wolf-paladin', 'peppy-paladin')
+  ShardHWTestsBetweenBuilders('daisy_skate-paladin', 'peach_pit-paladin')
+  ShardHWTestsBetweenBuilders('lumpy-paladin', 'stumpy-paladin')
+
+  # Add a pre-cq config for every board.
+  _CreateConfigsForBoards(pre_cq, _all_boards, 'pre-cq')
+  # Override 'lakitu-pre-cq' - it's in _brillo_boards, but should run vmtests.
+  site_config.AddConfig(
+      pre_cq, 'lakitu-pre-cq',
+      _base_configs['lakitu'],
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+  )
+
+  _CreateConfigsForBoards(no_vmtest_pre_cq, _all_boards, 'no-vmtest-pre-cq')
+  _CreateConfigsForBoards(
+      compile_only_pre_cq, _all_boards, 'compile-only-pre-cq')
+
+  site_config.AddConfig(
+      pre_cq, constants.BINHOST_PRE_CQ,
+      no_vmtest_pre_cq,
+      internal,
+      boards=[],
+      binhost_test=True,
+  )
+
+  # TODO(davidjames): Add peach_pit, nyan, and beaglebone to pre-cq.
+  # TODO(davidjames): Update daisy_spring to build images again.
+  site_config.AddGroup(
+      'mixed-a-pre-cq',
+      # daisy_spring w/kernel 3.8.
+      site_config['daisy_spring-compile-only-pre-cq'],
+      # lumpy w/kernel 3.8.
+      site_config['lumpy-compile-only-pre-cq'],
+  )
+
+  site_config.AddGroup(
+      'mixed-b-pre-cq',
+      # arm64 w/kernel 3.14.
+      site_config['rush_ryu-compile-only-pre-cq'],
+      # samus w/kernel 3.14.
+      site_config['samus-compile-only-pre-cq'],
+  )
+
+  site_config.AddGroup(
+      'mixed-c-pre-cq',
+      # brillo
+      site_config['storm-compile-only-pre-cq'],
+  )
+
+  site_config.AddGroup(
+      'external-mixed-pre-cq',
+      site_config['x86-generic-no-vmtest-pre-cq'],
+      site_config['amd64-generic-no-vmtest-pre-cq'],
+  )
+
+  site_config.AddGroup(
+      'kernel-3_14-a-pre-cq',
+      site_config['x86-generic-no-vmtest-pre-cq'],
+      site_config['arm-generic-no-vmtest-pre-cq']
+  )
+
+  site_config.AddGroup(
+      'kernel-3_14-b-pre-cq',
+      site_config['storm-no-vmtest-pre-cq'],
+  )
+
+  site_config.AddGroup(
+      'kernel-3_14-c-pre-cq',
+      site_config['veyron_pinky-no-vmtest-pre-cq'],
+      site_config['rush_ryu-no-vmtest-pre-cq']
+  )
+
+  site_config.AddConfigWithoutTemplate(
+      'pre-cq-launcher',
+      internal_paladin,
+      no_vmtest_builder,
+      no_hwtest_builder,
+      boards=[],
+      build_type=constants.PRE_CQ_LAUNCHER_TYPE,
+      description='Launcher for Pre-CQ builders',
+      trybot_list=False,
+      manifest_version=False,
+      # Every Pre-CQ launch failure should send out an alert.
+      health_threshold=1,
+      health_alert_recipients=['chromeos-infra-eng@grotations.appspotmail.com',
+                               'tree'],
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Pre-CQ',
+  )
+
+
+  site_config.AddConfig(
+      internal_incremental, 'mario-incremental',
+      boards=['x86-mario'],
+  )
+
+  site_config.AddConfig(
+      internal_incremental, 'lakitu-incremental',
+      _base_configs['lakitu'],
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+  )
+
+  site_config.AddConfigWithoutTemplate(
+      'internal-toolchain-major',
+      _toolchain_major, internal, official,
+      boards=['x86-alex', 'stumpy', 'daisy', 'lakitu'],
+      build_tests=True,
+      description=_toolchain_major['description'] + ' (internal)',
+  )
+
+  site_config.AddConfigWithoutTemplate(
+      'internal-toolchain-minor',
+      _toolchain_minor, internal, official,
+      boards=['x86-alex', 'stumpy', 'daisy', 'lakitu'],
+      build_tests=True,
+      description=_toolchain_minor['description'] + ' (internal)',
+  )
+
+  _release = site_config.AddTemplate(
+      'release',
+      full,
+      official,
+      internal,
+      default_hw_tests_override,
+      build_type=constants.CANARY_TYPE,
+      useflags=append_useflags(['-cros-debug']),
+      build_tests=True,
+      afdo_use=True,
+      manifest=constants.OFFICIAL_MANIFEST,
+      manifest_version=True,
+      images=['base', 'recovery', 'test', 'factory_install'],
+      push_image=True,
+      upload_symbols=True,
+      binhost_bucket='gs://chromeos-dev-installer',
+      binhost_key='RELEASE_BINHOST',
+      binhost_base_url='https://commondatastorage.googleapis.com/'
+                       'chromeos-dev-installer',
+      dev_installer_prebuilts=True,
+      git_sync=False,
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE,
+                constants.DEV_MODE_TEST_TYPE,
+                constants.CROS_VM_TEST_TYPE],
+      hw_tests=HWTestList.SharedPoolCanary(),
+      paygen=True,
+      signer_tests=True,
+      trybot_list=True,
+      hwqual=True,
+      description="Release Builds (canary) (internal)",
+      chrome_sdk=True,
+      image_test=True,
+      doc='http://www.chromium.org/chromium-os/build/builder-overview#'
+          'TOC-Canaries',
+  )
+
+  _grouped_config = config_lib.BuildConfig(
+      build_packages_in_background=True,
+      chrome_sdk_build_chrome=False,
+      unittests=None,
+      vm_tests=[],
+  )
+
+  _grouped_variant_config = _grouped_config.derive(
+      chrome_sdk=False,
+  )
+
+  _grouped_variant_release = _release.derive(_grouped_variant_config)
+
+  ### Master release config.
+
+  site_config.AddConfig(
+      _release, 'master-release',
+      boards=[],
+      master=True,
+      sync_chrome=False,
+      chrome_sdk=False,
+      health_alert_recipients=['chromeos-infra-eng@grotations.appspotmail.com',
+                               'tree'],
+      afdo_use=False,
+      branch_util_test=True,
+  )
+
+  ### Release config groups.
+
+  site_config.AddGroup(
+      'x86-alex-release-group',
+      site_config.AddConfig(
+          _release, 'x86-alex-release',
+          boards=['x86-alex'],
+      ),
+      site_config.AddConfig(
+          _grouped_variant_release, 'x86-alex_he-release',
+          boards=['x86-alex_he'],
+          hw_tests=[],
+          upload_hw_test_artifacts=False,
+          paygen_skip_testing=True,
+      ),
+  )
+
+  site_config.AddGroup(
+      'x86-zgb-release-group',
+      site_config.AddConfig(
+          _release, 'x86-zgb-release',
+          boards=['x86-zgb'],
+      ),
+      site_config.AddConfig(
+          _grouped_variant_release, 'x86-zgb_he-release',
+          boards=['x86-zgb_he'],
+          hw_tests=[],
+          upload_hw_test_artifacts=False,
+          paygen_skip_testing=True,
+      ),
+  )
+
+  ### Release AFDO configs.
+
+  release_afdo = _release.derive(
+      trybot_list=False,
+      hw_tests=(
+          HWTestList.DefaultList(pool=constants.HWTEST_SUITES_POOL, num=4) +
+          HWTestList.AFDOList()
+      ),
+      push_image=False,
+      paygen=False,
+      dev_installer_prebuilts=False,
+  )
+
+  release_afdo_generate = site_config.AddTemplate(
+      config_lib.CONFIG_TYPE_RELEASE_AFDO + '-generate',
+      release_afdo,
+      afdo_generate_min=True,
+      afdo_use=False,
+      afdo_update_ebuild=True,
+
+      hw_tests=[HWTestList.AFDORecordTest()],
+      hw_tests_override=[HWTestList.AFDORecordTest(
+          num=constants.HWTEST_TRYBOT_NUM,
+          pool=constants.HWTEST_TRYBOT_POOL,
+          file_bugs=False,
+          priority=constants.HWTEST_DEFAULT_PRIORITY,
+      )],
+  )
+
+  release_afdo_use = site_config.AddTemplate(
+      config_lib.CONFIG_TYPE_RELEASE_AFDO + '-use',
+      release_afdo,
+      afdo_use=True,
+  )
+
+  # Now generate generic release-afdo configs if we haven't created anything
+  # more specific above already. release-afdo configs are builders that do AFDO
+  # profile collection and optimization in the same builder. Used by developers
+  # that want to measure performance changes caused by their changes.
+  def _AddAFDOConfigs():
+    for board in _all_release_boards:
+      base = _base_configs[board]
+
+      config_name = '%s-%s' % (board, config_lib.CONFIG_TYPE_RELEASE_AFDO)
+      if config_name in site_config:
+        continue
+
+      generate_config_name = (
+          '%s-%s-%s' % (board,
+                        config_lib.CONFIG_TYPE_RELEASE_AFDO,
+                        'generate'))
+      use_config_name = '%s-%s-%s' % (board,
+                                      config_lib.CONFIG_TYPE_RELEASE_AFDO,
+                                      'use')
+
+      # We can't use AFDO data if afdo_use is disabled for this board.
+      if not base.get('afdo_use', True):
+        continue
+
+      site_config.AddGroup(
+          config_name,
+          site_config.AddConfig(
+              release_afdo_generate, generate_config_name, base
+          ),
+          site_config.AddConfig(
+              release_afdo_use, use_config_name, base
+          ),
+      )
+
+  _AddAFDOConfigs()
+
+  ### Release configs.
+
+  _critical_for_chrome_boards = frozenset([
+      'daisy',
+      'lumpy',
+      'parrot',
+  ])
+
+  # bayleybay-release does not enable vm_tests or unittests due to the compiler
+  # flags enabled for baytrail.
+  site_config.AddConfig(
+      _release, 'bayleybay-release',
+      boards=['bayleybay'],
+      hw_tests=[],
+      vm_tests=[],
+      unittests=False,
+  )
+
+  site_config.AddConfig(
+      _release, 'beltino-release',
+      boards=['beltino'],
+      hw_tests=[],
+      vm_tests=[],
+  )
+
+  # bobcat-release does not enable vm_tests or unittests due to the compiler
+  # flags enabled for baytrail.
+  site_config.AddConfig(
+      _release, 'bobcat-release',
+      boards=['bobcat'],
+      hw_tests=[],
+      profile='minimal',
+      # This build doesn't generate signed images, so don't try to release them.
+      paygen=False,
+      signer_tests=False,
+  )
+
+  site_config.AddConfig(
+      _release, 'gizmo-release',
+      _base_configs['gizmo'],
+      important=True,
+      paygen=False,
+      signer_tests=False,
+  )
+
+  site_config.AddConfig(
+      _release, 'samus-release',
+      _base_configs['samus'],
+      important=True,
+  )
+
+  ### Arm release configs.
+
+  site_config.AddConfig(
+      _release, 'veyron_rialto-release',
+      _base_configs['veyron_rialto'],
+      # rialto does not use Chrome.
+      sync_chrome=False,
+      chrome_sdk=False,
+  )
+
+  # Now generate generic release configs if we haven't created anything more
+  # specific above already.
+  def _AddReleaseConfigs():
+    # We have to mark all autogenerated PFQs as not important so the master
+    # does not wait for them.  http://crbug.com/386214
+    # If you want an important PFQ, you'll have to declare it yourself.
+    _CreateConfigsForBoards(
+        chrome_info, _all_release_boards, 'tot-chrome-pfq-informational',
+        important=False)
+    _CreateConfigsForBoards(
+        chrome_pfq, _chrome_pfq_important_boards, 'chrome-pfq')
+    _CreateConfigsForBoards(
+        chrome_pfq, _all_release_boards, 'chrome-pfq', important=False)
+    _CreateConfigsForBoards(
+        _release, _critical_for_chrome_boards, config_lib.CONFIG_TYPE_RELEASE,
+        critical_for_chrome=True)
+    _CreateConfigsForBoards(
+        _release, _all_release_boards, config_lib.CONFIG_TYPE_RELEASE)
+
+  _AddReleaseConfigs()
+
+  site_config.AddConfig(
+      _release, 'panther_embedded-minimal-release',
+      _base_configs['panther_embedded'],
+      profile='minimal',
+      important=True,
+      paygen=False,
+      signer_tests=False,
+  )
+
+  # beaglebone build doesn't generate signed images, so don't try to release
+  # them.
+  _beaglebone_release = _release.derive(beaglebone, paygen=False,
+                                        signer_tests=False,
+                                        images=['base', 'test'])
+
+  site_config.AddGroup(
+      'beaglebone-release-group',
+      site_config.AddConfig(
+          _beaglebone_release, 'beaglebone-release',
+          boards=['beaglebone'],
+      ),
+      site_config.AddConfig(
+          _beaglebone_release, 'beaglebone_servo-release',
+          boards=['beaglebone_servo'],
+          payload_image='base'
+      ).derive(_grouped_variant_config),
+      important=True,
+  )
+
+  site_config.AddConfig(
+      _release, 'kayle-release',
+      _base_configs['kayle'],
+      paygen=False,
+      signer_tests=False,
+  )
+
+  site_config.AddConfig(
+      _release, 'storm-release',
+      _base_configs['storm'],
+
+      # Hw Lab can't test storm, yet.
+      paygen_skip_testing=True,
+      signer_tests=False,
+  )
+
+  moblab_release = site_config.AddTemplate(
+      'moblab-release',
+      _release,
+      description='Moblab release builders',
+      images=['base', 'recovery', 'test'],
+      paygen_skip_delta_payloads=True,
+      # TODO: re-enable paygen testing when crbug.com/386473 is fixed.
+      paygen_skip_testing=True,
+      important=False,
+      afdo_use=False,
+      signer_tests=False,
+      hw_tests=[
+          config_lib.HWTestConfig(constants.HWTEST_MOBLAB_SUITE, blocking=True,
+                                  num=1, timeout=120*60),
+          config_lib.HWTestConfig(constants.HWTEST_BVT_SUITE, blocking=True,
+                                  warn_only=True, num=1),
+          config_lib.HWTestConfig(constants.HWTEST_AU_SUITE, blocking=True,
+                                  warn_only=True, num=1)],
+  )
+
+  site_config.AddConfig(
+      moblab_release, 'stumpy_moblab-release',
+      _base_configs['stumpy_moblab'],
+  )
+
+  site_config.AddConfig(
+      moblab_release, 'guado_moblab-release',
+      _base_configs['guado_moblab'],
+  )
+
+  site_config.AddConfig(
+      moblab_release, 'panther_moblab-release',
+      _base_configs['panther_moblab'],
+  )
+
+  site_config.AddConfig(
+      _release, 'rush-release',
+      _base_configs['rush'],
+      hw_tests=[],
+      # This build doesn't generate signed images, so don't try to release them.
+      paygen=False,
+      signer_tests=False,
+  )
+
+  site_config.AddConfig(
+      _release, 'rush_ryu-release',
+      _base_configs['rush_ryu'],
+      images=['base', 'test', 'factory_install'],
+      dev_installer_prebuilts=False,
+      paygen=False,
+      signer_tests=False,
+      push_image=False,
+      hw_tests=[],
+  )
+
+  site_config.AddConfig(
+      _release, 'veyron_mickey-release',
+      _base_configs['veyron_mickey'],
+      hw_tests=[],
+      vm_tests=[],
+  )
+
+  site_config.AddConfig(
+      _release, 'veyron_romy-release',
+      _base_configs['veyron_romy'],
+      hw_tests=[],
+      vm_tests=[],
+  )
+
+  site_config.AddConfig(
+      _release, 'whirlwind-release',
+      _base_configs['whirlwind'],
+      dev_installer_prebuilts=True,
+  )
+
+  site_config.AddConfig(
+      _release, 'lakitu-release',
+      _base_configs['lakitu'],
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+      important=True,
+  )
+
+  site_config.AddConfig(
+      _release, 'lakitu_mobbuild-release',
+      _base_configs['lakitu_mobbuild'],
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE],
+      signer_tests=False,
+      important=True,
+  )
+
+  _wificell_pre_cq = site_config.AddTemplate(
+      'wificell-pre-cq',
+      pre_cq,
+      unittests=False,
+      hw_tests=HWTestList.WiFiCellPoolPreCQ(),
+      hw_tests_override=HWTestList.WiFiCellPoolPreCQ(),
+      archive=True,
+      image_test=False,
+      description='WiFi tests acting as pre-cq for WiFi related changes',
+  )
+
+  site_config.AddGroup(
+      'mixed-wificell-pre-cq',
+      site_config.AddConfig(
+          _wificell_pre_cq,
+          'winky-wificell-pre-cq',
+          _base_configs['winky']),
+      site_config.AddConfig(
+          _wificell_pre_cq,
+          'veyron_speedy-wificell-pre-cq',
+          _base_configs['veyron_speedy']),
+      site_config.AddConfig(
+          _wificell_pre_cq,
+          'veyron_jerry-wificell-pre-cq',
+          _base_configs['veyron_jerry']),
+      site_config.AddConfig(
+          _wificell_pre_cq,
+          'daisy-wificell-pre-cq',
+          _base_configs['daisy']),
+  )
+
+  ### Per-chipset release groups
+
+  def _AddGroupConfig(name, base_board, group_boards=None,
+                      group_variant_boards=None, **kwargs):
+    """Generate full & release group configs."""
+    def _boards_list(x):
+      # Make sure _boards_list is a valid list (not None or tuple)
+      return [] if x is None else list(x)
+
+    group_boards = _boards_list(group_boards)
+    group_variant_boards = _boards_list(group_variant_boards)
+
+    for group in ('release', 'full'):
+      configs = []
+
+      all_boards = [base_board] + group_boards + group_variant_boards
+      desc = '%s; Group config (boards: %s)' % (
+          site_config['%s-%s' % (base_board, group)].description,
+          ', '.join(all_boards))
+
+      for board in all_boards:
+        if board in group_boards:
+          subconfig = _grouped_config
+        elif board in group_variant_boards:
+          subconfig = _grouped_variant_config
+        else:
+          subconfig = {}
+        board_config = '%s-%s' % (board, group)
+        configs.append(site_config[board_config].derive(subconfig, **kwargs))
+
+        config_name = '%s-%s-group' % (name, group)
+        important = group == 'release' and kwargs.get('important', True)
+      site_config.AddGroup(
+          config_name, *configs, description=desc,
+          important=important
+      )
+
+  # pineview chipset boards
+  _AddGroupConfig(
+      'pineview', 'x86-mario', (
+          'x86-alex',
+          'x86-zgb',
+      ), (
+          'x86-alex_he',
+          'x86-zgb_he',
+      )
+  )
+
+  # sandybridge chipset boards
+  _AddGroupConfig(
+      'sandybridge', 'parrot', (
+          'lumpy',
+          'butterfly',
+          'stumpy',
+      )
+  )
+
+  # ivybridge chipset boards
+  _AddGroupConfig(
+      'ivybridge', 'stout', (
+          'link',
+      ), (
+          'parrot_ivb',
+      )
+  )
+
+  # slippy-based haswell boards
+  # TODO(davidjames): Combine slippy and beltino into haswell canary, once we've
+  # optimized our builders more.
+  # slippy itself is deprecated in favor of the below boards, so we don't bother
+  # building it.
+  # TODO(dnj): Re-add peppy canary once builders are allocated.
+  _AddGroupConfig(
+      'slippy', 'peppy', (
+          'falco',
+          'leon',
+          'wolf',
+      ), (
+          'falco_li',
+      )
+  )
+
+  # beltino-based haswell boards
+  # beltino itself is deprecated in favor of the below boards, so we don't
+  # bother building it.
+
+  _AddGroupConfig(
+      'beltino-a', 'panther', (
+          'mccloud',
+      )
+  )
+
+  _AddGroupConfig(
+      'beltino-b', 'monroe', (
+          'tricky',
+          'zako',
+      )
+  )
+
+  # rambi-based boards
+  _AddGroupConfig(
+      'rambi-a', 'rambi', (
+          'clapper',
+          'enguarde',
+          'expresso',
+      )
+  )
+
+  _AddGroupConfig(
+      'rambi-b', 'glimmer', (
+          'gnawty',
+          'kip',
+          'quawks',
+      )
+  )
+
+  _AddGroupConfig(
+      'rambi-c', 'squawks', (
+          'swanky',
+          'winky',
+          'candy',
+      )
+  )
+
+  _AddGroupConfig(
+      'rambi-d', 'banjo', (
+          'ninja',
+          'sumo',
+      ),
+  )
+
+  _AddGroupConfig(
+      'rambi-e', 'orco', (
+          'heli',
+          'wizpig',
+      ),
+  )
+
+  # daisy-based boards
+  _AddGroupConfig(
+      'daisy', 'daisy', (
+          'daisy_spring',
+          'daisy_skate',
+      ),
+  )
+
+  # peach-based boards
+  _AddGroupConfig(
+      'peach', 'peach_pit', (
+          'peach_pi',
+      )
+  )
+
+  # nyan-based boards
+  _AddGroupConfig(
+      'nyan', 'nyan', (
+          'nyan_big',
+          'nyan_blaze',
+          'nyan_kitty',
+      )
+  )
+
+  # auron-based boards
+  _AddGroupConfig(
+      'auron', 'auron', (
+          'auron_yuna',
+          'auron_paine',
+      )
+  )
+
+  _AddGroupConfig(
+      'auron-b', 'lulu', (
+          'gandof',
+          'buddy',
+      ),
+  )
+
+  # veyron-based boards
+  _AddGroupConfig(
+      'veyron', 'veyron_pinky', (
+          'veyron_jerry',
+          'veyron_mighty',
+          'veyron_speedy'
+      ),
+  )
+
+  _AddGroupConfig(
+      'veyron-b', 'veyron_gus', (
+          'veyron_jaq',
+          'veyron_minnie',
+          'veyron_rialto',
+      ),
+  )
+
+  _AddGroupConfig(
+      'veyron-c', 'veyron_brain', (
+          'veyron_danger',
+          'veyron_thea',
+          'veyron_shark',
+      ),
+      important=False,
+  )
+
+  _AddGroupConfig(
+      'veyron-d', 'veyron_mickey', (
+          'veyron_romy',
+      ),
+  )
+
+  # jecht-based boards
+  _AddGroupConfig(
+      'jecht', 'jecht', (
+          'guado',
+          'tidus',
+          'rikku',
+      )
+  )
+
+  # strago-based boards
+  _AddGroupConfig(
+      'strago', 'strago', (
+          'cyan',
+          'celes',
+          'ultima',
+      ),
+      important=False,
+  )
+
+  # oak-based boards
+  _AddGroupConfig(
+      'oak', 'oak', (
+      )
+  )
+
+  # glados-based boards
+  _AddGroupConfig(
+      'glados', 'glados', (
+      ),
+  )
+
+  # storm-based boards
+  _AddGroupConfig(
+      'storm', 'storm', (
+          'arkham',
+          'whirlwind',
+      ),
+      important=False,
+  )
+
+  # kunimitsu-based boards
+  _AddGroupConfig(
+      'kunimitsu', 'kunimitsu', (
+      ),
+  )
+
+  # Factory and Firmware releases much inherit from these classes.
+  # Modifications for these release builders should go here.
+
+  # Naming conventions also must be followed. Factory and firmware branches
+  # must end in -factory or -firmware suffixes.
+
+  _factory_release = site_config.AddTemplate(
+      'factory',
+      _release,
+      upload_hw_test_artifacts=False,
+      upload_symbols=False,
+      hw_tests=[],
+      chrome_sdk=False,
+      description='Factory Builds',
+      paygen=False,
+      afdo_use=False,
+  )
+
+  _firmware = config_lib.BuildConfig(
+      no_vmtest_builder,
+      images=[],
+      factory_toolkit=False,
+      packages=['virtual/chromeos-firmware', 'chromeos-base/autotest-all'],
+      usepkg_build_packages=True,
+      sync_chrome=False,
+      build_tests=True,
+      chrome_sdk=False,
+      unittests=False,
+      hw_tests=[],
+      dev_installer_prebuilts=False,
+      upload_hw_test_artifacts=True,
+      upload_symbols=False,
+      useflags=['chromeless_tty'],
+      signer_tests=False,
+      trybot_list=False,
+      paygen=False,
+      image_test=False,
+  )
+
+  _firmware_release = site_config.AddTemplate(
+      'firmware',
+      _release,
+      _firmware,
+      description='Firmware Canary',
+      manifest=constants.DEFAULT_MANIFEST,
+      afdo_use=False,
+  )
+
+  _depthcharge_release = site_config.AddTemplate(
+      'depthcharge-firmware',
+      _firmware_release,
+      useflags=append_useflags(['depthcharge']))
+
+  _depthcharge_full_internal = site_config.AddTemplate(
+      'depthcharge-full-firmware',
+      full,
+      internal,
+      _firmware,
+      useflags=append_useflags(['depthcharge']),
+      description='Firmware Informational',
+  )
+
+  _firmware_boards = frozenset([
+      'auron',
+      'banjo',
+      'bayleybay',
+      'beltino',
+      'butterfly',
+      'candy',
+      'clapper',
+      'cyan',
+      'daisy',
+      'daisy_skate',
+      'daisy_spring',
+      'enguarde',
+      'expresso',
+      'falco',
+      'glimmer',
+      'gnawty',
+      'jecht',
+      'kip',
+      'leon',
+      'link',
+      'lumpy',
+      'monroe',
+      'ninja',
+      'orco',
+      'panther',
+      'parrot',
+      'parry',
+      'peach_pi',
+      'peach_pit',
+      'peppy',
+      'quawks',
+      'rambi',
+      'rikku',
+      'samus',
+      'slippy',
+      'smaug',
+      'squawks',
+      'storm',
+      'stout',
+      'strago',
+      'stumpy',
+      'sumo',
+      'swanky',
+      'winky',
+      'wolf',
+      'x86-mario',
+      'zako',
+  ])
+
+  _x86_depthcharge_firmware_boards = frozenset([
+      'auron',
+      'banjo',
+      'bayleybay',
+      'candy',
+      'clapper',
+      'cyan',
+      'enguarde',
+      'expresso',
+      'glados',
+      'glimmer',
+      'gnawty',
+      'heli',
+      'jecht',
+      'kip',
+      'kunimitsu',
+      'leon',
+      'link',
+      'ninja',
+      'orco',
+      'parry',
+      'quawks',
+      'rambi',
+      'rikku',
+      'samus',
+      'squawks',
+      'strago',
+      'sumo',
+      'swanky',
+      'winky',
+      'zako',
+  ])
+
+
+  def _AddFirmwareConfigs():
+    """Add x86 and arm firmware configs."""
+    for board in _firmware_boards:
+      site_config.AddConfig(
+          _firmware_release,
+          '%s-%s' % (board, config_lib.CONFIG_TYPE_FIRMWARE),
+          _base_configs[board],
+          no_vmtest_builder,
+      )
+
+    for board in _x86_depthcharge_firmware_boards:
+      site_config.AddConfig(
+          _depthcharge_release,
+          '%s-%s-%s' % (board, 'depthcharge', config_lib.CONFIG_TYPE_FIRMWARE),
+          _base_configs[board],
+          no_vmtest_builder,
+      )
+      site_config.AddConfig(
+          _depthcharge_full_internal,
+          '%s-%s-%s-%s' % (board, 'depthcharge', config_lib.CONFIG_TYPE_FULL,
+                           config_lib.CONFIG_TYPE_FIRMWARE),
+          _base_configs[board],
+          no_vmtest_builder,
+      )
+
+  _AddFirmwareConfigs()
+
+
+  # This is an example factory branch configuration.
+  # Modify it to match your factory branch.
+  site_config.AddConfig(
+      _factory_release, 'x86-mario-factory',
+      boards=['x86-mario'],
+  )
+
+  _payloads = site_config.AddTemplate(
+      'payloads',
+      internal,
+      no_vmtest_builder,
+      no_unittest_builder,
+      no_hwtest_builder,
+      build_type=constants.PAYLOADS_TYPE,
+      builder_class_name='release_builders.GeneratePayloadsBuilder',
+      description='Regenerate release payloads.',
+
+      # Sync to the code used to do the build the first time.
+      manifest_version=True,
+
+      # This is the actual work we want to do.
+      paygen=True,
+
+      upload_hw_test_artifacts=False,
+  )
+
+  def _AddPayloadConfigs():
+    """Create <board>-payloads configs for all payload generating boards.
+
+    We create a config named 'board-payloads' for every board which has a
+    config with 'paygen' True. The idea is that we have a build that generates
+    payloads, we need to have a tryjob to re-attempt them on failure.
+    """
+    payload_boards = set()
+
+    def _search_config_and_children(search_config):
+      # If paygen is enabled, add it's boards to our list of payload boards.
+      if search_config['paygen']:
+        for board in search_config['boards']:
+          payload_boards.add(board)
+
+      # Recurse on any child configs.
+      for child in search_config['child_configs']:
+        _search_config_and_children(child)
+
+    # Search all configs for boards that generate payloads.
+    for _, search_config in site_config.iteritems():
+      _search_config_and_children(search_config)
+
+    # Generate a payloads trybot config for every board that generates payloads.
+    for board in payload_boards:
+      name = '%s-payloads' % board
+      site_config.AddConfig(_payloads, name, boards=[board])
+
+  _AddPayloadConfigs()
+
+  # Add special builders to help with cbuidlbot development/testing.
+  site_config.Add(
+      'sync-test-cbuildbot',
+      no_hwtest_builder,
+      boards=[],
+      builder_class_name='test_builders.ManifestVersionedSyncBuilder',
+      chroot_replace=True,
+  )
+
+  def _SetupWaterfalls():
+    for name, c in site_config.iteritems():
+      if not c.get('active_waterfall'):
+        c['active_waterfall'] = GetDefaultWaterfall(c)
+
+    # Apply manual configs.
+    for waterfall, names in _waterfall_config_map.iteritems():
+      for name in names:
+        site_config[name]['active_waterfall'] = waterfall
+
+  _SetupWaterfalls()
+
+
+  def _InsertHwTestsOverrideDefaults(build):
+    """Insert default hw_tests values for a given build.
+
+    Also updates child builds.
+
+    Args:
+      build: BuildConfig instance to modify in place.
+    """
+    for child in build['child_configs']:
+      _InsertHwTestsOverrideDefaults(child)
+
+    if build['hw_tests_override'] is not None:
+      # Explicitly set, no need to insert defaults.
+      return
+
+    if not build['hw_tests']:
+      build['hw_tests_override'] = HWTestList.DefaultList(
+          num=constants.HWTEST_TRYBOT_NUM, pool=constants.HWTEST_TRYBOT_POOL,
+          file_bugs=False)
+    else:
+      # Copy over base tests.
+      build['hw_tests_override'] = [copy.copy(x) for x in build['hw_tests']]
+
+      # Adjust for manual test environment.
+      for hw_config in build['hw_tests_override']:
+        hw_config.num = constants.HWTEST_TRYBOT_NUM
+        hw_config.pool = constants.HWTEST_TRYBOT_POOL
+        hw_config.file_bugs = False
+        hw_config.priority = constants.HWTEST_DEFAULT_PRIORITY
+
+    # TODO: Fix full_release_test.py/AUTest on trybots, crbug.com/390828.
+    build['hw_tests_override'] = [
+        hw_config for hw_config in build['hw_tests_override']
+        if hw_config.suite != constants.HWTEST_AU_SUITE]
+
+  for build in site_config.itervalues():
+    _InsertHwTestsOverrideDefaults(build)
+
+  return site_config
diff --git a/cbuildbot/chromeos_config_unittest b/cbuildbot/chromeos_config_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/chromeos_config_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/chromeos_config_unittest.py b/cbuildbot/chromeos_config_unittest.py
new file mode 100644
index 0000000..82f4c78
--- /dev/null
+++ b/cbuildbot/chromeos_config_unittest.py
@@ -0,0 +1,824 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for config."""
+
+from __future__ import print_function
+
+import mock
+import re
+import cPickle
+
+from chromite.cbuildbot import builders
+from chromite.cbuildbot import chromeos_config
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import config_lib_unittest
+from chromite.cbuildbot import constants
+from chromite.cbuildbot.builders import generic_builders
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.lib import osutils
+
+# pylint: disable=protected-access
+
+CHROMIUM_WATCHING_URL = (
+    'http://src.chromium.org/chrome/trunk/tools/build/masters/'
+    'master.chromium.chromiumos/master_chromiumos_cros_cfg.py'
+)
+
+
+class GenerateChromeosConfigTestBase(cros_test_lib.TestCase):
+  """Base class for tests of chromeos_config.."""
+
+  def setUp(self):
+    self.all_configs = chromeos_config.GetConfig()
+
+
+class ConfigDumpTest(GenerateChromeosConfigTestBase):
+  """Tests related to config_dump.json & chromeos_config.py"""
+
+  def testDump(self):
+    """Make sure the json & config are kept in sync"""
+    new_dump = self.all_configs.SaveConfigToString()
+    old_dump = osutils.ReadFile(constants.CHROMEOS_CONFIG_FILE).rstrip()
+
+    self.assertTrue(
+        new_dump == old_dump, 'config_dump.json does not match the '
+        'configs defined in chromeos_config.py. Run '
+        'bin/cbuildbot_view_config --update_config')
+
+  def testSaveLoadReload(self):
+    """Make sure that loading and reloading the config is a no-op."""
+    dump = self.all_configs.SaveConfigToString()
+    loaded = config_lib.LoadConfigFromString(dump)
+    self.assertEqual(self.all_configs, loaded)
+
+
+class ConfigPickleTest(GenerateChromeosConfigTestBase):
+  """Test that a config object is pickleable."""
+
+  def testPickle(self):
+    bc1 = self.all_configs['x86-mario-paladin']
+    bc2 = cPickle.loads(cPickle.dumps(bc1))
+
+    self.assertEquals(bc1.boards, bc2.boards)
+    self.assertEquals(bc1.name, bc2.name)
+
+
+class ConfigClassTest(GenerateChromeosConfigTestBase):
+  """Tests of the config class itself."""
+
+  def testAppendUseflags(self):
+    base_config = config_lib.BuildConfig()
+    inherited_config_1 = base_config.derive(
+        useflags=chromeos_config.append_useflags(
+            ['foo', 'bar', '-baz']))
+    inherited_config_2 = inherited_config_1.derive(
+        useflags=chromeos_config.append_useflags(['-bar', 'baz']))
+    self.assertEqual(inherited_config_1.useflags, ['-baz', 'bar', 'foo'])
+    self.assertEqual(inherited_config_2.useflags, ['-bar', 'baz', 'foo'])
+
+
+class CBuildBotTest(GenerateChromeosConfigTestBase):
+  """General tests of chromeos_config."""
+
+  def testConfigsKeysMismatch(self):
+    """Verify that all configs contain exactly the default keys.
+
+    This checks for mispelled keys, or keys that are somehow removed.
+    """
+    expected_keys = set(self.all_configs.GetDefault().iterkeys())
+    for build_name, config in self.all_configs.iteritems():
+      config_keys = set(config.keys())
+
+      extra_keys = config_keys.difference(expected_keys)
+      self.assertFalse(extra_keys, ('Config %s has extra values %s' %
+                                    (build_name, list(extra_keys))))
+
+      missing_keys = expected_keys.difference(config_keys)
+      self.assertFalse(missing_keys, ('Config %s is missing values %s' %
+                                      (build_name, list(missing_keys))))
+
+  def testConfigsHaveName(self):
+    """Configs must have names set."""
+    for build_name, config in self.all_configs.iteritems():
+      self.assertTrue(build_name == config['name'])
+
+  def testConfigUseflags(self):
+    """Useflags must be lists.
+
+    Strings are interpreted as arrays of characters for this, which is not
+    useful.
+    """
+    for build_name, config in self.all_configs.iteritems():
+      useflags = config.get('useflags')
+      if not useflags is None:
+        self.assertTrue(
+            isinstance(useflags, list),
+            'Config %s: useflags should be a list.' % build_name)
+
+  def testBoards(self):
+    """Verify 'boards' is explicitly set for every config."""
+    for build_name, config in self.all_configs.iteritems():
+      self.assertTrue(isinstance(config['boards'], (tuple, list)),
+                      "Config %s doesn't have a list of boards." % build_name)
+      self.assertEqual(len(set(config['boards'])), len(config['boards']),
+                       'Config %s has duplicate boards.' % build_name)
+      if config['builder_class_name'] in (
+          'sdk_builders.ChrootSdkBuilder',
+          'misc_builders.RefreshPackagesBuilder'):
+        self.assertTrue(len(config['boards']) >= 1,
+                        'Config %s requires 1 or more boards.' % build_name)
+      else:
+        # TODO: Switch to assert after wificell-pre-cq is fixed.
+        if not len(config['boards']) <= 1:
+          print('Config %s should have <= 1 board.' % build_name)
+        # self.assertTrue(len(config['boards']) <= 1,
+        #                 'Config %s should have <= 1 board.' % build_name)
+
+  def testOverlaySettings(self):
+    """Verify overlays and push_overlays have legal values."""
+    for build_name, config in self.all_configs.iteritems():
+      overlays = config['overlays']
+      push_overlays = config['push_overlays']
+
+      self.assertTrue(overlays in [None, 'public', 'private', 'both'],
+                      'Config %s: has unexpected overlays value.' % build_name)
+      self.assertTrue(
+          push_overlays in [None, 'public', 'private', 'both'],
+          'Config %s: has unexpected push_overlays value.' % build_name)
+
+      if overlays == None:
+        subset = [None]
+      elif overlays == 'public':
+        subset = [None, 'public']
+      elif overlays == 'private':
+        subset = [None, 'private']
+      elif overlays == 'both':
+        subset = [None, 'public', 'private', 'both']
+
+      self.assertTrue(
+          push_overlays in subset,
+          ('Config %s: push_overlays should be a subset of overlays.' %
+           build_name))
+
+  def testOverlayMaster(self):
+    """Verify that only one master is pushing uprevs for each overlay."""
+    masters = {}
+    for build_name, config in self.all_configs.iteritems():
+      overlays = config['overlays']
+      push_overlays = config['push_overlays']
+      if (overlays and push_overlays and config['uprev'] and config['master']
+          and not config['branch']):
+        other_master = masters.get(push_overlays)
+        err_msg = 'Found two masters for push_overlays=%s: %s and %s'
+        self.assertFalse(
+            other_master, err_msg % (push_overlays, build_name, other_master))
+        masters[push_overlays] = build_name
+
+    if 'both' in masters:
+      self.assertEquals(len(masters), 1, 'Found too many masters.')
+
+  def testChromeRev(self):
+    """Verify chrome_rev has an expected value"""
+    for build_name, config in self.all_configs.iteritems():
+      self.assertTrue(
+          config['chrome_rev'] in constants.VALID_CHROME_REVISIONS + [None],
+          'Config %s: has unexpected chrome_rev value.' % build_name)
+      self.assertFalse(
+          config['chrome_rev'] == constants.CHROME_REV_LOCAL,
+          'Config %s: has unexpected chrome_rev_local value.' % build_name)
+      if config['chrome_rev']:
+        self.assertTrue(
+            config_lib.IsPFQType(config['build_type']),
+            'Config %s: has chrome_rev but is not a PFQ.' % build_name)
+
+  def testValidVMTestType(self):
+    """Verify vm_tests has an expected value"""
+    for build_name, config in self.all_configs.iteritems():
+      if config['vm_tests'] is None:
+        continue
+      for test_type in config['vm_tests']:
+        self.assertTrue(
+            test_type in constants.VALID_VM_TEST_TYPES,
+            'Config %s: has unexpected vm test type value.' % build_name)
+
+  def testImageTestMustHaveBaseImage(self):
+    """Verify image_test build is only enabled with 'base' in images."""
+    for build_name, config in self.all_configs.iteritems():
+      if config.get('image_test', False):
+        self.assertTrue(
+            'base' in config['images'],
+            'Build %s runs image_test but does not have base image' %
+            build_name)
+
+  def testBuildType(self):
+    """Verifies that all configs use valid build types."""
+    for build_name, config in self.all_configs.iteritems():
+      # For builders that have explicit classes, this check doesn't make sense.
+      if config['builder_class_name']:
+        continue
+      self.assertIn(config['build_type'], constants.VALID_BUILD_TYPES,
+                    'Config %s: has unexpected build_type value.' % build_name)
+
+  def testGCCGitHash(self):
+    """Verifies that gcc_githash is not set without setting latest_toolchain."""
+    for build_name, config in self.all_configs.iteritems():
+      if config['gcc_githash']:
+        self.assertTrue(
+            config['latest_toolchain'],
+            'Config %s: has gcc_githash but not latest_toolchain.' % build_name)
+
+  def testBuildToRun(self):
+    """Verify we don't try to run tests without building them."""
+    for build_name, config in self.all_configs.iteritems():
+      self.assertFalse(
+          isinstance(config['useflags'], list) and
+          '-build_tests' in config['useflags'] and config['vm_tests'],
+          'Config %s: has vm_tests and use -build_tests.' % build_name)
+
+  def testSyncToChromeSdk(self):
+    """Verify none of the configs build chrome sdk but don't sync chrome."""
+    for build_name, config in self.all_configs.iteritems():
+      if config['sync_chrome'] is not None and not config['sync_chrome']:
+        self.assertFalse(
+            config['chrome_sdk'],
+            'Config %s: has chrome_sdk but not sync_chrome.' % build_name)
+
+  def testOverrideVmTestsOnly(self):
+    """VM/unit tests listed should also be supported."""
+    for build_name, config in self.all_configs.iteritems():
+      if config.vm_tests_override is not None:
+        for test in config.vm_tests:
+          self.assertIn(
+              test, config.vm_tests_override,
+              'Config %s: has %s VM test, not in override (%s, %s).' % \
+              (build_name, test, config.vm_tests, config.vm_tests_override))
+
+  def testHWTestsIFFArchivingHWTestArtifacts(self):
+    """Make sure all configs upload artifacts that need them for hw testing."""
+    for build_name, config in self.all_configs.iteritems():
+      if config['hw_tests']:
+        self.assertTrue(
+            config['upload_hw_test_artifacts'],
+            "%s is trying to run hw tests without uploading payloads." %
+            build_name)
+
+  def testValidUnifiedMasterConfig(self):
+    """Make sure any unified master configurations are valid."""
+    for build_name, config in self.all_configs.iteritems():
+      error = 'Unified config for %s has invalid values' % build_name
+      # Unified masters must be internal and must rev both overlays.
+      if config['master']:
+        self.assertTrue(
+            config['internal'] and config['manifest_version'], error)
+      elif not config['master'] and config['manifest_version']:
+        # Unified slaves can rev either public or both depending on whether
+        # they are internal or not.
+        if not config['internal']:
+          self.assertEqual(config['overlays'], constants.PUBLIC_OVERLAYS, error)
+        elif config_lib.IsCQType(config['build_type']):
+          self.assertEqual(config['overlays'], constants.BOTH_OVERLAYS, error)
+
+  def testGetSlaves(self):
+    """Make sure every master has a sane list of slaves"""
+    for build_name, config in self.all_configs.iteritems():
+      if config.master:
+        configs = self.all_configs.GetSlavesForMaster(config)
+        self.assertEqual(
+            len(map(repr, configs)), len(set(map(repr, configs))),
+            'Duplicate board in slaves of %s will cause upload prebuilts'
+            ' failures' % build_name)
+
+        # Our logic for calculating what slaves have completed their critical
+        # stages will break if the master is considered a slave of itself,
+        # because db.GetSlaveStages(...) doesn't include master stages.
+        if config.build_type == constants.PALADIN_TYPE:
+          self.assertEquals(
+              config.boards, [],
+              'Master paladin %s cannot have boards.' % build_name)
+          self.assertNotIn(
+              build_name, [x.name for x in configs],
+              'Master paladin %s cannot be a slave of itself.' % build_name)
+
+  def testGetSlavesOnTrybot(self):
+    """Make sure every master has a sane list of slaves"""
+    mock_options = mock.Mock()
+    mock_options.remote_trybot = True
+    for _, config in self.all_configs.iteritems():
+      if config['master']:
+        configs = self.all_configs.GetSlavesForMaster(config, mock_options)
+        self.assertEqual([], configs)
+
+  def testFactoryFirmwareValidity(self):
+    """Ensures that firmware/factory branches have at least 1 valid name."""
+    tracking_branch = git.GetChromiteTrackingBranch()
+    for branch in ['firmware', 'factory']:
+      if tracking_branch.startswith(branch):
+        saw_config_for_branch = False
+        for build_name in self.all_configs:
+          if build_name.endswith('-%s' % branch):
+            self.assertFalse('release' in build_name,
+                             'Factory|Firmware release builders should not '
+                             'contain release in their name.')
+            saw_config_for_branch = True
+
+        self.assertTrue(
+            saw_config_for_branch, 'No config found for %s branch. '
+            'As this is the %s branch, all release configs that are being used '
+            'must end in %s.' % (branch, tracking_branch, branch))
+
+  def testBuildTests(self):
+    """Verify that we don't try to use tests without building them."""
+
+    for build_name, config in self.all_configs.iteritems():
+      if not config['build_tests']:
+        for flag in ('factory_toolkit', 'vm_tests', 'hw_tests'):
+          self.assertFalse(
+              config[flag],
+              'Config %s set %s without build_tests.' % (build_name, flag))
+
+  def testAFDOInBackground(self):
+    """Verify that we don't try to build or use AFDO data in the background."""
+    for build_name, config in self.all_configs.iteritems():
+      if config.build_packages_in_background:
+        # It is unsupported to use the build_packages_in_background flags with
+        # the afdo_generate or afdo_generate_min config options.
+        msg = 'Config %s uses build_packages_in_background with afdo_%s'
+        self.assertFalse(config.afdo_generate, msg % (build_name, 'generate'))
+        self.assertFalse(config.afdo_generate_min, msg % (build_name,
+                                                          'generate_min'))
+
+  def testReleaseGroupInBackground(self):
+    """Verify build_packages_in_background settings for release groups.
+
+    For each release group, the first builder should be set to run in the
+    foreground (to build binary packages), and the remainder of the builders
+    should be set to run in parallel (to install the binary packages.)
+    """
+    for build_name, config in self.all_configs.iteritems():
+      if build_name.endswith('-release-group'):
+        msg = 'Config %s should not build_packages_in_background'
+        self.assertFalse(config.build_packages_in_background, msg % build_name)
+
+        self.assertTrue(
+            config.child_configs,
+            'Config %s should have child configs' % build_name)
+        first_config = config.child_configs[0]
+        msg = 'Primary config for %s should not build_packages_in_background'
+        self.assertFalse(first_config.build_packages_in_background,
+                         msg % build_name)
+
+        msg = 'Child config %s for %s should build_packages_in_background'
+        for child_config in config.child_configs[1:]:
+          self.assertTrue(child_config.build_packages_in_background,
+                          msg % (child_config.name, build_name))
+
+  def testAFDOSameInChildConfigs(self):
+    """Verify that 'afdo_use' is the same for all children in a group."""
+    msg = ('Child config %s for %s should have same value for afdo_use '
+           'as other children')
+    for build_name, config in self.all_configs.iteritems():
+      if build_name.endswith('-group'):
+        prev_value = None
+        self.assertTrue(config.child_configs,
+                        'Config %s should have child configs' % build_name)
+        for child_config in config.child_configs:
+          if prev_value is None:
+            prev_value = child_config.afdo_use
+          else:
+            self.assertEqual(child_config.afdo_use, prev_value,
+                             msg % (child_config.name, build_name))
+
+  def testReleaseAFDOConfigs(self):
+    """Verify that <board>-release-afdo config have generate and use children.
+
+    These configs should have a 'generate' and a 'use' child config. Also,
+    any 'generate' and 'use' configs should be children of a release-afdo
+    config.
+    """
+    msg = 'Config %s should have %s as a parent'
+    parent_suffix = config_lib.CONFIG_TYPE_RELEASE_AFDO
+    generate_suffix = '%s-generate' % parent_suffix
+    use_suffix = '%s-use' % parent_suffix
+    for build_name, config in self.all_configs.iteritems():
+      if build_name.endswith(parent_suffix):
+        self.assertEqual(
+            len(config.child_configs), 2,
+            'Config %s should have 2 child configs' % build_name)
+        for child_config in config.child_configs:
+          child_name = child_config.name
+          self.assertTrue(child_name.endswith(generate_suffix) or
+                          child_name.endswith(use_suffix),
+                          'Config %s has wrong %s child' %
+                          (build_name, child_config))
+      if build_name.endswith(generate_suffix):
+        parent_config_name = build_name.replace(generate_suffix,
+                                                parent_suffix)
+        self.assertTrue(parent_config_name in self.all_configs,
+                        msg % (build_name, parent_config_name))
+      if build_name.endswith(use_suffix):
+        parent_config_name = build_name.replace(use_suffix,
+                                                parent_suffix)
+        self.assertTrue(parent_config_name in self.all_configs,
+                        msg % (build_name, parent_config_name))
+
+  def testNoGrandChildConfigs(self):
+    """Verify that no child configs have a child config."""
+    for build_name, config in self.all_configs.iteritems():
+      for child_config in config.child_configs:
+        for grandchild_config in child_config.child_configs:
+          self.fail('Config %s has grandchild %s' % (build_name,
+                                                     grandchild_config.name))
+
+  def testUseChromeLKGMImpliesInternal(self):
+    """Currently use_chrome_lkgm refers only to internal manifests."""
+    for build_name, config in self.all_configs.iteritems():
+      if config['use_chrome_lkgm']:
+        self.assertTrue(
+            config['internal'],
+            'Chrome lkgm currently only works with an internal manifest: %s' % (
+                build_name,))
+
+  def _HasValidSuffix(self, config_name, config_types):
+    """Given a config_name, see if it has a suffix in config_types.
+
+    Args:
+      config_name: Name of config to compare.
+      config_types: A tuple/list of config suffixes.
+
+    Returns:
+      True, if the config has a suffix matching one of the types.
+    """
+    for config_type in config_types:
+      if config_name.endswith('-' + config_type) or config_name == config_type:
+        return True
+
+    return False
+
+  def testNonOverlappingConfigTypes(self):
+    """Test that a config can only match one build suffix."""
+    # This test belongs in config_lib_unittest, except nobody else cares.
+    for config_type in config_lib.CONFIG_TYPE_DUMP_ORDER:
+      trimmed_configs = list(config_lib.CONFIG_TYPE_DUMP_ORDER)
+      trimmed_configs.remove(config_type)
+      self.assertFalse(self._HasValidSuffix(config_type, trimmed_configs))
+
+  def testConfigTypesComplete(self):
+    """Verify CONFIG_TYPE_DUMP_ORDER contains all valid config types."""
+    for config_name in self.all_configs:
+      self.assertTrue(
+          self._HasValidSuffix(config_name, config_lib.CONFIG_TYPE_DUMP_ORDER),
+          '%s did not match any types in %s' %
+          (config_name, 'config_lib.CONFIG_TYPE_DUMP_ORDER'))
+
+  def testCantBeBothTypesOfLKGM(self):
+    """Using lkgm and chrome_lkgm doesn't make sense."""
+    for config in self.all_configs.values():
+      self.assertFalse(config['use_lkgm'] and config['use_chrome_lkgm'])
+
+  def testNoDuplicateSlavePrebuilts(self):
+    """Test that no two same-board paladin slaves upload prebuilts."""
+    for cfg in self.all_configs.values():
+      if cfg['build_type'] == constants.PALADIN_TYPE and cfg['master']:
+        slaves = self.all_configs.GetSlavesForMaster(cfg)
+        prebuilt_slaves = [s for s in slaves if s['prebuilts']]
+        # Dictionary from board name to builder name that uploads prebuilt
+        prebuilt_slave_boards = {}
+        for slave in prebuilt_slaves:
+          for board in slave['boards']:
+            self.assertFalse(prebuilt_slave_boards.has_key(board),
+                             'Configs %s and %s both upload prebuilts for '
+                             'board %s.' % (prebuilt_slave_boards.get(board),
+                                            slave['name'],
+                                            board))
+            prebuilt_slave_boards[board] = slave['name']
+
+  def testNoDuplicateWaterfallNames(self):
+    """Tests that no two configs specify same waterfall name."""
+    waterfall_names = set()
+    for config in self.all_configs.values():
+      wn = config['buildbot_waterfall_name']
+      if wn is not None:
+        self.assertNotIn(wn, waterfall_names,
+                         'Duplicate waterfall name %s.' % wn)
+        waterfall_names.add(wn)
+
+  def testCantBeBothTypesOfAFDO(self):
+    """Using afdo_generate and afdo_use together doesn't work."""
+    for config in self.all_configs.values():
+      self.assertFalse(config['afdo_use'] and config['afdo_generate'])
+      self.assertFalse(config['afdo_use'] and config['afdo_generate_min'])
+      self.assertFalse(config['afdo_generate'] and config['afdo_generate_min'])
+
+  def testValidPrebuilts(self):
+    """Verify all builders have valid prebuilt values."""
+    for build_name, config in self.all_configs.iteritems():
+      msg = 'Config %s: has unexpected prebuilts value.' % build_name
+      valid_values = (False, constants.PRIVATE, constants.PUBLIC)
+      self.assertTrue(config['prebuilts'] in valid_values, msg)
+
+  def testInternalPrebuilts(self):
+    for build_name, config in self.all_configs.iteritems():
+      if (config['internal'] and
+          config['build_type'] != constants.CHROME_PFQ_TYPE):
+        msg = 'Config %s is internal but has public prebuilts.' % build_name
+        self.assertNotEqual(config['prebuilts'], constants.PUBLIC, msg)
+
+  def testValidHWTestPriority(self):
+    """Verify that hw test priority is valid."""
+    for build_name, config in self.all_configs.iteritems():
+      for test_config in config['hw_tests']:
+        self.assertTrue(
+            test_config.priority in constants.HWTEST_VALID_PRIORITIES,
+            '%s has an invalid hwtest priority.' % build_name)
+
+  def testAllBoardsExist(self):
+    """Verifies that all config boards are in _all_boards."""
+    for build_name, config in self.all_configs.iteritems():
+      for board in config['boards']:
+        self.assertIn(board, chromeos_config._all_boards,
+                      'Config %s has unknown board %s.' %
+                      (build_name, board))
+
+  def testPushImagePaygenDependancies(self):
+    """Paygen requires PushImage."""
+    for build_name, config in self.all_configs.iteritems():
+
+      # paygen can't complete without push_image, except for payloads
+      # where --channel arguments meet the requirements.
+      if config['paygen']:
+        self.assertTrue(config['push_image'] or
+                        config['build_type'] == constants.PAYLOADS_TYPE,
+                        '%s has paygen without push_image' % build_name)
+
+  def testPaygenTestDependancies(self):
+    """paygen testing requires upload_hw_test_artifacts."""
+    for build_name, config in self.all_configs.iteritems():
+
+      # This requirement doesn't apply to payloads builds. Payloads are
+      # using artifacts from a previous build.
+      if build_name.endswith('-payloads'):
+        continue
+
+      if config['paygen'] and not config['paygen_skip_testing']:
+        self.assertTrue(config['upload_hw_test_artifacts'],
+                        '%s is not upload_hw_test_artifacts, but also not'
+                        ' paygen_skip_testing' % build_name)
+
+  def testPayloadImageIsBuilt(self):
+    for build_name, config in self.all_configs.iteritems():
+      if config.payload_image is not None:
+        self.assertNotEqual('recovery', config.payload_image,
+                            '%s wants to generate payloads from recovery '
+                            'images, which is not allowed.' % build_name)
+        self.assertIn(config.payload_image, config.images,
+                      '%s builds payloads from %s, which is not in images '
+                      'list %s' % (build_name, config.payload_image,
+                                   config.images))
+
+  def testBuildPackagesForRecoveryImage(self):
+    """Tests that we build the packages required for recovery image."""
+    for build_name, config in self.all_configs.iteritems():
+      if 'recovery' in config.images:
+        if not config.packages:
+          # No packages are specified. Defaults to build all packages.
+          continue
+
+        self.assertIn('chromeos-base/chromeos-initramfs',
+                      config.packages,
+                      '%s does not build chromeos-initramfs, which is required '
+                      'for creating the recovery image' % build_name)
+
+  def testBuildRecoveryImageFlags(self):
+    """Ensure the right flags are disabled when building the recovery image."""
+    incompatible_flags = ['paygen', 'signer_tests']
+    for build_name, config in self.all_configs.iteritems():
+      for flag in incompatible_flags:
+        if config[flag] and config.build_type != constants.PAYLOADS_TYPE:
+          self.assertIn('recovery', config.images,
+                        '%s does not build the recovery image, which is '
+                        'incompatible with %s=True' % (build_name, flag))
+
+  def testBuildBaseImageForRecoveryImage(self):
+    """Tests that we build the packages required for recovery image."""
+    for build_name, config in self.all_configs.iteritems():
+      if 'recovery' in config.images:
+        self.assertIn('base', config.images,
+                      '%s does not build the base image, which is required for '
+                      'building the recovery image' % build_name)
+
+  def testChildConfigsNotImportantInReleaseGroup(self):
+    """Verify that configs in an important group are not important."""
+    msg = ('Child config %s for %s should not be important because %s is '
+           'already important')
+    for build_name, config in self.all_configs.iteritems():
+      if build_name.endswith('-release-group') and config['important']:
+        for child_config in config.child_configs:
+          self.assertFalse(child_config.important,
+                           msg % (child_config.name, build_name, build_name))
+
+  def testFullCQBuilderDoNotRunHWTest(self):
+    """Full CQ configs should not run HWTest."""
+    msg = ('%s should not be a full builder and run HWTest for '
+           'performance reasons')
+    for build_name, config in self.all_configs.iteritems():
+      if config.build_type == constants.PALADIN_TYPE:
+        self.assertFalse(config.chrome_binhost_only and config.hw_tests,
+                         msg % build_name)
+
+  def testExternalConfigsDoNotUseInternalFeatures(self):
+    """External configs should not use chrome_internal, or official.xml."""
+    msg = ('%s is not internal, so should not use chrome_internal, or an '
+           'internal manifest')
+    for build_name, config in self.all_configs.iteritems():
+      if not config['internal']:
+        self.assertFalse('chrome_internal' in config['useflags'],
+                         msg % build_name)
+        self.assertNotEqual(config.get('manifest'),
+                            constants.OFFICIAL_MANIFEST,
+                            msg % build_name)
+
+  def testNoShadowedUseflags(self):
+    """Configs should not have both useflags x and -x."""
+    msg = ('%s contains useflag %s and -%s.')
+    for build_name, config in self.all_configs.iteritems():
+      useflag_set = set(config['useflags'])
+      for flag in useflag_set:
+        if not flag.startswith('-'):
+          self.assertFalse('-' + flag in useflag_set,
+                           msg % (build_name, flag, flag))
+
+  def testHealthCheckEmails(self):
+    """Configs should only have valid email addresses or aliases"""
+    msg = ('%s contains an invalid tree alias or email address: %s')
+    for build_name, config in self.all_configs.iteritems():
+      health_alert_recipients = config['health_alert_recipients']
+      for recipient in health_alert_recipients:
+        self.assertTrue(re.match(r'[^@]+@[^@]+\.[^@]+', recipient) or
+                        recipient in constants.SHERIFF_TYPE_TO_URL.keys(),
+                        msg % (build_name, recipient))
+
+  def testCheckBuilderClass(self):
+    """Verify builder_class_name is a valid value."""
+    for build_name, config in self.all_configs.iteritems():
+      builder_class_name = config['builder_class_name']
+      if builder_class_name is None:
+        continue
+
+      cls = builders.GetBuilderClass(builder_class_name)
+      self.assertTrue(issubclass(cls, generic_builders.Builder),
+                      msg=('config %s has a broken builder_class_name' %
+                           build_name))
+
+  def testDistinctBoardSets(self):
+    """Verify that distinct board sets are distinct."""
+    # Every board should be in exactly one of the distinct board sets.
+    for board in chromeos_config._all_boards:
+      found = False
+      for s in chromeos_config._distinct_board_sets:
+        if board in s:
+          if found:
+            assert False, '%s in multiple board sets.' % board
+          else:
+            found = True
+      if not found:
+        assert False, '%s in no board sets' % board
+    for s in chromeos_config._distinct_board_sets:
+      for board in s - chromeos_config._all_boards:
+        assert False, ('%s in _distinct_board_sets but not in _all_boards' %
+                       board)
+
+
+class OverrideForTrybotTest(GenerateChromeosConfigTestBase):
+  """Test config override functionality."""
+
+  def testVmTestOverride(self):
+    """Verify that vm_tests override for trybots pay heed to original config."""
+    mock_options = mock.Mock()
+    old = self.all_configs['x86-mario-paladin']
+    new = config_lib.OverrideConfigForTrybot(old, mock_options)
+    self.assertEquals(new['vm_tests'], [constants.SMOKE_SUITE_TEST_TYPE,
+                                        constants.SIMPLE_AU_TEST_TYPE,
+                                        constants.CROS_VM_TEST_TYPE])
+
+    # Don't override vm tests for arm boards.
+    old = self.all_configs['daisy-paladin']
+    new = config_lib.OverrideConfigForTrybot(old, mock_options)
+    self.assertEquals(new['vm_tests'], old['vm_tests'])
+
+    # Don't override vm tests for brillo boards.
+    old = self.all_configs['storm-paladin']
+    new = config_lib.OverrideConfigForTrybot(old, mock_options)
+    self.assertEquals(new['vm_tests'], old['vm_tests'])
+
+  def testWaterfallManualConfigIsValid(self):
+    """Verify the correctness of the manual waterfall configuration."""
+    all_build_names = set(self.all_configs.iterkeys())
+    redundant = set()
+    seen = set()
+    waterfall_iter = chromeos_config._waterfall_config_map.iteritems()
+    for waterfall, names in waterfall_iter:
+      for build_name in names:
+        # Every build in the configuration map must be valid.
+        self.assertTrue(build_name in all_build_names,
+                        "Invalid build name in manual waterfall config: %s" % (
+                            build_name,))
+        # No build should appear in multiple waterfalls.
+        self.assertFalse(build_name in seen,
+                         "Duplicate manual config for board: %s" % (
+                             build_name,))
+        seen.add(build_name)
+
+        # The manual configuration must be applied and override any default
+        # configuration.
+        config = self.all_configs[build_name]
+        self.assertEqual(config['active_waterfall'], waterfall,
+                         "Manual waterfall membership is not in the "
+                         "configuration for: %s" % (build_name,))
+
+
+        default_waterfall = chromeos_config.GetDefaultWaterfall(config)
+        if config['active_waterfall'] == default_waterfall:
+          redundant.add(build_name)
+
+    # No configurations should be redundant with defaults.
+    self.assertFalse(redundant,
+                     "Manual waterfall membership in "
+                     "`_waterfall_config_map` is redundant for these "
+                     "configs: %s" % (sorted(redundant),))
+
+  def testNoDuplicateCanaryBuildersOnWaterfall(self):
+    seen = {}
+    for config in self.all_configs.itervalues():
+      waterfall = config['active_waterfall']
+      btype = config['build_type']
+      if not (waterfall and config_lib.IsCanaryType(btype)):
+        continue
+
+      waterfall_seen = seen.setdefault(waterfall, set())
+      stack = [config]
+      while stack:
+        current_config = stack.pop()
+        self.assertNotIn(current_config['name'], waterfall_seen,
+                         "Multiple builders for '%s' on '%s' waterfall" % (
+                             current_config['name'], waterfall))
+        waterfall_seen.add(current_config['name'])
+        stack += current_config['child_configs']
+
+  def testBinhostTest(self):
+    """Builders with the binhost_test setting shouldn't have boards."""
+    for config in self.all_configs.values():
+      if config.binhost_test:
+        self.assertEqual(config.boards, [])
+
+
+class TemplateTest(GenerateChromeosConfigTestBase):
+  """Tests for templates."""
+
+  def testTemplatesUsed(self):
+    """Test that all templates are used."""
+    templates_used = set(cfg['_template'] for cfg in self.all_configs.values())
+    templates = set([None] + self.all_configs.GetTemplates().keys())
+    self.assertEqual(templates, templates_used)
+
+  def testConfigNamesMatchTemplate(self):
+    """Test that all configs have names that match their templates."""
+    for name, config in self.all_configs.iteritems():
+      template = config._template
+      if template:
+        child_configs = config.child_configs
+        if not child_configs:
+          msg = '%s should end with %s to match its template'
+          self.assertTrue(name.endswith(template), msg % (name, template))
+        else:
+          msg = 'Child config of %s has name that does not match its template'
+          self.assertTrue(child_configs[0].name.endswith(template),
+                          msg % name)
+
+      for other in self.all_configs.GetTemplates():
+        if name.endswith(other) and other != template:
+          if template:
+            msg = '%s has more specific template: %s' % (name, other)
+            self.assertGreater(len(template), len(other), msg)
+          else:
+            msg = '%s should have %s as template' % (name, other)
+            self.assertFalse(name, msg)
+
+
+class SiteInterfaceTest(GenerateChromeosConfigTestBase):
+  """Test enforcing site parameters for a chromeos SiteConfig."""
+
+  def testAssertSiteParameters(self):
+    """Test that a chromeos SiteConfig contains the necessary parameters."""
+    # Check that our config contains site-independent parameters.
+    self.assertTrue(
+        config_lib_unittest.AssertSiteIndependentParameters(self.all_configs))
+
+    # Enumerate the necessary chromeos site parameter keys.
+    chromeos_params = config_lib.DefaultSiteParameters().keys()
+
+    # Check that our config contains all chromeos specific site parameters.
+    site_params = self.all_configs.params
+    self.assertTrue(all([x in site_params for x in chromeos_params]))
diff --git a/cbuildbot/chromeos_version_test.sh b/cbuildbot/chromeos_version_test.sh
new file mode 100755
index 0000000..ebd6801
--- /dev/null
+++ b/cbuildbot/chromeos_version_test.sh
@@ -0,0 +1,55 @@
+#!/bin/sh
+
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# ChromeOS version information
+#
+# This file is usually sourced by other build scripts, but can be run
+# directly to see what it would do.
+#
+# Version numbering scheme is much like Chrome's, with the addition of
+# double-incrementing branch number so trunk is always odd.
+
+#############################################################################
+# SET VERSION NUMBERS
+#############################################################################
+# Major/minor versions.
+# Primarily for product marketing.
+export CHROMEOS_VERSION_MAJOR=0
+export CHROMEOS_VERSION_MINOR=13
+
+# Branch number.
+# Increment by 1 in a new release branch.
+# Increment by 2 in trunk after making a release branch.
+# Does not reset on a major/minor change (always increases).
+# (Trunk is always odd; branches are always even).
+export CHROMEOS_VERSION_BRANCH=507
+
+# Patch number.
+# Increment by 1 each release on a branch.
+# Reset to 0 when increasing branch number.
+export CHROMEOS_VERSION_PATCH=87
+
+# Official builds must set CHROMEOS_OFFICIAL=1.
+if [ ${CHROMEOS_OFFICIAL:-0} -ne 1 ] && [ "${USER}" != "chrome-bot" ]; then
+  # For developer builds, overwrite CHROMEOS_VERSION_PATCH with a date string
+  # for use by auto-updater.
+  export CHROMEOS_VERSION_PATCH=$(date +%Y_%m_%d_%H%M)
+fi
+
+# Version string. Not indentied to appease bash.
+export CHROMEOS_VERSION_STRING=\
+"${CHROMEOS_VERSION_MAJOR}.${CHROMEOS_VERSION_MINOR}"\
+".${CHROMEOS_VERSION_BRANCH}.${CHROMEOS_VERSION_PATCH}"
+
+# Set CHROME values (Used for releases) to pass to chromeos-chrome-bin ebuild
+# URL to chrome archive
+export CHROME_BASE=
+# export CHROME_VERSION from incoming value or NULL and let ebuild default
+export CHROME_VERSION="$CHROME_VERSION"
+
+# Print (and remember) version info.
+echo "ChromeOS version information:"
+env | egrep '^CHROMEOS_VERSION|CHROME_' | sed 's/^/    /'
diff --git a/cbuildbot/chroot_lib.py b/cbuildbot/chroot_lib.py
new file mode 100644
index 0000000..970fc2f
--- /dev/null
+++ b/cbuildbot/chroot_lib.py
@@ -0,0 +1,95 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions for managing chroots.
+
+Currently this just contains functions for reusing chroots for incremental
+building.
+"""
+
+from __future__ import print_function
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import sudo
+
+
+CHROOT_VERSION_FILE = 'etc/cros_manifest_version'
+
+
+class ChrootManager(object):
+  """Class for managing chroots and chroot versions."""
+
+  def __init__(self, build_root):
+    """Constructor.
+
+    Args:
+      build_root: The root of the checkout.
+    """
+    self._build_root = build_root
+
+  def _ChrootVersionPath(self, chroot=None):
+    """Get the path to the chroot version file for |chroot|.
+
+    Args:
+      chroot: Path to chroot. Defaults to 'chroot' under build root.
+
+    Returns:
+      The path to the chroot version file.
+    """
+    if chroot is None:
+      chroot = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR)
+    return os.path.join(chroot, CHROOT_VERSION_FILE)
+
+  def GetChrootVersion(self, chroot=None):
+    """Get the version of the checkout used to create |chroot|.
+
+    Args:
+      chroot: Path to chroot. Defaults to 'chroot' under build root.
+
+    Returns:
+      The version of Chrome OS used to build |chroot|. E.g. 6394.0.0-rc3.
+      If the chroot does not exist, or there is no version file, returns None.
+    """
+    chroot_version_file = self._ChrootVersionPath(chroot)
+    if not os.path.exists(chroot_version_file):
+      return None
+    return osutils.ReadFile(chroot_version_file).strip()
+
+  def EnsureChrootAtVersion(self, version):
+    """Ensure the current chroot is at version |version|.
+
+    If our chroot has version, use it. Otherwise, blow away the chroot.
+
+    Args:
+      version: Version of the chroot to look for. E.g. 6394.0.0-rc3
+    """
+    chroot = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR)
+    if version and self.GetChrootVersion(chroot) == version:
+      logging.PrintBuildbotStepText('(Using existing chroot)')
+    else:
+      logging.PrintBuildbotStepText('(Using fresh chroot)')
+      osutils.RmDir(chroot, ignore_missing=True, sudo=True)
+
+  def ClearChrootVersion(self, chroot=None):
+    """Clear the version in the specified |chroot|.
+
+    Args:
+      chroot: Path to chroot. Defaults to 'chroot' under build root.
+    """
+    chroot_version_file = self._ChrootVersionPath(chroot)
+    osutils.RmDir(chroot_version_file, ignore_missing=True, sudo=True)
+
+  def SetChrootVersion(self, version, chroot=None):
+    """Update the version file in the chroot to |version|.
+
+    Args:
+      version: Version to use. E.g. 6394.0.0-rc3
+      chroot: Path to chroot. Defaults to 'chroot' under build root.
+    """
+    chroot_version_file = self._ChrootVersionPath(chroot)
+    if os.path.exists(os.path.dirname(chroot_version_file)):
+      sudo.SetFileContents(chroot_version_file, version)
diff --git a/cbuildbot/chroot_lib_unittest b/cbuildbot/chroot_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/chroot_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/chroot_lib_unittest.py b/cbuildbot/chroot_lib_unittest.py
new file mode 100644
index 0000000..f40476f
--- /dev/null
+++ b/cbuildbot/chroot_lib_unittest.py
@@ -0,0 +1,59 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for chroot management functions."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import chroot_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+
+
+class TestChrootManager(cros_test_lib.TempDirTestCase):
+  """Class that tests the ChrootManager."""
+
+  sudo_cleanup = True
+
+  def setUp(self):
+    self.chroot_manager = chroot_lib.ChrootManager(self.tempdir)
+
+  def testGetChrootVersionWithNoChroot(self):
+    """If there's no chroot, GetChrootVersion returns None."""
+    self.assertIsNone(self.chroot_manager.GetChrootVersion('foo'))
+
+  def testSetChrootVersionWithNoChroot(self):
+    """If there's no chroot, SetChrootVersion does nothing."""
+    self.chroot_manager.SetChrootVersion('foo')
+    self.assertIsNone(self.chroot_manager.GetChrootVersion())
+
+  def testSetChrootVersionWithChroot(self):
+    """SetChrootVersion sets the chroot version."""
+    osutils.SafeMakedirs(os.path.join(self.tempdir, 'chroot', 'etc'))
+    self.chroot_manager.SetChrootVersion('foo')
+    self.assertEquals('foo', self.chroot_manager.GetChrootVersion())
+
+  def testClearChrootVersion(self):
+    """SetChrootVersion sets the chroot version."""
+    osutils.SafeMakedirs(os.path.join(self.tempdir, 'chroot', 'etc'))
+    self.chroot_manager.SetChrootVersion('foo')
+    self.assertEquals('foo', self.chroot_manager.GetChrootVersion())
+    self.chroot_manager.ClearChrootVersion()
+    self.assertIsNone(self.chroot_manager.GetChrootVersion())
+
+  def testUseExistingChroot(self):
+    """Tests that EnsureChrootAtVersion succeeds with valid chroot."""
+    chroot = os.path.join(self.tempdir, 'chroot')
+    osutils.SafeMakedirs(os.path.join(chroot, 'etc'))
+    self.chroot_manager.SetChrootVersion('foo')
+    self.chroot_manager.EnsureChrootAtVersion('foo')
+    self.assertEquals(self.chroot_manager.GetChrootVersion(chroot), 'foo')
+
+  def testUseFreshChroot(self):
+    """Tests that EnsureChrootAtVersion succeeds with invalid chroot."""
+    chroot = os.path.join(self.tempdir, 'chroot')
+    self.chroot_manager.EnsureChrootAtVersion('foo')
+    self.assertEquals(self.chroot_manager.GetChrootVersion(chroot), None)
diff --git a/cbuildbot/commands.py b/cbuildbot/commands.py
new file mode 100644
index 0000000..84e6d96
--- /dev/null
+++ b/cbuildbot/commands.py
@@ -0,0 +1,2374 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the various individual commands a builder can run."""
+
+from __future__ import print_function
+
+import base64
+import collections
+import fnmatch
+import glob
+import json
+import multiprocessing
+import os
+import re
+import shutil
+import sys
+import tempfile
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import swarming_lib
+from chromite.cbuildbot import topology
+from chromite.cli.cros.tests import cros_vm_test
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import gob_util
+from chromite.lib import gs
+from chromite.lib import locking
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import path_util
+from chromite.lib import portage_util
+from chromite.lib import retry_util
+from chromite.lib import timeout_util
+from chromite.scripts import pushimage
+
+site_config = config_lib.GetConfig()
+
+
+_PACKAGE_FILE = '%(buildroot)s/src/scripts/cbuildbot_package.list'
+CHROME_KEYWORDS_FILE = ('/build/%(board)s/etc/portage/package.keywords/chrome')
+CHROME_UNMASK_FILE = ('/build/%(board)s/etc/portage/package.unmask/chrome')
+_CROS_ARCHIVE_URL = 'CROS_ARCHIVE_URL'
+_FACTORY_SHIM = 'factory_shim'
+_AUTOTEST_RPC_CLIENT = ('/b/build_internal/scripts/slave-internal/autotest_rpc/'
+                        'autotest_rpc_client.py')
+_AUTOTEST_RPC_HOSTNAME = 'master2'
+_LOCAL_BUILD_FLAGS = ['--nousepkg', '--reuse_pkgs_from_local_boards']
+UPLOADED_LIST_FILENAME = 'UPLOADED'
+STATEFUL_FILE = 'stateful.tgz'
+# For sorting through VM test results.
+_TEST_REPORT_FILENAME = 'test_report.log'
+_TEST_PASSED = 'PASSED'
+_TEST_FAILED = 'FAILED'
+# For swarming proxy
+_SWARMING_ADDITIONAL_TIMEOUT = 60 * 60
+_DEFAULT_HWTEST_TIMEOUT_MINS = 1440
+_SWARMING_EXPIRATION = 20 * 60
+_RUN_SUITE_PATH = '/usr/local/autotest/site_utils/run_suite.py'
+_ABORT_SUITE_PATH = '/usr/local/autotest/site_utils/abort_suite.py'
+
+
+# =========================== Command Helpers =================================
+
+def RunBuildScript(buildroot, cmd, chromite_cmd=False, **kwargs):
+  """Run a build script, wrapping exceptions as needed.
+
+  This wraps RunCommand(cmd, cwd=buildroot, **kwargs), adding extra logic to
+  help determine the cause of command failures.
+    - If a package fails to build, a PackageBuildFailure exception is thrown,
+      which lists exactly which packages failed to build.
+    - If the command fails for a different reason, a BuildScriptFailure
+      exception is thrown.
+
+  We detect what packages failed to build by creating a temporary status file,
+  and passing that status file to parallel_emerge via the
+  PARALLEL_EMERGE_STATUS_FILE variable.
+
+  Args:
+    buildroot: The root of the build directory.
+    cmd: The command to run.
+    chromite_cmd: Whether the command should be evaluated relative to the
+      chromite/bin subdir of the |buildroot|.
+    kwargs: Optional args passed to RunCommand; see RunCommand for specifics.
+      In addition, if 'sudo' kwarg is True, SudoRunCommand will be used.
+  """
+  assert not kwargs.get('shell', False), 'Cannot execute shell commands'
+  kwargs.setdefault('cwd', buildroot)
+  enter_chroot = kwargs.get('enter_chroot', False)
+  sudo = kwargs.pop('sudo', False)
+
+  if chromite_cmd:
+    cmd = cmd[:]
+    cmd[0] = os.path.join(buildroot, constants.CHROMITE_BIN_SUBDIR, cmd[0])
+    if enter_chroot:
+      cmd[0] = path_util.ToChrootPath(cmd[0])
+
+  # If we are entering the chroot, create status file for tracking what
+  # packages failed to build.
+  chroot_tmp = os.path.join(buildroot, 'chroot', 'tmp')
+  status_file = None
+  with cros_build_lib.ContextManagerStack() as stack:
+    if enter_chroot and os.path.exists(chroot_tmp):
+      kwargs['extra_env'] = (kwargs.get('extra_env') or {}).copy()
+      status_file = stack.Add(tempfile.NamedTemporaryFile, dir=chroot_tmp)
+      kwargs['extra_env'][constants.PARALLEL_EMERGE_STATUS_FILE_ENVVAR] = \
+          path_util.ToChrootPath(status_file.name)
+    runcmd = cros_build_lib.RunCommand
+    if sudo:
+      runcmd = cros_build_lib.SudoRunCommand
+    try:
+      return runcmd(cmd, **kwargs)
+    except cros_build_lib.RunCommandError as ex:
+      # Print the original exception.
+      logging.error('\n%s', ex)
+
+      # Check whether a specific package failed. If so, wrap the exception
+      # appropriately. These failures are usually caused by a recent CL, so we
+      # don't ever treat these failures as flaky.
+      if status_file is not None:
+        status_file.seek(0)
+        failed_packages = status_file.read().split()
+        if failed_packages:
+          raise failures_lib.PackageBuildFailure(ex, cmd[0], failed_packages)
+
+      # Looks like a generic failure. Raise a BuildScriptFailure.
+      raise failures_lib.BuildScriptFailure(ex, cmd[0])
+
+
+def ValidateClobber(buildroot):
+  """Do due diligence if user wants to clobber buildroot.
+
+  Args:
+    buildroot: buildroot that's potentially clobbered.
+
+  Returns:
+    True if the clobber is ok.
+  """
+  cwd = os.path.dirname(os.path.realpath(__file__))
+  if cwd.startswith(buildroot):
+    cros_build_lib.Die('You are trying to clobber this chromite checkout!')
+
+  if buildroot == '/':
+    cros_build_lib.Die('Refusing to clobber your system!')
+
+  if os.path.exists(buildroot):
+    return cros_build_lib.BooleanPrompt(default=False)
+  return True
+
+
+# =========================== Main Commands ===================================
+
+
+def BuildRootGitCleanup(buildroot):
+  """Put buildroot onto manifest branch. Delete branches created on last run.
+
+  Args:
+    buildroot: buildroot to clean up.
+  """
+  lock_path = os.path.join(buildroot, '.clean_lock')
+  deleted_objdirs = multiprocessing.Event()
+
+  def RunCleanupCommands(project, cwd):
+    with locking.FileLock(lock_path, verbose=False).read_lock() as lock:
+      # Calculate where the git repository is stored.
+      relpath = os.path.relpath(cwd, buildroot)
+      projects_dir = os.path.join(buildroot, '.repo', 'projects')
+      project_objects_dir = os.path.join(buildroot, '.repo', 'project-objects')
+      repo_git_store = '%s.git' % os.path.join(projects_dir, relpath)
+      repo_obj_store = '%s.git' % os.path.join(project_objects_dir, project)
+
+      try:
+        if os.path.isdir(cwd):
+          git.CleanAndDetachHead(cwd)
+          git.GarbageCollection(cwd)
+      except cros_build_lib.RunCommandError as e:
+        result = e.result
+        logging.PrintBuildbotStepWarnings()
+        logging.warning('\n%s', result.error)
+
+        # If there's no repository corruption, just delete the index.
+        corrupted = git.IsGitRepositoryCorrupted(cwd)
+        lock.write_lock()
+        logging.warning('Deleting %s because %s failed', cwd, result.cmd)
+        osutils.RmDir(cwd, ignore_missing=True)
+        if corrupted:
+          # Looks like the object dir is corrupted. Delete the whole repository.
+          deleted_objdirs.set()
+          for store in (repo_git_store, repo_obj_store):
+            logging.warning('Deleting %s as well', store)
+            osutils.RmDir(store, ignore_missing=True)
+
+      # Delete all branches created by cbuildbot.
+      if os.path.isdir(repo_git_store):
+        cmd = ['branch', '-D'] + list(constants.CREATED_BRANCHES)
+        git.RunGit(repo_git_store, cmd, error_code_ok=True)
+
+      if os.path.isdir(cwd):
+        # Above we deleted refs/heads/<branch> for each created branch, now we
+        # need to delete the bare ref <branch> if it was created somehow.
+        for ref in constants.CREATED_BRANCHES:
+          git.RunGit(cwd, ['update-ref', '-d', ref])
+
+
+  # Cleanup all of the directories.
+  dirs = [[attrs['name'], os.path.join(buildroot, attrs['path'])] for attrs in
+          git.ManifestCheckout.Cached(buildroot).ListCheckouts()]
+  parallel.RunTasksInProcessPool(RunCleanupCommands, dirs)
+
+  # repo shares git object directories amongst multiple project paths. If the
+  # first pass deleted an object dir for a project path, then other repositories
+  # (project paths) of that same project may now be broken. Do a second pass to
+  # clean them up as well.
+  if deleted_objdirs.is_set():
+    parallel.RunTasksInProcessPool(RunCleanupCommands, dirs)
+
+
+def CleanUpMountPoints(buildroot):
+  """Cleans up any stale mount points from previous runs."""
+  # Scrape it from /proc/mounts since it's easily accessible;
+  # additionally, unmount in reverse order of what's listed there
+  # rather than trying a reverse sorting; it's possible for
+  # mount /z /foon
+  # mount /foon/blah -o loop /a
+  # which reverse sorting cannot handle.
+  buildroot = os.path.realpath(buildroot).rstrip('/') + '/'
+  mounts = [mtab.destination for mtab in osutils.IterateMountPoints() if
+            mtab.destination.startswith(buildroot)]
+
+  for mount_pt in reversed(mounts):
+    osutils.UmountDir(mount_pt, lazy=True, cleanup=False)
+
+
+def WipeOldOutput(buildroot):
+  """Wipes out build output directory.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    board: Delete image directories for this board name.
+  """
+  image_dir = os.path.join(buildroot, 'src', 'build', 'images')
+  osutils.RmDir(image_dir, ignore_missing=True, sudo=True)
+
+
+def MakeChroot(buildroot, replace, use_sdk, chrome_root=None, extra_env=None):
+  """Wrapper around make_chroot."""
+  cmd = ['cros_sdk', '--buildbot-log-version']
+  cmd.append('--create' if use_sdk else '--bootstrap')
+
+  if replace:
+    cmd.append('--replace')
+
+  if chrome_root:
+    cmd.append('--chrome_root=%s' % chrome_root)
+
+  RunBuildScript(buildroot, cmd, chromite_cmd=True, extra_env=extra_env)
+
+
+def RunChrootUpgradeHooks(buildroot, chrome_root=None, extra_env=None):
+  """Run the chroot upgrade hooks in the chroot.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    chrome_root: The directory where chrome is stored.
+    extra_env: A dictionary of environment variables to set.
+  """
+  chroot_args = []
+  if chrome_root:
+    chroot_args.append('--chrome_root=%s' % chrome_root)
+
+  RunBuildScript(buildroot, ['./run_chroot_version_hooks'], enter_chroot=True,
+                 chroot_args=chroot_args, extra_env=extra_env)
+
+
+def RefreshPackageStatus(buildroot, boards, debug):
+  """Wrapper around refresh_package_status"""
+  # First run check_gdata_token to validate or refresh auth token.
+  cmd = ['check_gdata_token']
+  RunBuildScript(buildroot, cmd, chromite_cmd=True)
+
+  # Prepare refresh_package_status command to update the package spreadsheet.
+  cmd = ['refresh_package_status']
+
+  # Skip the host board if present.
+  board = ':'.join([b for b in boards if b != 'amd64-host'])
+  cmd.append('--board=%s' % board)
+
+  # Upload to the test spreadsheet only when in debug mode.
+  if debug:
+    cmd.append('--test-spreadsheet')
+
+  # Actually run prepared refresh_package_status command.
+  RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=True)
+
+  # Disabling the auto-filing of Tracker issues for now - crbug.com/334260.
+  #SyncPackageStatus(buildroot, debug)
+
+
+def SyncPackageStatus(buildroot, debug):
+  """Wrapper around sync_package_status."""
+  # Run sync_package_status to create Tracker issues for outdated
+  # packages.  At the moment, this runs only for groups that have opted in.
+  basecmd = ['sync_package_status']
+  if debug:
+    basecmd.extend(['--pretend', '--test-spreadsheet'])
+
+  cmdargslist = [['--team=build'],
+                 ['--team=kernel', '--default-owner=arscott']]
+
+  for cmdargs in cmdargslist:
+    cmd = basecmd + cmdargs
+    RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=True)
+
+
+def SetSharedUserPassword(buildroot, password):
+  """Wrapper around set_shared_user_password.sh"""
+  if password is not None:
+    cmd = ['./set_shared_user_password.sh', password]
+    RunBuildScript(buildroot, cmd, enter_chroot=True)
+  else:
+    passwd_file = os.path.join(buildroot, 'chroot/etc/shared_user_passwd.txt')
+    osutils.SafeUnlink(passwd_file, sudo=True)
+
+
+def UpdateChroot(buildroot, usepkg, toolchain_boards=None, extra_env=None):
+  """Wrapper around update_chroot.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    usepkg: Whether to use binary packages when setting up the toolchain.
+    toolchain_boards: List of boards to always include.
+    extra_env: A dictionary of environmental variables to set during generation.
+  """
+  cmd = ['./update_chroot']
+
+  if not usepkg:
+    cmd.extend(['--nousepkg'])
+
+  if toolchain_boards:
+    cmd.extend(['--toolchain_boards', ','.join(toolchain_boards)])
+
+  RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
+
+
+def SetupBoard(buildroot, board, usepkg, chrome_binhost_only=False,
+               extra_env=None, force=False, profile=None, chroot_upgrade=True):
+  """Wrapper around setup_board.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    board: The board to set up.
+    usepkg: Whether to use binary packages when setting up the board.
+    chrome_binhost_only: If set, only use binary packages on the board for
+      Chrome itself.
+    extra_env: A dictionary of environmental variables to set during generation.
+    force: Whether to remove the board prior to setting it up.
+    profile: The profile to use with this board.
+    chroot_upgrade: Whether to update the chroot. If the chroot is already up to
+      date, you can specify chroot_upgrade=False.
+  """
+  cmd = ['./setup_board', '--board=%s' % board,
+         '--accept_licenses=@CHROMEOS']
+
+  # This isn't the greatest thing, but emerge's dependency calculation
+  # isn't the speediest thing, so let callers skip this step when they
+  # know the system is up-to-date already.
+  if not chroot_upgrade:
+    cmd.append('--skip_chroot_upgrade')
+
+  if profile:
+    cmd.append('--profile=%s' % profile)
+
+  if not usepkg:
+    cmd.extend(_LOCAL_BUILD_FLAGS)
+
+  if chrome_binhost_only:
+    cmd.append('--chrome_binhost_only')
+
+  if force:
+    cmd.append('--force')
+
+  RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
+
+
+class MissingBinpkg(failures_lib.InfrastructureFailure):
+  """Error class for when we are missing an essential binpkg."""
+
+
+def VerifyBinpkg(buildroot, board, pkg, packages, extra_env=None):
+  """Verify that an appropriate binary package exists for |pkg|.
+
+  Using the depgraph from |packages|, check to see if |pkg| would be pulled in
+  as a binary or from source.  If |pkg| isn't installed at all, then ignore it.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    board: The board to set up.
+    pkg: The package to look for.
+    packages: The list of packages that get installed on |board|.
+    extra_env: A dictionary of environmental variables to set.
+
+  Raises:
+    If the package is found and is built from source, raise MissingBinpkg.
+    If the package is not found, or it is installed from a binpkg, do nothing.
+  """
+  cmd = ['emerge-%s' % board, '-pegNvq', '--with-bdeps=y',
+         '--color=n'] + list(packages)
+  result = RunBuildScript(buildroot, cmd, capture_output=True,
+                          enter_chroot=True, extra_env=extra_env)
+  pattern = r'^\[(ebuild|binary).*%s' % re.escape(pkg)
+  m = re.search(pattern, result.output, re.MULTILINE)
+  if m and m.group(1) == 'ebuild':
+    logging.info('(output):\n%s', result.output)
+    msg = 'Cannot find prebuilts for %s on %s' % (pkg, board)
+    raise MissingBinpkg(msg)
+
+
+def RunBinhostTest(buildroot, incremental=True):
+  """Test prebuilts for all boards, making sure everybody gets Chrome prebuilts.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    incremental: If True, run the incremental compatibility test.
+  """
+  cmd = ['../cbuildbot/binhost_test', '--log-level=debug']
+
+  # Non incremental tests are listed in a special test suite.
+  if not incremental:
+    cmd += ['NoIncremental']
+  RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=True)
+
+
+def RunBranchUtilTest(buildroot, version):
+  """Tests that branch-util works at the given manifest version."""
+  with osutils.TempDir() as tempdir:
+    cmd = [
+        'cbuildbot',
+        'branch-util',
+        '--local',
+        '--skip-remote-push',
+        '--branch-name', 'test_branch',
+        '--version', version,
+        '--buildroot', tempdir,
+        '--no-buildbot-tags',
+    ]
+    RunBuildScript(buildroot, cmd, chromite_cmd=True)
+
+
+def UpdateBinhostJson(buildroot):
+  """Test prebuilts for all boards, making sure everybody gets Chrome prebuilts.
+
+  Args:
+    buildroot: The buildroot of the current build.
+  """
+  cmd = ['../cbuildbot/update_binhost_json']
+  RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=True)
+
+
+def Build(buildroot, board, build_autotest, usepkg, chrome_binhost_only,
+          packages=(), skip_chroot_upgrade=True, noworkon=False,
+          extra_env=None, chrome_root=None):
+  """Wrapper around build_packages.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    board: The board to set up.
+    build_autotest: Whether to build autotest-related packages.
+    usepkg: Whether to use binary packages.
+    chrome_binhost_only: If set, only use binary packages on the board for
+      Chrome itself.
+    packages: Tuple of specific packages we want to build. If empty,
+      build_packages will calculate a list of packages automatically.
+    skip_chroot_upgrade: Whether to skip the chroot update. If the chroot is
+      not yet up to date, you should specify skip_chroot_upgrade=False.
+    noworkon: If set, don't force-build workon packages.
+    extra_env: A dictionary of environmental variables to set during generation.
+    chrome_root: The directory where chrome is stored.
+  """
+  cmd = ['./build_packages', '--board=%s' % board,
+         '--accept_licenses=@CHROMEOS', '--withdebugsymbols']
+
+  if not build_autotest:
+    cmd.append('--nowithautotest')
+
+  if skip_chroot_upgrade:
+    cmd.append('--skip_chroot_upgrade')
+
+  if not usepkg:
+    cmd.extend(_LOCAL_BUILD_FLAGS)
+
+  if chrome_binhost_only:
+    cmd.append('--chrome_binhost_only')
+
+  if noworkon:
+    cmd.append('--noworkon')
+
+  chroot_args = []
+  if chrome_root:
+    chroot_args.append('--chrome_root=%s' % chrome_root)
+
+  cmd.extend(packages)
+  RunBuildScript(buildroot, cmd, extra_env=extra_env, chroot_args=chroot_args,
+                 enter_chroot=True)
+
+
+FirmwareVersions = collections.namedtuple(
+    'FirmwareVersions',
+    ['main', 'ec']
+)
+
+
+def GetFirmwareVersions(buildroot, board):
+  """Extract version information from the firmware updater, if one exists.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    board: The board the firmware is for.
+
+  Returns:
+    (main fw version, ec fw version)
+    Each element will either be set to the string output by the firmware
+    updater shellball, or None if there is no firmware updater.
+  """
+  updater = os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR,
+                         cros_build_lib.GetSysroot(board).lstrip(os.path.sep),
+                         'usr', 'sbin', 'chromeos-firmwareupdate')
+  if not os.path.isfile(updater):
+    return FirmwareVersions(None, None)
+  updater = path_util.ToChrootPath(updater)
+
+  result = cros_build_lib.RunCommand([updater, '-V'], enter_chroot=True,
+                                     capture_output=True, log_output=True,
+                                     cwd=buildroot)
+  main = re.search(r'BIOS version:\s*(?P<version>.*)', result.output)
+  ec = re.search(r'EC version:\s*(?P<version>.*)', result.output)
+  return (main.group('version') if main else None,
+          ec.group('version') if ec else None)
+
+
+def BuildImage(buildroot, board, images_to_build, version=None,
+               rootfs_verification=True, extra_env=None, disk_layout=None):
+
+  # Default to base if images_to_build is passed empty.
+  if not images_to_build:
+    images_to_build = ['base']
+
+  version_str = '--version=%s' % (version or '')
+
+  cmd = ['./build_image', '--board=%s' % board, '--replace', version_str]
+
+  if not rootfs_verification:
+    cmd += ['--noenable_rootfs_verification']
+
+  if disk_layout:
+    cmd += ['--disk_layout=%s' % disk_layout]
+
+  cmd += images_to_build
+
+  RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
+
+
+def GenerateAuZip(buildroot, image_dir, extra_env=None):
+  """Run the script which generates au-generator.zip.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    image_dir: The directory in which to store au-generator.zip.
+    extra_env: A dictionary of environmental variables to set during generation.
+
+  Raises:
+    failures_lib.BuildScriptFailure if the called script fails.
+  """
+  chroot_image_dir = path_util.ToChrootPath(image_dir)
+  cmd = ['./build_library/generate_au_zip.py', '-o', chroot_image_dir]
+  RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
+
+
+def TestAuZip(buildroot, image_dir, extra_env=None):
+  """Run the script which validates an au-generator.zip.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    image_dir: The directory in which to find au-generator.zip.
+    extra_env: A dictionary of environmental variables to set during generation.
+
+  Raises:
+    failures_lib.BuildScriptFailure if the test script fails.
+  """
+  cmd = ['./build_library/test_au_zip.py', '-o', image_dir]
+  RunBuildScript(buildroot, cmd, cwd=constants.CROSUTILS_DIR,
+                 extra_env=extra_env)
+
+
+def BuildVMImageForTesting(buildroot, board, extra_env=None):
+  cmd = ['./image_to_vm.sh', '--board=%s' % board, '--test_image']
+  RunBuildScript(buildroot, cmd, extra_env=extra_env, enter_chroot=True)
+
+
+def RunTestImage(buildroot, board, image_dir, results_dir):
+  """Executes test_image on the produced image in |image_dir|.
+
+  The "test_image" script will be run as root in chroot. Running the script as
+  root will allow the tests to read normally-forbidden files such as those
+  owned by root. Running tests inside the chroot allows us to control
+  dependencies better.
+
+  Args:
+    buildroot: The buildroot of the current build.
+    board: The board the image was built for.
+    image_dir: The directory in which to find the image.
+    results_dir: The directory to store result files.
+
+  Raises:
+    failures_lib.BuildScriptFailure if the test script fails.
+  """
+  cmd = [
+      'test_image',
+      '--board', board,
+      '--test_results_root', path_util.ToChrootPath(results_dir),
+      path_util.ToChrootPath(image_dir),
+  ]
+  RunBuildScript(buildroot, cmd, enter_chroot=True, chromite_cmd=True,
+                 sudo=True)
+
+
+def RunSignerTests(buildroot, board):
+  cmd = ['./security_test_image', '--board=%s' % board]
+  RunBuildScript(buildroot, cmd, enter_chroot=True)
+
+
+def RunUnitTests(buildroot, board, blacklist=None, extra_env=None):
+  cmd = ['cros_run_unit_tests', '--board=%s' % board]
+
+  if blacklist:
+    cmd += ['--blacklist_packages=%s' % ' '.join(blacklist)]
+
+  RunBuildScript(buildroot, cmd, chromite_cmd=True, enter_chroot=True,
+                 extra_env=extra_env or {})
+
+
+def RunTestSuite(buildroot, board, image_path, results_dir, test_type,
+                 whitelist_chrome_crashes, archive_dir, ssh_private_key=None):
+  """Runs the test harness suite."""
+  results_dir_in_chroot = os.path.join(buildroot, 'chroot',
+                                       results_dir.lstrip('/'))
+  osutils.RmDir(results_dir_in_chroot, ignore_missing=True)
+
+  cwd = os.path.join(buildroot, 'src', 'scripts')
+  dut_type = 'gce' if test_type == constants.GCE_VM_TEST_TYPE else 'vm'
+
+  cmd = ['bin/ctest',
+         '--board=%s' % board,
+         '--type=%s' % dut_type,
+         '--no_graphics',
+         '--target_image=%s' % image_path,
+         '--test_results_root=%s' % results_dir_in_chroot
+        ]
+
+  if test_type not in constants.VALID_VM_TEST_TYPES:
+    raise AssertionError('Unrecognized test type %r' % test_type)
+
+  if test_type == constants.FULL_AU_TEST_TYPE:
+    cmd.append('--archive_dir=%s' % archive_dir)
+  else:
+    if (test_type == constants.SMOKE_SUITE_TEST_TYPE or test_type ==
+        constants.GCE_VM_TEST_TYPE):
+      cmd.append('--only_verify')
+      cmd.append('--suite=smoke')
+    elif test_type == constants.TELEMETRY_SUITE_TEST_TYPE:
+      cmd.append('--only_verify')
+      cmd.append('--suite=telemetry_unit')
+    else:
+      cmd.append('--quick_update')
+
+  if whitelist_chrome_crashes:
+    cmd.append('--whitelist_chrome_crashes')
+
+  if ssh_private_key is not None:
+    cmd.append('--ssh_private_key=%s' % ssh_private_key)
+
+  result = cros_build_lib.RunCommand(cmd, cwd=cwd, error_code_ok=True)
+  if result.returncode:
+    if os.path.exists(results_dir_in_chroot):
+      error = '%s exited with code %d' % (' '.join(cmd), result.returncode)
+      with open(results_dir_in_chroot + '/failed_test_command', 'w') as failed:
+        failed.write(error)
+
+    raise failures_lib.TestFailure(
+        '** VMTests failed with code %d **' % result.returncode)
+
+
+def RunDevModeTest(buildroot, board, image_dir):
+  """Runs the dev mode testing script to verify dev-mode scripts work."""
+  crostestutils = os.path.join(buildroot, 'src', 'platform', 'crostestutils')
+  image_path = os.path.join(image_dir, constants.TEST_IMAGE_BIN)
+  test_script = 'devmode-test/devinstall_test.py'
+  cmd = [os.path.join(crostestutils, test_script), '--verbose', board,
+         image_path]
+  cros_build_lib.RunCommand(cmd)
+
+
+def RunCrosVMTest(board, image_dir):
+  """Runs cros_vm_test script to verify cros commands work."""
+  image_path = os.path.join(image_dir, constants.TEST_IMAGE_BIN)
+  test = cros_vm_test.CrosVMTest(board, image_path)
+  test.Run()
+
+
+def ListFailedTests(results_path):
+  """Returns a list of failed tests.
+
+  Parse the test report logs from autotest to find failed tests.
+
+  Args:
+    results_path: Path to the directory of test results.
+
+  Returns:
+    A lists of (test_name, relative/path/to/failed/tests)
+  """
+  # TODO: we don't have to parse the log to find failed tests once
+  # crbug.com/350520 is fixed.
+  reports = []
+  for path, _, filenames in os.walk(results_path):
+    reports.extend([os.path.join(path, x) for x in filenames
+                    if x == _TEST_REPORT_FILENAME])
+
+  failed_tests = []
+  processed_tests = []
+  for report in reports:
+    logging.info('Parsing test report %s', report)
+    # Format used in the report:
+    #   /path/to/base/dir/test_harness/all/SimpleTestUpdateAndVerify/ \
+    #     2_autotest_tests/results-01-security_OpenSSLBlacklist [  FAILED  ]
+    #   /path/to/base/dir/test_harness/all/SimpleTestUpdateAndVerify/ \
+    #     2_autotest_tests/results-01-security_OpenSSLBlacklist/ \
+    #     security_OpenBlacklist [  FAILED  ]
+    with open(report) as f:
+      failed_re = re.compile(r'([\./\w-]*)\s*\[\s*(\S+?)\s*\]')
+      test_name_re = re.compile(r'results-[\d]+?-([\.\w_]*)')
+      for line in f:
+        r = failed_re.search(line)
+        if r and r.group(2) == _TEST_FAILED:
+          # Process only failed tests.
+          file_path = r.group(1)
+          match = test_name_re.search(file_path)
+          if match:
+            test_name = match.group(1)
+          else:
+            # If no match is found (due to format change or other
+            # reasons), simply use the last component of file_path.
+            test_name = os.path.basename(file_path)
+
+          # A test may have subtests. We don't want to list all subtests.
+          if test_name not in processed_tests:
+            base_dirname = os.path.basename(results_path)
+            # Get the relative path from the test_results directory. Note
+            # that file_path is a chroot path, while results_path is a
+            # non-chroot path, so we cannot use os.path.relpath directly.
+            rel_path = file_path.split(base_dirname)[1].lstrip(os.path.sep)
+            failed_tests.append((test_name, rel_path))
+            processed_tests.append(test_name)
+
+  return failed_tests
+
+
+def GetTestResultsDir(buildroot, test_results_dir):
+  """Returns the test results directory located in chroot.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    test_results_dir: Path from buildroot/chroot to find test results.
+      This must a subdir of /tmp.
+  """
+  test_results_dir = test_results_dir.lstrip('/')
+  return os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR, test_results_dir)
+
+
+def ArchiveTestResults(results_path, archive_dir):
+  """Archives the test results to |archive_dir|.
+
+  Args:
+    results_path: Path to test results.
+    archive_dir: Local directory to archive to.
+  """
+  cros_build_lib.SudoRunCommand(['chmod', '-R', 'a+rw', results_path],
+                                print_cmd=False)
+  if os.path.exists(archive_dir):
+    osutils.RmDir(archive_dir)
+
+  def _ShouldIgnore(dirname, file_list):
+    # Note: We exclude VM disk and memory images. Instead, they are
+    # archived via ArchiveVMFiles. Also skip any symlinks. gsutil
+    # hangs on broken symlinks.
+    return [x for x in file_list if
+            x.startswith(constants.VM_DISK_PREFIX) or
+            x.startswith(constants.VM_MEM_PREFIX) or
+            os.path.islink(os.path.join(dirname, x))]
+
+  shutil.copytree(results_path, archive_dir, symlinks=False,
+                  ignore=_ShouldIgnore)
+
+
+def BuildAndArchiveTestResultsTarball(src_dir, buildroot):
+  """Create a compressed tarball of test results.
+
+  Args:
+    src_dir: The directory containing the test results.
+    buildroot: Build root directory.
+
+  Returns:
+    The name of the tarball.
+  """
+  target = '%s.tgz' % src_dir.rstrip(os.path.sep)
+  chroot = os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR)
+  cros_build_lib.CreateTarball(
+      target, src_dir, compression=cros_build_lib.COMP_GZIP,
+      chroot=chroot)
+  return os.path.basename(target)
+
+
+def ArchiveVMFiles(buildroot, test_results_dir, archive_path):
+  """Archives the VM memory and disk images into tarballs.
+
+  There may be multiple tests (e.g. SimpleTestUpdate and
+  SimpleTestUpdateAndVerify), and multiple files for each test (one
+  for the VM disk, and one for the VM memory). We create a separate
+  tar file for each of these files, so that each can be downloaded
+  independently.
+
+  Args:
+    buildroot: Build root directory.
+    test_results_dir: Path from buildroot/chroot to find test results.
+      This must a subdir of /tmp.
+    archive_path: Directory the tarballs should be written to.
+
+  Returns:
+    The paths to the tarballs.
+  """
+  images_dir = os.path.join(buildroot, 'chroot', test_results_dir.lstrip('/'))
+  images = []
+  for path, _, filenames in os.walk(images_dir):
+    images.extend([os.path.join(path, filename) for filename in
+                   fnmatch.filter(filenames, constants.VM_DISK_PREFIX + '*')])
+    images.extend([os.path.join(path, filename) for filename in
+                   fnmatch.filter(filenames, constants.VM_MEM_PREFIX + '*')])
+
+  tar_files = []
+  for image_path in images:
+    image_rel_path = os.path.relpath(image_path, images_dir)
+    image_parent_dir = os.path.dirname(image_path)
+    image_file = os.path.basename(image_path)
+    tarball_path = os.path.join(archive_path,
+                                "%s.tar" % image_rel_path.replace('/', '_'))
+    # Note that tar will chdir to |image_parent_dir|, so that |image_file|
+    # is at the top-level of the tar file.
+    cros_build_lib.CreateTarball(tarball_path,
+                                 image_parent_dir,
+                                 compression=cros_build_lib.COMP_BZIP2,
+                                 inputs=[image_file])
+    tar_files.append(tarball_path)
+  return tar_files
+
+
+@failures_lib.SetFailureType(failures_lib.SuiteTimedOut,
+                             timeout_util.TimeoutError)
+def RunHWTestSuite(build, suite, board, pool=None, num=None, file_bugs=None,
+                   wait_for_results=None, priority=None, timeout_mins=None,
+                   retry=None, max_retries=None,
+                   minimum_duts=0, suite_min_duts=0,
+                   offload_failures_only=None, debug=True, subsystems=None):
+  """Run the test suite in the Autotest lab.
+
+  Args:
+    build: The build is described as the bot_id and the build version.
+      e.g. x86-mario-release/R18-1655.0.0-a1-b1584.
+    suite: Name of the Autotest suite.
+    board: The board the test suite should be scheduled against.
+    pool: The pool of machines we should use to run the hw tests on.
+    num: Maximum number of devices to use when scheduling tests in the
+         hardware test lab.
+    file_bugs: File bugs on test failures for this suite run.
+    wait_for_results: If True, wait for autotest results before returning.
+    priority: Priority of this suite run.
+    timeout_mins: Timeout in minutes for the suite job and its sub-jobs.
+    retry: If True, will enable job-level retry. Only works when
+           wait_for_results is True.
+    max_retries: Integer, maximum job retries allowed at suite level.
+                 None for no max.
+    minimum_duts: The minimum number of DUTs should be available in lab for the
+                  suite job to be created. If it's set to 0, the check will be
+                  skipped.
+    suite_min_duts: Preferred minimum duts, lab will prioritize on getting
+                    such many duts even if the suite is competing with
+                    a suite that has higher priority.
+    offload_failures_only: Only offload failed tests to Google Storage.
+    debug: Whether we are in debug mode.
+    subsystems: A set of subsystems that the relevant changes affect, for
+                testing purposes.
+  """
+  try:
+    cmd = [_RUN_SUITE_PATH]
+    cmd += _GetRunSuiteArgs(build, suite, board, pool, num, file_bugs,
+                            wait_for_results, priority, timeout_mins, retry,
+                            max_retries, minimum_duts, suite_min_duts,
+                            offload_failures_only, subsystems)
+    swarming_args = _CreateSwarmingArgs(build, suite, timeout_mins)
+    HWTestCreateAndWait(cmd, swarming_args, debug)
+  except cros_build_lib.RunCommandError as e:
+    result = e.result
+    if not result.task_summary_json:
+      # swarming client has failed.
+      logging.error('No task summary json generated, output:%s', result.output)
+      raise failures_lib.SwarmingProxyFailure(
+          '** Failed to fullfill request with proxy server, code(%d) **'
+          % result.returncode)
+    elif result.task_summary_json['shards'][0]['internal_failure']:
+      logging.error('Encountered swarming internal error:\n'
+                    'stdout: \n%s\n'
+                    'summary json content:\n%s',
+                    result.output, str(result.task_summary_json))
+      raise failures_lib.SwarmingProxyFailure(
+          '** Failed to fullfill request with proxy server, code(%d) **'
+          % result.returncode)
+    else:
+      logging.debug('swarming info: name: %s, bot_id: %s, created_ts: %s',
+                    result.task_summary_json['shards'][0]['name'],
+                    result.task_summary_json['shards'][0]['bot_id'],
+                    result.task_summary_json['shards'][0]['created_ts'])
+      for output in result.task_summary_json['shards'][0]['outputs']:
+        sys.stdout.write(output)
+      sys.stdout.flush()
+      # swarming client has submitted task and returned task information.
+      lab_warning_codes = (2,)
+      infra_error_codes = (3,)
+      timeout_codes = (4,)
+      board_not_available_codes = (5,)
+      proxy_failure_codes = (241,)
+
+      if result.returncode in lab_warning_codes:
+        raise failures_lib.TestWarning('** Suite passed with a warning code **')
+      elif (result.returncode in infra_error_codes or
+            result.returncode in proxy_failure_codes):
+        raise failures_lib.TestLabFailure(
+            '** HWTest did not complete due to infrastructure issues '
+            '(code %d) **' % result.returncode)
+      elif result.returncode in timeout_codes:
+        raise failures_lib.SuiteTimedOut(
+            '** Suite timed out before completion **')
+      elif result.returncode in board_not_available_codes:
+        raise failures_lib.BoardNotAvailable(
+            '** Board was not availble in the lab **')
+      elif result.returncode != 0:
+        raise failures_lib.TestFailure(
+            '** HWTest failed (code %d) **' % result.returncode)
+
+
+# pylint: disable=docstring-missing-args
+def _GetRunSuiteArgs(build, suite, board, pool=None, num=None,
+                     file_bugs=None, wait_for_results=None,
+                     priority=None, timeout_mins=None,
+                     retry=None, max_retries=None, minimum_duts=0,
+                     suite_min_duts=0, offload_failures_only=None,
+                     subsystems=None):
+  """Get a list of args for run_suite.
+
+  Args:
+    See RunHWTestSuite.
+
+  Returns:
+    A list of args for run_suite
+  """
+  args = ['--build', build, '--board', board]
+
+  if subsystems:
+    args += ['--suite_name', 'suite_attr_wrapper']
+  else:
+    args += ['--suite_name', suite]
+
+  # Add optional arguments to command, if present.
+  if pool is not None:
+    args += ['--pool', pool]
+
+  if num is not None:
+    args += ['--num', str(num)]
+
+  if file_bugs is not None:
+    args += ['--file_bugs', str(file_bugs)]
+
+  if wait_for_results is not None:
+    args += ['--no_wait', str(not wait_for_results)]
+
+  if priority is not None:
+    args += ['--priority', priority]
+
+  if timeout_mins is not None:
+    args += ['--timeout_mins', str(timeout_mins)]
+
+  if retry is not None:
+    args += ['--retry', str(retry)]
+
+  if max_retries is not None:
+    args += ['--max_retries', str(max_retries)]
+
+  if minimum_duts != 0:
+    args += ['--minimum_duts', str(minimum_duts)]
+
+  if suite_min_duts != 0:
+    args += ['--suite_min_duts', str(suite_min_duts)]
+
+  if offload_failures_only is not None:
+    args += ['--offload_failures_only', str(offload_failures_only)]
+
+  if subsystems:
+    subsystem_attr = ['subsystem:%s' % x for x in subsystems]
+    subsystems_attr_str = ' or '.join(subsystem_attr)
+
+    if suite != 'suite_attr_wrapper':
+      if type(suite) is str:
+        suite_attr_str = 'suite:%s' % suite
+      else:
+        suite_attr_str = ' or '.join(['suite:%s' % x for x in suite])
+
+      attr_value = '(%s) and (%s)' % (suite_attr_str, subsystems_attr_str)
+    else:
+      attr_value = subsystems_attr_str
+
+    suite_args_dict = repr({'attr_filter' : attr_value})
+    args += ['--suite_args', suite_args_dict]
+
+  return args
+
+
+# pylint: disable=docstring-missing-args
+def _CreateSwarmingArgs(build, suite, timeout_mins=None):
+  """Create args for swarming client.
+
+  Args:
+    build: Name of the build, will be part of the swarming task name.
+    suite: Name of the suite, will be part of the swarming task name.
+    timeout_mins: run_suite timeout mins, will be used to figure out
+                  timeouts for swarming task.
+
+  Returns:
+    A dictionary of args for swarming client.
+  """
+
+  swarming_timeout = timeout_mins or _DEFAULT_HWTEST_TIMEOUT_MINS
+  swarming_timeout = swarming_timeout * 60 + _SWARMING_ADDITIONAL_TIMEOUT
+
+  swarming_args = {
+      'swarming_server': topology.topology.get(
+          topology.SWARMING_PROXY_HOST_KEY),
+      'task_name': '-'.join([build, suite]),
+      'dimension': ('os', 'Linux'),
+      'print_status_updates': True,
+      'timeout_secs': swarming_timeout,
+      'io_timeout_secs': swarming_timeout,
+      'hard_timeout_secs': swarming_timeout,
+      'expiration_secs': _SWARMING_EXPIRATION}
+  return swarming_args
+
+
+def HWTestCreateAndWait(cmd, swarming_args, debug=False):
+  """Start and wait on HWTest suite in the lab.
+
+  This method first run a command to create the suite.
+  And then run a second command to wait for the suite result.
+  Since we are using swarming client, which contiuously send
+  request to swarming server to poll task result, there is
+  no need to retry on any network related failures.
+
+  Args:
+    cmd: Proxied run_suite command.
+    debug: If True, log command rather than running it.
+    swarming_args: A dictionary of args to passed to RunSwarmingCommand.
+  """
+  # Start the suite
+  start_cmd = list(cmd) + ['-c']
+
+  if debug:
+    logging.info('RunHWTestSuite would run: %s',
+                 cros_build_lib.CmdToStr(start_cmd))
+  else:
+    result = swarming_lib.RunSwarmingCommand(
+        start_cmd, capture_output=True, combine_stdout_stderr=True,
+        **swarming_args)
+    # If the command succeeds, result.task_summary_json
+    # should have the right content.
+    for output in result.task_summary_json['shards'][0]['outputs']:
+      sys.stdout.write(output)
+    sys.stdout.flush()
+    m = re.search(r'Created suite job:.*object_id=(?P<job_id>\d*)',
+                  result.output)
+    if m:
+      job_id = m.group('job_id')
+      # Wait on the suite
+      wait_cmd = list(cmd) + ['-m', str(job_id)]
+      result = swarming_lib.RunSwarmingCommand(
+          wait_cmd, capture_output=True, combine_stdout_stderr=True,
+          **swarming_args)
+      for output in result.task_summary_json['shards'][0]['outputs']:
+        sys.stdout.write(output)
+      sys.stdout.flush()
+
+
+def AbortHWTests(config_type_or_name, version, debug, suite=''):
+  """Abort the specified hardware tests for the given bot(s).
+
+  Args:
+    config_type_or_name: Either the name of the builder (e.g. link-paladin) or
+                         the config type if you want to abort all HWTests for
+                         that config (e.g. config_lib.CONFIG_TYPE_FULL).
+    version: The version of the current build. E.g. R18-1655.0.0-rc1
+    debug: Whether we are in debug mode.
+    suite: Name of the Autotest suite. If empty, abort all suites.
+  """
+  # Abort all jobs for the given config and version.
+  # Example for a specific config: link-paladin/R35-5542.0.0-rc1
+  # Example for a config type: paladin/R35-5542.0.0-rc1
+  substr = '%s/%s' % (config_type_or_name, version)
+  abort_args = ['-i', substr, '-s', suite]
+  try:
+    cmd = [_ABORT_SUITE_PATH] + abort_args
+    swarming_args = {
+        'swarming_server': topology.topology.get(
+            topology.SWARMING_PROXY_HOST_KEY),
+        'task_name': '-'.join(['abort', substr, suite]),
+        'dimension': ('os', 'Linux'),
+        'print_status_updates': True,
+        'expiration_secs': _SWARMING_EXPIRATION}
+    if debug:
+      logging.info('AbortHWTests would run the cmd via '
+                   'swarming, cmd: %s, swarming_args: %s',
+                   cros_build_lib.CmdToStr(cmd), str(swarming_args))
+    else:
+      swarming_lib.RunSwarmingCommand(cmd, **swarming_args)
+  except cros_build_lib.RunCommandError:
+    logging.warning('AbortHWTests failed', exc_info=True)
+
+
+def GenerateStackTraces(buildroot, board, test_results_dir,
+                        archive_dir, got_symbols):
+  """Generates stack traces for logs in |gzipped_test_tarball|
+
+  Args:
+    buildroot: Root directory where build occurs.
+    board: Name of the board being worked on.
+    test_results_dir: Directory of the test results.
+    archive_dir: Local directory for archiving.
+    got_symbols: True if breakpad symbols have been generated.
+
+  Returns:
+    List of stack trace file names.
+  """
+  stack_trace_filenames = []
+  asan_log_signaled = False
+
+  board_path = cros_build_lib.GetSysroot(board=board)
+  symbol_dir = os.path.join(board_path, 'usr', 'lib', 'debug', 'breakpad')
+  for curr_dir, _subdirs, files in os.walk(test_results_dir):
+    for curr_file in files:
+      full_file_path = os.path.join(curr_dir, curr_file)
+      processed_file_path = '%s.txt' % full_file_path
+
+      # Distinguish whether the current file is a minidump or asan_log.
+      if curr_file.endswith('.dmp'):
+        # Skip crash files that were purposely generated or if
+        # breakpad symbols are absent.
+        if not got_symbols or curr_file.find('crasher_nobreakpad') == 0:
+          continue
+        # Precess the minidump from within chroot.
+        minidump = path_util.ToChrootPath(full_file_path)
+        cwd = os.path.join(buildroot, 'src', 'scripts')
+        cros_build_lib.RunCommand(
+            ['minidump_stackwalk', minidump, symbol_dir], cwd=cwd,
+            enter_chroot=True, error_code_ok=True, redirect_stderr=True,
+            debug_level=logging.DEBUG, log_stdout_to_file=processed_file_path)
+      # Process asan log.
+      else:
+        # Prepend '/chrome/$board' path to the stack trace in log.
+        log_content = ''
+        with open(full_file_path) as f:
+          for line in f:
+            # Stack frame line example to be matched here:
+            #    #0 0x721d1831 (/opt/google/chrome/chrome+0xb837831)
+            stackline_match = re.search(r'^ *#[0-9]* 0x.* \(', line)
+            if stackline_match:
+              frame_end = stackline_match.span()[1]
+              line = line[:frame_end] + board_path + line[frame_end:]
+            log_content += line
+        # Symbolize and demangle it.
+        raw = cros_build_lib.RunCommand(
+            ['asan_symbolize.py'], input=log_content, enter_chroot=True,
+            debug_level=logging.DEBUG, capture_output=True,
+            extra_env={'LLVM_SYMBOLIZER_PATH' : '/usr/bin/llvm-symbolizer'})
+        cros_build_lib.RunCommand(['c++filt'],
+                                  input=raw.output, debug_level=logging.DEBUG,
+                                  cwd=buildroot, redirect_stderr=True,
+                                  log_stdout_to_file=processed_file_path)
+        # Break the bot if asan_log found. This is because some asan
+        # crashes may not fail any test so the bot stays green.
+        # Ex: crbug.com/167497
+        if not asan_log_signaled:
+          asan_log_signaled = True
+          logging.error('Asan crash occurred. See asan_logs in Artifacts.')
+          logging.PrintBuildbotStepFailure()
+
+      # Append the processed file to archive.
+      filename = ArchiveFile(processed_file_path, archive_dir)
+      stack_trace_filenames.append(filename)
+
+  return stack_trace_filenames
+
+
+@failures_lib.SetFailureType(failures_lib.BuilderFailure)
+def ArchiveFile(file_to_archive, archive_dir):
+  """Archives the specified file.
+
+  Args:
+    file_to_archive: Full path to file to archive.
+    archive_dir: Local directory for archiving.
+
+  Returns:
+    The base name of the archived file.
+  """
+  filename = os.path.basename(file_to_archive)
+  if archive_dir:
+    archived_file = os.path.join(archive_dir, filename)
+    shutil.copy(file_to_archive, archived_file)
+    os.chmod(archived_file, 0o644)
+
+  return filename
+
+
+class ChromeIsPinnedUprevError(failures_lib.InfrastructureFailure):
+  """Raised when we try to uprev while chrome is pinned."""
+
+  def __init__(self, new_chrome_atom):
+    """Initialize a ChromeIsPinnedUprevError.
+
+    Args:
+      new_chrome_atom: The chrome atom that we failed to
+                       uprev to, due to chrome being pinned.
+    """
+    msg = ('Failed up uprev to chrome version %s as chrome was pinned.' %
+           new_chrome_atom)
+    super(ChromeIsPinnedUprevError, self).__init__(msg)
+    self.new_chrome_atom = new_chrome_atom
+
+
+def MarkChromeAsStable(buildroot,
+                       tracking_branch,
+                       chrome_rev,
+                       boards,
+                       chrome_version=None):
+  """Returns the portage atom for the revved chrome ebuild - see man emerge."""
+  cwd = os.path.join(buildroot, 'src', 'scripts')
+  extra_env = None
+  chroot_args = None
+
+  command = ['../../chromite/bin/cros_mark_chrome_as_stable',
+             '--tracking_branch=%s' % tracking_branch]
+  if boards:
+    command.append('--boards=%s' % ':'.join(boards))
+  if chrome_version:
+    command.append('--force_version=%s' % chrome_version)
+
+  portage_atom_string = cros_build_lib.RunCommand(
+      command + [chrome_rev],
+      cwd=cwd,
+      redirect_stdout=True,
+      enter_chroot=True,
+      chroot_args=chroot_args,
+      extra_env=extra_env).output.rstrip()
+  chrome_atom = None
+  if portage_atom_string:
+    chrome_atom = portage_atom_string.splitlines()[-1].partition('=')[-1]
+  if not chrome_atom:
+    logging.info('Found nothing to rev.')
+    return None
+
+  for board in boards:
+    # If we're using a version of Chrome other than the latest one, we need
+    # to unmask it manually.
+    if chrome_rev != constants.CHROME_REV_LATEST:
+      keywords_file = CHROME_KEYWORDS_FILE % {'board': board}
+      for keywords_file in (CHROME_KEYWORDS_FILE % {'board': board},
+                            CHROME_UNMASK_FILE % {'board': board}):
+        cros_build_lib.SudoRunCommand(
+            ['mkdir', '-p', os.path.dirname(keywords_file)],
+            enter_chroot=True, cwd=cwd)
+        cros_build_lib.SudoRunCommand(
+            ['tee', keywords_file], input='=%s\n' % chrome_atom,
+            enter_chroot=True, cwd=cwd)
+
+    # Sanity check: We should always be able to merge the version of
+    # Chrome we just unmasked.
+    try:
+      cros_build_lib.RunCommand(
+          ['emerge-%s' % board, '-p', '--quiet', '=%s' % chrome_atom],
+          enter_chroot=True, combine_stdout_stderr=True, capture_output=True)
+    except cros_build_lib.RunCommandError:
+      logging.error('Cannot emerge-%s =%s\nIs Chrome pinned to an older '
+                    'version?' % (board, chrome_atom))
+      raise ChromeIsPinnedUprevError(chrome_atom)
+
+  return chrome_atom
+
+
+def CleanupChromeKeywordsFile(boards, buildroot):
+  """Cleans chrome uprev artifact if it exists."""
+  for board in boards:
+    keywords_path_in_chroot = CHROME_KEYWORDS_FILE % {'board': board}
+    keywords_file = '%s/chroot%s' % (buildroot, keywords_path_in_chroot)
+    if os.path.exists(keywords_file):
+      cros_build_lib.SudoRunCommand(['rm', '-f', keywords_file])
+
+
+def UprevPackages(buildroot, boards, overlays):
+  """Uprevs non-browser chromium os packages that have changed."""
+  drop_file = _PACKAGE_FILE % {'buildroot': buildroot}
+  cmd = ['cros_mark_as_stable', '--all',
+         '--boards=%s' % ':'.join(boards),
+         '--overlays=%s' % ':'.join(overlays),
+         '--drop_file=%s' % drop_file,
+         'commit']
+  RunBuildScript(buildroot, cmd, chromite_cmd=True)
+
+
+def UprevPush(buildroot, overlays, dryrun):
+  """Pushes uprev changes to the main line."""
+  cmd = ['cros_mark_as_stable',
+         '--srcroot=%s' % os.path.join(buildroot, 'src'),
+         '--overlays=%s' % ':'.join(overlays)
+        ]
+  if dryrun:
+    cmd.append('--dryrun')
+  cmd.append('push')
+  RunBuildScript(buildroot, cmd, chromite_cmd=True)
+
+
+def ExtractDependencies(buildroot, packages, board=None, useflags=None,
+                        cpe_format=False, raw_cmd_result=False):
+  """Extracts dependencies for |packages|.
+
+  Args:
+    buildroot: The root directory where the build occurs.
+    packages: A list of packages for which to extract dependencies.
+    board: Board type that was built on this machine.
+    useflags: A list of useflags for this build.
+    cpe_format: Set output format to CPE-only JSON; otherwise,
+      output traditional deps.
+    raw_cmd_result: If set True, returns the CommandResult object.
+      Otherwise, returns the dependencies as a dictionary.
+
+  Returns:
+    Returns the CommandResult object if |raw_cmd_result| is set; returns
+    the dependencies in a dictionary otherwise.
+  """
+  cmd = ['cros_extract_deps']
+  if board:
+    cmd += ['--board', board]
+  if cpe_format:
+    cmd += ['--format=cpe']
+  else:
+    cmd += ['--format=deps']
+  cmd += packages
+  env = {}
+  if useflags:
+    env['USE'] = ' '.join(useflags)
+
+  if raw_cmd_result:
+    return RunBuildScript(
+        buildroot, cmd, enter_chroot=True, chromite_cmd=True,
+        capture_output=True, extra_env=env)
+
+  # The stdout of cros_extract_deps may contain undesirable
+  # output. Avoid that by instructing the script to explicitly dump
+  # the deps into a file.
+  with tempfile.NamedTemporaryFile(
+      dir=os.path.join(buildroot, 'chroot', 'tmp')) as f:
+    cmd += ['--output-path', path_util.ToChrootPath(f.name)]
+    RunBuildScript(buildroot, cmd, enter_chroot=True,
+                   chromite_cmd=True, capture_output=True, extra_env=env)
+    return json.loads(f.read())
+
+
+def GenerateCPEExport(buildroot, board, useflags=None):
+  """Generate CPE export.
+
+  Args:
+    buildroot: The root directory where the build occurs.
+    board: Board type that was built on this machine.
+    useflags: A list of useflags for this build.
+
+  Returns:
+    A CommandResult object with the results of running the CPE
+    export command.
+  """
+  return ExtractDependencies(
+      buildroot, ['virtual/target-os'], board=board,
+      useflags=useflags, cpe_format=True, raw_cmd_result=True)
+
+
+def GenerateBreakpadSymbols(buildroot, board, debug):
+  """Generate breakpad symbols.
+
+  Args:
+    buildroot: The root directory where the build occurs.
+    board: Board type that was built on this machine.
+    debug: Include extra debugging output.
+  """
+  # We don't care about firmware symbols.
+  # See http://crbug.com/213670.
+  exclude_dirs = ['firmware']
+
+  cmd = ['cros_generate_breakpad_symbols', '--board=%s' % board,
+         '--jobs=%s' % str(max([1, multiprocessing.cpu_count() / 2]))]
+  cmd += ['--exclude-dir=%s' % x for x in exclude_dirs]
+  if debug:
+    cmd += ['--debug']
+  RunBuildScript(buildroot, cmd, enter_chroot=True, chromite_cmd=True)
+
+
+def GenerateDebugTarball(buildroot, board, archive_path, gdb_symbols):
+  """Generates a debug tarball in the archive_dir.
+
+  Args:
+    buildroot: The root directory where the build occurs.
+    board: Board type that was built on this machine
+    archive_path: Directory where tarball should be stored.
+    gdb_symbols: Include *.debug files for debugging core files with gdb.
+
+  Returns:
+    The filename of the created debug tarball.
+  """
+  # Generate debug tarball. This needs to run as root because some of the
+  # symbols are only readable by root.
+  chroot = os.path.join(buildroot, 'chroot')
+  board_dir = os.path.join(chroot, 'build', board, 'usr', 'lib')
+  debug_tgz = os.path.join(archive_path, 'debug.tgz')
+  extra_args = None
+  inputs = None
+
+  if gdb_symbols:
+    extra_args = ['--exclude',
+                  os.path.join('debug', constants.AUTOTEST_BUILD_PATH),
+                  '--exclude', 'debug/tests']
+    inputs = ['debug']
+  else:
+    inputs = ['debug/breakpad']
+
+  cros_build_lib.CreateTarball(
+      debug_tgz, board_dir, sudo=True, compression=cros_build_lib.COMP_GZIP,
+      chroot=chroot, inputs=inputs, extra_args=extra_args)
+
+  # Fix permissions and ownership on debug tarball.
+  cros_build_lib.SudoRunCommand(['chown', str(os.getuid()), debug_tgz])
+  os.chmod(debug_tgz, 0o644)
+
+  return os.path.basename(debug_tgz)
+
+
+def GenerateHtmlIndex(index, files, url_base=None, head=None, tail=None):
+  """Generate a simple index.html file given a set of filenames
+
+  Args:
+    index: The file to write the html index to.
+    files: The list of files to create the index of.  If a string, then it
+           may be a path to a file (with one file per line), or a directory
+           (which will be listed).
+    url_base: The URL to prefix to all elements (otherwise they'll be relative).
+    head: All the content before the listing.  '<html><body>' if not specified.
+    tail: All the content after the listing.  '</body></html>' if not specified.
+  """
+  def GenLink(target, name=None):
+    if name == '':
+      return ''
+    return ('<li><a href="%s%s">%s</a></li>'
+            % (url_base, target, name if name else target))
+
+  if isinstance(files, (unicode, str)):
+    if os.path.isdir(files):
+      files = os.listdir(files)
+    else:
+      files = osutils.ReadFile(files).splitlines()
+  url_base = url_base + '/' if url_base else ''
+
+  if not head:
+    head = '<html><body>'
+  html = head + '<ul>'
+
+  dot = ('.',)
+  dot_dot = ('..',)
+  links = []
+  for a in sorted(set(files)):
+    a = a.split('|')
+    if a[0] == '.':
+      dot = a
+    elif a[0] == '..':
+      dot_dot = a
+    else:
+      links.append(GenLink(*a))
+  links.insert(0, GenLink(*dot_dot))
+  links.insert(0, GenLink(*dot))
+  html += '\n'.join(links)
+
+  if not tail:
+    tail = '</body></html>'
+  html += '</ul>' + tail
+
+  osutils.WriteFile(index, html)
+
+
+@failures_lib.SetFailureType(failures_lib.GSUploadFailure)
+def _UploadPathToGS(local_path, upload_urls, debug, timeout, acl=None):
+  """Upload |local_path| to Google Storage.
+
+  Args:
+    local_path: Local path to upload.
+    upload_urls: Iterable of GS locations to upload to.
+    debug: Whether we are in debug mode.
+    filename: Filename of the file to upload.
+    timeout: Timeout in seconds.
+    acl: Canned gsutil acl to use.
+  """
+  gs_context = gs.GSContext(acl=acl, dry_run=debug)
+  for upload_url in upload_urls:
+    with timeout_util.Timeout(timeout):
+      gs_context.CopyInto(local_path, upload_url, parallel=True,
+                          recursive=True)
+
+
+@failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+def UploadArchivedFile(archive_dir, upload_urls, filename, debug,
+                       update_list=False, timeout=2 * 60 * 60, acl=None):
+  """Uploads |filename| in |archive_dir| to Google Storage.
+
+  Args:
+    archive_dir: Path to the archive directory.
+    upload_urls: Iterable of GS locations to upload to.
+    debug: Whether we are in debug mode.
+    filename: Name of the file to upload.
+    update_list: Flag to update the list of uploaded files.
+    timeout: Timeout in seconds.
+    acl: Canned gsutil acl to use.
+  """
+  # Upload the file.
+  file_path = os.path.join(archive_dir, filename)
+  _UploadPathToGS(file_path, upload_urls, debug, timeout, acl=acl)
+
+  if update_list:
+    # Append |filename| to the local list of uploaded files and archive
+    # the list to Google Storage. As long as the |filename| string is
+    # less than PIPE_BUF (> 512 bytes), the append is atomic.
+    uploaded_file_path = os.path.join(archive_dir, UPLOADED_LIST_FILENAME)
+    osutils.WriteFile(uploaded_file_path, filename + '\n', mode='a')
+    _UploadPathToGS(uploaded_file_path, upload_urls, debug, timeout)
+
+
+def UploadSymbols(buildroot, board, official, cnt, failed_list):
+  """Upload debug symbols for this build."""
+  cmd = ['upload_symbols', '--yes', '--board', board,
+         '--root', os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR)]
+  if failed_list is not None:
+    cmd += ['--failed-list', str(failed_list)]
+  if official:
+    cmd.append('--official_build')
+  if cnt is not None:
+    cmd += ['--upload-limit', str(cnt)]
+
+  # We don't want to import upload_symbols directly because it uses the
+  # swarming module which itself imports a _lot_ of stuff.  It has also
+  # been known to hang.  We want to keep cbuildbot isolated & robust.
+  ret = RunBuildScript(buildroot, cmd, chromite_cmd=True, error_code_ok=True)
+  if ret.returncode:
+    # TODO(davidjames): Convert this to a fatal error.
+    # See http://crbug.com/212437
+    logging.PrintBuildbotStepWarnings()
+
+
+def PushImages(board, archive_url, dryrun, profile, sign_types=()):
+  """Push the generated image to the release bucket for signing."""
+  # Log the equivalent command for debugging purposes.
+  log_cmd = ['pushimage', '--board=%s' % board]
+
+  if dryrun:
+    log_cmd.append('-n')
+
+  if profile:
+    log_cmd.append('--profile=%s' % profile)
+
+  if sign_types:
+    log_cmd.append('--sign-types=%s' % ' '.join(sign_types))
+
+  log_cmd.append(archive_url)
+  logging.info('Running: %s' % cros_build_lib.CmdToStr(log_cmd))
+
+  try:
+    return pushimage.PushImage(archive_url, board, profile=profile,
+                               sign_types=sign_types, dry_run=dryrun)
+  except pushimage.PushError as e:
+    logging.PrintBuildbotStepFailure()
+    return e.args[1]
+
+
+def BuildFactoryInstallImage(buildroot, board, extra_env):
+  """Build a factory install image.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    board: Board type that was built on this machine
+    extra_env: Flags to be added to the environment for the new process.
+
+  Returns:
+    The basename of the symlink created for the image.
+  """
+
+  # We use build_attempt=3 here to ensure that this image uses a different
+  # output directory from our regular image and the factory test image.
+  alias = _FACTORY_SHIM
+  cmd = ['./build_image',
+         '--board=%s' % board,
+         '--replace',
+         '--symlink=%s' % alias,
+         '--build_attempt=3',
+         'factory_install']
+  RunBuildScript(buildroot, cmd, extra_env=extra_env, capture_output=True,
+                 enter_chroot=True)
+  return alias
+
+
+def MakeNetboot(buildroot, board, image_dir):
+  """Build a netboot image.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    board: Board type that was built on this machine.
+    image_dir: Directory containing factory install shim.
+  """
+  cmd = ['./make_netboot.sh',
+         '--board=%s' % board,
+         '--image_dir=%s' % path_util.ToChrootPath(image_dir)]
+  RunBuildScript(buildroot, cmd, capture_output=True, enter_chroot=True)
+
+
+def MakeFactoryToolkit(buildroot, board, output_dir, version=None):
+  """Build a factory toolkit.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    board: Board type that was built on this machine.
+    output_dir: Directory for the resulting factory toolkit.
+    version: Version string to be included in ID string.
+  """
+  cmd = ['./make_factory_toolkit.sh',
+         '--board=%s' % board,
+         '--output_dir=%s' % path_util.ToChrootPath(output_dir)]
+  if version is not None:
+    cmd.extend(['--version', version])
+  RunBuildScript(buildroot, cmd, capture_output=True, enter_chroot=True)
+
+
+def BuildRecoveryImage(buildroot, board, image_dir, extra_env):
+  """Build a recovery image.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    board: Board type that was built on this machine.
+    image_dir: Directory containing base image.
+    extra_env: Flags to be added to the environment for the new process.
+  """
+  base_image = os.path.join(image_dir, constants.BASE_IMAGE_BIN)
+  # mod_image_for_recovery leaves behind some artifacts in the source directory
+  # that we don't care about. So, use a tempdir as the working directory.
+  # This tempdir needs to be at a chroot accessible path.
+  with osutils.TempDir(base_dir=image_dir) as tempdir:
+    tempdir_base_image = os.path.join(tempdir, constants.BASE_IMAGE_BIN)
+    tempdir_recovery_image = os.path.join(tempdir, constants.RECOVERY_IMAGE_BIN)
+
+    # Copy the base image. Symlinking isn't enough because image building
+    # scripts follow symlinks by design.
+    shutil.copyfile(base_image, tempdir_base_image)
+    cmd = ['./mod_image_for_recovery.sh',
+           '--board=%s' % board,
+           '--image=%s' % path_util.ToChrootPath(tempdir_base_image)]
+    RunBuildScript(buildroot, cmd, extra_env=extra_env, capture_output=True,
+                   enter_chroot=True)
+    shutil.move(tempdir_recovery_image, image_dir)
+
+
+def BuildTarball(buildroot, input_list, tarball_output, cwd=None,
+                 compressed=True, **kwargs):
+  """Tars and zips files and directories from input_list to tarball_output.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    input_list: A list of files and directories to be archived.
+    tarball_output: Path of output tar archive file.
+    cwd: Current working directory when tar command is executed.
+    compressed: Whether or not the tarball should be compressed with pbzip2.
+    **kwargs: Keyword arguments to pass to CreateTarball.
+
+  Returns:
+    Return value of cros_build_lib.CreateTarball.
+  """
+  compressor = cros_build_lib.COMP_NONE
+  chroot = None
+  if compressed:
+    compressor = cros_build_lib.COMP_BZIP2
+    chroot = os.path.join(buildroot, 'chroot')
+  return cros_build_lib.CreateTarball(
+      tarball_output, cwd, compression=compressor, chroot=chroot,
+      inputs=input_list, **kwargs)
+
+
+def FindFilesWithPattern(pattern, target='./', cwd=os.curdir, exclude_dirs=()):
+  """Search the root directory recursively for matching filenames.
+
+  Args:
+    pattern: the pattern used to match the filenames.
+    target: the target directory to search.
+    cwd: current working directory.
+    exclude_dirs: Directories to not include when searching.
+
+  Returns:
+    A list of paths of the matched files.
+  """
+  # Backup the current working directory before changing it
+  old_cwd = os.getcwd()
+  os.chdir(cwd)
+
+  matches = []
+  for target, _, filenames in os.walk(target):
+    if not any(target.startswith(e) for e in exclude_dirs):
+      for filename in fnmatch.filter(filenames, pattern):
+        matches.append(os.path.join(target, filename))
+
+  # Restore the working directory
+  os.chdir(old_cwd)
+
+  return matches
+
+def BuildAUTestTarball(buildroot, board, work_dir, version, archive_url):
+  """Tar up the au test artifacts into the tarball_dir.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    board: Board type that was built on this machine.
+    work_dir: Location for doing work.
+    version: Basic version of the build i.e. 3289.23.0.
+    archive_url: GS directory where we uploaded payloads.
+  """
+  au_test_tarball = os.path.join(work_dir, 'au_control.tar.bz2')
+
+  cwd = os.path.join(buildroot, 'src', 'third_party', 'autotest', 'files')
+  control_files_subdir = os.path.join('autotest', 'au_control_files')
+
+  autotest_dir = os.path.join(work_dir, control_files_subdir)
+  os.makedirs(autotest_dir)
+
+  # Get basic version without R*.
+  basic_version = re.search(r'R[0-9]+-([0-9][\w.]+)', version).group(1)
+
+  # Pass in the python paths to the libs full release test needs.
+  env_dict = dict(
+      chromite_path=buildroot,
+      devserver_path=os.path.join(buildroot, 'src', 'platform', 'dev'))
+
+  python_path = '%(chromite_path)s:%(devserver_path)s' % env_dict
+  cmd = ['site_utils/autoupdate/full_release_test.py',
+         '--npo', '--nmo', '--dump',
+         '--dump_dir', autotest_dir, '--archive_url', archive_url,
+         basic_version, board, '--log=debug']
+
+  gs_context_dir = os.path.dirname(gs.GSContext.GetDefaultGSUtilBin())
+  run_env = None
+  if not gs_context_dir in os.environ['PATH']:
+    run_env = os.environ.copy()
+    run_env['PATH'] += ':%s' % gs_context_dir
+  else:
+    run_env = os.environ
+
+  run_env.setdefault('PYTHONPATH', '')
+  run_env['PYTHONPATH'] += ':%s' % python_path
+
+  cros_build_lib.RunCommand(cmd, env=run_env, cwd=cwd)
+  BuildTarball(buildroot, [control_files_subdir], au_test_tarball, cwd=work_dir)
+  return au_test_tarball
+
+
+def BuildAutotestControlFilesTarball(buildroot, cwd, tarball_dir):
+  """Tar up the autotest control files.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    cwd: Current working directory.
+    tarball_dir: Location for storing autotest tarball.
+
+  Returns:
+    Path of the partial autotest control files tarball.
+  """
+  # Find the control files in autotest/
+  control_files = FindFilesWithPattern('control*', target='autotest', cwd=cwd,
+                                       exclude_dirs=['autotest/test_suites'])
+  control_files_tarball = os.path.join(tarball_dir, 'control_files.tar')
+  BuildTarball(buildroot, control_files, control_files_tarball, cwd=cwd,
+               compressed=False)
+  return control_files_tarball
+
+
+def BuildAutotestPackagesTarball(buildroot, cwd, tarball_dir):
+  """Tar up the autotest packages.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    cwd: Current working directory.
+    tarball_dir: Location for storing autotest tarball.
+
+  Returns:
+    Path of the partial autotest packages tarball.
+  """
+  input_list = ['autotest/packages']
+  packages_tarball = os.path.join(tarball_dir, 'autotest_packages.tar')
+  BuildTarball(buildroot, input_list, packages_tarball, cwd=cwd,
+               compressed=False)
+  return packages_tarball
+
+
+def BuildAutotestTestSuitesTarball(buildroot, cwd, tarball_dir):
+  """Tar up the autotest test suite control files.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    cwd: Current working directory.
+    tarball_dir: Location for storing autotest tarball.
+
+  Returns:
+    Path of the autotest test suites tarball.
+  """
+  test_suites_tarball = os.path.join(tarball_dir, 'test_suites.tar.bz2')
+  BuildTarball(buildroot, ['autotest/test_suites'], test_suites_tarball,
+               cwd=cwd)
+  return test_suites_tarball
+
+
+def BuildAutotestServerPackageTarball(buildroot, cwd, tarball_dir):
+  """Tar up the autotest files required by the server package.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    cwd: Current working directory.
+    tarball_dir: Location for storing autotest tarballs.
+
+  Returns:
+    The path of the autotest server package tarball.
+  """
+  # Find all files in autotest excluding certain directories.
+  autotest_files = FindFilesWithPattern(
+      '*', target='autotest', cwd=cwd,
+      exclude_dirs=('autotest/packages', 'autotest/client/deps/',
+                    'autotest/client/tests', 'autotest/client/site_tests'))
+  tarball = os.path.join(tarball_dir, 'autotest_server_package.tar.bz2')
+  BuildTarball(buildroot, autotest_files, tarball, cwd=cwd, error_code_ok=True)
+  return tarball
+
+
+def BuildFullAutotestTarball(buildroot, board, tarball_dir):
+  """Tar up the full autotest directory into image_dir.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    board: Board type that was built on this machine.
+    tarball_dir: Location for storing autotest tarballs.
+
+  Returns:
+    A tuple the path of the full autotest tarball.
+  """
+
+  tarball = os.path.join(tarball_dir, 'autotest.tar.bz2')
+  cwd = os.path.abspath(os.path.join(buildroot, 'chroot', 'build', board,
+                                     constants.AUTOTEST_BUILD_PATH, '..'))
+  result = BuildTarball(buildroot, ['autotest'], tarball, cwd=cwd,
+                        error_code_ok=True)
+
+  # Emerging the autotest package to the factory test image while this is
+  # running modifies the timestamp on /build/autotest/server by
+  # adding a tmp directory underneath it.
+  # When tar spots this, it flags this and returns
+  # status code 1. The tarball is still OK, although there might be a few
+  # unneeded (and garbled) tmp files. If tar fails in a different way, it'll
+  # return an error code other than 1.
+  # TODO: Fix the autotest ebuild. See http://crbug.com/237537
+  if result.returncode not in (0, 1):
+    raise Exception('Autotest tarball creation failed with exit code %s'
+                    % (result.returncode))
+
+  return tarball
+
+
+def BuildImageZip(archive_dir, image_dir):
+  """Build image.zip in archive_dir from contents of image_dir.
+
+  Exclude the dev image from the zipfile.
+
+  Args:
+    archive_dir: Directory to store image.zip.
+    image_dir: Directory to zip up.
+
+  Returns:
+    The basename of the zipfile.
+  """
+  filename = 'image.zip'
+  zipfile = os.path.join(archive_dir, filename)
+  cros_build_lib.RunCommand(['zip', zipfile, '-r', '.'], cwd=image_dir,
+                            capture_output=True)
+  return filename
+
+
+def BuildStandaloneArchive(archive_dir, image_dir, artifact_info):
+  """Create a compressed archive from the specified image information.
+
+  The artifact info is derived from a JSON file in the board overlay. It
+  should be in the following format:
+  {
+  "artifacts": [
+    { artifact },
+    { artifact },
+    ...
+  ]
+  }
+  Each artifact can contain the following keys:
+  input - Required. A list of paths and globs that expands to
+      the list of files to archive.
+  output - the name of the archive to be created. If omitted,
+      it will default to the first filename, stripped of
+      extensions, plus the appropriate .tar.gz or other suffix.
+  archive - "tar" or "zip". If omitted, files will be uploaded
+      directly, without being archived together.
+  compress - a value cros_build_lib.CompressionStrToType knows about. Only
+      useful for tar. If omitted, an uncompressed tar will be created.
+
+  Args:
+    archive_dir: Directory to store image zip.
+    image_dir: Base path for all inputs.
+    artifact_info: Extended archive configuration dictionary containing:
+      - paths - required, list of files to archive.
+      - output, archive & compress entries from the JSON file.
+
+  Returns:
+    The base name of the archive.
+
+  Raises:
+    A ValueError if the compression or archive values are unknown.
+    A KeyError is a required field is missing from artifact_info.
+  """
+  if 'archive' not in artifact_info:
+    # Nothing to do, just return the list as-is.
+    return artifact_info['paths']
+
+  inputs = artifact_info['paths']
+  archive = artifact_info['archive']
+  compress = artifact_info.get('compress')
+  compress_type = cros_build_lib.CompressionStrToType(compress)
+  if compress_type is None:
+    raise ValueError('unknown compression type: %s' % compress)
+
+  # If the output is fixed, use that. Otherwise, construct it
+  # from the name of the first archived file, stripping extensions.
+  filename = artifact_info.get(
+      'output', '%s.%s' % (os.path.splitext(inputs[0])[0], archive))
+  if archive == 'tar':
+    # Add the .compress extension if we don't have a fixed name.
+    if 'output' not in artifact_info and compress:
+      filename = "%s.%s" % (filename, compress)
+    extra_env = {'XZ_OPT': '-1'}
+    cros_build_lib.CreateTarball(
+        os.path.join(archive_dir, filename), image_dir,
+        inputs=inputs, compression=compress_type, extra_env=extra_env)
+  elif archive == 'zip':
+    cros_build_lib.RunCommand(
+        ['zip', os.path.join(archive_dir, filename), '-r'] + inputs,
+        cwd=image_dir, capture_output=True)
+  else:
+    raise ValueError('unknown archive type: %s' % archive)
+
+  return [filename]
+
+
+def BuildStrippedPackagesTarball(buildroot, board, package_globs, archive_dir):
+  """Builds a tarball containing stripped packages.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    board: The board for which packages should be tarred up.
+    package_globs: List of package search patterns. Each pattern is used to
+        search for packages via `equery list`.
+    archive_dir: The directory to drop the tarball in.
+
+  Returns:
+    The file name of the output tarball, None if no package found.
+  """
+  chroot_path = os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR)
+  board_path = os.path.join(chroot_path, 'build', board)
+  stripped_pkg_dir = os.path.join(board_path, 'stripped-packages')
+  tarball_paths = []
+  for pattern in package_globs:
+    packages = portage_util.FindPackageNameMatches(pattern, board)
+    for cpv in packages:
+      pkg = '%s/%s' % (cpv.category, cpv.pv)
+      cmd = ['strip_package', '--board', board, pkg]
+      cros_build_lib.RunCommand(cmd, cwd=buildroot, enter_chroot=True)
+      # Find the stripped package.
+      files = glob.glob(os.path.join(stripped_pkg_dir, pkg) + '.*')
+      if not files:
+        raise AssertionError('Silent failure to strip binary %s? '
+                             'Failed to find stripped files at %s.' %
+                             (pkg, os.path.join(stripped_pkg_dir, pkg)))
+      if len(files) > 1:
+        logging.PrintBuildbotStepWarnings()
+        logging.warning('Expected one stripped package for %s, found %d',
+                        pkg, len(files))
+
+      tarball = sorted(files)[-1]
+      tarball_paths.append(os.path.abspath(tarball))
+
+  if not tarball_paths:
+    # tar barfs on an empty list of files, so skip tarring completely.
+    return None
+
+  tarball_output = os.path.join(archive_dir, 'stripped-packages.tar')
+  BuildTarball(buildroot, tarball_paths, tarball_output, compressed=False)
+  return os.path.basename(tarball_output)
+
+
+def BuildGceTarball(archive_dir, image_dir, image):
+  """Builds a tarball that can be converted into a GCE image.
+
+  GCE has some very specific requirements about the format of VM
+  images. The full list can be found at
+  https://cloud.google.com/compute/docs/tutorials/building-images#requirements
+
+  Args:
+    archive_dir: Directory to store the output tarball.
+    image_dir: Directory where raw disk file can be found.
+    image: Name of raw disk file.
+
+  Returns:
+    The file name of the output tarball.
+  """
+  with osutils.TempDir() as tempdir:
+    temp_disk_raw = os.path.join(tempdir, 'disk.raw')
+    output = constants.ImageBinToGceTar(image)
+    output_file = os.path.join(archive_dir, output)
+    os.symlink(os.path.join(image_dir, image), temp_disk_raw)
+
+    cros_build_lib.CreateTarball(
+        output_file, tempdir, inputs=['disk.raw'],
+        compression=cros_build_lib.COMP_GZIP, extra_args=['--dereference'])
+    return os.path.basename(output_file)
+
+
+def BuildFirmwareArchive(buildroot, board, archive_dir):
+  """Build firmware_from_source.tar.bz2 in archive_dir from build root.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    board: Board name of build target.
+    archive_dir: Directory to store output file.
+
+  Returns:
+    The basename of the archived file, or None if the target board does
+    not have firmware from source.
+  """
+  firmware_root = os.path.join(buildroot, 'chroot', 'build', board, 'firmware')
+  source_list = [os.path.relpath(f, firmware_root)
+                 for f in glob.iglob(os.path.join(firmware_root, '*'))]
+  if not source_list:
+    return None
+
+  archive_name = 'firmware_from_source.tar.bz2'
+  archive_file = os.path.join(archive_dir, archive_name)
+  BuildTarball(buildroot, source_list, archive_file, cwd=firmware_root)
+  return archive_name
+
+
+def BuildFactoryZip(buildroot, board, archive_dir, factory_shim_dir,
+                    factory_toolkit_dir, version=None):
+  """Build factory_image.zip in archive_dir.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    board: Board name of build target.
+    archive_dir: Directory to store factory_image.zip.
+    factory_shim_dir: Directory containing factory shim.
+    factory_toolkit_dir: Directory containing factory toolkit.
+    version: The version string to be included in the factory image.zip.
+
+  Returns:
+    The basename of the zipfile.
+  """
+  filename = 'factory_image.zip'
+
+  # Creates a staging temporary folder.
+  temp_dir = tempfile.mkdtemp(prefix='cbuildbot_factory')
+
+  zipfile = os.path.join(archive_dir, filename)
+  cmd = ['zip', '-r', zipfile, '.']
+
+  # Rules for archive: { folder: pattern }
+  rules = {
+      factory_shim_dir:
+          ['*factory_install*.bin', '*partition*',
+           os.path.join('netboot', '*')],
+      factory_toolkit_dir:
+          ['*factory_image*.bin', '*partition*', 'install_factory_toolkit.run'],
+  }
+
+  for folder, patterns in rules.items():
+    if not folder or not os.path.exists(folder):
+      continue
+    basename = os.path.basename(folder)
+    target = os.path.join(temp_dir, basename)
+    os.symlink(folder, target)
+    for pattern in patterns:
+      cmd.extend(['--include', os.path.join(basename, pattern)])
+
+  # Everything in /usr/local/factory/bundle gets overlaid into the
+  # bundle.
+  bundle_src_dir = os.path.join(
+      buildroot, 'chroot', 'build', board, 'usr', 'local', 'factory', 'bundle')
+  if os.path.exists(bundle_src_dir):
+    for f in os.listdir(bundle_src_dir):
+      src_path = os.path.join(bundle_src_dir, f)
+      os.symlink(src_path, os.path.join(temp_dir, f))
+      cmd.extend(['--include',
+                  f if os.path.isfile(src_path) else
+                  os.path.join(f, '*')])
+
+  # Add a version file in the zip file.
+  if version is not None:
+    version_file = os.path.join(temp_dir, 'BUILD_VERSION')
+    osutils.WriteFile(version_file, version)
+    cmd.extend(['--include', version_file])
+
+  cros_build_lib.RunCommand(cmd, cwd=temp_dir, capture_output=True)
+  osutils.RmDir(temp_dir)
+  return filename
+
+
+def ArchiveHWQual(buildroot, hwqual_name, archive_dir, image_dir):
+  """Create a hwqual tarball in archive_dir.
+
+  Args:
+    buildroot: Root directory where build occurs.
+    hwqual_name: Name for tarball.
+    archive_dir: Local directory for hwqual tarball.
+    image_dir: Directory containing test image.
+  """
+  scripts_dir = os.path.join(buildroot, 'src', 'scripts')
+  ssh_private_key = os.path.join(image_dir, constants.TEST_KEY_PRIVATE)
+  cmd = [os.path.join(scripts_dir, 'archive_hwqual'),
+         '--from', archive_dir,
+         '--image_dir', image_dir,
+         '--ssh_private_key', ssh_private_key,
+         '--output_tag', hwqual_name]
+  cros_build_lib.RunCommand(cmd, capture_output=True)
+  return '%s.tar.bz2' % hwqual_name
+
+
+def CreateTestRoot(build_root):
+  """Returns a temporary directory for test results in chroot.
+
+  Returns:
+    The path inside the chroot rather than whole path.
+  """
+  # Create test directory within tmp in chroot.
+  chroot = os.path.join(build_root, 'chroot')
+  chroot_tmp = os.path.join(chroot, 'tmp')
+  test_root = tempfile.mkdtemp(prefix='cbuildbot', dir=chroot_tmp)
+
+  # Path inside chroot.
+  return os.path.sep + os.path.relpath(test_root, start=chroot)
+
+
+def GeneratePayloads(build_root, target_image_path, archive_dir, full=False,
+                     delta=False, stateful=False):
+  """Generates the payloads for hw testing.
+
+  Args:
+    build_root: The root of the chromium os checkout.
+    target_image_path: The path to the image to generate payloads to.
+    archive_dir: Where to store payloads we generated.
+    full: Generate full payloads.
+    delta: Generate delta payloads.
+    stateful: Generate stateful payload.
+  """
+  real_target = os.path.realpath(target_image_path)
+  # The path to the target should look something like this:
+  # .../link/R37-5952.0.2014_06_12_2302-a1/chromiumos_test_image.bin
+  board, os_version = real_target.split('/')[-3:-1]
+  prefix = 'chromeos'
+  suffix = 'dev.bin'
+
+  cwd = os.path.join(build_root, 'src', 'scripts')
+  path = path_util.ToChrootPath(
+      os.path.join(build_root, 'src', 'platform', 'dev', 'host'))
+  chroot_dir = os.path.join(build_root, 'chroot')
+  chroot_tmp = os.path.join(chroot_dir, 'tmp')
+  chroot_target = path_util.ToChrootPath(target_image_path)
+
+  with osutils.TempDir(base_dir=chroot_tmp,
+                       prefix='generate_payloads') as temp_dir:
+    chroot_temp_dir = temp_dir.replace(chroot_dir, '', 1)
+
+    cmd = [
+        os.path.join(path, 'cros_generate_update_payload'),
+        '--image', chroot_target,
+        '--output', os.path.join(chroot_temp_dir, 'update.gz')
+    ]
+    if full:
+      cros_build_lib.RunCommand(cmd, enter_chroot=True, cwd=cwd)
+      name = '_'.join([prefix, os_version, board, 'full', suffix])
+      # Names for full payloads look something like this:
+      # chromeos_R37-5952.0.2014_06_12_2302-a1_link_full_dev.bin
+      shutil.move(os.path.join(temp_dir, 'update.gz'),
+                  os.path.join(archive_dir, name))
+
+    cmd.extend(['--src_image', chroot_target])
+    if delta:
+      cros_build_lib.RunCommand(cmd, enter_chroot=True, cwd=cwd)
+      # Names for delta payloads look something like this:
+      # chromeos_R37-5952.0.2014_06_12_2302-a1_R37-
+      # 5952.0.2014_06_12_2302-a1_link_delta_dev.bin
+      name = '_'.join([prefix, os_version, os_version, board, 'delta', suffix])
+      shutil.move(os.path.join(temp_dir, 'update.gz'),
+                  os.path.join(archive_dir, name))
+
+    if stateful:
+      cmd = [
+          os.path.join(path, 'cros_generate_stateful_update_payload'),
+          '--image', chroot_target,
+          '--output', chroot_temp_dir
+      ]
+      cros_build_lib.RunCommand(cmd, enter_chroot=True, cwd=cwd)
+      shutil.move(os.path.join(temp_dir, STATEFUL_FILE),
+                  os.path.join(archive_dir, STATEFUL_FILE))
+
+
+def GetChromeLKGM(revision):
+  """Returns the ChromeOS LKGM from Chrome given the git revision."""
+  revision = revision or 'refs/heads/master'
+  lkgm_url_path = '%s/+/%s/%s?format=text' % (
+      constants.CHROMIUM_SRC_PROJECT, revision, constants.PATH_TO_CHROME_LKGM)
+  contents_b64 = gob_util.FetchUrl(site_config.params.EXTERNAL_GOB_HOST,
+                                   lkgm_url_path)
+  return base64.b64decode(contents_b64.read()).strip()
+
+
+def SyncChrome(build_root, chrome_root, useflags, tag=None, revision=None):
+  """Sync chrome.
+
+  Args:
+    build_root: The root of the chromium os checkout.
+    chrome_root: The directory where chrome is stored.
+    useflags: Array of use flags.
+    tag: If supplied, the Chrome tag to sync.
+    revision: If supplied, the Chrome revision to sync.
+  """
+  # --reset tells sync_chrome to blow away local changes and to feel
+  # free to delete any directories that get in the way of syncing. This
+  # is needed for unattended operation.
+  sync_chrome = os.path.join(build_root, 'chromite', 'bin', 'sync_chrome')
+  internal = constants.USE_CHROME_INTERNAL in useflags
+  cmd = [sync_chrome, '--reset']
+  cmd += ['--internal'] if internal else []
+  cmd += ['--tag', tag] if tag is not None else []
+  cmd += ['--revision', revision] if revision is not None else []
+  cmd += [chrome_root]
+  retry_util.RunCommandWithRetries(constants.SYNC_RETRIES, cmd, cwd=build_root)
+
+
+def PatchChrome(chrome_root, patch, subdir):
+  """Apply a patch to Chrome.
+
+  Args:
+    chrome_root: The directory where chrome is stored.
+    patch: Rietveld issue number to apply.
+    subdir: Subdirectory to apply patch in.
+  """
+  cmd = ['apply_issue', '-i', patch]
+  cros_build_lib.RunCommand(cmd, cwd=os.path.join(chrome_root, subdir))
+
+
+class ChromeSDK(object):
+  """Wrapper for the 'cros chrome-sdk' command."""
+
+  DEFAULT_JOBS = 24
+  DEFAULT_JOBS_GOMA = 500
+
+  def __init__(self, cwd, board, extra_args=None, chrome_src=None, goma=False,
+               debug_log=True, cache_dir=None, target_tc=None,
+               toolchain_url=None):
+    """Initialization.
+
+    Args:
+      cwd: Where to invoke 'cros chrome-sdk'.
+      board: The board to run chrome-sdk for.
+      extra_args: Extra args to pass in on the command line.
+      chrome_src: Path to pass in with --chrome-src.
+      goma: If True, run using goma.
+      debug_log: If set, run with debug log-level.
+      cache_dir: Specify non-default cache directory.
+      target_tc: Override target toolchain.
+      toolchain_url: Override toolchain url pattern.
+    """
+    self.cwd = cwd
+    self.board = board
+    self.extra_args = extra_args or []
+    if chrome_src:
+      self.extra_args += ['--chrome-src', chrome_src]
+    self.goma = goma
+    if not self.goma:
+      self.extra_args.append('--nogoma')
+    self.debug_log = debug_log
+    self.cache_dir = cache_dir
+    self.target_tc = target_tc
+    self.toolchain_url = toolchain_url
+
+  def _GetDefaultTargets(self):
+    """Get the default chrome targets to build."""
+    targets = ['chrome', 'chrome_sandbox']
+
+    use_flags = portage_util.GetInstalledPackageUseFlags(constants.CHROME_CP,
+                                                         self.board)
+    if 'nacl' in use_flags.get(constants.CHROME_CP, []):
+      targets += ['nacl_helper']
+
+    return targets
+
+  def Run(self, cmd, extra_args=None):
+    """Run a command inside the chrome-sdk context."""
+    cros_cmd = ['cros']
+    if self.debug_log:
+      cros_cmd += ['--log-level', 'debug']
+    if self.cache_dir:
+      cros_cmd += ['--cache-dir', self.cache_dir]
+    if self.target_tc:
+      self.extra_args += ['--target-tc', self.target_tc]
+    if self.toolchain_url:
+      self.extra_args += ['--toolchain-url', self.toolchain_url]
+    cros_cmd += ['chrome-sdk', '--board', self.board] + self.extra_args
+    cros_cmd += (extra_args or []) + ['--'] + cmd
+    cros_build_lib.RunCommand(cros_cmd, cwd=self.cwd)
+
+  def Ninja(self, jobs=None, debug=False, targets=None):
+    """Run 'ninja' inside a chrome-sdk context.
+
+    Args:
+      jobs: The number of -j jobs to run.
+      debug: Whether to do a Debug build (defaults to Release).
+      targets: The targets to compile.
+    """
+    if jobs is None:
+      jobs = self.DEFAULT_JOBS_GOMA if self.goma else self.DEFAULT_JOBS
+    if targets is None:
+      targets = self._GetDefaultTargets()
+    flavor = 'Debug' if debug else 'Release'
+    cmd = ['ninja', '-C', 'out_%s/%s' % (self.board, flavor), '-j', str(jobs)]
+    self.Run(cmd + list(targets))
diff --git a/cbuildbot/commands_unittest b/cbuildbot/commands_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/commands_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/commands_unittest.py b/cbuildbot/commands_unittest.py
new file mode 100644
index 0000000..da89b7b
--- /dev/null
+++ b/cbuildbot/commands_unittest.py
@@ -0,0 +1,1022 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for commands."""
+
+from __future__ import print_function
+
+import base64
+import mock
+import os
+from StringIO import StringIO
+from os.path import join as pathjoin
+from os.path import abspath as abspath
+
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import swarming_lib
+from chromite.cbuildbot import topology
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import gob_util
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+from chromite.lib import path_util
+from chromite.lib import portage_util
+from chromite.scripts import pushimage
+
+
+site_config = config_lib.GetConfig()
+
+
+class RunBuildScriptTest(cros_test_lib.TempDirTestCase):
+  """Test RunBuildScript in a variety of cases."""
+
+  def _assertRunBuildScript(self, in_chroot=False, error=None, raises=None,
+                            **kwargs):
+    """Test the RunBuildScript function.
+
+    Args:
+      in_chroot: Whether to enter the chroot or not.
+      error: error result message to simulate.
+      raises: If the command should fail, the exception to be raised.
+      kwargs: Extra kwargs passed to RunBuildScript.
+    """
+    # Write specified error message to status file.
+    def WriteError(_cmd, extra_env=None, **_kwargs):
+      if extra_env is not None and error is not None:
+        status_file = extra_env[constants.PARALLEL_EMERGE_STATUS_FILE_ENVVAR]
+        osutils.WriteFile(status_file, error)
+
+    buildroot = self.tempdir
+    osutils.SafeMakedirs(os.path.join(buildroot, '.repo'))
+    if error is not None:
+      osutils.SafeMakedirs(os.path.join(buildroot, 'chroot', 'tmp'))
+
+    # Run the command, throwing an exception if it fails.
+    with cros_build_lib_unittest.RunCommandMock() as m:
+      cmd = ['example', 'command']
+      sudo_cmd = ['sudo', '--'] + cmd
+      returncode = 1 if raises else 0
+      m.AddCmdResult(cmd, returncode=returncode, side_effect=WriteError)
+      m.AddCmdResult(sudo_cmd, returncode=returncode, side_effect=WriteError)
+      with mock.patch.object(path_util, 'ToChrootPath',
+                             side_effect=lambda x: x):
+        with cros_test_lib.LoggingCapturer():
+          # If the script failed, the exception should be raised and printed.
+          if raises:
+            self.assertRaises(raises, commands.RunBuildScript, buildroot,
+                              cmd, enter_chroot=in_chroot, **kwargs)
+          else:
+            commands.RunBuildScript(buildroot, cmd, enter_chroot=in_chroot,
+                                    **kwargs)
+
+  def testSuccessOutsideChroot(self):
+    """Test executing a command outside the chroot."""
+    self._assertRunBuildScript()
+
+  def testSuccessInsideChrootWithoutTempdir(self):
+    """Test executing a command inside a chroot without a tmp dir."""
+    self._assertRunBuildScript(in_chroot=True)
+
+  def testSuccessInsideChrootWithTempdir(self):
+    """Test executing a command inside a chroot with a tmp dir."""
+    self._assertRunBuildScript(in_chroot=True, error='')
+
+  def testFailureOutsideChroot(self):
+    """Test a command failure outside the chroot."""
+    self._assertRunBuildScript(raises=failures_lib.BuildScriptFailure)
+
+  def testFailureInsideChrootWithoutTempdir(self):
+    """Test a command failure inside the chroot without a temp directory."""
+    self._assertRunBuildScript(in_chroot=True,
+                               raises=failures_lib.BuildScriptFailure)
+
+  def testFailureInsideChrootWithTempdir(self):
+    """Test a command failure inside the chroot with a temp directory."""
+    self._assertRunBuildScript(in_chroot=True, error='',
+                               raises=failures_lib.BuildScriptFailure)
+
+  def testPackageBuildFailure(self):
+    """Test detecting a package build failure."""
+    self._assertRunBuildScript(in_chroot=True, error=constants.CHROME_CP,
+                               raises=failures_lib.PackageBuildFailure)
+
+  def testSuccessWithSudo(self):
+    """Test a command run with sudo."""
+    self._assertRunBuildScript(in_chroot=False, sudo=True)
+    self._assertRunBuildScript(in_chroot=True, sudo=True)
+
+
+class RunTestSuiteTest(cros_build_lib_unittest.RunCommandTempDirTestCase):
+  """Test RunTestSuite functionality."""
+
+  TEST_BOARD = 'x86-generic'
+  BUILD_ROOT = '/fake/root'
+
+  def _RunTestSuite(self, test_type):
+    commands.RunTestSuite(self.BUILD_ROOT, self.TEST_BOARD, self.tempdir,
+                          '/tmp/taco', archive_dir='/fake/root',
+                          whitelist_chrome_crashes=False,
+                          test_type=test_type)
+
+  def testFull(self):
+    """Test running FULL config."""
+    self._RunTestSuite(constants.FULL_AU_TEST_TYPE)
+    self.assertCommandContains(['--quick'], expected=False)
+    self.assertCommandContains(['--only_verify'], expected=False)
+
+  def testSimple(self):
+    """Test SIMPLE config."""
+    self._RunTestSuite(constants.SIMPLE_AU_TEST_TYPE)
+    self.assertCommandContains(['--quick_update'])
+
+  def testSmoke(self):
+    """Test SMOKE config."""
+    self._RunTestSuite(constants.SMOKE_SUITE_TEST_TYPE)
+    self.assertCommandContains(['--only_verify'])
+
+  def testGceVmTestType(self):
+    """Test GCE_VM_TEST_TYPE."""
+    self._RunTestSuite(constants.GCE_VM_TEST_TYPE)
+    self.assertCommandContains(['--only_verify'])
+    self.assertCommandContains(['--type=gce'])
+    self.assertCommandContains(['--suite=smoke'])
+
+
+class ChromeSDKTest(cros_build_lib_unittest.RunCommandTempDirTestCase):
+  """Basic tests for ChromeSDK commands with RunCommand mocked out."""
+  BOARD = 'daisy_foo'
+  EXTRA_ARGS = ('--monkey', 'banana')
+  EXTRA_ARGS2 = ('--donkey', 'kong')
+  CHROME_SRC = 'chrome_src'
+  CMD = ['bar', 'baz']
+  CWD = 'fooey'
+
+  def setUp(self):
+    self.inst = commands.ChromeSDK(self.CWD, self.BOARD)
+
+  def testRunCommand(self):
+    """Test that running a command is possible."""
+    self.inst.Run(self.CMD)
+    self.assertCommandContains([self.BOARD] + self.CMD, cwd=self.CWD)
+
+  def testRunCommandKwargs(self):
+    """Exercise optional arguments."""
+    custom_inst = commands.ChromeSDK(
+        self.CWD, self.BOARD, extra_args=list(self.EXTRA_ARGS),
+        chrome_src=self.CHROME_SRC, debug_log=True)
+    custom_inst.Run(self.CMD, list(self.EXTRA_ARGS2))
+    self.assertCommandContains(['debug', self.BOARD] + list(self.EXTRA_ARGS) +
+                               list(self.EXTRA_ARGS2) + self.CMD, cwd=self.CWD)
+
+  def testNinjaWithNaclUseFlag(self):
+    """Test that running ninja is possible.
+
+    Verify that nacl_helper is built when the 'nacl' USE flag is specified
+    for chromeos-base/chromeos-chrome.
+    """
+    self.rc.AddCmdResult(partial_mock.In('qlist-%s' % self.BOARD),
+                         output='%s ninja nacl gold' % constants.CHROME_CP)
+    self.inst.Ninja(self.BOARD)
+    self.assertCommandContains([self.BOARD], cwd=self.CWD)
+    self.assertCommandContains(['nacl_helper'])
+
+  def testNinjaWithoutNaclUseFlag(self):
+    """Test that running ninja is possible.
+
+    Verify that nacl_helper is not built when no 'nacl' USE flag is specified
+    for chromeos-base/chromeos-chrome.
+    """
+    self.rc.AddCmdResult(partial_mock.In('qlist-%s' % self.BOARD),
+                         output='%s' % constants.CHROME_CP)
+    self.inst.Ninja(self.BOARD)
+    self.assertCommandContains([self.BOARD], cwd=self.CWD)
+    self.assertCommandContains(['nacl_helper'], expected=False)
+
+
+class HWLabCommandsTest(cros_build_lib_unittest.RunCommandTestCase,
+                        cros_test_lib.OutputTestCase,
+                        cros_test_lib.MockTempDirTestCase):
+  """Test commands related to HWLab tests that are runing via swarming proxy."""
+
+  # pylint: disable=protected-access
+  JOB_ID_OUTPUT = '''
+Autotest instance: cautotest
+02-23-2015 [06:26:51] Submitted create_suite_job rpc
+02-23-2015 [06:26:53] Created suite job: http://cautotest.corp.google.com/afe/#tab_id=view_job&object_id=26960110
+@@@STEP_LINK@Suite created@http://cautotest.corp.google.com/afe/#tab_id=view_job&object_id=26960110@@@
+'''
+  WAIT_OUTPUT = '''
+The suite job has another 3:09:50.012887 till timeout.
+The suite job has another 2:39:39.789250 till timeout.
+'''
+  SWARMING_TIMEOUT_DEFAULT = str(
+      commands._DEFAULT_HWTEST_TIMEOUT_MINS * 60 +
+      commands._SWARMING_ADDITIONAL_TIMEOUT)
+  SWARMING_EXPIRATION = str(commands._SWARMING_EXPIRATION)
+
+
+  def setUp(self):
+    self._build = 'test-build'
+    self._board = 'test-board'
+    self._suite = 'test-suite'
+    self._pool = 'test-pool'
+    self._num = 42
+    self._file_bugs = True
+    self._wait_for_results = False
+    self._priority = 'test-priority'
+    self._timeout_mins = 23
+    self._retry = False
+    self._max_retries = 3
+    self._minimum_duts = 2
+    self._suite_min_duts = 2
+    self.create_cmd = None
+    self.wait_cmd = None
+    self.temp_json_path = os.path.join(self.tempdir, 'temp_summary.json')
+    topology.FetchTopologyFromCIDB(None)
+
+  def RunHWTestSuite(self, *args, **kwargs):
+    """Run the hardware test suite, printing logs to stdout."""
+    kwargs.setdefault('debug', False)
+    with cros_test_lib.LoggingCapturer() as logs:
+      try:
+        commands.RunHWTestSuite(self._build, self._suite, self._board,
+                                *args, **kwargs)
+      finally:
+        print(logs.messages)
+
+  def SetCmdResults(self, create_return_code=0, wait_return_code=0, args=(),
+                    swarming_timeout_secs=SWARMING_TIMEOUT_DEFAULT,
+                    swarming_io_timeout_secs=SWARMING_TIMEOUT_DEFAULT,
+                    swarming_hard_timeout_secs=SWARMING_TIMEOUT_DEFAULT,
+                    swarming_expiration_secs=SWARMING_EXPIRATION):
+    """Set the expected results from the specified commands.
+
+    Args:
+      create_return_code: Return code from create command.
+      wait_return_code: Return code from wait command.
+      args: Additional args to pass to create and wait commands.
+      swarming_timeout_secs: swarming client timeout.
+      swarming_io_timeout_secs: swarming client io timeout.
+      swarming_hard_timeout_secs: swarming client hard timeout.
+      swarming_expiration_secs: swarming task expiration.
+    """
+    base_cmd = [swarming_lib._SWARMING_PROXY_CLIENT, 'run',
+                '--swarming', topology.topology.get(
+                    topology.SWARMING_PROXY_HOST_KEY),
+                '--task-summary-json', self.temp_json_path,
+                '--raw-cmd',
+                '--task-name', 'test-build-test-suite',
+                '--dimension', 'os', 'Linux',
+                '--print-status-updates',
+                '--timeout', swarming_timeout_secs,
+                '--io-timeout', swarming_io_timeout_secs,
+                '--hard-timeout', swarming_hard_timeout_secs,
+                '--expiration', swarming_expiration_secs,
+                '--', commands._RUN_SUITE_PATH,
+                '--build', 'test-build', '--board', 'test-board',
+                '--suite_name', 'test-suite'] + list(args)
+    self.create_cmd = base_cmd + ['-c']
+    self.wait_cmd = base_cmd + ['-m', '26960110']
+    create_results = iter([
+        self.rc.CmdResult(returncode=create_return_code,
+                          output=self.JOB_ID_OUTPUT,
+                          error=''),
+    ])
+    self.rc.AddCmdResult(
+        self.create_cmd,
+        side_effect=lambda *args, **kwargs: create_results.next(),
+    )
+
+    wait_results = iter([
+        self.rc.CmdResult(returncode=wait_return_code,
+                          output=self.WAIT_OUTPUT,
+                          error=''),
+    ])
+    self.rc.AddCmdResult(
+        self.wait_cmd,
+        side_effect=lambda *args, **kwargs: wait_results.next(),
+    )
+
+  def PatchJson(self, task_outputs):
+    """Mock out the code that loads from json.
+
+    Args:
+      task_outputs: A list of tuple, the first element is the value of 'outputs'
+                    field in the json dictionary, the second is a boolean
+                    indicating whether there is an internal failure.
+                    ('some output', True)
+    """
+    orig_func = commands._CreateSwarmingArgs
+
+    def replacement(*args, **kargs):
+      swarming_args = orig_func(*args, **kargs)
+      swarming_args['temp_json_path'] = self.temp_json_path
+      return swarming_args
+
+    self.PatchObject(commands, '_CreateSwarmingArgs', side_effect=replacement)
+
+    if task_outputs:
+      return_values = []
+      for s in task_outputs:
+        j = {'shards':[{'name': 'fake_name', 'bot_id': 'chromeos-server990',
+                        'created_ts': '2015-06-12 12:00:00',
+                        'internal_failure': s[1],
+                        'outputs': [s[0]]}]}
+        return_values.append(j)
+      return_values_iter = iter(return_values)
+      self.PatchObject(swarming_lib.SwarmingCommandResult, 'LoadJsonSummary',
+                       side_effect=lambda json_file: return_values_iter.next())
+    else:
+      self.PatchObject(swarming_lib.SwarmingCommandResult, 'LoadJsonSummary',
+                       return_value=None)
+
+  def testRunHWTestSuiteMinimal(self):
+    """Test RunHWTestSuite without optional arguments."""
+    self.SetCmdResults()
+    self.PatchJson([(self.JOB_ID_OUTPUT, False), (self.WAIT_OUTPUT, False)])
+
+    with self.OutputCapturer() as output:
+      self.RunHWTestSuite()
+    self.assertCommandCalled(self.create_cmd, capture_output=True,
+                             combine_stdout_stderr=True)
+    self.assertCommandCalled(self.wait_cmd, capture_output=True,
+                             combine_stdout_stderr=True)
+    self.assertIn(self.JOB_ID_OUTPUT, '\n'.join(output.GetStdoutLines()))
+    self.assertIn(self.WAIT_OUTPUT, '\n'.join(output.GetStdoutLines()))
+
+  def testRunHWTestSuiteMaximal(self):
+    """Test RunHWTestSuite with all arguments."""
+    swarming_timeout = str(self._timeout_mins * 60 +
+                           commands._SWARMING_ADDITIONAL_TIMEOUT)
+    self.SetCmdResults(
+        args=[
+            '--pool', 'test-pool', '--num', '42',
+            '--file_bugs', 'True', '--no_wait', 'True',
+            '--priority', 'test-priority', '--timeout_mins', '23',
+            '--retry', 'False', '--max_retries', '3', '--minimum_duts', '2',
+            '--suite_min_duts', '2'
+        ],
+        swarming_timeout_secs=swarming_timeout,
+        swarming_io_timeout_secs=swarming_timeout,
+        swarming_hard_timeout_secs=swarming_timeout)
+
+    self.PatchJson([(self.JOB_ID_OUTPUT, False), (self.WAIT_OUTPUT, False)])
+    with self.OutputCapturer() as output:
+      self.RunHWTestSuite(self._pool, self._num, self._file_bugs,
+                          self._wait_for_results, self._priority,
+                          self._timeout_mins, self._retry,
+                          self._max_retries,
+                          self._minimum_duts, self._suite_min_duts)
+    self.assertCommandCalled(self.create_cmd, capture_output=True,
+                             combine_stdout_stderr=True)
+    self.assertCommandCalled(self.wait_cmd, capture_output=True,
+                             combine_stdout_stderr=True)
+    self.assertIn(self.WAIT_OUTPUT, '\n'.join(output.GetStdoutLines()))
+    self.assertIn(self.JOB_ID_OUTPUT, '\n'.join(output.GetStdoutLines()))
+
+  def testRunHWTestSuiteFailure(self):
+    """Test RunHWTestSuite when ERROR is returned."""
+    self.PatchJson([(self.JOB_ID_OUTPUT, False)])
+    self.rc.SetDefaultCmdResult(returncode=1, output=self.JOB_ID_OUTPUT)
+    with self.OutputCapturer():
+      self.assertRaises(failures_lib.TestFailure, self.RunHWTestSuite)
+
+  def testRunHWTestSuiteTimedOut(self):
+    """Test RunHWTestSuite when SUITE_TIMEOUT is returned."""
+    self.PatchJson([(self.JOB_ID_OUTPUT, False)])
+    self.rc.SetDefaultCmdResult(returncode=4, output=self.JOB_ID_OUTPUT)
+    with self.OutputCapturer():
+      self.assertRaises(failures_lib.SuiteTimedOut, self.RunHWTestSuite)
+
+  def testRunHWTestSuiteInfraFail(self):
+    """Test RunHWTestSuite when INFRA_FAILURE is returned."""
+    self.PatchJson([(self.JOB_ID_OUTPUT, False)])
+    self.rc.SetDefaultCmdResult(returncode=3, output=self.JOB_ID_OUTPUT)
+    with self.OutputCapturer():
+      self.assertRaises(failures_lib.TestLabFailure, self.RunHWTestSuite)
+
+  def testRunHWTestBoardNotAvailable(self):
+    """Test RunHWTestSuite when BOARD_NOT_AVAILABLE is returned."""
+    self.PatchJson([(self.JOB_ID_OUTPUT, False)])
+    self.rc.SetDefaultCmdResult(returncode=5, output=self.JOB_ID_OUTPUT)
+    with self.OutputCapturer():
+      self.assertRaises(failures_lib.BoardNotAvailable, self.RunHWTestSuite)
+
+  def testRunHWTestTestWarning(self):
+    """Test RunHWTestSuite when WARNING is returned."""
+    self.PatchJson([(self.JOB_ID_OUTPUT, False)])
+    self.rc.SetDefaultCmdResult(returncode=2, output=self.JOB_ID_OUTPUT)
+    with self.OutputCapturer():
+      self.assertRaises(failures_lib.TestWarning, self.RunHWTestSuite)
+
+  def testRunHWTestTestSwarmingClientNoSummaryFile(self):
+    """Test RunHWTestSuite when no summary file is generated."""
+    unknown_failure = 'Unknown failure'
+    self.PatchJson(task_outputs=[])
+    self.rc.SetDefaultCmdResult(returncode=1, output=unknown_failure)
+    with self.OutputCapturer() as output:
+      self.assertRaises(failures_lib.SwarmingProxyFailure, self.RunHWTestSuite)
+      self.assertIn(unknown_failure, '\n'.join(output.GetStdoutLines()))
+
+  def testRunHWTestTestSwarmingClientInternalFailure(self):
+    """Test RunHWTestSuite when no summary file is generated."""
+    unknown_failure = 'Unknown failure'
+    self.PatchJson(task_outputs=[(self.JOB_ID_OUTPUT, True)])
+    self.rc.SetDefaultCmdResult(returncode=1, output=unknown_failure)
+    with self.OutputCapturer() as output:
+      self.assertRaises(failures_lib.SwarmingProxyFailure, self.RunHWTestSuite)
+      self.assertIn(unknown_failure, '\n'.join(output.GetStdoutLines()))
+      self.assertIn('summary json content', '\n'.join(output.GetStdoutLines()))
+
+  def testGetRunSuiteArgsWithSubsystems(self):
+    """Test _GetRunSuiteArgs when subsystems is specified."""
+    result_1 = commands._GetRunSuiteArgs(build=self._build,
+                                         suite=self._suite,
+                                         board=self._board,
+                                         subsystems=['light'])
+    expected_1 = ['--build', self._build,
+                  '--board', self._board,
+                  '--suite_name', 'suite_attr_wrapper',
+                  '--suite_args',
+                  ("{'attr_filter': '(suite:%s) and (subsystem:light)'}" %
+                   self._suite)]
+    # Test with multiple subsystems.
+    result_2 = commands._GetRunSuiteArgs(build=self._build,
+                                         suite=self._suite,
+                                         board=self._board,
+                                         subsystems=['light', 'power'])
+    expected_2 = ['--build', self._build,
+                  '--board', self._board,
+                  '--suite_name', 'suite_attr_wrapper',
+                  '--suite_args',
+                  ("{'attr_filter': '(suite:%s) and (subsystem:light or "
+                   "subsystem:power)'}" % self._suite)]
+
+    self.assertEqual(result_1, expected_1)
+    self.assertEqual(result_2, expected_2)
+
+
+class CBuildBotTest(cros_build_lib_unittest.RunCommandTempDirTestCase):
+  """Test general cbuildbot command methods."""
+
+  def setUp(self):
+    self._board = 'test-board'
+    self._buildroot = self.tempdir
+    self._overlays = ['%s/src/third_party/chromiumos-overlay' % self._buildroot]
+    self._chroot = os.path.join(self._buildroot, 'chroot')
+    os.makedirs(os.path.join(self._buildroot, '.repo'))
+
+  def testGenerateStackTraces(self):
+    """Test if we can generate stack traces for minidumps."""
+    os.makedirs(os.path.join(self._chroot, 'tmp'))
+    dump_file = os.path.join(self._chroot, 'tmp', 'test.dmp')
+    dump_file_dir, dump_file_name = os.path.split(dump_file)
+    ret = [(dump_file_dir, [''], [dump_file_name])]
+    with mock.patch('os.walk', return_value=ret):
+      test_results_dir = os.path.join(self.tempdir, 'test_results')
+      commands.GenerateStackTraces(self._buildroot, self._board,
+                                   test_results_dir, self.tempdir, True)
+      self.assertCommandContains(['minidump_stackwalk'])
+
+  def testUprevAllPackages(self):
+    """Test if we get None in revisions.pfq indicating Full Builds."""
+    commands.UprevPackages(self._buildroot, [self._board], self._overlays)
+    self.assertCommandContains(['--boards=%s' % self._board, 'commit'])
+
+  def testVerifyBinpkgMissing(self):
+    """Test case where binpkg is missing."""
+    self.rc.AddCmdResult(
+        partial_mock.ListRegex(r'emerge'),
+        output='\n[ebuild] %s' % constants.CHROME_CP)
+    self.assertRaises(
+        commands.MissingBinpkg, commands.VerifyBinpkg,
+        self._buildroot, self._board, constants.CHROME_CP, packages=())
+
+  def testVerifyBinpkgPresent(self):
+    """Test case where binpkg is present."""
+    self.rc.AddCmdResult(
+        partial_mock.ListRegex(r'emerge'),
+        output='\n[binary] %s' % constants.CHROME_CP)
+    commands.VerifyBinpkg(self._buildroot, self._board, constants.CHROME_CP,
+                          packages=())
+
+  def testVerifyChromeNotInstalled(self):
+    """Test case where Chrome is not installed at all."""
+    commands.VerifyBinpkg(self._buildroot, self._board, constants.CHROME_CP,
+                          packages=())
+
+  def testBuild(self, default=False, **kwargs):
+    """Base case where Build is called with minimal options."""
+    kwargs.setdefault('build_autotest', default)
+    kwargs.setdefault('usepkg', default)
+    kwargs.setdefault('chrome_binhost_only', default)
+    kwargs.setdefault('skip_chroot_upgrade', default)
+    commands.Build(buildroot=self._buildroot, board='x86-generic', **kwargs)
+    self.assertCommandContains(['./build_packages'])
+
+  def testGetFirmwareVersions(self):
+    self.rc.SetDefaultCmdResult(output='''
+
+flashrom(8): a273d7fd6663c665176159496bc014ff */build/nyan/usr/sbin/flashrom
+             ELF 32-bit LSB executable, ARM, EABI5 version 1 (SYSV), statically linked, for GNU/Linux 2.6.16, BuildID[sha1]=61d8a9676e433414fb0e22fa819b55be86329e44, stripped
+
+
+BIOS image:   4aba4c07a65b7bf82d72d8ed892f5dc5 */build/nyan/tmp/portage/chromeos-base/chromeos-firmware-nyan-0.0.1-r20/work/chromeos-firmware-nyan-0.0.1/.dist/nyan_fw_5771.10.0.tbz2/image.bin
+BIOS version: Google_Nyan.5771.10.0
+EC image:     7b6bb5035fa8101b41c954bce5250dae */build/nyan/tmp/portage/chromeos-base/chromeos-firmware-nyan-0.0.1-r20/work/chromeos-firmware-nyan-0.0.1/.dist/nyan_ec_5771.10.0.tbz2/ec.bin
+EC version:   nyan_v1.1.1782-23f1337
+
+Package Content:
+d7124c9a2680ff57f1c7d6521ac5ef8c *./mosys
+ad9520c70add670d8f2770a2a3c4115a *./gbb_utility
+7b6bb5035fa8101b41c954bce5250dae *./ec.bin
+a273d7fd6663c665176159496bc014ff *./flashrom
+d149f6413749ca6a0edddd52926f95ca *./dump_fmap
+5bfe13d9b7fef1dfd9d3dac185f94994 *./crossystem
+3c3a99346d1ca1273cbcd86c104851ff *./shflags
+4aba4c07a65b7bf82d72d8ed892f5dc5 *./bios.bin
+2a484f3e107bf27a4d1068e03e74803c *./common.sh
+995a97518f90541d37c3f57a336d37db *./vpd
+b9270e726180af1ed59077d1ab2fc688 *./crosfw.sh
+f6b0b80d5f2d9a2fb41ebb6e2cee7ad8 *./updater4.sh
+4363fcfd6849b2ab1a7320b1c98a11f2 *./crosutil.sh
+''')
+    build_sbin = os.path.join(self._buildroot, constants.DEFAULT_CHROOT_DIR,
+                              'build', self._board, 'usr', 'sbin')
+    osutils.Touch(os.path.join(build_sbin, 'chromeos-firmwareupdate'),
+                  makedirs=True)
+    result = commands.GetFirmwareVersions(self._buildroot, self._board)
+    versions = ('Google_Nyan.5771.10.0', 'nyan_v1.1.1782-23f1337')
+    self.assertEquals(result, versions)
+
+  def testBuildMaximum(self):
+    """Base case where Build is called with all options (except extra_env)."""
+    self.testBuild(default=True)
+
+  def testBuildWithEnv(self):
+    """Case where Build is called with a custom environment."""
+    extra_env = {'A': 'Av', 'B': 'Bv'}
+    self.testBuild(extra_env=extra_env)
+    self.assertCommandContains(['./build_packages'], extra_env=extra_env)
+
+  def testGenerateSymbols(self):
+    """Test GenerateBreakpadSymbols Command."""
+    commands.GenerateBreakpadSymbols(self.tempdir, self._board, False)
+    self.assertCommandContains(['--board=%s' % self._board])
+
+  def testUploadSymbols(self, official=False, cnt=None):
+    """Test UploadSymbols Command."""
+    commands.UploadSymbols(self.tempdir, self._board, official, cnt, None)
+    self.assertCommandContains(['--board', self._board])
+    self.assertCommandContains(['--official_build'], expected=official)
+    self.assertCommandContains(['--upload-limit'], expected=cnt is not None)
+    self.assertCommandContains(['--failed-list'], expected=False)
+
+  def testOfficialUploadSymbols(self):
+    """Test uploading symbols for official builds"""
+    self.testUploadSymbols(official=True)
+
+  def testLimitUploadSymbols(self):
+    """Test uploading a limited number of symbols"""
+    self.testUploadSymbols(cnt=10)
+
+  def testFailedUploadSymbols(self):
+    """Test when uploading fails"""
+    self.rc.SetDefaultCmdResult(returncode=1, error='i am sad')
+    # This should not throw an exception.
+    commands.UploadSymbols(self.tempdir, self._board, None, None, None)
+
+  def testPushImages(self):
+    """Test PushImages Command."""
+    m = self.PatchObject(pushimage, 'PushImage')
+    commands.PushImages(self._board, 'gs://foo/R34-1234.0.0', False, None)
+    self.assertEqual(m.call_count, 1)
+
+  def testBuildImage(self):
+    """Test Basic BuildImage Command."""
+    commands.BuildImage(self._buildroot, self._board, None)
+    self.assertCommandContains(['./build_image'])
+
+  def testGenerateAuZip(self):
+    """Test Basic generate_au_zip Command."""
+    with mock.patch.object(path_util, 'ToChrootPath',
+                           side_effect=lambda x: x):
+      commands.GenerateAuZip(self._buildroot, '/tmp/taco', None)
+    self.assertCommandContains(['./build_library/generate_au_zip.py'])
+
+  def testTestAuZip(self):
+    """Test Basic generate_au_zip Command."""
+    commands.TestAuZip(self._buildroot, '/tmp/taco', None)
+    self.assertCommandContains(['./build_library/test_au_zip.py'])
+
+  def testCompleteBuildImage(self):
+    """Test Complete BuildImage Command."""
+    images_to_build = ['bob', 'carol', 'ted', 'alice']
+    commands.BuildImage(
+        self._buildroot, self._board, images_to_build,
+        rootfs_verification=False, extra_env={'LOVE': 'free'},
+        disk_layout='2+2', version='1969')
+    self.assertCommandContains(['./build_image'])
+
+  def _TestChromeLKGM(self, chrome_revision):
+    """Helper method for testing the GetChromeLKGM method."""
+    chrome_lkgm = '3322.0.0'
+    url = '%s/+/%s/%s?format=text' % (
+        constants.CHROMIUM_SRC_PROJECT,
+        chrome_revision or 'refs/heads/master',
+        constants.PATH_TO_CHROME_LKGM)
+    with mock.patch.object(
+        gob_util, 'FetchUrl',
+        return_value=StringIO(base64.b64encode(chrome_lkgm))) as patcher:
+      self.assertEqual(chrome_lkgm, commands.GetChromeLKGM(chrome_revision))
+      patcher.assert_called_with(site_config.params.EXTERNAL_GOB_HOST, url)
+
+  def testChromeLKGM(self):
+    """Verifies that we can get the chrome lkgm without a chrome revision."""
+    self._TestChromeLKGM(None)
+
+  def testChromeLKGMWithRevision(self):
+    """Verifies that we can get the chrome lkgm with a chrome revision."""
+    self._TestChromeLKGM('deadbeef' * 5)
+
+  def testAbortHWTests(self):
+    """Verifies that HWTests are aborted for a specific non-CQ config."""
+    topology.FetchTopologyFromCIDB(None)
+    commands.AbortHWTests('my_config', 'my_version', debug=False)
+    self.assertCommandContains(['-i', 'my_config/my_version'])
+
+
+class BuildTarballTests(cros_build_lib_unittest.RunCommandTempDirTestCase):
+  """Tests related to building tarball artifacts."""
+
+  def setUp(self):
+    self._buildroot = os.path.join(self.tempdir, 'buildroot')
+    os.makedirs(self._buildroot)
+    self._board = 'test-board'
+    self._cwd = os.path.abspath(
+        os.path.join(self._buildroot, 'chroot', 'build', self._board,
+                     constants.AUTOTEST_BUILD_PATH, '..'))
+    self._tarball_dir = self.tempdir
+
+  def testBuildAUTestTarball(self):
+    """Tests that our call to generate an au test tarball is correct."""
+    archive_url = 'gs://mytest/path/version'
+    with mock.patch.object(commands, 'BuildTarball') as m:
+      tarball_path = commands.BuildAUTestTarball(
+          self._buildroot, self._board, self._tarball_dir, 'R26-3928.0.0',
+          archive_url)
+      m.assert_called_once_with(self._buildroot, ['autotest/au_control_files'],
+                                os.path.join(self._tarball_dir,
+                                             'au_control.tar.bz2'),
+                                cwd=self._tarball_dir)
+
+      self.assertEquals(os.path.join(self._tarball_dir, 'au_control.tar.bz2'),
+                        tarball_path)
+
+    # Full release test with partial args defined.
+    self.assertCommandContains(['site_utils/autoupdate/full_release_test.py',
+                                '--archive_url', archive_url, '3928.0.0',
+                                self._board])
+
+  def testBuildFullAutotestTarball(self):
+    """Tests that our call to generate the full autotest tarball is correct."""
+    with mock.patch.object(commands, 'BuildTarball') as m:
+      m.return_value.returncode = 0
+      commands.BuildFullAutotestTarball(self._buildroot, self._board,
+                                        self._tarball_dir)
+      m.assert_called_once_with(self._buildroot, ['autotest'],
+                                os.path.join(self._tarball_dir,
+                                             'autotest.tar.bz2'),
+                                cwd=self._cwd, error_code_ok=True)
+
+  def testBuildAutotestPackagesTarball(self):
+    """Tests that generating the autotest packages tarball is correct."""
+    with mock.patch.object(commands, 'BuildTarball') as m:
+      commands.BuildAutotestPackagesTarball(self._buildroot, self._cwd,
+                                            self._tarball_dir)
+      m.assert_called_once_with(self._buildroot, ['autotest/packages'],
+                                os.path.join(self._tarball_dir,
+                                             'autotest_packages.tar'),
+                                cwd=self._cwd, compressed=False)
+
+  def testBuildAutotestControlFilesTarball(self):
+    """Tests that generating the autotest control files tarball is correct."""
+    control_file_list = ['autotest/client/site_tests/testA/control',
+                         'autotest/server/site_tests/testB/control']
+    with mock.patch.object(commands, 'FindFilesWithPattern') as find_mock:
+      find_mock.return_value = control_file_list
+      with mock.patch.object(commands, 'BuildTarball') as tar_mock:
+        commands.BuildAutotestControlFilesTarball(self._buildroot, self._cwd,
+                                                  self._tarball_dir)
+        tar_mock.assert_called_once_with(self._buildroot, control_file_list,
+                                         os.path.join(self._tarball_dir,
+                                                      'control_files.tar'),
+                                         cwd=self._cwd, compressed=False)
+
+  def testBuildAutotestServerPackageTarball(self):
+    """Tests that generating the autotest server package tarball is correct."""
+    control_file_list = ['autotest/server/site_tests/testA/control',
+                         'autotest/server/site_tests/testB/control']
+    self.PatchObject(commands, 'FindFilesWithPattern',
+                     return_value=control_file_list)
+    tar_mock = self.PatchObject(commands, 'BuildTarball')
+    commands.BuildAutotestServerPackageTarball(self._buildroot, self._cwd,
+                                               self._tarball_dir)
+    tar_mock.assert_called_once_with(
+        self._buildroot, control_file_list,
+        os.path.join(self._tarball_dir, 'autotest_server_package.tar.bz2'),
+        cwd=self._cwd, error_code_ok=True)
+
+  def testBuildStrippedPackagesArchive(self):
+    """Test generation of stripped package tarball using globs."""
+    package_globs = ['chromeos-base/chromeos-chrome', 'sys-kernel/*kernel*']
+    self.PatchObject(
+        portage_util, 'FindPackageNameMatches',
+        side_effect=[
+            [portage_util.SplitCPV('chromeos-base/chrome-1-r0')],
+            [portage_util.SplitCPV('sys-kernel/kernel-1-r0'),
+             portage_util.SplitCPV('sys-kernel/kernel-2-r0')]])
+    # Drop "stripped packages".
+    pkg_dir = pathjoin(self._buildroot, 'chroot', 'build', 'test-board',
+                       'stripped-packages')
+    osutils.Touch(pathjoin(pkg_dir, 'chromeos-base', 'chrome-1-r0.tbz2'),
+                  makedirs=True)
+    sys_kernel = pathjoin(pkg_dir, 'sys-kernel')
+    osutils.Touch(pathjoin(sys_kernel, 'kernel-1-r0.tbz2'), makedirs=True)
+    osutils.Touch(pathjoin(sys_kernel, 'kernel-1-r01.tbz2'), makedirs=True)
+    osutils.Touch(pathjoin(sys_kernel, 'kernel-2-r0.tbz1'), makedirs=True)
+    osutils.Touch(pathjoin(sys_kernel, 'kernel-2-r0.tbz2'), makedirs=True)
+    stripped_files_list = [
+        abspath(pathjoin(pkg_dir, 'chromeos-base', 'chrome-1-r0.tbz2')),
+        abspath(pathjoin(pkg_dir, 'sys-kernel', 'kernel-1-r0.tbz2')),
+        abspath(pathjoin(pkg_dir, 'sys-kernel', 'kernel-2-r0.tbz2'))]
+
+    tar_mock = self.PatchObject(commands, 'BuildTarball')
+    self.PatchObject(cros_build_lib, 'RunCommand')
+    commands.BuildStrippedPackagesTarball(self._buildroot,
+                                          'test-board',
+                                          package_globs,
+                                          self.tempdir)
+    tar_mock.assert_called_once_with(
+        self._buildroot, stripped_files_list,
+        pathjoin(self.tempdir, 'stripped-packages.tar'),
+        compressed=False)
+
+
+class UnmockedTests(cros_test_lib.TempDirTestCase):
+  """Test cases which really run tests, instead of using mocks."""
+
+  def testListFaliedTests(self):
+    """Tests if we can list failed tests."""
+    test_report_1 = """
+/tmp/taco/taste_tests/all/results-01-has_salsa              [  PASSED  ]
+/tmp/taco/taste_tests/all/results-01-has_salsa/has_salsa    [  PASSED  ]
+/tmp/taco/taste_tests/all/results-02-has_cheese             [  FAILED  ]
+/tmp/taco/taste_tests/all/results-02-has_cheese/has_cheese  [  FAILED  ]
+/tmp/taco/taste_tests/all/results-02-has_cheese/has_cheese   FAIL: No cheese.
+"""
+    test_report_2 = """
+/tmp/taco/verify_tests/all/results-01-has_salsa              [  PASSED  ]
+/tmp/taco/verify_tests/all/results-01-has_salsa/has_salsa    [  PASSED  ]
+/tmp/taco/verify_tests/all/results-02-has_cheese             [  PASSED  ]
+/tmp/taco/verify_tests/all/results-02-has_cheese/has_cheese  [  PASSED  ]
+"""
+    results_path = os.path.join(self.tempdir, 'tmp/taco')
+    os.makedirs(results_path)
+    # Create two reports with the same content to test that we don't
+    # list the same test twice.
+    osutils.WriteFile(
+        os.path.join(results_path, 'taste_tests', 'all', 'test_report.log'),
+        test_report_1, makedirs=True)
+    osutils.WriteFile(
+        os.path.join(results_path, 'taste_tests', 'failed', 'test_report.log'),
+        test_report_1, makedirs=True)
+    osutils.WriteFile(
+        os.path.join(results_path, 'verify_tests', 'all', 'test_report.log'),
+        test_report_2, makedirs=True)
+
+    self.assertEquals(
+        commands.ListFailedTests(results_path),
+        [('has_cheese', 'taste_tests/all/results-02-has_cheese')])
+
+  def testArchiveTestResults(self):
+    """Test if we can archive a test results dir."""
+    test_results_dir = 'tmp/taco'
+    results_path = os.path.join(self.tempdir, 'chroot', test_results_dir)
+    archive_dir = os.path.join(self.tempdir, 'archived_taco')
+    os.makedirs(results_path)
+    os.makedirs(archive_dir)
+    # File that should be archived.
+    osutils.Touch(os.path.join(results_path, 'foo.txt'))
+    # Flies that should be ignored.
+    osutils.Touch(os.path.join(results_path,
+                               'chromiumos_qemu_disk.bin.foo'))
+    os.symlink('/src/foo', os.path.join(results_path, 'taco_link'))
+    commands.ArchiveTestResults(results_path, archive_dir)
+    self.assertExists(os.path.join(archive_dir, 'foo.txt'))
+    self.assertNotExists(
+        os.path.join(archive_dir, 'chromiumos_qemu_disk.bin.foo'))
+    self.assertNotExists(os.path.join(archive_dir, 'taco_link'))
+
+  def testBuildFirmwareArchive(self):
+    """Verifies that firmware archiver includes proper files"""
+    # Assorted set of file names, some of which are supposed to be included in
+    # the archive.
+    fw_files = (
+        'dts/emeraldlake2.dts',
+        'image-link.rw.bin',
+        'nv_image-link.bin',
+        'pci8086,0166.rom',
+        'seabios.cbfs',
+        'u-boot.elf',
+        'u-boot_netboot.bin',
+        'updater-link.rw.sh',
+        'x86-memtest',
+    )
+    # Files which should be included in the archive.
+    fw_archived_files = fw_files + ('dts/',)
+    board = 'link'
+    fw_test_root = os.path.join(self.tempdir, os.path.basename(__file__))
+    fw_files_root = os.path.join(fw_test_root,
+                                 'chroot/build/%s/firmware' % board)
+    # Generate a representative set of files produced by a typical build.
+    cros_test_lib.CreateOnDiskHierarchy(fw_files_root, fw_files)
+    # Create an archive from the simulated firmware directory
+    tarball = os.path.join(
+        fw_test_root,
+        commands.BuildFirmwareArchive(fw_test_root, board, fw_test_root))
+    # Verify the tarball contents.
+    cros_test_lib.VerifyTarball(tarball, fw_archived_files)
+
+  def findFilesWithPatternExpectedResults(self, root, files):
+    """Generate the expected results for testFindFilesWithPattern"""
+    return [os.path.join(root, f) for f in files]
+
+  def testFindFilesWithPattern(self):
+    """Verifies FindFilesWithPattern searches and excludes files properly"""
+    search_files = (
+        'file1',
+        'test1',
+        'file2',
+        'dir1/file1',
+        'dir1/test1',
+        'dir2/file2',
+    )
+    search_files_root = os.path.join(self.tempdir, 'FindFilesWithPatternTest')
+    cros_test_lib.CreateOnDiskHierarchy(search_files_root, search_files)
+    find_all = commands.FindFilesWithPattern('*', target=search_files_root)
+    expected_find_all = self.findFilesWithPatternExpectedResults(
+        search_files_root, search_files)
+    self.assertEquals(set(find_all), set(expected_find_all))
+    find_test_files = commands.FindFilesWithPattern('test*',
+                                                    target=search_files_root)
+    find_test_expected = self.findFilesWithPatternExpectedResults(
+        search_files_root, ['test1', 'dir1/test1'])
+    self.assertEquals(set(find_test_files), set(find_test_expected))
+    find_exclude = commands.FindFilesWithPattern(
+        '*', target=search_files_root,
+        exclude_dirs=(os.path.join(search_files_root, 'dir1'),))
+    find_exclude_expected = self.findFilesWithPatternExpectedResults(
+        search_files_root, ['file1', 'test1', 'file2', 'dir2/file2'])
+    self.assertEquals(set(find_exclude), set(find_exclude_expected))
+
+  def testGenerateHtmlIndexTuple(self):
+    """Verifies GenerateHtmlIndex gives us something sane (input: tuple)"""
+    index = os.path.join(self.tempdir, 'index.html')
+    files = ('file1', 'monkey tree', 'flying phone',)
+    commands.GenerateHtmlIndex(index, files)
+    html = osutils.ReadFile(index)
+    for f in files:
+      # TODO(build): Use assertIn w/python-2.7.
+      self.assertTrue('>%s</a>' % f in html)
+
+  def testGenerateHtmlIndexTupleDupe(self):
+    """Verifies GenerateHtmlIndex gives us something unique (input: tuple)"""
+    index = os.path.join(self.tempdir, 'index.html')
+    files = ('file1', 'file1', 'file1',)
+    commands.GenerateHtmlIndex(index, files)
+    html = osutils.ReadFile(index)
+    self.assertEqual(html.count('>file1</a>'), 1)
+
+  def testGenerateHtmlIndexTuplePretty(self):
+    """Verifies GenerateHtmlIndex gives us something pretty (input: tuple)"""
+    index = os.path.join(self.tempdir, 'index.html')
+    files = ('..|up', 'f.txt|MY FILE', 'm.log|MONKEY', 'b.bin|Yander',)
+    commands.GenerateHtmlIndex(index, files)
+    html = osutils.ReadFile(index)
+    for f in files:
+      a = f.split('|')
+      # TODO(build): Use assertIn w/python-2.7.
+      self.assertTrue('href="%s"' % a[0] in html)
+      self.assertTrue('>%s</a>' % a[1] in html)
+
+  def testGenerateHtmlIndexDir(self):
+    """Verifies GenerateHtmlIndex gives us something sane (input: dir)"""
+    index = os.path.join(self.tempdir, 'index.html')
+    files = ('a', 'b b b', 'c', 'dalsdkjfasdlkf',)
+    simple_dir = os.path.join(self.tempdir, 'dir')
+    for f in files:
+      osutils.Touch(os.path.join(simple_dir, f), makedirs=True)
+    commands.GenerateHtmlIndex(index, files)
+    html = osutils.ReadFile(index)
+    for f in files:
+      # TODO(build): Use assertIn w/python-2.7.
+      self.assertTrue('>%s</a>' % f in html)
+
+  def testGenerateHtmlIndexFile(self):
+    """Verifies GenerateHtmlIndex gives us something sane (input: file)"""
+    index = os.path.join(self.tempdir, 'index.html')
+    files = ('a.tgz', 'b b b.txt', 'c', 'dalsdkjfasdlkf',)
+    filelist = os.path.join(self.tempdir, 'listing')
+    osutils.WriteFile(filelist, '\n'.join(files))
+    commands.GenerateHtmlIndex(index, filelist)
+    html = osutils.ReadFile(index)
+    for f in files:
+      # TODO(build): Use assertIn w/python-2.7.
+      self.assertTrue('>%s</a>' % f in html)
+
+  def testArchiveGeneration(self):
+    """Verifies BuildStandaloneImageArchive produces correct archives"""
+    image_dir = os.path.join(self.tempdir, 'inputs')
+    archive_dir = os.path.join(self.tempdir, 'outputs')
+    files = ('a.bin', 'aa', 'b b b', 'c', 'dalsdkjfasdlkf',)
+    osutils.SafeMakedirs(image_dir)
+    osutils.SafeMakedirs(archive_dir)
+    for f in files:
+      osutils.Touch(os.path.join(image_dir, f))
+
+    # Check specifying tar functionality.
+    artifact = {'paths': ['a.bin'], 'output': 'a.tar.gz', 'archive': 'tar',
+                'compress':'gz'}
+    path = commands.BuildStandaloneArchive(archive_dir, image_dir, artifact)
+    self.assertEquals(path, ['a.tar.gz'])
+    cros_test_lib.VerifyTarball(os.path.join(archive_dir, path[0]),
+                                ['a.bin'])
+
+    # Check multiple input files.
+    artifact = {'paths': ['a.bin', 'aa'], 'output': 'aa.tar.gz',
+                'archive': 'tar', 'compress': 'gz'}
+    path = commands.BuildStandaloneArchive(archive_dir, image_dir, artifact)
+    self.assertEquals(path, ['aa.tar.gz'])
+    cros_test_lib.VerifyTarball(os.path.join(archive_dir, path[0]),
+                                ['a.bin', 'aa'])
+
+    # Check zip functionality.
+    artifact = {'paths': ['a.bin'], 'archive': 'zip'}
+    path = commands.BuildStandaloneArchive(archive_dir, image_dir, artifact)
+    self.assertEquals(path, ['a.zip'])
+    self.assertExists(os.path.join(archive_dir, path[0]))
+
+  def testGceTarballGeneration(self):
+    """Verifies BuildGceTarball produces correct archives"""
+    image_dir = os.path.join(self.tempdir, 'inputs')
+    archive_dir = os.path.join(self.tempdir, 'outputs')
+    image = constants.TEST_IMAGE_BIN
+    output = constants.TEST_IMAGE_GCE_TAR
+
+    osutils.SafeMakedirs(image_dir)
+    osutils.SafeMakedirs(archive_dir)
+    osutils.Touch(os.path.join(image_dir, image))
+
+    output_tar = commands.BuildGceTarball(archive_dir, image_dir, image)
+    self.assertEquals(output, output_tar)
+
+    output_path = os.path.join(archive_dir, output_tar)
+    self.assertExists(output_path)
+
+    # GCE expects the tarball to be in a particular format.
+    cros_test_lib.VerifyTarball(output_path, ['disk.raw'])
+
+
+class ImageTestCommandsTest(cros_build_lib_unittest.RunCommandTestCase):
+  """Test commands related to ImageTest tests."""
+
+  def setUp(self):
+    self._build = 'test-build'
+    self._board = 'test-board'
+    self._image_dir = 'image-dir'
+    self._result_dir = 'result-dir'
+    self.PatchObject(path_util, 'ToChrootPath',
+                     side_effect=lambda x: x)
+
+  def testRunTestImage(self):
+    """Verifies RunTestImage calls into test-image script properly."""
+    commands.RunTestImage(self._build, self._board, self._image_dir,
+                          self._result_dir)
+    self.assertCommandContains(
+        [
+            'sudo', '--',
+            os.path.join(self._build, 'chromite', 'bin', 'test_image'),
+            '--board', self._board,
+            '--test_results_root',
+            path_util.ToChrootPath(self._result_dir),
+            path_util.ToChrootPath(self._image_dir),
+        ],
+        enter_chroot=True,
+    )
diff --git a/cbuildbot/config_dump.json b/cbuildbot/config_dump.json
new file mode 100644
index 0000000..83d8cb7
--- /dev/null
+++ b/cbuildbot/config_dump.json
@@ -0,0 +1,24128 @@
+{
+    "_default": {
+        "_template": null,
+        "active_waterfall": null,
+        "afdo_generate": false,
+        "afdo_generate_min": false,
+        "afdo_update_ebuild": false,
+        "afdo_use": false,
+        "archive": true,
+        "archive_build_debug": false,
+        "binhost_base_url": null,
+        "binhost_bucket": null,
+        "binhost_key": null,
+        "binhost_test": false,
+        "board_replace": false,
+        "boards": null,
+        "branch": false,
+        "branch_util_test": false,
+        "build_before_patching": false,
+        "build_packages_in_background": false,
+        "build_tests": true,
+        "build_type": "pfq",
+        "buildbot_waterfall_name": null,
+        "builder_class_name": null,
+        "child_configs": [],
+        "chrome_binhost_only": false,
+        "chrome_rev": null,
+        "chrome_sdk": false,
+        "chrome_sdk_build_chrome": true,
+        "chrome_sdk_goma": false,
+        "chromeos_official": false,
+        "chroot_replace": true,
+        "compilecheck": false,
+        "cpe_export": true,
+        "create_delta_sysroot": false,
+        "critical_for_chrome": false,
+        "debug_symbols": true,
+        "description": null,
+        "dev_installer_prebuilts": false,
+        "dev_manifest": "default.xml",
+        "disk_layout": null,
+        "do_not_apply_cq_patches": false,
+        "doc": null,
+        "factory": true,
+        "factory_install_netboot": true,
+        "factory_toolkit": true,
+        "gcc_githash": null,
+        "git_sync": false,
+        "grouped": false,
+        "gs_path": "default",
+        "health_alert_recipients": [],
+        "health_threshold": 0,
+        "hw_tests": [],
+        "hw_tests_override": null,
+        "hwqual": false,
+        "image_test": false,
+        "images": [
+            "test"
+        ],
+        "important": false,
+        "internal": false,
+        "latest_toolchain": false,
+        "lkgm_manifest": "LKGM/lkgm.xml",
+        "manifest": "default.xml",
+        "manifest_branch": null,
+        "manifest_repo_url": "https://chromium.googlesource.com/chromiumos/manifest",
+        "manifest_version": false,
+        "master": false,
+        "name": null,
+        "overlays": "public",
+        "packages": [],
+        "paygen": false,
+        "paygen_skip_delta_payloads": false,
+        "paygen_skip_testing": false,
+        "payload_image": null,
+        "postsync_patch": true,
+        "postsync_reexec": true,
+        "pre_cq": false,
+        "prebuilts": false,
+        "profile": null,
+        "push_image": false,
+        "push_overlays": null,
+        "rootfs_verification": true,
+        "sanity_check_slaves": null,
+        "separate_debug_symbols": true,
+        "shared_user_password": null,
+        "signer_tests": false,
+        "sync_chrome": null,
+        "trybot_list": false,
+        "unittest_blacklist": [],
+        "unittests": true,
+        "upload_gce_images": false,
+        "upload_hw_test_artifacts": true,
+        "upload_standalone_images": true,
+        "upload_stripped_packages": [
+            "chromeos-base/chromeos-chrome",
+            "sys-kernel/*kernel*"
+        ],
+        "upload_symbols": false,
+        "uprev": true,
+        "use_chrome_lkgm": false,
+        "use_lkgm": false,
+        "use_sdk": true,
+        "useflags": [],
+        "usepkg_build_packages": true,
+        "usepkg_toolchain": true,
+        "vm_test_runs": 1,
+        "vm_tests": [
+            "smoke_suite",
+            "pfq_suite"
+        ],
+        "vm_tests_override": null
+    },
+    "_site_params": {
+        "AOSP_GERRIT_HOST": "android-review.googlesource.com",
+        "AOSP_GERRIT_INSTANCE": "android-review",
+        "AOSP_GERRIT_URL": "https://android-review.googlesource.com",
+        "AOSP_GOB_HOST": "android.googlesource.com",
+        "AOSP_GOB_INSTANCE": "android",
+        "AOSP_GOB_URL": "https://android.googlesource.com",
+        "AOSP_REMOTE": "aosp",
+        "BRANCHABLE_PROJECTS": {
+            "cros": "chromiumos/(.+)",
+            "cros-internal": "chromeos/(.+)",
+            "kayle-cros-internal": "chromeos/(.+)"
+        },
+        "CHANGE_PREFIX": {
+            "cros": "",
+            "cros-internal": "*"
+        },
+        "CHROME_REMOTE": "chrome",
+        "CHROMIUM_REMOTE": "chromium",
+        "CROS_REMOTES": {
+            "aosp": "https://android.googlesource.com",
+            "cros": "https://chromium.googlesource.com",
+            "cros-internal": "https://chrome-internal.googlesource.com",
+            "kayle-cros-internal": "https://chrome-internal.googlesource.com",
+            "weave": "https://weave.googlesource.com"
+        },
+        "EXTERNAL_MANIFEST_VERSIONS_PATH": "manifest-versions",
+        "EXTERNAL_REMOTES": [
+            "cros",
+            "chromium"
+        ],
+        "GERRIT_HOSTS": {
+            "aosp": "android-review.googlesource.com",
+            "cros": "chromium-review.googlesource.com",
+            "cros-internal": "chrome-internal-review.googlesource.com",
+            "weave": "weave-review.googlesource.com"
+        },
+        "GIT_REMOTES": {
+            "aosp": "https://android.googlesource.com",
+            "chrome": "https://chrome-internal.googlesource.com",
+            "chromium": "https://chromium.googlesource.com",
+            "cros": "https://chromium.googlesource.com",
+            "cros-internal": "https://chrome-internal.googlesource.com",
+            "kayle-cros-internal": "https://chrome-internal.googlesource.com",
+            "weave": "https://weave.googlesource.com"
+        },
+        "GOB_REMOTES": {
+            "chrome-internal": "cros-internal",
+            "chromium": "cros"
+        },
+        "INTERNAL_MANIFEST_VERSIONS_PATH": "manifest-versions-internal",
+        "KAYLE_INTERNAL_REMOTE": "kayle-cros-internal",
+        "MANIFEST_INT_PROJECT": "chromeos/manifest-internal",
+        "MANIFEST_INT_URL": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "MANIFEST_PROJECT": "chromiumos/manifest",
+        "MANIFEST_PROJECTS": [
+            "chromiumos/manifest",
+            "chromeos/manifest-internal"
+        ],
+        "MANIFEST_URL": "https://chromium.googlesource.com/chromiumos/manifest",
+        "MANIFEST_VERSIONS_GOB_URL": "https://chromium.googlesource.com/chromiumos/manifest-versions",
+        "MANIFEST_VERSIONS_GOB_URL_TEST": "https://chromium.googlesource.com/chromiumos/manifest-versions-test",
+        "MANIFEST_VERSIONS_GS_URL": "gs://chromeos-manifest-versions",
+        "MANIFEST_VERSIONS_INT_GOB_URL": "https://chrome-internal.googlesource.com/chromeos/manifest-versions",
+        "MANIFEST_VERSIONS_INT_GOB_URL_TEST": "https://chrome-internal.googlesource.com/chromeos/manifest-versions-test",
+        "WEAVE_GERRIT_HOST": "weave-review.googlesource.com",
+        "WEAVE_GERRIT_INSTANCE": "weave-review",
+        "WEAVE_GERRIT_URL": "https://weave-review.googlesource.com",
+        "WEAVE_GOB_HOST": "weave.googlesource.com",
+        "WEAVE_GOB_INSTANCE": "weave",
+        "WEAVE_GOB_URL": "https://weave.googlesource.com",
+        "WEAVE_REMOTE": "weave"
+    },
+    "_templates": {
+        "asan": {
+            "_template": "asan",
+            "disk_layout": "2gb-rootfs",
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-ASAN",
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "profile": "asan",
+            "vm_tests": [
+                "smoke_suite"
+            ],
+            "vm_tests_override": null
+        },
+        "chrome-perf": {
+            "_template": "chrome-perf",
+            "build_type": "chrome",
+            "chrome_rev": "tot",
+            "chrome_sdk": false,
+            "chromeos_official": true,
+            "description": "Chrome Performance test bot",
+            "hw_tests": [
+                "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": true,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"chromeperf\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"perf_v2\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 5400,\n    \"warn_only\": false\n}"
+            ],
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "important": false,
+            "internal": true,
+            "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+            "manifest_version": false,
+            "overlays": "both",
+            "unittests": false,
+            "uprev": false,
+            "use_chrome_lkgm": true,
+            "use_lkgm": false,
+            "useflags": [
+                "-cros-debug",
+                "chrome_internal"
+            ],
+            "vm_tests": [],
+            "vm_tests_override": null
+        },
+        "chrome-pfq": {
+            "_template": "chrome-pfq",
+            "build_type": "chrome",
+            "chrome_rev": "latest_release",
+            "chrome_sdk": true,
+            "chromeos_official": true,
+            "description": "Preflight Chrome Uprev & Build (internal)",
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "important": true,
+            "internal": true,
+            "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+            "manifest_version": true,
+            "overlays": "both",
+            "prebuilts": "private",
+            "uprev": false,
+            "useflags": [
+                "chrome_internal"
+            ],
+            "vm_tests": [
+                "smoke_suite",
+                "pfq_suite"
+            ],
+            "vm_tests_override": null
+        },
+        "chrome-pfq-informational": {
+            "_template": "chrome-pfq-informational",
+            "build_type": "chrome",
+            "chrome_rev": "tot",
+            "chrome_sdk": false,
+            "chromeos_official": true,
+            "description": "Informational Chrome Uprev & Build (internal)",
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "important": false,
+            "internal": true,
+            "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+            "manifest_version": false,
+            "overlays": "both",
+            "uprev": false,
+            "use_lkgm": true,
+            "useflags": [
+                "chrome_internal"
+            ],
+            "vm_tests": [
+                "smoke_suite",
+                "pfq_suite"
+            ],
+            "vm_tests_override": null
+        },
+        "chromium-pfq": {
+            "_template": "chromium-pfq",
+            "build_type": "chrome",
+            "chrome_rev": "latest_release",
+            "chrome_sdk": true,
+            "description": "Preflight Chromium Uprev & Build (public)",
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "important": true,
+            "manifest_version": true,
+            "overlays": "public",
+            "uprev": false,
+            "vm_tests": [
+                "smoke_suite",
+                "pfq_suite"
+            ],
+            "vm_tests_override": null
+        },
+        "chromium-pfq-informational": {
+            "_template": "chromium-pfq-informational",
+            "build_type": "chrome",
+            "chrome_rev": "tot",
+            "chrome_sdk": false,
+            "description": "Informational Chromium Uprev & Build (public)",
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "important": false,
+            "manifest_version": false,
+            "overlays": "public",
+            "uprev": false,
+            "use_lkgm": true,
+            "vm_tests": [
+                "smoke_suite",
+                "pfq_suite"
+            ],
+            "vm_tests_override": null
+        },
+        "compile-only-pre-cq": {
+            "_template": "compile-only-pre-cq",
+            "archive": false,
+            "build_packages_in_background": true,
+            "build_type": "binary",
+            "chrome_sdk": false,
+            "chrome_sdk_build_chrome": false,
+            "chroot_replace": true,
+            "compilecheck": true,
+            "cpe_export": false,
+            "debug_symbols": false,
+            "description": "Verifies compilation only",
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Pre-CQ",
+            "health_alert_recipients": [
+                "chromeos-infra-eng@grotations.appspotmail.com"
+            ],
+            "health_threshold": 3,
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "image_test": true,
+            "images": [
+                "base",
+                "test"
+            ],
+            "important": true,
+            "manifest_version": true,
+            "overlays": "public",
+            "pre_cq": true,
+            "prebuilts": false,
+            "trybot_list": true,
+            "unittests": false,
+            "upload_standalone_images": false,
+            "vm_tests": [],
+            "vm_tests_override": null
+        },
+        "depthcharge-firmware": {
+            "_template": "depthcharge-firmware",
+            "afdo_use": false,
+            "archive_build_debug": true,
+            "binhost_base_url": "https://commondatastorage.googleapis.com/chromeos-dev-installer",
+            "binhost_bucket": "gs://chromeos-dev-installer",
+            "binhost_key": "RELEASE_BINHOST",
+            "build_tests": true,
+            "build_type": "canary",
+            "chrome_sdk": false,
+            "chromeos_official": true,
+            "description": "Firmware Canary",
+            "dev_installer_prebuilts": false,
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Canaries",
+            "factory_toolkit": false,
+            "git_sync": false,
+            "hw_tests": [],
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "hwqual": true,
+            "image_test": false,
+            "images": [],
+            "internal": true,
+            "manifest": "default.xml",
+            "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+            "manifest_version": true,
+            "overlays": "both",
+            "packages": [
+                "virtual/chromeos-firmware",
+                "chromeos-base/autotest-all"
+            ],
+            "paygen": false,
+            "push_image": true,
+            "signer_tests": false,
+            "sync_chrome": false,
+            "trybot_list": false,
+            "unittests": false,
+            "upload_hw_test_artifacts": true,
+            "upload_symbols": false,
+            "useflags": [
+                "chromeless_tty",
+                "depthcharge"
+            ],
+            "usepkg_build_packages": true,
+            "vm_tests": [],
+            "vm_tests_override": null
+        },
+        "depthcharge-full-firmware": {
+            "_template": "depthcharge-full-firmware",
+            "archive_build_debug": true,
+            "build_tests": true,
+            "build_type": "full",
+            "chrome_sdk": false,
+            "description": "Firmware Informational",
+            "dev_installer_prebuilts": false,
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Continuous",
+            "factory_toolkit": false,
+            "git_sync": true,
+            "hw_tests": [],
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "image_test": false,
+            "images": [],
+            "internal": true,
+            "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+            "overlays": "both",
+            "packages": [
+                "virtual/chromeos-firmware",
+                "chromeos-base/autotest-all"
+            ],
+            "paygen": false,
+            "signer_tests": false,
+            "sync_chrome": false,
+            "trybot_list": false,
+            "unittests": false,
+            "upload_hw_test_artifacts": true,
+            "upload_symbols": false,
+            "useflags": [
+                "chromeless_tty",
+                "depthcharge"
+            ],
+            "usepkg_build_packages": true,
+            "vm_tests": [],
+            "vm_tests_override": null
+        },
+        "factory": {
+            "_template": "factory",
+            "afdo_use": false,
+            "archive_build_debug": true,
+            "binhost_base_url": "https://commondatastorage.googleapis.com/chromeos-dev-installer",
+            "binhost_bucket": "gs://chromeos-dev-installer",
+            "binhost_key": "RELEASE_BINHOST",
+            "build_tests": true,
+            "build_type": "canary",
+            "chrome_sdk": false,
+            "chromeos_official": true,
+            "description": "Factory Builds",
+            "dev_installer_prebuilts": true,
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Canaries",
+            "git_sync": false,
+            "hw_tests": [],
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "hwqual": true,
+            "image_test": true,
+            "images": [
+                "base",
+                "recovery",
+                "test",
+                "factory_install"
+            ],
+            "internal": true,
+            "manifest": "official.xml",
+            "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+            "manifest_version": true,
+            "overlays": "both",
+            "paygen": false,
+            "push_image": true,
+            "signer_tests": true,
+            "trybot_list": true,
+            "upload_hw_test_artifacts": false,
+            "upload_symbols": false,
+            "useflags": [
+                "-cros-debug",
+                "chrome_internal"
+            ],
+            "usepkg_build_packages": false,
+            "vm_tests": [
+                "smoke_suite",
+                "dev_mode_test",
+                "cros_vm_test"
+            ]
+        },
+        "firmware": {
+            "_template": "firmware",
+            "afdo_use": false,
+            "archive_build_debug": true,
+            "binhost_base_url": "https://commondatastorage.googleapis.com/chromeos-dev-installer",
+            "binhost_bucket": "gs://chromeos-dev-installer",
+            "binhost_key": "RELEASE_BINHOST",
+            "build_tests": true,
+            "build_type": "canary",
+            "chrome_sdk": false,
+            "chromeos_official": true,
+            "description": "Firmware Canary",
+            "dev_installer_prebuilts": false,
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Canaries",
+            "factory_toolkit": false,
+            "git_sync": false,
+            "hw_tests": [],
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "hwqual": true,
+            "image_test": false,
+            "images": [],
+            "internal": true,
+            "manifest": "default.xml",
+            "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+            "manifest_version": true,
+            "overlays": "both",
+            "packages": [
+                "virtual/chromeos-firmware",
+                "chromeos-base/autotest-all"
+            ],
+            "paygen": false,
+            "push_image": true,
+            "signer_tests": false,
+            "sync_chrome": false,
+            "trybot_list": false,
+            "unittests": false,
+            "upload_hw_test_artifacts": true,
+            "upload_symbols": false,
+            "useflags": [
+                "chromeless_tty"
+            ],
+            "usepkg_build_packages": true,
+            "vm_tests": [],
+            "vm_tests_override": null
+        },
+        "full": {
+            "_template": "full",
+            "archive_build_debug": true,
+            "build_type": "full",
+            "chrome_sdk": true,
+            "description": "Full Builds",
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Continuous",
+            "git_sync": true,
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "image_test": true,
+            "images": [
+                "base",
+                "recovery",
+                "test",
+                "factory_install"
+            ],
+            "trybot_list": true,
+            "usepkg_build_packages": false
+        },
+        "incremental": {
+            "_template": "incremental",
+            "build_type": "binary",
+            "chroot_replace": false,
+            "description": "Incremental Builds",
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Continuous",
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "overlays": "public",
+            "uprev": false
+        },
+        "llvm": {
+            "_template": "llvm",
+            "description": "Build with LLVM",
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "profile": "llvm"
+        },
+        "moblab-release": {
+            "_template": "moblab-release",
+            "afdo_use": false,
+            "archive_build_debug": true,
+            "binhost_base_url": "https://commondatastorage.googleapis.com/chromeos-dev-installer",
+            "binhost_bucket": "gs://chromeos-dev-installer",
+            "binhost_key": "RELEASE_BINHOST",
+            "build_tests": true,
+            "build_type": "canary",
+            "chrome_sdk": true,
+            "chromeos_official": true,
+            "description": "Moblab release builders",
+            "dev_installer_prebuilts": true,
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Canaries",
+            "git_sync": false,
+            "hw_tests": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"moblab\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": true\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": true\n}"
+            ],
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "hwqual": true,
+            "image_test": true,
+            "images": [
+                "base",
+                "recovery",
+                "test"
+            ],
+            "important": false,
+            "internal": true,
+            "manifest": "official.xml",
+            "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+            "manifest_version": true,
+            "overlays": "both",
+            "paygen": true,
+            "paygen_skip_delta_payloads": true,
+            "paygen_skip_testing": true,
+            "push_image": true,
+            "signer_tests": false,
+            "trybot_list": true,
+            "upload_symbols": true,
+            "useflags": [
+                "-cros-debug",
+                "chrome_internal"
+            ],
+            "usepkg_build_packages": false,
+            "vm_tests": [
+                "smoke_suite",
+                "dev_mode_test",
+                "cros_vm_test"
+            ]
+        },
+        "no-vmtest-pre-cq": {
+            "_template": "no-vmtest-pre-cq",
+            "archive": false,
+            "build_packages_in_background": true,
+            "build_type": "binary",
+            "chrome_sdk": false,
+            "chrome_sdk_build_chrome": false,
+            "chroot_replace": true,
+            "cpe_export": false,
+            "debug_symbols": false,
+            "description": "Verifies compilation, building an image, and unit tests if supported.",
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Pre-CQ",
+            "health_alert_recipients": [
+                "chromeos-infra-eng@grotations.appspotmail.com"
+            ],
+            "health_threshold": 3,
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "image_test": true,
+            "images": [
+                "base",
+                "test"
+            ],
+            "important": true,
+            "manifest_version": true,
+            "overlays": "public",
+            "pre_cq": true,
+            "prebuilts": false,
+            "trybot_list": true,
+            "upload_standalone_images": false,
+            "vm_tests": [],
+            "vm_tests_override": null
+        },
+        "paladin": {
+            "_template": "paladin",
+            "build_type": "paladin",
+            "chrome_sdk": true,
+            "chrome_sdk_build_chrome": false,
+            "chroot_replace": false,
+            "description": "Commit Queue",
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-CQ",
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "image_test": true,
+            "images": [
+                "base",
+                "test"
+            ],
+            "important": true,
+            "manifest_version": true,
+            "overlays": "public",
+            "prebuilts": "public",
+            "trybot_list": true,
+            "upload_standalone_images": false,
+            "vm_tests": [
+                "smoke_suite"
+            ],
+            "vm_tests_override": [
+                "smoke_suite",
+                "pfq_suite",
+                "cros_vm_test"
+            ]
+        },
+        "payloads": {
+            "_template": "payloads",
+            "build_type": "payloads",
+            "builder_class_name": "release_builders.GeneratePayloadsBuilder",
+            "description": "Regenerate release payloads.",
+            "hw_tests": [],
+            "hw_tests_override": [],
+            "internal": true,
+            "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+            "manifest_version": true,
+            "overlays": "both",
+            "paygen": true,
+            "unittests": false,
+            "upload_hw_test_artifacts": false,
+            "vm_tests": [],
+            "vm_tests_override": null
+        },
+        "pre-cq": {
+            "_template": "pre-cq",
+            "archive": false,
+            "build_packages_in_background": true,
+            "build_type": "binary",
+            "chrome_sdk": false,
+            "chrome_sdk_build_chrome": false,
+            "chroot_replace": true,
+            "cpe_export": false,
+            "debug_symbols": false,
+            "description": "Verifies compilation, building an image, and vm/unit tests if supported.",
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Pre-CQ",
+            "health_alert_recipients": [
+                "chromeos-infra-eng@grotations.appspotmail.com"
+            ],
+            "health_threshold": 3,
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "image_test": true,
+            "images": [
+                "base",
+                "test"
+            ],
+            "important": true,
+            "manifest_version": true,
+            "overlays": "public",
+            "pre_cq": true,
+            "prebuilts": false,
+            "trybot_list": true,
+            "upload_standalone_images": false,
+            "vm_tests": [
+                "smoke_suite"
+            ],
+            "vm_tests_override": null
+        },
+        "pre-flight-branch": {
+            "_template": "pre-flight-branch",
+            "active_waterfall": "chromeos_release",
+            "branch": true,
+            "build_type": "pfq",
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+            "important": true,
+            "internal": true,
+            "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+            "manifest_version": true,
+            "overlays": "both",
+            "prebuilts": "private",
+            "sync_chrome": true,
+            "trybot_list": false,
+            "uprev": true,
+            "useflags": [
+                "chrome_internal"
+            ],
+            "vm_tests": [
+                "smoke_suite",
+                "pfq_suite"
+            ],
+            "vm_tests_override": [
+                "smoke_suite",
+                "pfq_suite",
+                "cros_vm_test"
+            ]
+        },
+        "release": {
+            "_template": "release",
+            "afdo_use": true,
+            "archive_build_debug": true,
+            "binhost_base_url": "https://commondatastorage.googleapis.com/chromeos-dev-installer",
+            "binhost_bucket": "gs://chromeos-dev-installer",
+            "binhost_key": "RELEASE_BINHOST",
+            "build_tests": true,
+            "build_type": "canary",
+            "chrome_sdk": true,
+            "chromeos_official": true,
+            "description": "Release Builds (canary) (internal)",
+            "dev_installer_prebuilts": true,
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Canaries",
+            "git_sync": false,
+            "hw_tests": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": 10,\n    \"minimum_duts\": 1,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"sanity\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 6,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 2,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 6,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 4,\n    \"num\": 2,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 4,\n    \"num\": 2,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "hwqual": true,
+            "image_test": true,
+            "images": [
+                "base",
+                "recovery",
+                "test",
+                "factory_install"
+            ],
+            "internal": true,
+            "manifest": "official.xml",
+            "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+            "manifest_version": true,
+            "overlays": "both",
+            "paygen": true,
+            "push_image": true,
+            "signer_tests": true,
+            "trybot_list": true,
+            "upload_symbols": true,
+            "useflags": [
+                "-cros-debug",
+                "chrome_internal"
+            ],
+            "usepkg_build_packages": false,
+            "vm_tests": [
+                "smoke_suite",
+                "dev_mode_test",
+                "cros_vm_test"
+            ]
+        },
+        "release-afdo-generate": {
+            "_template": "release-afdo-generate",
+            "afdo_generate_min": true,
+            "afdo_update_ebuild": true,
+            "afdo_use": false,
+            "archive_build_debug": true,
+            "binhost_base_url": "https://commondatastorage.googleapis.com/chromeos-dev-installer",
+            "binhost_bucket": "gs://chromeos-dev-installer",
+            "binhost_key": "RELEASE_BINHOST",
+            "build_tests": true,
+            "build_type": "canary",
+            "chrome_sdk": true,
+            "chromeos_official": true,
+            "description": "Release Builds (canary) (internal)",
+            "dev_installer_prebuilts": false,
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Canaries",
+            "git_sync": false,
+            "hw_tests": [
+                "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": true,\n    \"suite\": \"AFDO_record\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 5400,\n    \"warn_only\": true\n}"
+            ],
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"DEFAULT\",\n    \"retry\": true,\n    \"suite\": \"AFDO_record\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 5400,\n    \"warn_only\": true\n}"
+            ],
+            "hwqual": true,
+            "image_test": true,
+            "images": [
+                "base",
+                "recovery",
+                "test",
+                "factory_install"
+            ],
+            "internal": true,
+            "manifest": "official.xml",
+            "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+            "manifest_version": true,
+            "overlays": "both",
+            "paygen": false,
+            "push_image": false,
+            "signer_tests": true,
+            "trybot_list": false,
+            "upload_symbols": true,
+            "useflags": [
+                "-cros-debug",
+                "chrome_internal"
+            ],
+            "usepkg_build_packages": false,
+            "vm_tests": [
+                "smoke_suite",
+                "dev_mode_test",
+                "cros_vm_test"
+            ]
+        },
+        "release-afdo-use": {
+            "_template": "release-afdo-use",
+            "afdo_use": true,
+            "archive_build_debug": true,
+            "binhost_base_url": "https://commondatastorage.googleapis.com/chromeos-dev-installer",
+            "binhost_bucket": "gs://chromeos-dev-installer",
+            "binhost_key": "RELEASE_BINHOST",
+            "build_tests": true,
+            "build_type": "canary",
+            "chrome_sdk": true,
+            "chromeos_official": true,
+            "description": "Release Builds (canary) (internal)",
+            "dev_installer_prebuilts": false,
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Canaries",
+            "git_sync": false,
+            "hw_tests": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 4,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": false,\n    \"suite\": \"perf_v2\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
+            ],
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "hwqual": true,
+            "image_test": true,
+            "images": [
+                "base",
+                "recovery",
+                "test",
+                "factory_install"
+            ],
+            "internal": true,
+            "manifest": "official.xml",
+            "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+            "manifest_version": true,
+            "overlays": "both",
+            "paygen": false,
+            "push_image": false,
+            "signer_tests": true,
+            "trybot_list": false,
+            "upload_symbols": true,
+            "useflags": [
+                "-cros-debug",
+                "chrome_internal"
+            ],
+            "usepkg_build_packages": false,
+            "vm_tests": [
+                "smoke_suite",
+                "dev_mode_test",
+                "cros_vm_test"
+            ]
+        },
+        "telemetry": {
+            "_template": "telemetry",
+            "build_type": "binary",
+            "description": "Telemetry Builds",
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "overlays": "public",
+            "uprev": false,
+            "vm_tests": [
+                "telemetry_suite"
+            ]
+        },
+        "test-ap": {
+            "_template": "test-ap",
+            "description": "WiFi AP images used in testing",
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "internal": true,
+            "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+            "overlays": "both",
+            "profile": "testbed-ap",
+            "vm_tests": []
+        },
+        "tot-asan-informational": {
+            "_template": "tot-asan-informational",
+            "build_type": "chrome",
+            "chrome_rev": "tot",
+            "chrome_sdk": false,
+            "description": "Build TOT Chrome with Address Sanitizer (Clang)",
+            "disk_layout": "2gb-rootfs",
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-ASAN",
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"au\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+                "{\n    \"async\": true,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"PostBuild\",\n    \"retry\": false,\n    \"suite\": \"bvt-perbuild\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "important": false,
+            "manifest_version": false,
+            "overlays": "public",
+            "profile": "asan",
+            "uprev": false,
+            "use_lkgm": true,
+            "vm_tests": [
+                "smoke_suite"
+            ],
+            "vm_tests_override": null
+        },
+        "wificell-pre-cq": {
+            "_template": "wificell-pre-cq",
+            "archive": true,
+            "build_packages_in_background": true,
+            "build_type": "binary",
+            "chrome_sdk": false,
+            "chrome_sdk_build_chrome": false,
+            "chroot_replace": true,
+            "cpe_export": false,
+            "debug_symbols": false,
+            "description": "WiFi tests acting as pre-cq for WiFi related changes",
+            "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Pre-CQ",
+            "health_alert_recipients": [
+                "chromeos-infra-eng@grotations.appspotmail.com"
+            ],
+            "health_threshold": 3,
+            "hw_tests": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 1,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"wificell-pre-cq\",\n    \"priority\": \"DEFAULT\",\n    \"retry\": false,\n    \"suite\": \"wificell-pre-cq\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "hw_tests_override": [
+                "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": null,\n    \"minimum_duts\": 1,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"wificell-pre-cq\",\n    \"priority\": \"DEFAULT\",\n    \"retry\": false,\n    \"suite\": \"wificell-pre-cq\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+            ],
+            "image_test": false,
+            "images": [
+                "base",
+                "test"
+            ],
+            "important": true,
+            "manifest_version": true,
+            "overlays": "public",
+            "pre_cq": true,
+            "prebuilts": false,
+            "trybot_list": true,
+            "unittests": false,
+            "upload_standalone_images": false,
+            "vm_tests": [
+                "smoke_suite"
+            ],
+            "vm_tests_override": null
+        }
+    },
+    "alex-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "x86-alex"
+        ],
+        "manifest": "official.xml"
+    },
+    "amd64-generic-asan": {
+        "_template": "asan",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "amd64-generic"
+        ],
+        "build_type": "binary",
+        "chroot_replace": false,
+        "description": "Build with Address Sanitizer (Clang)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Continuous",
+        "trybot_list": true,
+        "uprev": false
+    },
+    "amd64-generic-asan-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "amd64-generic"
+        ],
+        "description": "Paladin build with Address Sanitizer (Clang)",
+        "disk_layout": "2gb-rootfs",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-ASAN",
+        "important": false,
+        "profile": "asan",
+        "vm_tests_override": null
+    },
+    "amd64-generic-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "amd64-generic"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "disk_layout": "2gb-rootfs",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public"
+    },
+    "amd64-generic-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "amd64-generic"
+        ]
+    },
+    "amd64-generic-full": {
+        "_template": "full",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "amd64-generic"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "amd64-generic-incremental": {
+        "_template": "incremental",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "amd64-generic"
+        ],
+        "vm_tests": []
+    },
+    "amd64-generic-llvm": {
+        "_template": "llvm",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "amd64-generic"
+        ],
+        "build_type": "binary",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Continuous",
+        "trybot_list": true,
+        "uprev": false
+    },
+    "amd64-generic-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "amd64-generic"
+        ]
+    },
+    "amd64-generic-nowithdebug-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "amd64-generic"
+        ],
+        "description": "Commit Queue (internal, nowithdebug)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": false,
+        "useflags": [
+            "-cros-debug",
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "amd64-generic-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "amd64-generic"
+        ]
+    },
+    "amd64-generic-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "amd64-generic"
+        ]
+    },
+    "amd64-generic-telem-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "amd64-generic"
+        ],
+        "description": "Telemetry Builds",
+        "vm_tests": [
+            "telemetry_suite"
+        ]
+    },
+    "amd64-generic-telemetry": {
+        "_template": "telemetry",
+        "boards": [
+            "amd64-generic"
+        ]
+    },
+    "amd64-generic-tot-asan-informational": {
+        "_template": "tot-asan-informational",
+        "boards": [
+            "amd64-generic"
+        ]
+    },
+    "amd64-generic-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "amd64-generic"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "amd64-generic_freon-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "amd64-generic_freon"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "disk_layout": "2gb-rootfs",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "vm_tests": []
+    },
+    "amd64-generic_freon-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "amd64-generic_freon"
+        ]
+    },
+    "amd64-generic_freon-full": {
+        "_template": "full",
+        "boards": [
+            "amd64-generic_freon"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "amd64-generic_freon-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "amd64-generic_freon"
+        ]
+    },
+    "amd64-generic_freon-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "amd64-generic_freon"
+        ],
+        "important": false,
+        "vm_tests": []
+    },
+    "amd64-generic_freon-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "amd64-generic_freon"
+        ]
+    },
+    "amd64-generic_freon-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "amd64-generic_freon"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "arkham-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "arkham"
+        ],
+        "chrome_sdk": false,
+        "important": false,
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "arkham-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "arkham"
+        ],
+        "chrome_sdk": false,
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "arkham-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "arkham"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "arkham-full": {
+        "_template": "full",
+        "boards": [
+            "arkham"
+        ],
+        "chrome_sdk": false,
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "arkham-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "arkham"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "arkham-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "arkham"
+        ],
+        "chrome_sdk": false,
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "arkham-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "arkham"
+        ]
+    },
+    "arkham-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "arkham"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "arkham-release": {
+        "_template": "release",
+        "afdo_use": false,
+        "boards": [
+            "arkham"
+        ],
+        "chrome_sdk": false,
+        "dev_installer_prebuilts": false,
+        "hw_tests": [],
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "arkham-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "arkham"
+        ],
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "arkham-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "arkham"
+        ],
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "arm-generic-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "arm-generic"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "arm-generic-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "arm-generic"
+        ]
+    },
+    "arm-generic-full": {
+        "_template": "full",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "arm-generic"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "arm-generic-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "arm-generic"
+        ]
+    },
+    "arm-generic-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "arm-generic"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "arm-generic-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "arm-generic"
+        ],
+        "vm_tests": []
+    },
+    "arm-generic-telemetry": {
+        "_template": "telemetry",
+        "boards": [
+            "arm-generic"
+        ],
+        "vm_tests": []
+    },
+    "arm-generic-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "arm-generic"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "arm-generic_freon-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "arm-generic_freon"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "arm-generic_freon-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "arm-generic_freon"
+        ]
+    },
+    "arm-generic_freon-full": {
+        "_template": "full",
+        "boards": [
+            "arm-generic_freon"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "arm-generic_freon-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "arm-generic_freon"
+        ]
+    },
+    "arm-generic_freon-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "arm-generic_freon"
+        ],
+        "important": false,
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "arm-generic_freon-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "arm-generic_freon"
+        ],
+        "vm_tests": []
+    },
+    "arm-generic_freon-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "arm-generic_freon"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "arm64-generic-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "arm64-generic"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "arm64-generic-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "arm64-generic"
+        ]
+    },
+    "arm64-generic-full": {
+        "_template": "full",
+        "boards": [
+            "arm64-generic"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "arm64-generic-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "arm64-generic"
+        ]
+    },
+    "arm64-generic-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "arm64-generic"
+        ],
+        "important": false,
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "arm64-generic-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "arm64-generic"
+        ],
+        "vm_tests": []
+    },
+    "arm64-generic-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "arm64-generic"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "auron-b-full-group": {
+        "_template": "full",
+        "boards": [
+            "lulu"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "lulu"
+                ],
+                "grouped": true,
+                "name": "lulu-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "gandof"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "gandof-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "buddy"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "buddy-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: lulu, gandof, buddy)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "auron-b-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "lulu"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "lulu"
+                ],
+                "grouped": true,
+                "name": "lulu-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "gandof"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "gandof-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "buddy"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "buddy-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: lulu, gandof, buddy)",
+        "important": true
+    },
+    "auron-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "auron"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "auron-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "auron"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "auron-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "auron"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "auron-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "auron"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "auron-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "auron"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "auron-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "auron"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "auron-full": {
+        "_template": "full",
+        "boards": [
+            "auron"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "auron-full-group": {
+        "_template": "full",
+        "boards": [
+            "auron"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "auron"
+                ],
+                "grouped": true,
+                "name": "auron-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "auron_yuna"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "auron_yuna-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "auron_paine"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "auron_paine-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: auron, auron_yuna, auron_paine)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "auron-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "auron"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "auron-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "auron"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "auron-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "auron"
+        ]
+    },
+    "auron-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "auron"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "auron-release": {
+        "_template": "release",
+        "boards": [
+            "auron"
+        ]
+    },
+    "auron-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "auron"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "auron"
+                ],
+                "grouped": true,
+                "name": "auron-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "auron"
+                ],
+                "grouped": true,
+                "name": "auron-release-afdo-use"
+            }
+        ]
+    },
+    "auron-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "auron"
+        ]
+    },
+    "auron-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "auron"
+        ]
+    },
+    "auron-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "auron"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "auron"
+                ],
+                "grouped": true,
+                "name": "auron-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "auron_yuna"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "auron_yuna-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "auron_paine"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "auron_paine-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: auron, auron_yuna, auron_paine)",
+        "important": true
+    },
+    "auron-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "auron"
+        ],
+        "manifest": "official.xml"
+    },
+    "auron-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "auron"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "auron_paine-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "auron_paine"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "auron_paine-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "auron_paine"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "auron_paine-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "auron_paine"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "auron_paine-full": {
+        "_template": "full",
+        "boards": [
+            "auron_paine"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "auron_paine-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "auron_paine"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "auron_paine-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "auron_paine"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "auron_paine-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "auron_paine"
+        ]
+    },
+    "auron_paine-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "auron_paine"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "auron_paine-release": {
+        "_template": "release",
+        "boards": [
+            "auron_paine"
+        ]
+    },
+    "auron_paine-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "auron_paine"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "auron_paine"
+                ],
+                "grouped": true,
+                "name": "auron_paine-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "auron_paine"
+                ],
+                "grouped": true,
+                "name": "auron_paine-release-afdo-use"
+            }
+        ]
+    },
+    "auron_paine-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "auron_paine"
+        ]
+    },
+    "auron_paine-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "auron_paine"
+        ]
+    },
+    "auron_paine-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "auron_paine"
+        ],
+        "manifest": "official.xml"
+    },
+    "auron_paine-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "auron_paine"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "auron_yuna-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "auron_yuna"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "auron_yuna-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "auron_yuna"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "auron_yuna-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "auron_yuna"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "auron_yuna-full": {
+        "_template": "full",
+        "boards": [
+            "auron_yuna"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "auron_yuna-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "auron_yuna"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "auron_yuna-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "auron_yuna"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "auron_yuna-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "auron_yuna"
+        ]
+    },
+    "auron_yuna-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "auron_yuna"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "auron_yuna-release": {
+        "_template": "release",
+        "boards": [
+            "auron_yuna"
+        ]
+    },
+    "auron_yuna-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "auron_yuna"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "auron_yuna"
+                ],
+                "grouped": true,
+                "name": "auron_yuna-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "auron_yuna"
+                ],
+                "grouped": true,
+                "name": "auron_yuna-release-afdo-use"
+            }
+        ]
+    },
+    "auron_yuna-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "auron_yuna"
+        ]
+    },
+    "auron_yuna-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "auron_yuna"
+        ]
+    },
+    "auron_yuna-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "auron_yuna"
+        ],
+        "manifest": "official.xml"
+    },
+    "auron_yuna-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "auron_yuna"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "banjo-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "banjo"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "banjo-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "banjo"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "banjo-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "banjo"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "banjo-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "banjo"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "banjo-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "banjo"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "banjo-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "banjo"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "banjo-full": {
+        "_template": "full",
+        "boards": [
+            "banjo"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "banjo-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "banjo"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "banjo-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "banjo"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "banjo-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "banjo"
+        ]
+    },
+    "banjo-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "banjo"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "banjo-release": {
+        "_template": "release",
+        "boards": [
+            "banjo"
+        ]
+    },
+    "banjo-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "banjo"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "banjo"
+                ],
+                "grouped": true,
+                "name": "banjo-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "banjo"
+                ],
+                "grouped": true,
+                "name": "banjo-release-afdo-use"
+            }
+        ]
+    },
+    "banjo-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "banjo"
+        ]
+    },
+    "banjo-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "banjo"
+        ]
+    },
+    "banjo-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "banjo"
+        ],
+        "manifest": "official.xml"
+    },
+    "banjo-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "banjo"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "bayleybay-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "bayleybay"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "bayleybay-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "bayleybay"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "bayleybay-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "bayleybay"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "bayleybay-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "bayleybay"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "bayleybay-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "bayleybay"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "bayleybay-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "bayleybay"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "bayleybay-full": {
+        "_template": "full",
+        "boards": [
+            "bayleybay"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "bayleybay-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "bayleybay"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "bayleybay-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "bayleybay"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "bayleybay-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "bayleybay"
+        ]
+    },
+    "bayleybay-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "bayleybay"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "bayleybay-release": {
+        "_template": "release",
+        "boards": [
+            "bayleybay"
+        ],
+        "hw_tests": [],
+        "unittests": false,
+        "vm_tests": []
+    },
+    "bayleybay-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "bayleybay"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "bayleybay"
+                ],
+                "grouped": true,
+                "name": "bayleybay-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "bayleybay"
+                ],
+                "grouped": true,
+                "name": "bayleybay-release-afdo-use"
+            }
+        ]
+    },
+    "bayleybay-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "bayleybay"
+        ]
+    },
+    "bayleybay-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "bayleybay"
+        ]
+    },
+    "bayleybay-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "bayleybay"
+        ],
+        "manifest": "official.xml"
+    },
+    "bayleybay-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "bayleybay"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "beaglebone-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "beaglebone"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "beaglebone-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "beaglebone"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "beaglebone-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "beaglebone"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "beaglebone-full": {
+        "_template": "full",
+        "boards": [
+            "beaglebone"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "beaglebone-incremental": {
+        "_template": "incremental",
+        "boards": [
+            "beaglebone"
+        ],
+        "description": "Incremental Beaglebone Builder",
+        "rootfs_verification": false,
+        "sync_chrome": false,
+        "trybot_list": true,
+        "vm_tests": []
+    },
+    "beaglebone-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "beaglebone"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "beaglebone-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "beaglebone"
+        ],
+        "chrome_sdk": false,
+        "description": "Commit Queue (internal)",
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "rootfs_verification": false,
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "beaglebone-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "beaglebone"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "beaglebone-release": {
+        "_template": "release",
+        "afdo_use": false,
+        "boards": [
+            "beaglebone"
+        ],
+        "chrome_sdk": false,
+        "dev_installer_prebuilts": false,
+        "hw_tests": [],
+        "image_test": false,
+        "images": [
+            "base",
+            "test"
+        ],
+        "paygen": false,
+        "rootfs_verification": false,
+        "signer_tests": false,
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "beaglebone-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "beaglebone"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "beaglebone"
+                ],
+                "grouped": true,
+                "name": "beaglebone-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "beaglebone"
+                ],
+                "grouped": true,
+                "name": "beaglebone-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "beaglebone-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "beaglebone"
+        ],
+        "vm_tests": []
+    },
+    "beaglebone-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "beaglebone"
+        ],
+        "vm_tests": []
+    },
+    "beaglebone-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "afdo_use": false,
+        "boards": [
+            "beaglebone"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "afdo_use": false,
+                "boards": [
+                    "beaglebone"
+                ],
+                "chrome_sdk": false,
+                "dev_installer_prebuilts": false,
+                "grouped": true,
+                "hw_tests": [],
+                "image_test": false,
+                "images": [
+                    "base",
+                    "test"
+                ],
+                "name": "beaglebone-release",
+                "paygen": false,
+                "rootfs_verification": false,
+                "signer_tests": false,
+                "sync_chrome": false,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "afdo_use": false,
+                "boards": [
+                    "beaglebone_servo"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "dev_installer_prebuilts": false,
+                "grouped": true,
+                "hw_tests": [],
+                "image_test": false,
+                "images": [
+                    "base",
+                    "test"
+                ],
+                "name": "beaglebone_servo-release",
+                "paygen": false,
+                "payload_image": "base",
+                "rootfs_verification": false,
+                "signer_tests": false,
+                "sync_chrome": false,
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "chrome_sdk": false,
+        "dev_installer_prebuilts": false,
+        "hw_tests": [],
+        "image_test": false,
+        "images": [
+            "base",
+            "test"
+        ],
+        "important": true,
+        "paygen": false,
+        "rootfs_verification": false,
+        "signer_tests": false,
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "beaglebone-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "beaglebone"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "beaglebone-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "beaglebone"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "beaglebone_servo-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "beaglebone_servo"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "beaglebone_servo-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "beaglebone_servo"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "beaglebone_servo-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "beaglebone_servo"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "beaglebone_servo-full": {
+        "_template": "full",
+        "boards": [
+            "beaglebone_servo"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "beaglebone_servo-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "beaglebone_servo"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "beaglebone_servo-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "beaglebone_servo"
+        ],
+        "chrome_sdk": false,
+        "description": "Commit Queue (internal)",
+        "image_test": false,
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "rootfs_verification": false,
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "beaglebone_servo-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "beaglebone_servo"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "beaglebone_servo-release": {
+        "_template": "release",
+        "afdo_use": false,
+        "boards": [
+            "beaglebone_servo"
+        ],
+        "chrome_sdk": false,
+        "dev_installer_prebuilts": false,
+        "hw_tests": [],
+        "image_test": false,
+        "images": [
+            "base",
+            "test"
+        ],
+        "paygen": false,
+        "payload_image": "base",
+        "rootfs_verification": false,
+        "signer_tests": false,
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "beaglebone_servo-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "beaglebone_servo"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "beaglebone_servo"
+                ],
+                "grouped": true,
+                "name": "beaglebone_servo-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "beaglebone_servo"
+                ],
+                "grouped": true,
+                "name": "beaglebone_servo-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "beaglebone_servo-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "beaglebone_servo"
+        ],
+        "vm_tests": []
+    },
+    "beaglebone_servo-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "beaglebone_servo"
+        ],
+        "vm_tests": []
+    },
+    "beaglebone_servo-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "beaglebone_servo"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "beaglebone_servo-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "beaglebone_servo"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "beltino-a-full-group": {
+        "_template": "full",
+        "boards": [
+            "panther"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "panther"
+                ],
+                "grouped": true,
+                "name": "panther-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "mccloud"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "mccloud-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: panther, mccloud)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "beltino-a-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "panther"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "panther"
+                ],
+                "grouped": true,
+                "name": "panther-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "mccloud"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "mccloud-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: panther, mccloud)",
+        "important": true
+    },
+    "beltino-b-full-group": {
+        "_template": "full",
+        "boards": [
+            "monroe"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "monroe"
+                ],
+                "grouped": true,
+                "name": "monroe-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "tricky"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "tricky-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "zako"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "zako-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: monroe, tricky, zako)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "beltino-b-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "monroe"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "monroe"
+                ],
+                "grouped": true,
+                "name": "monroe-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "tricky"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "tricky-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "zako"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "zako-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: monroe, tricky, zako)",
+        "important": true
+    },
+    "beltino-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "beltino"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "beltino-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "beltino"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "beltino-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "beltino"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "beltino-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "beltino"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "beltino-full": {
+        "_template": "full",
+        "boards": [
+            "beltino"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "beltino-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "beltino"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "beltino-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "beltino"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "beltino-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "beltino"
+        ]
+    },
+    "beltino-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "beltino"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "beltino-release": {
+        "_template": "release",
+        "boards": [
+            "beltino"
+        ],
+        "hw_tests": [],
+        "vm_tests": []
+    },
+    "beltino-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "beltino"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "beltino"
+                ],
+                "grouped": true,
+                "name": "beltino-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "beltino"
+                ],
+                "grouped": true,
+                "name": "beltino-release-afdo-use"
+            }
+        ]
+    },
+    "beltino-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "beltino"
+        ]
+    },
+    "beltino-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "beltino"
+        ]
+    },
+    "beltino-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "beltino"
+        ],
+        "manifest": "official.xml"
+    },
+    "beltino-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "beltino"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "binhost-pre-cq": {
+        "_template": "pre-cq",
+        "binhost_test": true,
+        "boards": [],
+        "description": "Verifies compilation, building an image, and unit tests if supported.",
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "vm_tests": []
+    },
+    "bobcat-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "bobcat"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "profile": "minimal"
+    },
+    "bobcat-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "bobcat"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "profile": "minimal",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "bobcat-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "bobcat"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "profile": "minimal",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "bobcat-full": {
+        "_template": "full",
+        "boards": [
+            "bobcat"
+        ],
+        "prebuilts": "public",
+        "profile": "minimal",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "bobcat-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "bobcat"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "profile": "minimal",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "bobcat-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "bobcat"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "profile": "minimal",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "bobcat-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "bobcat"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "profile": "minimal",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "bobcat-release": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "bobcat"
+        ],
+        "hw_tests": [],
+        "paygen": false,
+        "profile": "minimal",
+        "signer_tests": false
+    },
+    "bobcat-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "bobcat"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "bobcat"
+                ],
+                "grouped": true,
+                "name": "bobcat-release-afdo-generate",
+                "profile": "minimal"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "bobcat"
+                ],
+                "grouped": true,
+                "name": "bobcat-release-afdo-use",
+                "profile": "minimal"
+            }
+        ],
+        "profile": "minimal"
+    },
+    "bobcat-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "bobcat"
+        ],
+        "profile": "minimal"
+    },
+    "bobcat-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "bobcat"
+        ],
+        "profile": "minimal"
+    },
+    "bobcat-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "bobcat"
+        ],
+        "manifest": "official.xml",
+        "profile": "minimal"
+    },
+    "bobcat-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "bobcat"
+        ],
+        "profile": "minimal",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "branch-util": {
+        "boards": [],
+        "build_type": null,
+        "builder_class_name": "release_builders.CreateBranchBuilder",
+        "chrome_sdk": true,
+        "chrome_sdk_build_chrome": false,
+        "chroot_replace": false,
+        "description": "Used for creating/deleting branches (TPMs only)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-CQ",
+        "hw_tests_override": [],
+        "image_test": true,
+        "images": [
+            "base",
+            "test"
+        ],
+        "important": true,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "manifest_version": true,
+        "overlays": "both",
+        "postsync_patch": false,
+        "postsync_reexec": false,
+        "prebuilts": "private",
+        "trybot_list": true,
+        "upload_standalone_images": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "buddy-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "buddy"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "buddy-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "buddy"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "buddy-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "buddy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "buddy-full": {
+        "_template": "full",
+        "boards": [
+            "buddy"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "buddy-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "buddy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "buddy-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "buddy"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "buddy-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "buddy"
+        ]
+    },
+    "buddy-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "buddy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "buddy-release": {
+        "_template": "release",
+        "boards": [
+            "buddy"
+        ]
+    },
+    "buddy-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "buddy"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "buddy"
+                ],
+                "grouped": true,
+                "name": "buddy-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "buddy"
+                ],
+                "grouped": true,
+                "name": "buddy-release-afdo-use"
+            }
+        ]
+    },
+    "buddy-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "buddy"
+        ]
+    },
+    "buddy-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "buddy"
+        ]
+    },
+    "buddy-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "buddy"
+        ],
+        "manifest": "official.xml"
+    },
+    "buddy-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "buddy"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "butterfly-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "butterfly"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "butterfly-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "butterfly"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "butterfly-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "butterfly"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "butterfly-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "butterfly"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "butterfly-full": {
+        "_template": "full",
+        "boards": [
+            "butterfly"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "butterfly-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "butterfly"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "butterfly-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "butterfly"
+        ],
+        "chroot_replace": true,
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "butterfly-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "butterfly"
+        ]
+    },
+    "butterfly-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "butterfly"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "butterfly-release": {
+        "_template": "release",
+        "boards": [
+            "butterfly"
+        ]
+    },
+    "butterfly-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "butterfly"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "butterfly"
+                ],
+                "grouped": true,
+                "name": "butterfly-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "butterfly"
+                ],
+                "grouped": true,
+                "name": "butterfly-release-afdo-use"
+            }
+        ]
+    },
+    "butterfly-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "butterfly"
+        ]
+    },
+    "butterfly-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "butterfly"
+        ]
+    },
+    "butterfly-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "butterfly"
+        ],
+        "manifest": "official.xml"
+    },
+    "butterfly-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "butterfly"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "candy-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "candy"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "candy-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "candy"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "candy-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "candy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "candy-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "candy"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "candy-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "candy"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "candy-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "candy"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "candy-full": {
+        "_template": "full",
+        "boards": [
+            "candy"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "candy-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "candy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "candy-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "candy"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "candy-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "candy"
+        ]
+    },
+    "candy-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "candy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "candy-release": {
+        "_template": "release",
+        "boards": [
+            "candy"
+        ]
+    },
+    "candy-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "candy"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "candy"
+                ],
+                "grouped": true,
+                "name": "candy-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "candy"
+                ],
+                "grouped": true,
+                "name": "candy-release-afdo-use"
+            }
+        ]
+    },
+    "candy-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "candy"
+        ]
+    },
+    "candy-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "candy"
+        ]
+    },
+    "candy-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "candy"
+        ],
+        "manifest": "official.xml"
+    },
+    "candy-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "candy"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "celes-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "celes"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "celes-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "celes"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "celes-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "celes"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "celes-full": {
+        "_template": "full",
+        "boards": [
+            "celes"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "celes-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "celes"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "celes-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "celes"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "celes-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "celes"
+        ]
+    },
+    "celes-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "celes"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "celes-release": {
+        "_template": "release",
+        "boards": [
+            "celes"
+        ]
+    },
+    "celes-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "celes"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "celes"
+                ],
+                "grouped": true,
+                "name": "celes-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "celes"
+                ],
+                "grouped": true,
+                "name": "celes-release-afdo-use"
+            }
+        ]
+    },
+    "celes-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "celes"
+        ]
+    },
+    "celes-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "celes"
+        ]
+    },
+    "celes-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "celes"
+        ],
+        "manifest": "official.xml"
+    },
+    "celes-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "celes"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "chromiumos-sdk": {
+        "active_waterfall": "chromiumos",
+        "archive_build_debug": true,
+        "boards": [
+            "x86-generic",
+            "arm-generic",
+            "amd64-generic"
+        ],
+        "build_type": "chroot",
+        "builder_class_name": "sdk_builders.ChrootSdkBuilder",
+        "chrome_sdk": true,
+        "description": "Build the SDK and all the cross-compilers",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Continuous",
+        "git_sync": true,
+        "hw_tests_override": [],
+        "image_test": true,
+        "images": [
+            "base",
+            "recovery",
+            "test",
+            "factory_install"
+        ],
+        "prebuilts": "public",
+        "trybot_list": true,
+        "use_sdk": false,
+        "usepkg_build_packages": false
+    },
+    "cid-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "cid"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "cid-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "cid"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "cid-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "cid"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "cid-full": {
+        "_template": "full",
+        "boards": [
+            "cid"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "cid-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "cid"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "cid-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "cid"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "cid-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "cid"
+        ]
+    },
+    "cid-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "cid"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "cid-release": {
+        "_template": "release",
+        "boards": [
+            "cid"
+        ]
+    },
+    "cid-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "cid"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "cid"
+                ],
+                "grouped": true,
+                "name": "cid-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "cid"
+                ],
+                "grouped": true,
+                "name": "cid-release-afdo-use"
+            }
+        ]
+    },
+    "cid-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "cid"
+        ]
+    },
+    "cid-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "cid"
+        ]
+    },
+    "cid-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "cid"
+        ],
+        "manifest": "official.xml"
+    },
+    "cid-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "cid"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "clapper-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "clapper"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "clapper-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "clapper"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "clapper-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "clapper"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "clapper-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "clapper"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "clapper-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "clapper"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "clapper-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "clapper"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "clapper-full": {
+        "_template": "full",
+        "boards": [
+            "clapper"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "clapper-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "clapper"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "clapper-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "clapper"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "clapper-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "clapper"
+        ]
+    },
+    "clapper-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "clapper"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "clapper-release": {
+        "_template": "release",
+        "boards": [
+            "clapper"
+        ]
+    },
+    "clapper-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "clapper"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "clapper"
+                ],
+                "grouped": true,
+                "name": "clapper-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "clapper"
+                ],
+                "grouped": true,
+                "name": "clapper-release-afdo-use"
+            }
+        ]
+    },
+    "clapper-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "clapper"
+        ]
+    },
+    "clapper-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "clapper"
+        ]
+    },
+    "clapper-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "clapper"
+        ],
+        "manifest": "official.xml"
+    },
+    "clapper-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "clapper"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "cranky-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "cranky"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "cranky-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "cranky"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "cranky-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "cranky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "cranky-full": {
+        "_template": "full",
+        "boards": [
+            "cranky"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "cranky-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "cranky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "cranky-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "cranky"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "cranky-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "cranky"
+        ]
+    },
+    "cranky-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "cranky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "cranky-release": {
+        "_template": "release",
+        "boards": [
+            "cranky"
+        ]
+    },
+    "cranky-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "cranky"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "cranky"
+                ],
+                "grouped": true,
+                "name": "cranky-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "cranky"
+                ],
+                "grouped": true,
+                "name": "cranky-release-afdo-use"
+            }
+        ]
+    },
+    "cranky-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "cranky"
+        ]
+    },
+    "cranky-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "cranky"
+        ]
+    },
+    "cranky-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "cranky"
+        ],
+        "manifest": "official.xml"
+    },
+    "cranky-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "cranky"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "cyan-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "cyan"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "cyan-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "cyan"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "cyan-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "cyan"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "cyan-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "cyan"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "cyan-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "cyan"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "cyan-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "cyan"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "cyan-full": {
+        "_template": "full",
+        "boards": [
+            "cyan"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "cyan-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "cyan"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "cyan-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "cyan"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "cyan-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "cyan"
+        ]
+    },
+    "cyan-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "cyan"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "cyan-release": {
+        "_template": "release",
+        "boards": [
+            "cyan"
+        ]
+    },
+    "cyan-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "cyan"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "cyan"
+                ],
+                "grouped": true,
+                "name": "cyan-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "cyan"
+                ],
+                "grouped": true,
+                "name": "cyan-release-afdo-use"
+            }
+        ]
+    },
+    "cyan-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "cyan"
+        ]
+    },
+    "cyan-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "cyan"
+        ]
+    },
+    "cyan-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "cyan"
+        ],
+        "manifest": "official.xml"
+    },
+    "cyan-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "cyan"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "daisy-chrome-perf": {
+        "_template": "chrome-perf",
+        "boards": [
+            "daisy"
+        ],
+        "manifest": "official.xml",
+        "trybot_list": true
+    },
+    "daisy-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "daisy"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "daisy-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "daisy"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "daisy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "daisy-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "daisy"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "daisy-full": {
+        "_template": "full",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "daisy"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy-full-group": {
+        "_template": "full",
+        "boards": [
+            "daisy"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "daisy"
+                ],
+                "grouped": true,
+                "name": "daisy-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "daisy_spring"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "daisy_spring-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "daisy_skate"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "daisy_skate-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: daisy, daisy_spring, daisy_skate)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy-incremental": {
+        "_template": "incremental",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "daisy"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "daisy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "daisy-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "daisy"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "daisy-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "daisy"
+        ]
+    },
+    "daisy-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "daisy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy-release": {
+        "_template": "release",
+        "boards": [
+            "daisy"
+        ],
+        "critical_for_chrome": true,
+        "vm_tests": []
+    },
+    "daisy-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "daisy"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "daisy"
+                ],
+                "grouped": true,
+                "name": "daisy-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "daisy"
+                ],
+                "grouped": true,
+                "name": "daisy-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "daisy-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "daisy"
+        ],
+        "vm_tests": []
+    },
+    "daisy-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "daisy"
+        ],
+        "vm_tests": []
+    },
+    "daisy-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "daisy"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "daisy"
+                ],
+                "critical_for_chrome": true,
+                "grouped": true,
+                "name": "daisy-release",
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "daisy_spring"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "daisy_spring-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "daisy_skate"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "daisy_skate-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "critical_for_chrome": true,
+        "description": "Release Builds (canary) (internal); Group config (boards: daisy, daisy_spring, daisy_skate)",
+        "important": true,
+        "vm_tests": []
+    },
+    "daisy-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "daisy"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "daisy-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "daisy"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy-wificell-pre-cq": {
+        "_template": "wificell-pre-cq",
+        "boards": [
+            "daisy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy_skate-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "daisy_skate"
+        ],
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 1,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"sanity\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 3,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 3,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "daisy_skate-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "daisy_skate"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy_skate-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "daisy_skate"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "daisy_skate-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "daisy_skate"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "daisy_skate-full": {
+        "_template": "full",
+        "boards": [
+            "daisy_skate"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy_skate-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "daisy_skate"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "daisy_skate-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "daisy_skate"
+        ],
+        "description": "Commit Queue (internal)",
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": true,\n    \"pool\": \"cq\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "daisy_skate-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "daisy_skate"
+        ]
+    },
+    "daisy_skate-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "daisy_skate"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy_skate-release": {
+        "_template": "release",
+        "boards": [
+            "daisy_skate"
+        ],
+        "vm_tests": []
+    },
+    "daisy_skate-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "daisy_skate"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "daisy_skate"
+                ],
+                "grouped": true,
+                "name": "daisy_skate-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "daisy_skate"
+                ],
+                "grouped": true,
+                "name": "daisy_skate-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "daisy_skate-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "daisy_skate"
+        ],
+        "vm_tests": []
+    },
+    "daisy_skate-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "daisy_skate"
+        ],
+        "vm_tests": []
+    },
+    "daisy_skate-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "daisy_skate"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "daisy_skate-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "daisy_skate"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy_spring-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "daisy_spring"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "daisy_spring-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "daisy_spring"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy_spring-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "daisy_spring"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "daisy_spring-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "daisy_spring"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "daisy_spring-full": {
+        "_template": "full",
+        "boards": [
+            "daisy_spring"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy_spring-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "daisy_spring"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "daisy_spring-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "daisy_spring"
+        ],
+        "chroot_replace": true,
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "daisy_spring-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "daisy_spring"
+        ]
+    },
+    "daisy_spring-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "daisy_spring"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy_spring-release": {
+        "_template": "release",
+        "boards": [
+            "daisy_spring"
+        ],
+        "vm_tests": []
+    },
+    "daisy_spring-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "daisy_spring"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "daisy_spring"
+                ],
+                "grouped": true,
+                "name": "daisy_spring-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "daisy_spring"
+                ],
+                "grouped": true,
+                "name": "daisy_spring-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "daisy_spring-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "daisy_spring"
+        ],
+        "vm_tests": []
+    },
+    "daisy_spring-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "daisy_spring"
+        ],
+        "vm_tests": []
+    },
+    "daisy_spring-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "daisy_spring"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "daisy_spring-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "daisy_spring"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy_winter-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "daisy_winter"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "daisy_winter-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "daisy_winter"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy_winter-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "daisy_winter"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "daisy_winter-full": {
+        "_template": "full",
+        "boards": [
+            "daisy_winter"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "images": [
+            "base",
+            "recovery",
+            "test"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy_winter-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "daisy_winter"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "daisy_winter-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "daisy_winter"
+        ],
+        "description": "Commit Queue (internal)",
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "daisy_winter-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "daisy_winter"
+        ]
+    },
+    "daisy_winter-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "daisy_winter"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "daisy_winter-release": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "daisy_winter"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "images": [
+            "base",
+            "recovery",
+            "test"
+        ],
+        "vm_tests": []
+    },
+    "daisy_winter-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "daisy_winter"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "daisy_winter"
+                ],
+                "factory": false,
+                "factory_install_netboot": false,
+                "factory_toolkit": false,
+                "grouped": true,
+                "name": "daisy_winter-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "daisy_winter"
+                ],
+                "factory": false,
+                "factory_install_netboot": false,
+                "factory_toolkit": false,
+                "grouped": true,
+                "name": "daisy_winter-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "vm_tests": []
+    },
+    "daisy_winter-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "daisy_winter"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "vm_tests": []
+    },
+    "daisy_winter-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "daisy_winter"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "vm_tests": []
+    },
+    "daisy_winter-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "daisy_winter"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "daisy_winter-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "daisy_winter"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "enguarde-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "enguarde"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "enguarde-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "enguarde"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "enguarde-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "enguarde"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "enguarde-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "enguarde"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "enguarde-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "enguarde"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "enguarde-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "enguarde"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "enguarde-full": {
+        "_template": "full",
+        "boards": [
+            "enguarde"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "enguarde-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "enguarde"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "enguarde-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "enguarde"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "enguarde-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "enguarde"
+        ]
+    },
+    "enguarde-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "enguarde"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "enguarde-release": {
+        "_template": "release",
+        "boards": [
+            "enguarde"
+        ]
+    },
+    "enguarde-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "enguarde"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "enguarde"
+                ],
+                "grouped": true,
+                "name": "enguarde-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "enguarde"
+                ],
+                "grouped": true,
+                "name": "enguarde-release-afdo-use"
+            }
+        ]
+    },
+    "enguarde-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "enguarde"
+        ]
+    },
+    "enguarde-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "enguarde"
+        ]
+    },
+    "enguarde-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "enguarde"
+        ],
+        "manifest": "official.xml"
+    },
+    "enguarde-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "enguarde"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "expresso-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "expresso"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "expresso-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "expresso"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "expresso-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "expresso"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "expresso-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "expresso"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "expresso-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "expresso"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "expresso-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "expresso"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "expresso-full": {
+        "_template": "full",
+        "boards": [
+            "expresso"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "expresso-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "expresso"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "expresso-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "expresso"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "expresso-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "expresso"
+        ]
+    },
+    "expresso-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "expresso"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "expresso-release": {
+        "_template": "release",
+        "boards": [
+            "expresso"
+        ]
+    },
+    "expresso-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "expresso"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "expresso"
+                ],
+                "grouped": true,
+                "name": "expresso-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "expresso"
+                ],
+                "grouped": true,
+                "name": "expresso-release-afdo-use"
+            }
+        ]
+    },
+    "expresso-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "expresso"
+        ]
+    },
+    "expresso-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "expresso"
+        ]
+    },
+    "expresso-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "expresso"
+        ],
+        "manifest": "official.xml"
+    },
+    "expresso-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "expresso"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "external-mixed-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "x86-generic"
+        ],
+        "child_configs": [
+            {
+                "_template": "no-vmtest-pre-cq",
+                "boards": [
+                    "x86-generic"
+                ],
+                "grouped": true,
+                "name": "x86-generic-no-vmtest-pre-cq"
+            },
+            {
+                "_template": "no-vmtest-pre-cq",
+                "boards": [
+                    "amd64-generic"
+                ],
+                "grouped": true,
+                "name": "amd64-generic-no-vmtest-pre-cq"
+            }
+        ]
+    },
+    "falco-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "falco"
+        ],
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 1,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"sanity\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 3,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 3,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+        ],
+        "manifest": "official.xml"
+    },
+    "falco-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "falco"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "falco-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "falco"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "falco-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "falco"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "falco-full": {
+        "_template": "full",
+        "boards": [
+            "falco"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "falco-full-compile-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "board_replace": true,
+        "boards": [
+            "falco"
+        ],
+        "chrome_binhost_only": true,
+        "chrome_sdk": false,
+        "cpe_export": false,
+        "debug_symbols": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": false,
+        "unittests": false,
+        "upload_hw_test_artifacts": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "falco-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "falco"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "falco-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "falco"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "falco-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "falco"
+        ]
+    },
+    "falco-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "falco"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "falco-release": {
+        "_template": "release",
+        "boards": [
+            "falco"
+        ]
+    },
+    "falco-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "falco"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "falco"
+                ],
+                "grouped": true,
+                "name": "falco-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "falco"
+                ],
+                "grouped": true,
+                "name": "falco-release-afdo-use"
+            }
+        ]
+    },
+    "falco-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "falco"
+        ]
+    },
+    "falco-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "falco"
+        ]
+    },
+    "falco-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "falco"
+        ],
+        "manifest": "official.xml"
+    },
+    "falco-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "falco"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "falco_li-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "falco_li"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "falco_li-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "falco_li"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "falco_li-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "falco_li"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "falco_li-full": {
+        "_template": "full",
+        "boards": [
+            "falco_li"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "falco_li-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "falco_li"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "falco_li-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "falco_li"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "falco_li-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "falco_li"
+        ]
+    },
+    "falco_li-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "falco_li"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "falco_li-release": {
+        "_template": "release",
+        "boards": [
+            "falco_li"
+        ]
+    },
+    "falco_li-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "falco_li"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "falco_li"
+                ],
+                "grouped": true,
+                "name": "falco_li-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "falco_li"
+                ],
+                "grouped": true,
+                "name": "falco_li-release-afdo-use"
+            }
+        ]
+    },
+    "falco_li-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "falco_li"
+        ]
+    },
+    "falco_li-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "falco_li"
+        ]
+    },
+    "falco_li-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "falco_li"
+        ],
+        "manifest": "official.xml"
+    },
+    "falco_li-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "falco_li"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "gandof-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "gandof"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "gandof-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "gandof"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "gandof-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "gandof"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "gandof-full": {
+        "_template": "full",
+        "boards": [
+            "gandof"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "gandof-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "gandof"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "gandof-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "gandof"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "gandof-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "gandof"
+        ]
+    },
+    "gandof-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "gandof"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "gandof-release": {
+        "_template": "release",
+        "boards": [
+            "gandof"
+        ]
+    },
+    "gandof-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "gandof"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "gandof"
+                ],
+                "grouped": true,
+                "name": "gandof-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "gandof"
+                ],
+                "grouped": true,
+                "name": "gandof-release-afdo-use"
+            }
+        ]
+    },
+    "gandof-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "gandof"
+        ]
+    },
+    "gandof-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "gandof"
+        ]
+    },
+    "gandof-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "gandof"
+        ],
+        "manifest": "official.xml"
+    },
+    "gandof-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "gandof"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "gizmo-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "gizmo"
+        ],
+        "chrome_sdk": false,
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "gizmo-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "gizmo"
+        ],
+        "sync_chrome": false
+    },
+    "gizmo-full": {
+        "_template": "full",
+        "boards": [
+            "gizmo"
+        ],
+        "chrome_sdk": false,
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "gizmo-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "gizmo"
+        ],
+        "sync_chrome": false
+    },
+    "gizmo-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "gizmo"
+        ],
+        "chrome_sdk": false,
+        "sync_chrome": false,
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "gizmo-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "gizmo"
+        ],
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "gizmo-release": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "afdo_use": false,
+        "boards": [
+            "gizmo"
+        ],
+        "chrome_sdk": false,
+        "dev_installer_prebuilts": false,
+        "hw_tests": [],
+        "important": true,
+        "paygen": false,
+        "signer_tests": false,
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "gizmo-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "gizmo"
+        ],
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "glados-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "glados"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "glados-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "glados"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "glados-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "glados"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "glados-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "glados"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "glados-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "glados"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "glados-full": {
+        "_template": "full",
+        "boards": [
+            "glados"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "glados-full-group": {
+        "_template": "full",
+        "boards": [
+            "glados"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "glados"
+                ],
+                "grouped": true,
+                "name": "glados-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            }
+        ],
+        "description": "Full Builds; Group config (boards: glados)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "glados-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "glados"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "glados-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "glados"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "glados-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "glados"
+        ]
+    },
+    "glados-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "glados"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "glados-release": {
+        "_template": "release",
+        "boards": [
+            "glados"
+        ]
+    },
+    "glados-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "glados"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "glados"
+                ],
+                "grouped": true,
+                "name": "glados-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "glados"
+                ],
+                "grouped": true,
+                "name": "glados-release-afdo-use"
+            }
+        ]
+    },
+    "glados-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "glados"
+        ]
+    },
+    "glados-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "glados"
+        ]
+    },
+    "glados-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "glados"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "glados"
+                ],
+                "grouped": true,
+                "name": "glados-release"
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: glados)",
+        "important": true
+    },
+    "glados-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "glados"
+        ],
+        "manifest": "official.xml"
+    },
+    "glados-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "glados"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "glimmer-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "glimmer"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "glimmer-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "glimmer"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "glimmer-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "glimmer"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "glimmer-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "glimmer"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "glimmer-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "glimmer"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "glimmer-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "glimmer"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "glimmer-full": {
+        "_template": "full",
+        "boards": [
+            "glimmer"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "glimmer-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "glimmer"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "glimmer-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "glimmer"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "glimmer-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "glimmer"
+        ]
+    },
+    "glimmer-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "glimmer"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "glimmer-release": {
+        "_template": "release",
+        "boards": [
+            "glimmer"
+        ]
+    },
+    "glimmer-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "glimmer"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "glimmer"
+                ],
+                "grouped": true,
+                "name": "glimmer-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "glimmer"
+                ],
+                "grouped": true,
+                "name": "glimmer-release-afdo-use"
+            }
+        ]
+    },
+    "glimmer-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "glimmer"
+        ]
+    },
+    "glimmer-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "glimmer"
+        ]
+    },
+    "glimmer-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "glimmer"
+        ],
+        "manifest": "official.xml"
+    },
+    "glimmer-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "glimmer"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "gnawty-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "gnawty"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "gnawty-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "gnawty"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "gnawty-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "gnawty"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "gnawty-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "gnawty"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "gnawty-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "gnawty"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "gnawty-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "gnawty"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "gnawty-full": {
+        "_template": "full",
+        "boards": [
+            "gnawty"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "gnawty-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "gnawty"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "gnawty-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "gnawty"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "gnawty-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "gnawty"
+        ]
+    },
+    "gnawty-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "gnawty"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "gnawty-release": {
+        "_template": "release",
+        "boards": [
+            "gnawty"
+        ]
+    },
+    "gnawty-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "gnawty"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "gnawty"
+                ],
+                "grouped": true,
+                "name": "gnawty-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "gnawty"
+                ],
+                "grouped": true,
+                "name": "gnawty-release-afdo-use"
+            }
+        ]
+    },
+    "gnawty-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "gnawty"
+        ]
+    },
+    "gnawty-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "gnawty"
+        ]
+    },
+    "gnawty-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "gnawty"
+        ],
+        "manifest": "official.xml"
+    },
+    "gnawty-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "gnawty"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "guado-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "guado"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "guado-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "guado"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "guado-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "guado"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "guado-full": {
+        "_template": "full",
+        "boards": [
+            "guado"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "guado-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "guado"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "guado-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "guado"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "guado-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "guado"
+        ]
+    },
+    "guado-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "guado"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "guado-release": {
+        "_template": "release",
+        "boards": [
+            "guado"
+        ]
+    },
+    "guado-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "guado"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "guado"
+                ],
+                "grouped": true,
+                "name": "guado-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "guado"
+                ],
+                "grouped": true,
+                "name": "guado-release-afdo-use"
+            }
+        ]
+    },
+    "guado-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "guado"
+        ]
+    },
+    "guado-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "guado"
+        ]
+    },
+    "guado-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "guado"
+        ],
+        "manifest": "official.xml"
+    },
+    "guado-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "guado"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "guado_moblab-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "guado_moblab"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "guado_moblab-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "guado_moblab"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "guado_moblab-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "guado_moblab"
+        ],
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "guado_moblab-full": {
+        "_template": "full",
+        "boards": [
+            "guado_moblab"
+        ],
+        "image_test": false,
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "guado_moblab-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "guado_moblab"
+        ],
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "guado_moblab-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "guado_moblab"
+        ],
+        "description": "Commit Queue (internal)",
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"cq\",\n    \"priority\": \"Build\",\n    \"retry\": true,\n    \"suite\": \"moblab_quick\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
+        ],
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "guado_moblab-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "guado_moblab"
+        ]
+    },
+    "guado_moblab-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "guado_moblab"
+        ],
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "guado_moblab-release": {
+        "_template": "moblab-release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "guado_moblab"
+        ],
+        "image_test": false,
+        "vm_tests": []
+    },
+    "guado_moblab-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "guado_moblab"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "guado_moblab"
+                ],
+                "grouped": true,
+                "image_test": false,
+                "name": "guado_moblab-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "guado_moblab"
+                ],
+                "grouped": true,
+                "image_test": false,
+                "name": "guado_moblab-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "image_test": false,
+        "vm_tests": []
+    },
+    "guado_moblab-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "guado_moblab"
+        ],
+        "image_test": false,
+        "vm_tests": []
+    },
+    "guado_moblab-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "guado_moblab"
+        ],
+        "image_test": false,
+        "vm_tests": []
+    },
+    "guado_moblab-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "guado_moblab"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "guado_moblab-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "guado_moblab"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "heli-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "heli"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "heli-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "heli"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "heli-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "heli"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "heli-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "heli"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "heli-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "heli"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "heli-full": {
+        "_template": "full",
+        "boards": [
+            "heli"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "heli-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "heli"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "heli-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "heli"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "heli-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "heli"
+        ]
+    },
+    "heli-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "heli"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "heli-release": {
+        "_template": "release",
+        "boards": [
+            "heli"
+        ]
+    },
+    "heli-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "heli"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "heli"
+                ],
+                "grouped": true,
+                "name": "heli-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "heli"
+                ],
+                "grouped": true,
+                "name": "heli-release-afdo-use"
+            }
+        ]
+    },
+    "heli-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "heli"
+        ]
+    },
+    "heli-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "heli"
+        ]
+    },
+    "heli-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "heli"
+        ],
+        "manifest": "official.xml"
+    },
+    "heli-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "heli"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "internal-toolchain-major": {
+        "active_waterfall": "chromeos",
+        "archive_build_debug": true,
+        "boards": [
+            "x86-alex",
+            "stumpy",
+            "daisy",
+            "lakitu"
+        ],
+        "build_type": "chroot",
+        "builder_class_name": "sdk_builders.ChrootSdkBuilder",
+        "chrome_sdk": true,
+        "chromeos_official": true,
+        "description": "Test next major toolchain revision (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Continuous",
+        "gcc_githash": "svn-mirror/google/main",
+        "git_sync": true,
+        "hw_tests_override": [],
+        "image_test": true,
+        "images": [
+            "base",
+            "recovery",
+            "test",
+            "factory_install"
+        ],
+        "internal": true,
+        "latest_toolchain": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "use_sdk": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "usepkg_build_packages": false
+    },
+    "internal-toolchain-minor": {
+        "active_waterfall": "chromeos",
+        "archive_build_debug": true,
+        "boards": [
+            "x86-alex",
+            "stumpy",
+            "daisy",
+            "lakitu"
+        ],
+        "build_type": "chroot",
+        "builder_class_name": "sdk_builders.ChrootSdkBuilder",
+        "chrome_sdk": true,
+        "chromeos_official": true,
+        "description": "Test next minor toolchain revision (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Continuous",
+        "gcc_githash": "svn-mirror/google/gcc-4_9",
+        "git_sync": true,
+        "hw_tests_override": [],
+        "image_test": true,
+        "images": [
+            "base",
+            "recovery",
+            "test",
+            "factory_install"
+        ],
+        "internal": true,
+        "latest_toolchain": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "use_sdk": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "usepkg_build_packages": false
+    },
+    "ivybridge-full-group": {
+        "_template": "full",
+        "boards": [
+            "stout"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "stout"
+                ],
+                "grouped": true,
+                "name": "stout-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "link"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "link-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "parrot_ivb"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "parrot_ivb-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: stout, link, parrot_ivb)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "ivybridge-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "stout"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "stout"
+                ],
+                "grouped": true,
+                "name": "stout-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "link"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "link-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "parrot_ivb"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "parrot_ivb-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: stout, link, parrot_ivb)",
+        "important": true
+    },
+    "jecht-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "jecht"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "jecht-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "jecht"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "jecht-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "jecht"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "jecht-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "jecht"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "jecht-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "jecht"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "jecht-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "jecht"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "jecht-full": {
+        "_template": "full",
+        "boards": [
+            "jecht"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "jecht-full-group": {
+        "_template": "full",
+        "boards": [
+            "jecht"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "jecht"
+                ],
+                "grouped": true,
+                "name": "jecht-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "guado"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "guado-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "tidus"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "tidus-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "rikku"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "rikku-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: jecht, guado, tidus, rikku)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "jecht-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "jecht"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "jecht-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "jecht"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "jecht-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "jecht"
+        ]
+    },
+    "jecht-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "jecht"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "jecht-release": {
+        "_template": "release",
+        "boards": [
+            "jecht"
+        ]
+    },
+    "jecht-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "jecht"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "jecht"
+                ],
+                "grouped": true,
+                "name": "jecht-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "jecht"
+                ],
+                "grouped": true,
+                "name": "jecht-release-afdo-use"
+            }
+        ]
+    },
+    "jecht-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "jecht"
+        ]
+    },
+    "jecht-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "jecht"
+        ]
+    },
+    "jecht-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "jecht"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "jecht"
+                ],
+                "grouped": true,
+                "name": "jecht-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "guado"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "guado-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "tidus"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "tidus-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "rikku"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "rikku-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: jecht, guado, tidus, rikku)",
+        "important": true
+    },
+    "jecht-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "jecht"
+        ],
+        "manifest": "official.xml"
+    },
+    "jecht-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "jecht"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "kayle-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "kayle"
+        ],
+        "chrome_sdk": false,
+        "dev_manifest": "kayle.xml",
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "images": [
+            "base"
+        ],
+        "important": false,
+        "manifest": "kayle.xml",
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "kayle-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "kayle"
+        ],
+        "chrome_sdk": false,
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "dev_manifest": "kayle.xml",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "images": [
+            "base"
+        ],
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "kayle-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "kayle"
+        ],
+        "dev_manifest": "kayle.xml",
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "images": [
+            "base"
+        ],
+        "internal": true,
+        "manifest": "kayle.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "kayle-full": {
+        "_template": "full",
+        "boards": [
+            "kayle"
+        ],
+        "chrome_sdk": false,
+        "dev_manifest": "kayle.xml",
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "images": [
+            "base"
+        ],
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "kayle-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "kayle"
+        ],
+        "dev_manifest": "kayle.xml",
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "images": [
+            "base"
+        ],
+        "internal": true,
+        "manifest": "kayle.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "kayle-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "kayle"
+        ],
+        "chrome_sdk": false,
+        "description": "Commit Queue (internal)",
+        "dev_manifest": "kayle.xml",
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "images": [
+            "base"
+        ],
+        "internal": true,
+        "manifest": "kayle.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "kayle-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "kayle"
+        ],
+        "dev_manifest": "kayle.xml",
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "images": [
+            "base"
+        ],
+        "internal": true,
+        "manifest": "kayle.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "kayle-release": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "afdo_use": false,
+        "boards": [
+            "kayle"
+        ],
+        "chrome_sdk": false,
+        "dev_installer_prebuilts": false,
+        "dev_manifest": "kayle.xml",
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "hw_tests": [],
+        "hwqual": false,
+        "images": [
+            "base"
+        ],
+        "manifest": "kayle.xml",
+        "paygen": false,
+        "signer_tests": false,
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "kayle-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "kayle"
+        ],
+        "dev_manifest": "kayle.xml",
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "images": [
+            "base"
+        ],
+        "manifest": "kayle.xml",
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "kayle-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "kayle"
+        ],
+        "dev_manifest": "kayle.xml",
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "images": [
+            "base"
+        ],
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "kernel-3_14-a-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "x86-generic"
+        ],
+        "child_configs": [
+            {
+                "_template": "no-vmtest-pre-cq",
+                "boards": [
+                    "x86-generic"
+                ],
+                "grouped": true,
+                "name": "x86-generic-no-vmtest-pre-cq"
+            },
+            {
+                "_template": "no-vmtest-pre-cq",
+                "boards": [
+                    "arm-generic"
+                ],
+                "grouped": true,
+                "name": "arm-generic-no-vmtest-pre-cq"
+            }
+        ]
+    },
+    "kernel-3_14-b-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "storm"
+        ],
+        "child_configs": [
+            {
+                "_template": "no-vmtest-pre-cq",
+                "boards": [
+                    "storm"
+                ],
+                "grouped": true,
+                "internal": true,
+                "manifest": "official.xml",
+                "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+                "name": "storm-no-vmtest-pre-cq",
+                "overlays": "both",
+                "sync_chrome": false,
+                "useflags": [
+                    "chrome_internal"
+                ]
+            }
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "kernel-3_14-c-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "child_configs": [
+            {
+                "_template": "no-vmtest-pre-cq",
+                "boards": [
+                    "veyron_pinky"
+                ],
+                "grouped": true,
+                "internal": true,
+                "manifest": "official.xml",
+                "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+                "name": "veyron_pinky-no-vmtest-pre-cq",
+                "overlays": "both",
+                "useflags": [
+                    "chrome_internal"
+                ]
+            },
+            {
+                "_template": "no-vmtest-pre-cq",
+                "boards": [
+                    "rush_ryu"
+                ],
+                "grouped": true,
+                "internal": true,
+                "manifest": "official.xml",
+                "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+                "name": "rush_ryu-no-vmtest-pre-cq",
+                "overlays": "both",
+                "useflags": [
+                    "chrome_internal"
+                ]
+            }
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "kip-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "kip"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "kip-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "kip"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "kip-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "kip"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "kip-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "kip"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "kip-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "kip"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "kip-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "kip"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "kip-full": {
+        "_template": "full",
+        "boards": [
+            "kip"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "kip-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "kip"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "kip-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "kip"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "kip-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "kip"
+        ]
+    },
+    "kip-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "kip"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "kip-release": {
+        "_template": "release",
+        "boards": [
+            "kip"
+        ]
+    },
+    "kip-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "kip"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "kip"
+                ],
+                "grouped": true,
+                "name": "kip-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "kip"
+                ],
+                "grouped": true,
+                "name": "kip-release-afdo-use"
+            }
+        ]
+    },
+    "kip-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "kip"
+        ]
+    },
+    "kip-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "kip"
+        ]
+    },
+    "kip-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "kip"
+        ],
+        "manifest": "official.xml"
+    },
+    "kip-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "kip"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "kunimitsu-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "kunimitsu"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "kunimitsu-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "kunimitsu"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "kunimitsu-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "kunimitsu"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "kunimitsu-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "kunimitsu"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "kunimitsu-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "kunimitsu"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "kunimitsu-full": {
+        "_template": "full",
+        "boards": [
+            "kunimitsu"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "kunimitsu-full-group": {
+        "_template": "full",
+        "boards": [
+            "kunimitsu"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "kunimitsu"
+                ],
+                "grouped": true,
+                "name": "kunimitsu-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            }
+        ],
+        "description": "Full Builds; Group config (boards: kunimitsu)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "kunimitsu-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "kunimitsu"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "kunimitsu-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "kunimitsu"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "kunimitsu-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "kunimitsu"
+        ]
+    },
+    "kunimitsu-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "kunimitsu"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "kunimitsu-release": {
+        "_template": "release",
+        "boards": [
+            "kunimitsu"
+        ]
+    },
+    "kunimitsu-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "kunimitsu"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "kunimitsu"
+                ],
+                "grouped": true,
+                "name": "kunimitsu-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "kunimitsu"
+                ],
+                "grouped": true,
+                "name": "kunimitsu-release-afdo-use"
+            }
+        ]
+    },
+    "kunimitsu-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "kunimitsu"
+        ]
+    },
+    "kunimitsu-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "kunimitsu"
+        ]
+    },
+    "kunimitsu-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "kunimitsu"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "kunimitsu"
+                ],
+                "grouped": true,
+                "name": "kunimitsu-release"
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: kunimitsu)",
+        "important": true
+    },
+    "kunimitsu-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "kunimitsu"
+        ],
+        "manifest": "official.xml"
+    },
+    "kunimitsu-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "kunimitsu"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "lakitu-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "lakitu"
+        ],
+        "chrome_sdk": false,
+        "disk_layout": "base",
+        "important": false,
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "vm_tests": []
+    },
+    "lakitu-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "lakitu"
+        ],
+        "chrome_sdk": false,
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "disk_layout": "base",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "lakitu-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "lakitu"
+        ],
+        "disk_layout": "base",
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lakitu-full": {
+        "_template": "full",
+        "boards": [
+            "lakitu"
+        ],
+        "chrome_sdk": false,
+        "disk_layout": "base",
+        "image_test": false,
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "lakitu-incremental": {
+        "_template": "incremental",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "lakitu"
+        ],
+        "description": "Incremental Builds (internal)",
+        "disk_layout": "base",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [
+            "smoke_suite"
+        ]
+    },
+    "lakitu-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "lakitu"
+        ],
+        "disk_layout": "base",
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lakitu-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "lakitu"
+        ],
+        "chrome_sdk": false,
+        "description": "Commit Queue (internal)",
+        "disk_layout": "base",
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "lakitu-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "lakitu"
+        ]
+    },
+    "lakitu-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "lakitu"
+        ],
+        "disk_layout": "base",
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lakitu-release": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "afdo_use": false,
+        "boards": [
+            "lakitu"
+        ],
+        "chrome_sdk": false,
+        "dev_installer_prebuilts": false,
+        "disk_layout": "base",
+        "hw_tests": [],
+        "hwqual": false,
+        "image_test": false,
+        "important": true,
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "vm_tests": [
+            "smoke_suite"
+        ]
+    },
+    "lakitu-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "lakitu"
+        ],
+        "disk_layout": "base",
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "vm_tests": []
+    },
+    "lakitu-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "lakitu"
+        ],
+        "disk_layout": "base",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "lakitu_mobbuild-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "lakitu_mobbuild"
+        ],
+        "chrome_sdk": false,
+        "disk_layout": "base",
+        "important": false,
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "vm_tests": []
+    },
+    "lakitu_mobbuild-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "lakitu_mobbuild"
+        ],
+        "chrome_sdk": false,
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "disk_layout": "base",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "lakitu_mobbuild-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "lakitu_mobbuild"
+        ],
+        "disk_layout": "base",
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lakitu_mobbuild-full": {
+        "_template": "full",
+        "boards": [
+            "lakitu_mobbuild"
+        ],
+        "chrome_sdk": false,
+        "disk_layout": "base",
+        "image_test": false,
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "lakitu_mobbuild-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "lakitu_mobbuild"
+        ],
+        "disk_layout": "base",
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lakitu_mobbuild-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "lakitu_mobbuild"
+        ],
+        "chrome_sdk": false,
+        "description": "Commit Queue (internal)",
+        "disk_layout": "base",
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "lakitu_mobbuild-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "lakitu_mobbuild"
+        ]
+    },
+    "lakitu_mobbuild-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "lakitu_mobbuild"
+        ],
+        "disk_layout": "base",
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "lakitu_mobbuild-release": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "afdo_use": false,
+        "boards": [
+            "lakitu_mobbuild"
+        ],
+        "chrome_sdk": false,
+        "dev_installer_prebuilts": false,
+        "disk_layout": "base",
+        "hw_tests": [],
+        "hwqual": false,
+        "image_test": false,
+        "important": true,
+        "signer_tests": false,
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "vm_tests": [
+            "smoke_suite"
+        ]
+    },
+    "lakitu_mobbuild-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "lakitu_mobbuild"
+        ],
+        "disk_layout": "base",
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "vm_tests": []
+    },
+    "lakitu_mobbuild-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "lakitu_mobbuild"
+        ],
+        "disk_layout": "base",
+        "sync_chrome": false,
+        "upload_gce_images": true,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "leon-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "leon"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "leon-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "leon"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "leon-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "leon"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "leon-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "leon"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "leon-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "leon"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "leon-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "leon"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "leon-full": {
+        "_template": "full",
+        "boards": [
+            "leon"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "leon-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "leon"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "leon-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "leon"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "leon-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "leon"
+        ]
+    },
+    "leon-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "leon"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "leon-release": {
+        "_template": "release",
+        "boards": [
+            "leon"
+        ]
+    },
+    "leon-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "leon"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "leon"
+                ],
+                "grouped": true,
+                "name": "leon-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "leon"
+                ],
+                "grouped": true,
+                "name": "leon-release-afdo-use"
+            }
+        ]
+    },
+    "leon-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "leon"
+        ]
+    },
+    "leon-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "leon"
+        ]
+    },
+    "leon-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "leon"
+        ],
+        "manifest": "official.xml"
+    },
+    "leon-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "leon"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "link-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "link"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "link-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "link"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "link-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "link"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "link-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "link"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "link-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "link"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "link-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "link"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "link-full": {
+        "_template": "full",
+        "boards": [
+            "link"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "link-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "link"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "link-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "link"
+        ],
+        "description": "Commit Queue (internal)",
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": true,\n    \"pool\": \"cq\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}",
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": true,\n    \"pool\": \"cq\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "link-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "link"
+        ]
+    },
+    "link-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "link"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "link-release": {
+        "_template": "release",
+        "boards": [
+            "link"
+        ]
+    },
+    "link-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "link"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "link"
+                ],
+                "grouped": true,
+                "name": "link-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "link"
+                ],
+                "grouped": true,
+                "name": "link-release-afdo-use"
+            }
+        ]
+    },
+    "link-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "link"
+        ]
+    },
+    "link-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "link"
+        ]
+    },
+    "link-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "link"
+        ],
+        "manifest": "official.xml"
+    },
+    "link-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "link"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "lulu-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "lulu"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "lulu-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "lulu"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "lulu-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "lulu"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lulu-full": {
+        "_template": "full",
+        "boards": [
+            "lulu"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "lulu-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "lulu"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lulu-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "lulu"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "lulu-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "lulu"
+        ]
+    },
+    "lulu-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "lulu"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lulu-release": {
+        "_template": "release",
+        "boards": [
+            "lulu"
+        ]
+    },
+    "lulu-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "lulu"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "lulu"
+                ],
+                "grouped": true,
+                "name": "lulu-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "lulu"
+                ],
+                "grouped": true,
+                "name": "lulu-release-afdo-use"
+            }
+        ]
+    },
+    "lulu-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "lulu"
+        ]
+    },
+    "lulu-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "lulu"
+        ]
+    },
+    "lulu-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "lulu"
+        ],
+        "manifest": "official.xml"
+    },
+    "lulu-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "lulu"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "lumpy-chrome-perf": {
+        "_template": "chrome-perf",
+        "boards": [
+            "lumpy"
+        ],
+        "manifest": "official.xml",
+        "trybot_list": true
+    },
+    "lumpy-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "active_waterfall": "chromeos",
+        "afdo_generate": true,
+        "boards": [
+            "lumpy"
+        ],
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": true,\n    \"suite\": \"AFDO_record\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 5400,\n    \"warn_only\": true\n}",
+            "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 1,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"sanity\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 3,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 3,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+        ],
+        "manifest": "official.xml"
+    },
+    "lumpy-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "lumpy"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "lumpy-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "lumpy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lumpy-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "lumpy"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "lumpy-full": {
+        "_template": "full",
+        "boards": [
+            "lumpy"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "lumpy-incremental-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "lumpy"
+        ],
+        "build_before_patching": true,
+        "compilecheck": true,
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": false,
+        "unittests": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "lumpy-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "lumpy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lumpy-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "lumpy"
+        ],
+        "description": "Commit Queue (internal)",
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": true,\n    \"pool\": \"cq\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "lumpy-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "lumpy"
+        ]
+    },
+    "lumpy-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "lumpy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "lumpy-pre-flight-branch": {
+        "_template": "pre-flight-branch",
+        "afdo_generate": true,
+        "afdo_update_ebuild": true,
+        "boards": [
+            "lumpy"
+        ],
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": true,\n    \"suite\": \"AFDO_record\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 5400,\n    \"warn_only\": true\n}"
+        ],
+        "hw_tests_override": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 0,\n    \"num\": 3,\n    \"offload_failures_only\": false,\n    \"pool\": \"suites\",\n    \"priority\": \"DEFAULT\",\n    \"retry\": true,\n    \"suite\": \"AFDO_record\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 5400,\n    \"warn_only\": true\n}"
+        ],
+        "master": true,
+        "push_overlays": "both"
+    },
+    "lumpy-release": {
+        "_template": "release",
+        "boards": [
+            "lumpy"
+        ],
+        "critical_for_chrome": true
+    },
+    "lumpy-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "lumpy"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "lumpy"
+                ],
+                "grouped": true,
+                "name": "lumpy-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "lumpy"
+                ],
+                "grouped": true,
+                "name": "lumpy-release-afdo-use"
+            }
+        ]
+    },
+    "lumpy-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "lumpy"
+        ]
+    },
+    "lumpy-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "lumpy"
+        ]
+    },
+    "lumpy-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "lumpy"
+        ],
+        "manifest": "official.xml"
+    },
+    "lumpy-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "lumpy"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "mario-incremental": {
+        "_template": "incremental",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "x86-mario"
+        ],
+        "description": "Incremental Builds (internal)",
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both"
+    },
+    "master-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "active_waterfall": "chromeos",
+        "afdo_update_ebuild": true,
+        "binhost_test": true,
+        "boards": [],
+        "chrome_sdk": false,
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "health_alert_recipients": [
+            "chromeos-infra-eng@grotations.appspotmail.com",
+            "tree",
+            "chrome"
+        ],
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "master": true,
+        "overlays": "both",
+        "prebuilts": "public",
+        "push_overlays": "both"
+    },
+    "master-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "binhost_test": true,
+        "boards": [],
+        "description": "Commit Queue master (all others are slaves)",
+        "health_alert_recipients": [
+            "chromeos-infra-eng@grotations.appspotmail.com",
+            "tree"
+        ],
+        "health_threshold": 3,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "master": true,
+        "overlays": "both",
+        "prebuilts": "private",
+        "push_overlays": "both",
+        "sanity_check_slaves": [
+            "wolf-tot-paladin"
+        ],
+        "trybot_list": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "master-release": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "afdo_use": false,
+        "boards": [],
+        "branch_util_test": true,
+        "chrome_sdk": false,
+        "health_alert_recipients": [
+            "chromeos-infra-eng@grotations.appspotmail.com",
+            "tree"
+        ],
+        "master": true,
+        "sync_chrome": false
+    },
+    "mccloud-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "mccloud"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "mccloud-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "mccloud"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "mccloud-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "mccloud"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "mccloud-full": {
+        "_template": "full",
+        "boards": [
+            "mccloud"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "mccloud-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "mccloud"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "mccloud-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "mccloud"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "mccloud-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "mccloud"
+        ]
+    },
+    "mccloud-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "mccloud"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "mccloud-release": {
+        "_template": "release",
+        "boards": [
+            "mccloud"
+        ]
+    },
+    "mccloud-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "mccloud"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "mccloud"
+                ],
+                "grouped": true,
+                "name": "mccloud-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "mccloud"
+                ],
+                "grouped": true,
+                "name": "mccloud-release-afdo-use"
+            }
+        ]
+    },
+    "mccloud-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "mccloud"
+        ]
+    },
+    "mccloud-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "mccloud"
+        ]
+    },
+    "mccloud-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "mccloud"
+        ],
+        "manifest": "official.xml"
+    },
+    "mccloud-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "mccloud"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "mixed-a-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "daisy_spring"
+        ],
+        "child_configs": [
+            {
+                "_template": "compile-only-pre-cq",
+                "boards": [
+                    "daisy_spring"
+                ],
+                "grouped": true,
+                "internal": true,
+                "manifest": "official.xml",
+                "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+                "name": "daisy_spring-compile-only-pre-cq",
+                "overlays": "both",
+                "useflags": [
+                    "chrome_internal"
+                ]
+            },
+            {
+                "_template": "compile-only-pre-cq",
+                "boards": [
+                    "lumpy"
+                ],
+                "grouped": true,
+                "internal": true,
+                "manifest": "official.xml",
+                "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+                "name": "lumpy-compile-only-pre-cq",
+                "overlays": "both",
+                "useflags": [
+                    "chrome_internal"
+                ]
+            }
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "mixed-b-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "rush_ryu"
+        ],
+        "child_configs": [
+            {
+                "_template": "compile-only-pre-cq",
+                "boards": [
+                    "rush_ryu"
+                ],
+                "grouped": true,
+                "internal": true,
+                "manifest": "official.xml",
+                "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+                "name": "rush_ryu-compile-only-pre-cq",
+                "overlays": "both",
+                "useflags": [
+                    "chrome_internal"
+                ]
+            },
+            {
+                "_template": "compile-only-pre-cq",
+                "boards": [
+                    "samus"
+                ],
+                "grouped": true,
+                "internal": true,
+                "manifest": "official.xml",
+                "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+                "name": "samus-compile-only-pre-cq",
+                "overlays": "both",
+                "useflags": [
+                    "chrome_internal"
+                ]
+            }
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "mixed-c-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "storm"
+        ],
+        "child_configs": [
+            {
+                "_template": "compile-only-pre-cq",
+                "boards": [
+                    "storm"
+                ],
+                "grouped": true,
+                "internal": true,
+                "manifest": "official.xml",
+                "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+                "name": "storm-compile-only-pre-cq",
+                "overlays": "both",
+                "sync_chrome": false,
+                "useflags": [
+                    "chrome_internal"
+                ]
+            }
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "mixed-wificell-pre-cq": {
+        "_template": "wificell-pre-cq",
+        "boards": [
+            "winky"
+        ],
+        "child_configs": [
+            {
+                "_template": "wificell-pre-cq",
+                "boards": [
+                    "winky"
+                ],
+                "grouped": true,
+                "internal": true,
+                "manifest": "official.xml",
+                "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+                "name": "winky-wificell-pre-cq",
+                "overlays": "both",
+                "useflags": [
+                    "chrome_internal"
+                ]
+            },
+            {
+                "_template": "wificell-pre-cq",
+                "boards": [
+                    "veyron_speedy"
+                ],
+                "grouped": true,
+                "internal": true,
+                "manifest": "official.xml",
+                "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+                "name": "veyron_speedy-wificell-pre-cq",
+                "overlays": "both",
+                "useflags": [
+                    "chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "wificell-pre-cq",
+                "boards": [
+                    "veyron_jerry"
+                ],
+                "grouped": true,
+                "internal": true,
+                "manifest": "official.xml",
+                "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+                "name": "veyron_jerry-wificell-pre-cq",
+                "overlays": "both",
+                "useflags": [
+                    "chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "wificell-pre-cq",
+                "boards": [
+                    "daisy"
+                ],
+                "grouped": true,
+                "internal": true,
+                "manifest": "official.xml",
+                "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+                "name": "daisy-wificell-pre-cq",
+                "overlays": "both",
+                "useflags": [
+                    "chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "monroe-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "monroe"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "monroe-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "monroe"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "monroe-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "monroe"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "monroe-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "monroe"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "monroe-full": {
+        "_template": "full",
+        "boards": [
+            "monroe"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "monroe-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "monroe"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "monroe-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "monroe"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "monroe-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "monroe"
+        ]
+    },
+    "monroe-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "monroe"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "monroe-release": {
+        "_template": "release",
+        "boards": [
+            "monroe"
+        ]
+    },
+    "monroe-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "monroe"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "monroe"
+                ],
+                "grouped": true,
+                "name": "monroe-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "monroe"
+                ],
+                "grouped": true,
+                "name": "monroe-release-afdo-use"
+            }
+        ]
+    },
+    "monroe-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "monroe"
+        ]
+    },
+    "monroe-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "monroe"
+        ]
+    },
+    "monroe-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "monroe"
+        ],
+        "manifest": "official.xml"
+    },
+    "monroe-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "monroe"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "ninja-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "ninja"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "ninja-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "ninja"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "ninja-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "ninja"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "ninja-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "ninja"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "ninja-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "ninja"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "ninja-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "ninja"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "ninja-full": {
+        "_template": "full",
+        "boards": [
+            "ninja"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "ninja-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "ninja"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "ninja-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "ninja"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "ninja-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "ninja"
+        ]
+    },
+    "ninja-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "ninja"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "ninja-release": {
+        "_template": "release",
+        "boards": [
+            "ninja"
+        ]
+    },
+    "ninja-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "ninja"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "ninja"
+                ],
+                "grouped": true,
+                "name": "ninja-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "ninja"
+                ],
+                "grouped": true,
+                "name": "ninja-release-afdo-use"
+            }
+        ]
+    },
+    "ninja-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "ninja"
+        ]
+    },
+    "ninja-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "ninja"
+        ]
+    },
+    "ninja-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "ninja"
+        ],
+        "manifest": "official.xml"
+    },
+    "ninja-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "ninja"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "nyan-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "nyan"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "nyan-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "nyan"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "nyan"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "nyan-full": {
+        "_template": "full",
+        "boards": [
+            "nyan"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan-full-compile-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "board_replace": true,
+        "boards": [
+            "nyan"
+        ],
+        "chrome_binhost_only": true,
+        "chrome_sdk": false,
+        "cpe_export": false,
+        "debug_symbols": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": false,
+        "unittests": false,
+        "upload_hw_test_artifacts": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "nyan-full-group": {
+        "_template": "full",
+        "boards": [
+            "nyan"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "nyan"
+                ],
+                "grouped": true,
+                "name": "nyan-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "nyan_big"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "nyan_big-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "nyan_blaze"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "nyan_blaze-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "nyan_kitty"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "nyan_kitty-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: nyan, nyan_big, nyan_blaze, nyan_kitty)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "nyan"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "nyan-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "nyan"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "nyan-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "nyan"
+        ]
+    },
+    "nyan-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "nyan"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan-release": {
+        "_template": "release",
+        "boards": [
+            "nyan"
+        ],
+        "vm_tests": []
+    },
+    "nyan-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "nyan"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "nyan"
+                ],
+                "grouped": true,
+                "name": "nyan-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "nyan"
+                ],
+                "grouped": true,
+                "name": "nyan-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "nyan-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "nyan"
+        ],
+        "vm_tests": []
+    },
+    "nyan-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "nyan"
+        ],
+        "vm_tests": []
+    },
+    "nyan-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "nyan"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "nyan"
+                ],
+                "grouped": true,
+                "name": "nyan-release",
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "nyan_big"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "nyan_big-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "nyan_blaze"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "nyan_blaze-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "nyan_kitty"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "nyan_kitty-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: nyan, nyan_big, nyan_blaze, nyan_kitty)",
+        "important": true,
+        "vm_tests": []
+    },
+    "nyan-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "nyan"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "nyan-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "nyan"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_big-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "nyan_big"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "nyan_big-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "nyan_big"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_big-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "nyan_big"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "nyan_big-full": {
+        "_template": "full",
+        "boards": [
+            "nyan_big"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_big-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "nyan_big"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "nyan_big-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "nyan_big"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "nyan_big-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "nyan_big"
+        ]
+    },
+    "nyan_big-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "nyan_big"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_big-release": {
+        "_template": "release",
+        "boards": [
+            "nyan_big"
+        ],
+        "vm_tests": []
+    },
+    "nyan_big-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "nyan_big"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "nyan_big"
+                ],
+                "grouped": true,
+                "name": "nyan_big-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "nyan_big"
+                ],
+                "grouped": true,
+                "name": "nyan_big-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "nyan_big-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "nyan_big"
+        ],
+        "vm_tests": []
+    },
+    "nyan_big-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "nyan_big"
+        ],
+        "vm_tests": []
+    },
+    "nyan_big-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "nyan_big"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "nyan_big-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "nyan_big"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_blaze-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "nyan_blaze"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "nyan_blaze-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "nyan_blaze"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_blaze-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "nyan_blaze"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "nyan_blaze-full": {
+        "_template": "full",
+        "boards": [
+            "nyan_blaze"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_blaze-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "nyan_blaze"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "nyan_blaze-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "nyan_blaze"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "nyan_blaze-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "nyan_blaze"
+        ]
+    },
+    "nyan_blaze-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "nyan_blaze"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_blaze-release": {
+        "_template": "release",
+        "boards": [
+            "nyan_blaze"
+        ],
+        "vm_tests": []
+    },
+    "nyan_blaze-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "nyan_blaze"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "nyan_blaze"
+                ],
+                "grouped": true,
+                "name": "nyan_blaze-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "nyan_blaze"
+                ],
+                "grouped": true,
+                "name": "nyan_blaze-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "nyan_blaze-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "nyan_blaze"
+        ],
+        "vm_tests": []
+    },
+    "nyan_blaze-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "nyan_blaze"
+        ],
+        "vm_tests": []
+    },
+    "nyan_blaze-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "nyan_blaze"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "nyan_blaze-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "nyan_blaze"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_freon-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "nyan_freon"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "nyan_freon-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "nyan_freon"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_freon-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "nyan_freon"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "nyan_freon-full": {
+        "_template": "full",
+        "boards": [
+            "nyan_freon"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_freon-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "nyan_freon"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "nyan_freon-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "nyan_freon"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "nyan_freon-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "nyan_freon"
+        ]
+    },
+    "nyan_freon-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "nyan_freon"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_freon-release": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "nyan_freon"
+        ],
+        "vm_tests": []
+    },
+    "nyan_freon-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "nyan_freon"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "nyan_freon"
+                ],
+                "grouped": true,
+                "name": "nyan_freon-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "nyan_freon"
+                ],
+                "grouped": true,
+                "name": "nyan_freon-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "nyan_freon-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "nyan_freon"
+        ],
+        "vm_tests": []
+    },
+    "nyan_freon-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "nyan_freon"
+        ],
+        "vm_tests": []
+    },
+    "nyan_freon-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "nyan_freon"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "nyan_freon-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "nyan_freon"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_kitty-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "nyan_kitty"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "nyan_kitty-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "nyan_kitty"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_kitty-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "nyan_kitty"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "nyan_kitty-full": {
+        "_template": "full",
+        "boards": [
+            "nyan_kitty"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_kitty-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "nyan_kitty"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "nyan_kitty-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "nyan_kitty"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "nyan_kitty-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "nyan_kitty"
+        ]
+    },
+    "nyan_kitty-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "nyan_kitty"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "nyan_kitty-release": {
+        "_template": "release",
+        "boards": [
+            "nyan_kitty"
+        ],
+        "vm_tests": []
+    },
+    "nyan_kitty-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "nyan_kitty"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "nyan_kitty"
+                ],
+                "grouped": true,
+                "name": "nyan_kitty-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "nyan_kitty"
+                ],
+                "grouped": true,
+                "name": "nyan_kitty-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "nyan_kitty-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "nyan_kitty"
+        ],
+        "vm_tests": []
+    },
+    "nyan_kitty-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "nyan_kitty"
+        ],
+        "vm_tests": []
+    },
+    "nyan_kitty-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "nyan_kitty"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "nyan_kitty-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "nyan_kitty"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "oak-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "oak"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "oak-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "oak"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "oak-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "oak"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "oak-full": {
+        "_template": "full",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "oak"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "oak-full-group": {
+        "_template": "full",
+        "boards": [
+            "oak"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "oak"
+                ],
+                "grouped": true,
+                "name": "oak-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: oak)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "oak-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "oak"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "oak-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "oak"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "oak-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "oak"
+        ]
+    },
+    "oak-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "oak"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "oak-release": {
+        "_template": "release",
+        "boards": [
+            "oak"
+        ],
+        "vm_tests": []
+    },
+    "oak-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "oak"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "oak"
+                ],
+                "grouped": true,
+                "name": "oak-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "oak"
+                ],
+                "grouped": true,
+                "name": "oak-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "oak-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "oak"
+        ],
+        "vm_tests": []
+    },
+    "oak-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "oak"
+        ],
+        "vm_tests": []
+    },
+    "oak-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "oak"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "oak"
+                ],
+                "grouped": true,
+                "name": "oak-release",
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: oak)",
+        "important": true,
+        "vm_tests": []
+    },
+    "oak-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "oak"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "oak-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "oak"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "orco-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "orco"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "orco-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "orco"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "orco-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "orco"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "orco-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "orco"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "orco-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "orco"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "orco-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "orco"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "orco-full": {
+        "_template": "full",
+        "boards": [
+            "orco"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "orco-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "orco"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "orco-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "orco"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "orco-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "orco"
+        ]
+    },
+    "orco-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "orco"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "orco-release": {
+        "_template": "release",
+        "boards": [
+            "orco"
+        ]
+    },
+    "orco-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "orco"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "orco"
+                ],
+                "grouped": true,
+                "name": "orco-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "orco"
+                ],
+                "grouped": true,
+                "name": "orco-release-afdo-use"
+            }
+        ]
+    },
+    "orco-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "orco"
+        ]
+    },
+    "orco-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "orco"
+        ]
+    },
+    "orco-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "orco"
+        ],
+        "manifest": "official.xml"
+    },
+    "orco-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "orco"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "panther-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "panther"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "panther-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "panther"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "panther-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "panther"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "panther-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "panther"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "panther-full": {
+        "_template": "full",
+        "boards": [
+            "panther"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "panther-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "panther"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "panther-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "panther"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "panther-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "panther"
+        ]
+    },
+    "panther-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "panther"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "panther-release": {
+        "_template": "release",
+        "boards": [
+            "panther"
+        ]
+    },
+    "panther-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "panther"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "panther"
+                ],
+                "grouped": true,
+                "name": "panther-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "panther"
+                ],
+                "grouped": true,
+                "name": "panther-release-afdo-use"
+            }
+        ]
+    },
+    "panther-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "panther"
+        ]
+    },
+    "panther-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "panther"
+        ]
+    },
+    "panther-test-ap": {
+        "_template": "test-ap",
+        "boards": [
+            "panther"
+        ]
+    },
+    "panther-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "panther"
+        ],
+        "manifest": "official.xml"
+    },
+    "panther-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "panther"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "panther_embedded-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "panther_embedded"
+        ],
+        "chrome_sdk": false,
+        "important": false,
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "panther_embedded-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "panther_embedded"
+        ],
+        "chrome_sdk": false,
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "panther_embedded-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "panther_embedded"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "panther_embedded-full": {
+        "_template": "full",
+        "boards": [
+            "panther_embedded"
+        ],
+        "chrome_sdk": false,
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "panther_embedded-minimal-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "panther_embedded"
+        ],
+        "chrome_sdk": false,
+        "profile": "minimal",
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "panther_embedded-minimal-release": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "afdo_use": false,
+        "boards": [
+            "panther_embedded"
+        ],
+        "chrome_sdk": false,
+        "dev_installer_prebuilts": false,
+        "hw_tests": [],
+        "important": true,
+        "paygen": false,
+        "profile": "minimal",
+        "signer_tests": false,
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "panther_embedded-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "panther_embedded"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "panther_embedded-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "panther_embedded"
+        ],
+        "chrome_sdk": false,
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "panther_embedded-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "panther_embedded"
+        ]
+    },
+    "panther_embedded-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "panther_embedded"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "panther_embedded-release": {
+        "_template": "release",
+        "afdo_use": false,
+        "boards": [
+            "panther_embedded"
+        ],
+        "chrome_sdk": false,
+        "dev_installer_prebuilts": false,
+        "hw_tests": [],
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "panther_embedded-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "panther_embedded"
+        ],
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "panther_embedded-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "panther_embedded"
+        ],
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "panther_moblab-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "panther_moblab"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "panther_moblab-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "panther_moblab"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "panther_moblab-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "panther_moblab"
+        ],
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "panther_moblab-full": {
+        "_template": "full",
+        "boards": [
+            "panther_moblab"
+        ],
+        "image_test": false,
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "panther_moblab-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "panther_moblab"
+        ],
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "panther_moblab-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "panther_moblab"
+        ],
+        "description": "Commit Queue (internal)",
+        "image_test": false,
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "panther_moblab-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "panther_moblab"
+        ]
+    },
+    "panther_moblab-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "panther_moblab"
+        ],
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "panther_moblab-release": {
+        "_template": "moblab-release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "panther_moblab"
+        ],
+        "image_test": false,
+        "vm_tests": []
+    },
+    "panther_moblab-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "panther_moblab"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "panther_moblab"
+                ],
+                "grouped": true,
+                "image_test": false,
+                "name": "panther_moblab-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "panther_moblab"
+                ],
+                "grouped": true,
+                "image_test": false,
+                "name": "panther_moblab-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "image_test": false,
+        "vm_tests": []
+    },
+    "panther_moblab-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "panther_moblab"
+        ],
+        "image_test": false,
+        "vm_tests": []
+    },
+    "panther_moblab-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "panther_moblab"
+        ],
+        "image_test": false,
+        "vm_tests": []
+    },
+    "panther_moblab-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "panther_moblab"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "panther_moblab-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "panther_moblab"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "parrot-chrome-perf": {
+        "_template": "chrome-perf",
+        "boards": [
+            "parrot"
+        ],
+        "manifest": "official.xml",
+        "trybot_list": true
+    },
+    "parrot-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "parrot"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "parrot-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "parrot"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "parrot-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "parrot"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "parrot-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "parrot"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "parrot-full": {
+        "_template": "full",
+        "boards": [
+            "parrot"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "parrot-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "parrot"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "parrot-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "parrot"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [
+            "dev_mode_test"
+        ],
+        "vm_tests_override": [
+            "smoke_suite",
+            "pfq_suite",
+            "cros_vm_test",
+            "dev_mode_test"
+        ]
+    },
+    "parrot-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "parrot"
+        ]
+    },
+    "parrot-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "parrot"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "parrot-release": {
+        "_template": "release",
+        "boards": [
+            "parrot"
+        ],
+        "critical_for_chrome": true
+    },
+    "parrot-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "parrot"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "parrot"
+                ],
+                "grouped": true,
+                "name": "parrot-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "parrot"
+                ],
+                "grouped": true,
+                "name": "parrot-release-afdo-use"
+            }
+        ]
+    },
+    "parrot-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "parrot"
+        ]
+    },
+    "parrot-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "parrot"
+        ]
+    },
+    "parrot-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "parrot"
+        ],
+        "manifest": "official.xml"
+    },
+    "parrot-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "parrot"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "parrot_ivb-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "parrot_ivb"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "parrot_ivb-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "parrot_ivb"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "parrot_ivb-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "parrot_ivb"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "parrot_ivb-full": {
+        "_template": "full",
+        "boards": [
+            "parrot_ivb"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "parrot_ivb-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "parrot_ivb"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "parrot_ivb-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "parrot_ivb"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "parrot_ivb-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "parrot_ivb"
+        ]
+    },
+    "parrot_ivb-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "parrot_ivb"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "parrot_ivb-release": {
+        "_template": "release",
+        "boards": [
+            "parrot_ivb"
+        ]
+    },
+    "parrot_ivb-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "parrot_ivb"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "parrot_ivb"
+                ],
+                "grouped": true,
+                "name": "parrot_ivb-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "parrot_ivb"
+                ],
+                "grouped": true,
+                "name": "parrot_ivb-release-afdo-use"
+            }
+        ]
+    },
+    "parrot_ivb-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "parrot_ivb"
+        ]
+    },
+    "parrot_ivb-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "parrot_ivb"
+        ]
+    },
+    "parrot_ivb-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "parrot_ivb"
+        ],
+        "manifest": "official.xml"
+    },
+    "parrot_ivb-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "parrot_ivb"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "parry-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "parry"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "parry-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "parry"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "parry-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "parry"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "parry-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "parry"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "parry-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "parry"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "parry-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "parry"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "parry-full": {
+        "_template": "full",
+        "boards": [
+            "parry"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "parry-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "parry"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "parry-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "parry"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "parry-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "parry"
+        ]
+    },
+    "parry-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "parry"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "parry-release": {
+        "_template": "release",
+        "boards": [
+            "parry"
+        ]
+    },
+    "parry-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "parry"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "parry"
+                ],
+                "grouped": true,
+                "name": "parry-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "parry"
+                ],
+                "grouped": true,
+                "name": "parry-release-afdo-use"
+            }
+        ]
+    },
+    "parry-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "parry"
+        ]
+    },
+    "parry-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "parry"
+        ]
+    },
+    "parry-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "parry"
+        ],
+        "manifest": "official.xml"
+    },
+    "parry-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "parry"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "peach-full-group": {
+        "_template": "full",
+        "boards": [
+            "peach_pit"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "peach_pit"
+                ],
+                "grouped": true,
+                "name": "peach_pit-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "peach_pi"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "peach_pi-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: peach_pit, peach_pi)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "peach-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "peach_pit"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "peach_pit"
+                ],
+                "grouped": true,
+                "name": "peach_pit-release",
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "peach_pi"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "peach_pi-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: peach_pit, peach_pi)",
+        "important": true,
+        "vm_tests": []
+    },
+    "peach_pi-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "peach_pi"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "peach_pi-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "peach_pi"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "peach_pi-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "peach_pi"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "peach_pi-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "peach_pi"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "peach_pi-full": {
+        "_template": "full",
+        "boards": [
+            "peach_pi"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "peach_pi-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "peach_pi"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "peach_pi-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "peach_pi"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "peach_pi-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "peach_pi"
+        ]
+    },
+    "peach_pi-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "peach_pi"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "peach_pi-release": {
+        "_template": "release",
+        "boards": [
+            "peach_pi"
+        ],
+        "vm_tests": []
+    },
+    "peach_pi-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "peach_pi"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "peach_pi"
+                ],
+                "grouped": true,
+                "name": "peach_pi-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "peach_pi"
+                ],
+                "grouped": true,
+                "name": "peach_pi-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "peach_pi-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "peach_pi"
+        ],
+        "vm_tests": []
+    },
+    "peach_pi-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "peach_pi"
+        ],
+        "vm_tests": []
+    },
+    "peach_pi-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "peach_pi"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "peach_pi-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "peach_pi"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "peach_pit-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "peach_pit"
+        ],
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 1,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"sanity\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 3,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 3,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "peach_pit-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "peach_pit"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "peach_pit-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "peach_pit"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "peach_pit-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "peach_pit"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "peach_pit-full": {
+        "_template": "full",
+        "boards": [
+            "peach_pit"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "peach_pit-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "peach_pit"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "peach_pit-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "peach_pit"
+        ],
+        "description": "Commit Queue (internal)",
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": true,\n    \"pool\": \"cq\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "peach_pit-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "peach_pit"
+        ]
+    },
+    "peach_pit-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "peach_pit"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "peach_pit-release": {
+        "_template": "release",
+        "boards": [
+            "peach_pit"
+        ],
+        "vm_tests": []
+    },
+    "peach_pit-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "peach_pit"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "peach_pit"
+                ],
+                "grouped": true,
+                "name": "peach_pit-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "peach_pit"
+                ],
+                "grouped": true,
+                "name": "peach_pit-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "peach_pit-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "peach_pit"
+        ],
+        "vm_tests": []
+    },
+    "peach_pit-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "peach_pit"
+        ],
+        "vm_tests": []
+    },
+    "peach_pit-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "peach_pit"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "peach_pit-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "peach_pit"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "peppy-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "peppy"
+        ],
+        "manifest": "official.xml"
+    },
+    "peppy-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "peppy"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "peppy-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "peppy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "peppy-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "peppy"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "peppy-full": {
+        "_template": "full",
+        "boards": [
+            "peppy"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "peppy-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "peppy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "peppy-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "peppy"
+        ],
+        "description": "Commit Queue (internal)",
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": true,\n    \"pool\": \"cq\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "peppy-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "peppy"
+        ]
+    },
+    "peppy-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "peppy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "peppy-release": {
+        "_template": "release",
+        "boards": [
+            "peppy"
+        ]
+    },
+    "peppy-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "peppy"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "peppy"
+                ],
+                "grouped": true,
+                "name": "peppy-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "peppy"
+                ],
+                "grouped": true,
+                "name": "peppy-release-afdo-use"
+            }
+        ]
+    },
+    "peppy-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "peppy"
+        ]
+    },
+    "peppy-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "peppy"
+        ]
+    },
+    "peppy-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "peppy"
+        ],
+        "manifest": "official.xml"
+    },
+    "peppy-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "peppy"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "pineview-full-group": {
+        "_template": "full",
+        "boards": [
+            "x86-mario"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "x86-mario"
+                ],
+                "grouped": true,
+                "name": "x86-mario-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "x86-alex"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "x86-alex-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "x86-zgb"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "x86-zgb-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "x86-alex_he"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "x86-alex_he-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "x86-zgb_he"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "x86-zgb_he-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: x86-mario, x86-alex, x86-zgb, x86-alex_he, x86-zgb_he)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "pineview-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "x86-mario"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "x86-mario"
+                ],
+                "grouped": true,
+                "name": "x86-mario-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "x86-alex"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "x86-alex-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "x86-zgb"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "x86-zgb-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "x86-alex_he"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "hw_tests": [],
+                "name": "x86-alex_he-release",
+                "paygen_skip_testing": true,
+                "unittests": null,
+                "upload_hw_test_artifacts": false,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "x86-zgb_he"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "hw_tests": [],
+                "name": "x86-zgb_he-release",
+                "paygen_skip_testing": true,
+                "unittests": null,
+                "upload_hw_test_artifacts": false,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: x86-mario, x86-alex, x86-zgb, x86-alex_he, x86-zgb_he)",
+        "important": true
+    },
+    "pre-cq-launcher": {
+        "active_waterfall": "chromeos",
+        "boards": [],
+        "build_type": "priest",
+        "chrome_sdk": true,
+        "chrome_sdk_build_chrome": false,
+        "chroot_replace": false,
+        "description": "Launcher for Pre-CQ builders",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Pre-CQ",
+        "health_alert_recipients": [
+            "chromeos-infra-eng@grotations.appspotmail.com",
+            "tree"
+        ],
+        "health_threshold": 1,
+        "hw_tests_override": [],
+        "image_test": true,
+        "images": [
+            "base",
+            "test"
+        ],
+        "important": true,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "upload_standalone_images": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "purin-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "purin"
+        ],
+        "chrome_sdk": false,
+        "important": false,
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "purin-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "purin"
+        ],
+        "chrome_sdk": false,
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "purin-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "purin"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "purin-full": {
+        "_template": "full",
+        "boards": [
+            "purin"
+        ],
+        "chrome_sdk": false,
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "purin-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "purin"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "purin-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "purin"
+        ],
+        "chrome_sdk": false,
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "purin-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "purin"
+        ]
+    },
+    "purin-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "purin"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "purin-release": {
+        "_template": "release",
+        "afdo_use": false,
+        "boards": [
+            "purin"
+        ],
+        "chrome_sdk": false,
+        "dev_installer_prebuilts": false,
+        "hw_tests": [],
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "purin-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "purin"
+        ],
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "purin-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "purin"
+        ],
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "quawks-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "quawks"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "quawks-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "quawks"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "quawks-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "quawks"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "quawks-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "quawks"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "quawks-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "quawks"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "quawks-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "quawks"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "quawks-full": {
+        "_template": "full",
+        "boards": [
+            "quawks"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "quawks-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "quawks"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "quawks-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "quawks"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "quawks-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "quawks"
+        ]
+    },
+    "quawks-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "quawks"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "quawks-release": {
+        "_template": "release",
+        "boards": [
+            "quawks"
+        ]
+    },
+    "quawks-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "quawks"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "quawks"
+                ],
+                "grouped": true,
+                "name": "quawks-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "quawks"
+                ],
+                "grouped": true,
+                "name": "quawks-release-afdo-use"
+            }
+        ]
+    },
+    "quawks-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "quawks"
+        ]
+    },
+    "quawks-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "quawks"
+        ]
+    },
+    "quawks-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "quawks"
+        ],
+        "manifest": "official.xml"
+    },
+    "quawks-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "quawks"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "rambi-a-full-group": {
+        "_template": "full",
+        "boards": [
+            "rambi"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "rambi"
+                ],
+                "grouped": true,
+                "name": "rambi-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "clapper"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "clapper-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "enguarde"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "enguarde-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "expresso"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "expresso-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: rambi, clapper, enguarde, expresso)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "rambi-a-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "rambi"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "rambi"
+                ],
+                "grouped": true,
+                "name": "rambi-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "clapper"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "clapper-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "enguarde"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "enguarde-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "expresso"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "expresso-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: rambi, clapper, enguarde, expresso)",
+        "important": true
+    },
+    "rambi-b-full-group": {
+        "_template": "full",
+        "boards": [
+            "glimmer"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "glimmer"
+                ],
+                "grouped": true,
+                "name": "glimmer-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "gnawty"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "gnawty-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "kip"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "kip-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "quawks"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "quawks-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: glimmer, gnawty, kip, quawks)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "rambi-b-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "glimmer"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "glimmer"
+                ],
+                "grouped": true,
+                "name": "glimmer-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "gnawty"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "gnawty-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "kip"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "kip-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "quawks"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "quawks-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: glimmer, gnawty, kip, quawks)",
+        "important": true
+    },
+    "rambi-c-full-group": {
+        "_template": "full",
+        "boards": [
+            "squawks"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "squawks"
+                ],
+                "grouped": true,
+                "name": "squawks-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "swanky"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "swanky-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "winky"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "winky-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "candy"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "candy-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: squawks, swanky, winky, candy)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "rambi-c-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "squawks"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "squawks"
+                ],
+                "grouped": true,
+                "name": "squawks-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "swanky"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "swanky-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "winky"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "winky-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "candy"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "candy-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: squawks, swanky, winky, candy)",
+        "important": true
+    },
+    "rambi-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "rambi"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "rambi-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "rambi"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "rambi-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "rambi"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "rambi-d-full-group": {
+        "_template": "full",
+        "boards": [
+            "banjo"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "banjo"
+                ],
+                "grouped": true,
+                "name": "banjo-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "ninja"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "ninja-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "sumo"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "sumo-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: banjo, ninja, sumo)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "rambi-d-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "banjo"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "banjo"
+                ],
+                "grouped": true,
+                "name": "banjo-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "ninja"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "ninja-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "sumo"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "sumo-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: banjo, ninja, sumo)",
+        "important": true
+    },
+    "rambi-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "rambi"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "rambi-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "rambi"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "rambi-e-full-group": {
+        "_template": "full",
+        "boards": [
+            "orco"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "orco"
+                ],
+                "grouped": true,
+                "name": "orco-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "heli"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "heli-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "wizpig"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "wizpig-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: orco, heli, wizpig)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "rambi-e-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "orco"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "orco"
+                ],
+                "grouped": true,
+                "name": "orco-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "heli"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "heli-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "wizpig"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "wizpig-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: orco, heli, wizpig)",
+        "important": true
+    },
+    "rambi-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "rambi"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "rambi-full": {
+        "_template": "full",
+        "boards": [
+            "rambi"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "rambi-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "rambi"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "rambi-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "rambi"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [
+            "pfq_suite"
+        ]
+    },
+    "rambi-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "rambi"
+        ]
+    },
+    "rambi-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "rambi"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "rambi-release": {
+        "_template": "release",
+        "boards": [
+            "rambi"
+        ]
+    },
+    "rambi-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "rambi"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "rambi"
+                ],
+                "grouped": true,
+                "name": "rambi-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "rambi"
+                ],
+                "grouped": true,
+                "name": "rambi-release-afdo-use"
+            }
+        ]
+    },
+    "rambi-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "rambi"
+        ]
+    },
+    "rambi-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "rambi"
+        ]
+    },
+    "rambi-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "rambi"
+        ],
+        "manifest": "official.xml"
+    },
+    "rambi-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "rambi"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "refresh-packages": {
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "x86-generic",
+            "arm-generic"
+        ],
+        "builder_class_name": "misc_builders.RefreshPackagesBuilder",
+        "description": "Check upstream Gentoo for package updates",
+        "hw_tests_override": [],
+        "vm_tests": []
+    },
+    "rikku-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "rikku"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "rikku-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "rikku"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "rikku-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "rikku"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "rikku-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "rikku"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "rikku-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "rikku"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "rikku-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "rikku"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "rikku-full": {
+        "_template": "full",
+        "boards": [
+            "rikku"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "rikku-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "rikku"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "rikku-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "rikku"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "rikku-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "rikku"
+        ]
+    },
+    "rikku-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "rikku"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "rikku-release": {
+        "_template": "release",
+        "boards": [
+            "rikku"
+        ]
+    },
+    "rikku-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "rikku"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "rikku"
+                ],
+                "grouped": true,
+                "name": "rikku-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "rikku"
+                ],
+                "grouped": true,
+                "name": "rikku-release-afdo-use"
+            }
+        ]
+    },
+    "rikku-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "rikku"
+        ]
+    },
+    "rikku-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "rikku"
+        ]
+    },
+    "rikku-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "rikku"
+        ],
+        "manifest": "official.xml"
+    },
+    "rikku-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "rikku"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "rush-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "rush"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "rush-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "rush"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "rush-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "rush"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "rush-full": {
+        "_template": "full",
+        "boards": [
+            "rush"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "rush-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "rush"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "rush-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "rush"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "rush-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "rush"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "rush-release": {
+        "_template": "release",
+        "boards": [
+            "rush"
+        ],
+        "hw_tests": [],
+        "paygen": false,
+        "signer_tests": false,
+        "vm_tests": []
+    },
+    "rush-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "rush"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "rush"
+                ],
+                "grouped": true,
+                "name": "rush-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "rush"
+                ],
+                "grouped": true,
+                "name": "rush-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "rush-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "rush"
+        ],
+        "vm_tests": []
+    },
+    "rush-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "rush"
+        ],
+        "vm_tests": []
+    },
+    "rush-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "rush"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "rush-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "rush"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "rush_ryu-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "rush_ryu"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "rush_ryu-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "rush_ryu"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "rush_ryu-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "rush_ryu"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "rush_ryu-full": {
+        "_template": "full",
+        "boards": [
+            "rush_ryu"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "rush_ryu-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "rush_ryu"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "rush_ryu-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "rush_ryu"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "rush_ryu-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "rush_ryu"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "rush_ryu-release": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "rush_ryu"
+        ],
+        "dev_installer_prebuilts": false,
+        "hw_tests": [],
+        "images": [
+            "base",
+            "test",
+            "factory_install"
+        ],
+        "paygen": false,
+        "push_image": false,
+        "signer_tests": false,
+        "vm_tests": []
+    },
+    "rush_ryu-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "rush_ryu"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "rush_ryu"
+                ],
+                "grouped": true,
+                "name": "rush_ryu-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "rush_ryu"
+                ],
+                "grouped": true,
+                "name": "rush_ryu-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "rush_ryu-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "rush_ryu"
+        ],
+        "vm_tests": []
+    },
+    "rush_ryu-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "rush_ryu"
+        ],
+        "vm_tests": []
+    },
+    "rush_ryu-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "rush_ryu"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "rush_ryu-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "rush_ryu"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "samus-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "samus"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "samus-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "samus"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "samus-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "samus"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "samus-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "samus"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "samus-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "samus"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "samus-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "samus"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "samus-full": {
+        "_template": "full",
+        "boards": [
+            "samus"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "samus-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "samus"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "samus-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "samus"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "samus-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "samus"
+        ]
+    },
+    "samus-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "samus"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "samus-release": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "samus"
+        ],
+        "important": true
+    },
+    "samus-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "samus"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "samus"
+                ],
+                "grouped": true,
+                "name": "samus-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "samus"
+                ],
+                "grouped": true,
+                "name": "samus-release-afdo-use"
+            }
+        ]
+    },
+    "samus-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "samus"
+        ]
+    },
+    "samus-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "samus"
+        ]
+    },
+    "samus-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "samus"
+        ],
+        "manifest": "official.xml"
+    },
+    "samus-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "samus"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "sandybridge-full-group": {
+        "_template": "full",
+        "boards": [
+            "parrot"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "parrot"
+                ],
+                "grouped": true,
+                "name": "parrot-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "lumpy"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "lumpy-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "butterfly"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "butterfly-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "stumpy"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "stumpy-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: parrot, lumpy, butterfly, stumpy)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "sandybridge-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "parrot"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "parrot"
+                ],
+                "critical_for_chrome": true,
+                "grouped": true,
+                "name": "parrot-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "lumpy"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "critical_for_chrome": true,
+                "grouped": true,
+                "name": "lumpy-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "butterfly"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "butterfly-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "stumpy"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "stumpy-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "critical_for_chrome": true,
+        "description": "Release Builds (canary) (internal); Group config (boards: parrot, lumpy, butterfly, stumpy)",
+        "important": true
+    },
+    "slippy-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "slippy"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "slippy-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "slippy"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "slippy-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "slippy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "slippy-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "slippy"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "slippy-full": {
+        "_template": "full",
+        "boards": [
+            "slippy"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "slippy-full-group": {
+        "_template": "full",
+        "boards": [
+            "peppy"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "peppy"
+                ],
+                "grouped": true,
+                "name": "peppy-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "falco"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "falco-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "leon"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "leon-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "wolf"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "wolf-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "falco_li"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "falco_li-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: peppy, falco, leon, wolf, falco_li)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "slippy-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "slippy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "slippy-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "slippy"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "slippy-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "slippy"
+        ]
+    },
+    "slippy-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "slippy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "slippy-release": {
+        "_template": "release",
+        "boards": [
+            "slippy"
+        ]
+    },
+    "slippy-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "slippy"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "slippy"
+                ],
+                "grouped": true,
+                "name": "slippy-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "slippy"
+                ],
+                "grouped": true,
+                "name": "slippy-release-afdo-use"
+            }
+        ]
+    },
+    "slippy-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "slippy"
+        ]
+    },
+    "slippy-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "slippy"
+        ]
+    },
+    "slippy-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "peppy"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "peppy"
+                ],
+                "grouped": true,
+                "name": "peppy-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "falco"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "falco-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "leon"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "leon-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "wolf"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "wolf-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "falco_li"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "falco_li-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: peppy, falco, leon, wolf, falco_li)",
+        "important": true
+    },
+    "slippy-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "slippy"
+        ],
+        "manifest": "official.xml"
+    },
+    "slippy-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "slippy"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "smaug-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "smaug"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "smaug-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "smaug"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "smaug-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "smaug"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "smaug-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "smaug"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "smaug-full": {
+        "_template": "full",
+        "boards": [
+            "smaug"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "images": [
+            "base",
+            "recovery",
+            "test"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "smaug-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "smaug"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "smaug-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "smaug"
+        ],
+        "description": "Commit Queue (internal)",
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "smaug-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "smaug"
+        ]
+    },
+    "smaug-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "smaug"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "smaug-release": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "smaug"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "images": [
+            "base",
+            "recovery",
+            "test"
+        ],
+        "vm_tests": []
+    },
+    "smaug-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "smaug"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "smaug"
+                ],
+                "factory": false,
+                "factory_install_netboot": false,
+                "factory_toolkit": false,
+                "grouped": true,
+                "name": "smaug-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "smaug"
+                ],
+                "factory": false,
+                "factory_install_netboot": false,
+                "factory_toolkit": false,
+                "grouped": true,
+                "name": "smaug-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "vm_tests": []
+    },
+    "smaug-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "smaug"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "vm_tests": []
+    },
+    "smaug-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "smaug"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "vm_tests": []
+    },
+    "smaug-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "smaug"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "smaug-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "smaug"
+        ],
+        "factory": false,
+        "factory_install_netboot": false,
+        "factory_toolkit": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "squawks-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "squawks"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "squawks-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "squawks"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "squawks-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "squawks"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "squawks-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "squawks"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "squawks-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "squawks"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "squawks-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "squawks"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "squawks-full": {
+        "_template": "full",
+        "boards": [
+            "squawks"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "squawks-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "squawks"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "squawks-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "squawks"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "squawks-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "squawks"
+        ]
+    },
+    "squawks-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "squawks"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "squawks-release": {
+        "_template": "release",
+        "boards": [
+            "squawks"
+        ]
+    },
+    "squawks-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "squawks"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "squawks"
+                ],
+                "grouped": true,
+                "name": "squawks-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "squawks"
+                ],
+                "grouped": true,
+                "name": "squawks-release-afdo-use"
+            }
+        ]
+    },
+    "squawks-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "squawks"
+        ]
+    },
+    "squawks-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "squawks"
+        ]
+    },
+    "squawks-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "squawks"
+        ],
+        "manifest": "official.xml"
+    },
+    "squawks-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "squawks"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "storm-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "storm"
+        ],
+        "chrome_sdk": false,
+        "important": false,
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "storm-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "storm"
+        ],
+        "chrome_sdk": false,
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "storm-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "storm"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "storm-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "storm"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "storm-full": {
+        "_template": "full",
+        "boards": [
+            "storm"
+        ],
+        "chrome_sdk": false,
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "storm-full-group": {
+        "_template": "full",
+        "boards": [
+            "storm"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "storm"
+                ],
+                "chrome_sdk": false,
+                "grouped": true,
+                "name": "storm-full",
+                "prebuilts": "public",
+                "sync_chrome": false,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "arkham"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "arkham-full",
+                "prebuilts": "public",
+                "sync_chrome": false,
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "whirlwind"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "whirlwind-full",
+                "prebuilts": "public",
+                "sync_chrome": false,
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "chrome_sdk": false,
+        "description": "Full Builds; Group config (boards: storm, arkham, whirlwind)",
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "storm-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "storm"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "storm-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "storm"
+        ],
+        "chrome_sdk": false,
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "storm-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "storm"
+        ]
+    },
+    "storm-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "storm"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "storm-release": {
+        "_template": "release",
+        "afdo_use": false,
+        "boards": [
+            "storm"
+        ],
+        "chrome_sdk": false,
+        "dev_installer_prebuilts": false,
+        "hw_tests": [],
+        "paygen_skip_testing": true,
+        "signer_tests": false,
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "storm-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "afdo_use": false,
+        "boards": [
+            "storm"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "afdo_use": false,
+                "boards": [
+                    "storm"
+                ],
+                "chrome_sdk": false,
+                "dev_installer_prebuilts": false,
+                "grouped": true,
+                "hw_tests": [],
+                "name": "storm-release",
+                "paygen_skip_testing": true,
+                "signer_tests": false,
+                "sync_chrome": false,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "afdo_use": false,
+                "boards": [
+                    "arkham"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "dev_installer_prebuilts": false,
+                "grouped": true,
+                "hw_tests": [],
+                "name": "arkham-release",
+                "sync_chrome": false,
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "afdo_use": false,
+                "boards": [
+                    "whirlwind"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "hw_tests": [],
+                "name": "whirlwind-release",
+                "sync_chrome": false,
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "chrome_sdk": false,
+        "description": "Release Builds (canary) (internal); Group config (boards: storm, arkham, whirlwind)",
+        "dev_installer_prebuilts": false,
+        "hw_tests": [],
+        "paygen_skip_testing": true,
+        "signer_tests": false,
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "storm-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "storm"
+        ],
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "storm-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "storm"
+        ],
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "stout-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "stout"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "stout-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "stout"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "stout-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "stout"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "stout-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "stout"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "stout-full": {
+        "_template": "full",
+        "boards": [
+            "stout"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "stout-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "stout"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "stout-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "stout"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [
+            "cros_vm_test"
+        ]
+    },
+    "stout-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "stout"
+        ]
+    },
+    "stout-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "stout"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "stout-release": {
+        "_template": "release",
+        "boards": [
+            "stout"
+        ]
+    },
+    "stout-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "stout"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "stout"
+                ],
+                "grouped": true,
+                "name": "stout-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "stout"
+                ],
+                "grouped": true,
+                "name": "stout-release-afdo-use"
+            }
+        ]
+    },
+    "stout-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "stout"
+        ]
+    },
+    "stout-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "stout"
+        ]
+    },
+    "stout-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "stout"
+        ],
+        "manifest": "official.xml"
+    },
+    "stout-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "stout"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "strago-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "strago"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "strago-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "strago"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "strago-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "strago"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "strago-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "strago"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "strago-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "strago"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "strago-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "strago"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "strago-full": {
+        "_template": "full",
+        "boards": [
+            "strago"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "strago-full-group": {
+        "_template": "full",
+        "boards": [
+            "strago"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "strago"
+                ],
+                "grouped": true,
+                "name": "strago-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ]
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "cyan"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "cyan-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "celes"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "celes-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "ultima"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "ultima-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: strago, cyan, celes, ultima)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "strago-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "strago"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "strago-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "strago"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "strago-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "strago"
+        ]
+    },
+    "strago-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "strago"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "strago-release": {
+        "_template": "release",
+        "boards": [
+            "strago"
+        ]
+    },
+    "strago-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "strago"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "strago"
+                ],
+                "grouped": true,
+                "name": "strago-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "strago"
+                ],
+                "grouped": true,
+                "name": "strago-release-afdo-use"
+            }
+        ]
+    },
+    "strago-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "strago"
+        ]
+    },
+    "strago-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "strago"
+        ]
+    },
+    "strago-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "strago"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "strago"
+                ],
+                "grouped": true,
+                "name": "strago-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "cyan"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "cyan-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "celes"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "celes-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "ultima"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "ultima-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: strago, cyan, celes, ultima)"
+    },
+    "strago-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "strago"
+        ],
+        "manifest": "official.xml"
+    },
+    "strago-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "strago"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "stumpy-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "stumpy"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "stumpy-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "stumpy"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "stumpy-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "stumpy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "stumpy-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "stumpy"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "stumpy-full": {
+        "_template": "full",
+        "boards": [
+            "stumpy"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "stumpy-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "stumpy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "stumpy-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "stumpy"
+        ],
+        "description": "Commit Queue (internal)",
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": true,\n    \"pool\": \"cq\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "stumpy-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "stumpy"
+        ]
+    },
+    "stumpy-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "stumpy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "stumpy-release": {
+        "_template": "release",
+        "boards": [
+            "stumpy"
+        ]
+    },
+    "stumpy-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "stumpy"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "stumpy"
+                ],
+                "grouped": true,
+                "name": "stumpy-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "stumpy"
+                ],
+                "grouped": true,
+                "name": "stumpy-release-afdo-use"
+            }
+        ]
+    },
+    "stumpy-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "stumpy"
+        ]
+    },
+    "stumpy-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "stumpy"
+        ]
+    },
+    "stumpy-test-ap": {
+        "_template": "test-ap",
+        "boards": [
+            "stumpy"
+        ]
+    },
+    "stumpy-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "stumpy"
+        ],
+        "manifest": "official.xml"
+    },
+    "stumpy-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "stumpy"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "stumpy_moblab-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "stumpy_moblab"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "stumpy_moblab-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "stumpy_moblab"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "stumpy_moblab-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "stumpy_moblab"
+        ],
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "stumpy_moblab-full": {
+        "_template": "full",
+        "boards": [
+            "stumpy_moblab"
+        ],
+        "image_test": false,
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "stumpy_moblab-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "stumpy_moblab"
+        ],
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "stumpy_moblab-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "stumpy_moblab"
+        ],
+        "description": "Commit Queue (internal)",
+        "image_test": false,
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "stumpy_moblab-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "stumpy_moblab"
+        ]
+    },
+    "stumpy_moblab-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "stumpy_moblab"
+        ],
+        "image_test": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "stumpy_moblab-release": {
+        "_template": "moblab-release",
+        "boards": [
+            "stumpy_moblab"
+        ],
+        "image_test": false,
+        "vm_tests": []
+    },
+    "stumpy_moblab-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "stumpy_moblab"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "stumpy_moblab"
+                ],
+                "grouped": true,
+                "image_test": false,
+                "name": "stumpy_moblab-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "stumpy_moblab"
+                ],
+                "grouped": true,
+                "image_test": false,
+                "name": "stumpy_moblab-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "image_test": false,
+        "vm_tests": []
+    },
+    "stumpy_moblab-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "stumpy_moblab"
+        ],
+        "image_test": false,
+        "vm_tests": []
+    },
+    "stumpy_moblab-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "stumpy_moblab"
+        ],
+        "image_test": false,
+        "vm_tests": []
+    },
+    "stumpy_moblab-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "stumpy_moblab"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "stumpy_moblab-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "stumpy_moblab"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "sumo-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "sumo"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "sumo-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "sumo"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "sumo-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "sumo"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "sumo-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "sumo"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "sumo-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "sumo"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "sumo-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "sumo"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "sumo-full": {
+        "_template": "full",
+        "boards": [
+            "sumo"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "sumo-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "sumo"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "sumo-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "sumo"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "sumo-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "sumo"
+        ]
+    },
+    "sumo-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "sumo"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "sumo-release": {
+        "_template": "release",
+        "boards": [
+            "sumo"
+        ]
+    },
+    "sumo-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "sumo"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "sumo"
+                ],
+                "grouped": true,
+                "name": "sumo-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "sumo"
+                ],
+                "grouped": true,
+                "name": "sumo-release-afdo-use"
+            }
+        ]
+    },
+    "sumo-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "sumo"
+        ]
+    },
+    "sumo-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "sumo"
+        ]
+    },
+    "sumo-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "sumo"
+        ],
+        "manifest": "official.xml"
+    },
+    "sumo-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "sumo"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "swanky-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "swanky"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "swanky-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "swanky"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "swanky-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "swanky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "swanky-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "swanky"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "swanky-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "swanky"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "swanky-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "swanky"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "swanky-full": {
+        "_template": "full",
+        "boards": [
+            "swanky"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "swanky-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "swanky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "swanky-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "swanky"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "swanky-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "swanky"
+        ]
+    },
+    "swanky-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "swanky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "swanky-release": {
+        "_template": "release",
+        "boards": [
+            "swanky"
+        ]
+    },
+    "swanky-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "swanky"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "swanky"
+                ],
+                "grouped": true,
+                "name": "swanky-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "swanky"
+                ],
+                "grouped": true,
+                "name": "swanky-release-afdo-use"
+            }
+        ]
+    },
+    "swanky-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "swanky"
+        ]
+    },
+    "swanky-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "swanky"
+        ]
+    },
+    "swanky-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "swanky"
+        ],
+        "manifest": "official.xml"
+    },
+    "swanky-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "swanky"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "sync-test-cbuildbot": {
+        "boards": [],
+        "builder_class_name": "test_builders.ManifestVersionedSyncBuilder",
+        "hw_tests_override": []
+    },
+    "test-ap-group": {
+        "_template": "test-ap",
+        "boards": [
+            "stumpy"
+        ],
+        "child_configs": [
+            {
+                "_template": "test-ap",
+                "boards": [
+                    "stumpy"
+                ],
+                "grouped": true,
+                "name": "stumpy-test-ap"
+            },
+            {
+                "_template": "test-ap",
+                "boards": [
+                    "panther"
+                ],
+                "grouped": true,
+                "name": "panther-test-ap"
+            }
+        ]
+    },
+    "tidus-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "tidus"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "tidus-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "tidus"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "tidus-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "tidus"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "tidus-full": {
+        "_template": "full",
+        "boards": [
+            "tidus"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "tidus-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "tidus"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "tidus-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "tidus"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "tidus-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "tidus"
+        ]
+    },
+    "tidus-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "tidus"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "tidus-release": {
+        "_template": "release",
+        "boards": [
+            "tidus"
+        ]
+    },
+    "tidus-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "tidus"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "tidus"
+                ],
+                "grouped": true,
+                "name": "tidus-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "tidus"
+                ],
+                "grouped": true,
+                "name": "tidus-release-afdo-use"
+            }
+        ]
+    },
+    "tidus-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "tidus"
+        ]
+    },
+    "tidus-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "tidus"
+        ]
+    },
+    "tidus-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "tidus"
+        ],
+        "manifest": "official.xml"
+    },
+    "tidus-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "tidus"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "toolchain-major": {
+        "archive_build_debug": true,
+        "boards": [
+            "x86-generic",
+            "arm-generic",
+            "amd64-generic"
+        ],
+        "build_type": "chroot",
+        "builder_class_name": "sdk_builders.ChrootSdkBuilder",
+        "chrome_sdk": true,
+        "description": "Test next major toolchain revision",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Continuous",
+        "gcc_githash": "svn-mirror/google/main",
+        "git_sync": true,
+        "hw_tests_override": [],
+        "image_test": true,
+        "images": [
+            "base",
+            "recovery",
+            "test",
+            "factory_install"
+        ],
+        "latest_toolchain": true,
+        "use_sdk": false,
+        "usepkg_build_packages": false
+    },
+    "toolchain-minor": {
+        "archive_build_debug": true,
+        "boards": [
+            "x86-generic",
+            "arm-generic",
+            "amd64-generic"
+        ],
+        "build_type": "chroot",
+        "builder_class_name": "sdk_builders.ChrootSdkBuilder",
+        "chrome_sdk": true,
+        "description": "Test next minor toolchain revision",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Continuous",
+        "gcc_githash": "svn-mirror/google/gcc-4_9",
+        "git_sync": true,
+        "hw_tests_override": [],
+        "image_test": true,
+        "images": [
+            "base",
+            "recovery",
+            "test",
+            "factory_install"
+        ],
+        "latest_toolchain": true,
+        "use_sdk": false,
+        "usepkg_build_packages": false
+    },
+    "tricky-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "tricky"
+        ],
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 1,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"sanity\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 3,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}",
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": true,\n    \"max_retries\": null,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"PFQ\",\n    \"retry\": false,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 3,\n    \"timeout\": 13200,\n    \"warn_only\": false\n}"
+        ],
+        "manifest": "official.xml"
+    },
+    "tricky-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "tricky"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "tricky-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "tricky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "tricky-full": {
+        "_template": "full",
+        "boards": [
+            "tricky"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "tricky-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "tricky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "tricky-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "tricky"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "tricky-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "tricky"
+        ]
+    },
+    "tricky-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "tricky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "tricky-release": {
+        "_template": "release",
+        "boards": [
+            "tricky"
+        ]
+    },
+    "tricky-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "tricky"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "tricky"
+                ],
+                "grouped": true,
+                "name": "tricky-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "tricky"
+                ],
+                "grouped": true,
+                "name": "tricky-release-afdo-use"
+            }
+        ]
+    },
+    "tricky-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "tricky"
+        ]
+    },
+    "tricky-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "tricky"
+        ]
+    },
+    "tricky-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "tricky"
+        ],
+        "manifest": "official.xml"
+    },
+    "tricky-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "tricky"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "ultima-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "ultima"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "ultima-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "ultima"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "ultima-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "ultima"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "ultima-full": {
+        "_template": "full",
+        "boards": [
+            "ultima"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "ultima-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "ultima"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "ultima-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "ultima"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "ultima-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "ultima"
+        ]
+    },
+    "ultima-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "ultima"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "ultima-release": {
+        "_template": "release",
+        "boards": [
+            "ultima"
+        ]
+    },
+    "ultima-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "ultima"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "ultima"
+                ],
+                "grouped": true,
+                "name": "ultima-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "ultima"
+                ],
+                "grouped": true,
+                "name": "ultima-release-afdo-use"
+            }
+        ]
+    },
+    "ultima-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "ultima"
+        ]
+    },
+    "ultima-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "ultima"
+        ]
+    },
+    "ultima-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "ultima"
+        ],
+        "manifest": "official.xml"
+    },
+    "ultima-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "ultima"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "veyron-b-full-group": {
+        "_template": "full",
+        "boards": [
+            "veyron_gus"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_gus"
+                ],
+                "grouped": true,
+                "name": "veyron_gus-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_jaq"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_jaq-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_minnie"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_minnie-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_rialto"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_rialto-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: veyron_gus, veyron_jaq, veyron_minnie, veyron_rialto)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron-b-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "veyron_gus"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_gus"
+                ],
+                "grouped": true,
+                "name": "veyron_gus-release",
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_jaq"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_jaq-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_minnie"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_minnie-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_rialto"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_rialto-release",
+                "sync_chrome": false,
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: veyron_gus, veyron_jaq, veyron_minnie, veyron_rialto)",
+        "important": true,
+        "vm_tests": []
+    },
+    "veyron-c-full-group": {
+        "_template": "full",
+        "boards": [
+            "veyron_brain"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_brain"
+                ],
+                "grouped": true,
+                "name": "veyron_brain-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_danger"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_danger-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_thea"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_thea-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_shark"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_shark-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: veyron_brain, veyron_danger, veyron_thea, veyron_shark)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron-c-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "veyron_brain"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_brain"
+                ],
+                "grouped": true,
+                "name": "veyron_brain-release",
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_danger"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_danger-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_thea"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_thea-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_shark"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_shark-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: veyron_brain, veyron_danger, veyron_thea, veyron_shark)",
+        "vm_tests": []
+    },
+    "veyron-d-full-group": {
+        "_template": "full",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_mickey"
+                ],
+                "grouped": true,
+                "name": "veyron_mickey-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_romy"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_romy-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: veyron_mickey, veyron_romy)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron-d-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_mickey"
+                ],
+                "grouped": true,
+                "hw_tests": [],
+                "name": "veyron_mickey-release",
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_romy"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "hw_tests": [],
+                "name": "veyron_romy-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: veyron_mickey, veyron_romy)",
+        "hw_tests": [],
+        "important": true,
+        "vm_tests": []
+    },
+    "veyron-full-group": {
+        "_template": "full",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "child_configs": [
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_pinky"
+                ],
+                "grouped": true,
+                "name": "veyron_pinky-full",
+                "prebuilts": "public",
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_jerry"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_jerry-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_mighty"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_mighty-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            },
+            {
+                "_template": "full",
+                "boards": [
+                    "veyron_speedy"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_speedy-full",
+                "prebuilts": "public",
+                "unittests": null,
+                "useflags": [
+                    "-chrome_internal"
+                ],
+                "vm_tests": []
+            }
+        ],
+        "description": "Full Builds; Group config (boards: veyron_pinky, veyron_jerry, veyron_mighty, veyron_speedy)",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron-release-group": {
+        "_template": "release",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_pinky"
+                ],
+                "grouped": true,
+                "name": "veyron_pinky-release",
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_jerry"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_jerry-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_mighty"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_mighty-release",
+                "unittests": null,
+                "vm_tests": []
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "veyron_speedy"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "name": "veyron_speedy-release",
+                "unittests": null,
+                "vm_tests": []
+            }
+        ],
+        "description": "Release Builds (canary) (internal); Group config (boards: veyron_pinky, veyron_jerry, veyron_mighty, veyron_speedy)",
+        "important": true,
+        "vm_tests": []
+    },
+    "veyron_brain-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "veyron_brain"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_brain-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "veyron_brain"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_brain-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_brain"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_brain-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_brain"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_brain-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_brain"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_brain-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "veyron_brain"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_brain-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_brain"
+        ]
+    },
+    "veyron_brain-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_brain"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_brain-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_brain"
+        ],
+        "vm_tests": []
+    },
+    "veyron_brain-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_brain"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_brain"
+                ],
+                "grouped": true,
+                "name": "veyron_brain-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_brain"
+                ],
+                "grouped": true,
+                "name": "veyron_brain-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_brain-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_brain"
+        ],
+        "vm_tests": []
+    },
+    "veyron_brain-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_brain"
+        ],
+        "vm_tests": []
+    },
+    "veyron_brain-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_brain"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_brain-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_brain"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_danger-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "veyron_danger"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_danger-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "veyron_danger"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_danger-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_danger"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_danger-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_danger"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_danger-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_danger"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_danger-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "veyron_danger"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_danger-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_danger"
+        ]
+    },
+    "veyron_danger-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_danger"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_danger-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_danger"
+        ],
+        "vm_tests": []
+    },
+    "veyron_danger-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_danger"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_danger"
+                ],
+                "grouped": true,
+                "name": "veyron_danger-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_danger"
+                ],
+                "grouped": true,
+                "name": "veyron_danger-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_danger-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_danger"
+        ],
+        "vm_tests": []
+    },
+    "veyron_danger-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_danger"
+        ],
+        "vm_tests": []
+    },
+    "veyron_danger-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_danger"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_danger-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_danger"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_gus-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "veyron_gus"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_gus-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "veyron_gus"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_gus-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_gus"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_gus-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_gus"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_gus-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_gus"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_gus-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "veyron_gus"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_gus-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_gus"
+        ]
+    },
+    "veyron_gus-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_gus"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_gus-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_gus"
+        ],
+        "vm_tests": []
+    },
+    "veyron_gus-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_gus"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_gus"
+                ],
+                "grouped": true,
+                "name": "veyron_gus-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_gus"
+                ],
+                "grouped": true,
+                "name": "veyron_gus-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_gus-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_gus"
+        ],
+        "vm_tests": []
+    },
+    "veyron_gus-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_gus"
+        ],
+        "vm_tests": []
+    },
+    "veyron_gus-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_gus"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_gus-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_gus"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jaq-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "veyron_jaq"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_jaq-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "veyron_jaq"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jaq-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_jaq"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_jaq-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_jaq"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jaq-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_jaq"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_jaq-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "veyron_jaq"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_jaq-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_jaq"
+        ]
+    },
+    "veyron_jaq-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_jaq"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jaq-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_jaq"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jaq-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_jaq"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_jaq"
+                ],
+                "grouped": true,
+                "name": "veyron_jaq-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_jaq"
+                ],
+                "grouped": true,
+                "name": "veyron_jaq-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_jaq-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_jaq"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jaq-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_jaq"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jaq-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_jaq"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_jaq-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_jaq"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jerry-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "veyron_jerry"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_jerry-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "veyron_jerry"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jerry-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_jerry"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_jerry-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_jerry"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jerry-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_jerry"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_jerry-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "veyron_jerry"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_jerry-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_jerry"
+        ]
+    },
+    "veyron_jerry-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_jerry"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jerry-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_jerry"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jerry-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_jerry"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_jerry"
+                ],
+                "grouped": true,
+                "name": "veyron_jerry-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_jerry"
+                ],
+                "grouped": true,
+                "name": "veyron_jerry-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_jerry-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_jerry"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jerry-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_jerry"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jerry-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_jerry"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_jerry-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_jerry"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_jerry-wificell-pre-cq": {
+        "_template": "wificell-pre-cq",
+        "boards": [
+            "veyron_jerry"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_mickey-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_mickey-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_mickey-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_mickey-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_mickey-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_mickey-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_mickey-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_mickey"
+        ]
+    },
+    "veyron_mickey-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_mickey-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "hw_tests": [],
+        "vm_tests": []
+    },
+    "veyron_mickey-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_mickey"
+                ],
+                "grouped": true,
+                "name": "veyron_mickey-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_mickey"
+                ],
+                "grouped": true,
+                "name": "veyron_mickey-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_mickey-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "vm_tests": []
+    },
+    "veyron_mickey-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "vm_tests": []
+    },
+    "veyron_mickey-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_mickey-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_mickey"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_mighty-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "veyron_mighty"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_mighty-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "veyron_mighty"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_mighty-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_mighty"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_mighty-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_mighty"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_mighty-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_mighty"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_mighty-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "veyron_mighty"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_mighty-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_mighty"
+        ]
+    },
+    "veyron_mighty-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_mighty"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_mighty-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_mighty"
+        ],
+        "vm_tests": []
+    },
+    "veyron_mighty-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_mighty"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_mighty"
+                ],
+                "grouped": true,
+                "name": "veyron_mighty-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_mighty"
+                ],
+                "grouped": true,
+                "name": "veyron_mighty-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_mighty-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_mighty"
+        ],
+        "vm_tests": []
+    },
+    "veyron_mighty-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_mighty"
+        ],
+        "vm_tests": []
+    },
+    "veyron_mighty-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_mighty"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_mighty-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_mighty"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "veyron_minnie"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_minnie-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "veyron_minnie"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_minnie"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_minnie-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_minnie"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_minnie"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_minnie-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "veyron_minnie"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_minnie-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_minnie"
+        ]
+    },
+    "veyron_minnie-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_minnie"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_minnie"
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_minnie"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_minnie"
+                ],
+                "grouped": true,
+                "name": "veyron_minnie-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_minnie"
+                ],
+                "grouped": true,
+                "name": "veyron_minnie-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_minnie"
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_minnie"
+        ],
+        "vm_tests": []
+    },
+    "veyron_minnie-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_minnie"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_minnie-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_minnie"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_pinky-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_pinky-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_pinky-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_pinky-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_pinky-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_pinky-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_pinky-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_pinky"
+        ]
+    },
+    "veyron_pinky-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_pinky-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "vm_tests": []
+    },
+    "veyron_pinky-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_pinky"
+                ],
+                "grouped": true,
+                "name": "veyron_pinky-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_pinky"
+                ],
+                "grouped": true,
+                "name": "veyron_pinky-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_pinky-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "vm_tests": []
+    },
+    "veyron_pinky-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "vm_tests": []
+    },
+    "veyron_pinky-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_pinky-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_pinky"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_rialto-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "veyron_rialto"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_rialto-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "veyron_rialto"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_rialto-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_rialto"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_rialto-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_rialto"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_rialto-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_rialto"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_rialto-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "veyron_rialto"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_rialto-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_rialto"
+        ]
+    },
+    "veyron_rialto-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_rialto"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_rialto-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_rialto"
+        ],
+        "chrome_sdk": false,
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "veyron_rialto-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_rialto"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_rialto"
+                ],
+                "grouped": true,
+                "name": "veyron_rialto-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_rialto"
+                ],
+                "grouped": true,
+                "name": "veyron_rialto-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_rialto-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_rialto"
+        ],
+        "vm_tests": []
+    },
+    "veyron_rialto-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_rialto"
+        ],
+        "vm_tests": []
+    },
+    "veyron_rialto-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_rialto"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_rialto-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_rialto"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_romy-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "veyron_romy"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_romy-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "veyron_romy"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_romy-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_romy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_romy-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_romy"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_romy-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_romy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_romy-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "veyron_romy"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_romy-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_romy"
+        ]
+    },
+    "veyron_romy-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_romy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_romy-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_romy"
+        ],
+        "hw_tests": [],
+        "vm_tests": []
+    },
+    "veyron_romy-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_romy"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_romy"
+                ],
+                "grouped": true,
+                "name": "veyron_romy-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_romy"
+                ],
+                "grouped": true,
+                "name": "veyron_romy-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_romy-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_romy"
+        ],
+        "vm_tests": []
+    },
+    "veyron_romy-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_romy"
+        ],
+        "vm_tests": []
+    },
+    "veyron_romy-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_romy"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_romy-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_romy"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_shark-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "veyron_shark"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_shark-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "veyron_shark"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_shark-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_shark"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_shark-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_shark"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_shark-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_shark"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_shark-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "veyron_shark"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_shark-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_shark"
+        ]
+    },
+    "veyron_shark-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_shark"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_shark-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_shark"
+        ],
+        "vm_tests": []
+    },
+    "veyron_shark-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_shark"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_shark"
+                ],
+                "grouped": true,
+                "name": "veyron_shark-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_shark"
+                ],
+                "grouped": true,
+                "name": "veyron_shark-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_shark-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_shark"
+        ],
+        "vm_tests": []
+    },
+    "veyron_shark-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_shark"
+        ],
+        "vm_tests": []
+    },
+    "veyron_shark-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_shark"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_shark-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_shark"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_speedy-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "veyron_speedy"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_speedy-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "veyron_speedy"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_speedy-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_speedy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_speedy-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_speedy"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_speedy-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_speedy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_speedy-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "veyron_speedy"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_speedy-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_speedy"
+        ]
+    },
+    "veyron_speedy-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_speedy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_speedy-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_speedy"
+        ],
+        "vm_tests": []
+    },
+    "veyron_speedy-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_speedy"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_speedy"
+                ],
+                "grouped": true,
+                "name": "veyron_speedy-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_speedy"
+                ],
+                "grouped": true,
+                "name": "veyron_speedy-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_speedy-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_speedy"
+        ],
+        "vm_tests": []
+    },
+    "veyron_speedy-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_speedy"
+        ],
+        "vm_tests": []
+    },
+    "veyron_speedy-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_speedy"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_speedy-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_speedy"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_speedy-wificell-pre-cq": {
+        "_template": "wificell-pre-cq",
+        "boards": [
+            "veyron_speedy"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_thea-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "veyron_thea"
+        ],
+        "important": false,
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_thea-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "veyron_thea"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_thea-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "veyron_thea"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_thea-full": {
+        "_template": "full",
+        "boards": [
+            "veyron_thea"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_thea-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "veyron_thea"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "veyron_thea-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "veyron_thea"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "veyron_thea-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "veyron_thea"
+        ]
+    },
+    "veyron_thea-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "veyron_thea"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "veyron_thea-release": {
+        "_template": "release",
+        "boards": [
+            "veyron_thea"
+        ],
+        "vm_tests": []
+    },
+    "veyron_thea-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_thea"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "veyron_thea"
+                ],
+                "grouped": true,
+                "name": "veyron_thea-release-afdo-generate",
+                "vm_tests": []
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "veyron_thea"
+                ],
+                "grouped": true,
+                "name": "veyron_thea-release-afdo-use",
+                "vm_tests": []
+            }
+        ],
+        "vm_tests": []
+    },
+    "veyron_thea-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "veyron_thea"
+        ],
+        "vm_tests": []
+    },
+    "veyron_thea-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "veyron_thea"
+        ],
+        "vm_tests": []
+    },
+    "veyron_thea-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "veyron_thea"
+        ],
+        "manifest": "official.xml",
+        "vm_tests": []
+    },
+    "veyron_thea-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "veyron_thea"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "whirlwind-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "whirlwind"
+        ],
+        "chrome_sdk": false,
+        "important": false,
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "whirlwind-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "whirlwind"
+        ],
+        "chrome_sdk": false,
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "whirlwind-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "whirlwind"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "whirlwind-full": {
+        "_template": "full",
+        "boards": [
+            "whirlwind"
+        ],
+        "chrome_sdk": false,
+        "prebuilts": "public",
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "whirlwind-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "whirlwind"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "whirlwind-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "whirlwind"
+        ],
+        "chrome_sdk": false,
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [],
+        "vm_tests_override": null
+    },
+    "whirlwind-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "whirlwind"
+        ]
+    },
+    "whirlwind-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "whirlwind"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "sync_chrome": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "whirlwind-release": {
+        "_template": "release",
+        "afdo_use": false,
+        "boards": [
+            "whirlwind"
+        ],
+        "chrome_sdk": false,
+        "hw_tests": [],
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "whirlwind-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "whirlwind"
+        ],
+        "manifest": "official.xml",
+        "sync_chrome": false,
+        "vm_tests": []
+    },
+    "whirlwind-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "whirlwind"
+        ],
+        "sync_chrome": false,
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "winky-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "winky"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "winky-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "winky"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "winky-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "winky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "winky-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "winky"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "winky-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "winky"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "winky-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "winky"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "winky-full": {
+        "_template": "full",
+        "boards": [
+            "winky"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "winky-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "winky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "winky-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "winky"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "winky-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "winky"
+        ]
+    },
+    "winky-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "winky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "winky-release": {
+        "_template": "release",
+        "boards": [
+            "winky"
+        ]
+    },
+    "winky-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "winky"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "winky"
+                ],
+                "grouped": true,
+                "name": "winky-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "winky"
+                ],
+                "grouped": true,
+                "name": "winky-release-afdo-use"
+            }
+        ]
+    },
+    "winky-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "winky"
+        ]
+    },
+    "winky-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "winky"
+        ]
+    },
+    "winky-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "winky"
+        ],
+        "manifest": "official.xml"
+    },
+    "winky-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "winky"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "winky-wificell-pre-cq": {
+        "_template": "wificell-pre-cq",
+        "boards": [
+            "winky"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "wizpig-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "wizpig"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "wizpig-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "wizpig"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "wizpig-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "wizpig"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "wizpig-full": {
+        "_template": "full",
+        "boards": [
+            "wizpig"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "wizpig-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "wizpig"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "wizpig-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "wizpig"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "wizpig-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "wizpig"
+        ]
+    },
+    "wizpig-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "wizpig"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "wizpig-release": {
+        "_template": "release",
+        "boards": [
+            "wizpig"
+        ]
+    },
+    "wizpig-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "wizpig"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "wizpig"
+                ],
+                "grouped": true,
+                "name": "wizpig-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "wizpig"
+                ],
+                "grouped": true,
+                "name": "wizpig-release-afdo-use"
+            }
+        ]
+    },
+    "wizpig-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "wizpig"
+        ]
+    },
+    "wizpig-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "wizpig"
+        ]
+    },
+    "wizpig-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "wizpig"
+        ],
+        "manifest": "official.xml"
+    },
+    "wizpig-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "wizpig"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "wolf-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "wolf"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "wolf-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "wolf"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "wolf-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "wolf"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "wolf-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "wolf"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "wolf-full": {
+        "_template": "full",
+        "boards": [
+            "wolf"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "wolf-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "wolf"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "wolf-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "wolf"
+        ],
+        "description": "Commit Queue (internal)",
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": true,\n    \"pool\": \"cq\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "wolf-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "wolf"
+        ]
+    },
+    "wolf-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "wolf"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "wolf-release": {
+        "_template": "release",
+        "boards": [
+            "wolf"
+        ]
+    },
+    "wolf-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "wolf"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "wolf"
+                ],
+                "grouped": true,
+                "name": "wolf-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "wolf"
+                ],
+                "grouped": true,
+                "name": "wolf-release-afdo-use"
+            }
+        ]
+    },
+    "wolf-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "wolf"
+        ]
+    },
+    "wolf-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "wolf"
+        ]
+    },
+    "wolf-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "wolf"
+        ],
+        "manifest": "official.xml"
+    },
+    "wolf-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "wolf"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "wolf-tot-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "wolf"
+        ],
+        "description": "Commit Queue (internal)",
+        "do_not_apply_cq_patches": true,
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": true,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 1,\n    \"num\": 1,\n    \"offload_failures_only\": false,\n    \"pool\": \"bvt\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"sanity\",\n    \"suite_min_duts\": 1,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}",
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": true,\n    \"pool\": \"bvt\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 10,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}",
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": true,\n    \"pool\": \"bvt\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 10,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": false,
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "x32-generic-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "x32-generic"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "usepkg_toolchain": false
+    },
+    "x32-generic-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "x32-generic"
+        ],
+        "usepkg_toolchain": false
+    },
+    "x32-generic-full": {
+        "_template": "full",
+        "boards": [
+            "x32-generic"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "usepkg_toolchain": false
+    },
+    "x32-generic-incremental": {
+        "_template": "incremental",
+        "boards": [
+            "x32-generic"
+        ],
+        "usepkg_toolchain": false,
+        "vm_tests": []
+    },
+    "x32-generic-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "x32-generic"
+        ],
+        "usepkg_toolchain": false
+    },
+    "x32-generic-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "x32-generic"
+        ],
+        "important": false,
+        "usepkg_toolchain": false
+    },
+    "x32-generic-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "x32-generic"
+        ],
+        "usepkg_toolchain": false
+    },
+    "x32-generic-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "x32-generic"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ],
+        "usepkg_toolchain": false
+    },
+    "x86-alex-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "x86-alex"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "x86-alex-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "x86-alex"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-alex-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "x86-alex"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-alex-full": {
+        "_template": "full",
+        "boards": [
+            "x86-alex"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-alex-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "x86-alex"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-alex-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "x86-alex"
+        ],
+        "description": "Commit Queue (internal)",
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": true,\n    \"pool\": \"cq\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"bvt-cq\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "x86-alex-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "x86-alex"
+        ]
+    },
+    "x86-alex-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "x86-alex"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-alex-release": {
+        "_template": "release",
+        "boards": [
+            "x86-alex"
+        ]
+    },
+    "x86-alex-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "x86-alex"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "x86-alex"
+                ],
+                "grouped": true,
+                "name": "x86-alex-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "x86-alex"
+                ],
+                "grouped": true,
+                "name": "x86-alex-release-afdo-use"
+            }
+        ]
+    },
+    "x86-alex-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "x86-alex"
+        ]
+    },
+    "x86-alex-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "x86-alex"
+        ]
+    },
+    "x86-alex-release-group": {
+        "_template": "release",
+        "boards": [
+            "x86-alex"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "x86-alex"
+                ],
+                "grouped": true,
+                "name": "x86-alex-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "x86-alex_he"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "hw_tests": [],
+                "name": "x86-alex_he-release",
+                "paygen_skip_testing": true,
+                "unittests": null,
+                "upload_hw_test_artifacts": false,
+                "vm_tests": []
+            }
+        ]
+    },
+    "x86-alex-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "x86-alex"
+        ],
+        "manifest": "official.xml"
+    },
+    "x86-alex-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "x86-alex"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-alex_he-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "x86-alex_he"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "x86-alex_he-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "x86-alex_he"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-alex_he-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "x86-alex_he"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-alex_he-full": {
+        "_template": "full",
+        "boards": [
+            "x86-alex_he"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-alex_he-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "x86-alex_he"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-alex_he-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "x86-alex_he"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "x86-alex_he-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "x86-alex_he"
+        ]
+    },
+    "x86-alex_he-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "x86-alex_he"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-alex_he-release": {
+        "_template": "release",
+        "boards": [
+            "x86-alex_he"
+        ],
+        "build_packages_in_background": true,
+        "chrome_sdk": false,
+        "chrome_sdk_build_chrome": false,
+        "hw_tests": [],
+        "paygen_skip_testing": true,
+        "unittests": null,
+        "upload_hw_test_artifacts": false,
+        "vm_tests": []
+    },
+    "x86-alex_he-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "x86-alex_he"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "x86-alex_he"
+                ],
+                "grouped": true,
+                "name": "x86-alex_he-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "x86-alex_he"
+                ],
+                "grouped": true,
+                "name": "x86-alex_he-release-afdo-use"
+            }
+        ]
+    },
+    "x86-alex_he-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "x86-alex_he"
+        ]
+    },
+    "x86-alex_he-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "x86-alex_he"
+        ]
+    },
+    "x86-alex_he-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "x86-alex_he"
+        ],
+        "manifest": "official.xml"
+    },
+    "x86-alex_he-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "x86-alex_he"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-generic-asan": {
+        "_template": "asan",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "x86-generic"
+        ],
+        "build_type": "binary",
+        "description": "Build with Address Sanitizer (Clang)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Continuous",
+        "trybot_list": true,
+        "uprev": false
+    },
+    "x86-generic-asan-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "x86-generic"
+        ],
+        "description": "Paladin build with Address Sanitizer (Clang)",
+        "disk_layout": "2gb-rootfs",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-ASAN",
+        "important": false,
+        "profile": "asan",
+        "vm_tests_override": null
+    },
+    "x86-generic-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "x86-generic"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-generic-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "x86-generic"
+        ]
+    },
+    "x86-generic-full": {
+        "_template": "full",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "x86-generic"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-generic-incremental": {
+        "_template": "incremental",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "x86-generic"
+        ]
+    },
+    "x86-generic-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "x86-generic"
+        ]
+    },
+    "x86-generic-nowithdebug-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "x86-generic"
+        ],
+        "description": "Commit Queue (internal, nowithdebug)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": false,
+        "useflags": [
+            "-cros-debug",
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "x86-generic-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromiumos",
+        "boards": [
+            "x86-generic"
+        ]
+    },
+    "x86-generic-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "x86-generic"
+        ]
+    },
+    "x86-generic-telem-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "x86-generic"
+        ],
+        "description": "Telemetry Builds",
+        "vm_tests": [
+            "telemetry_suite"
+        ]
+    },
+    "x86-generic-telemetry": {
+        "_template": "telemetry",
+        "boards": [
+            "x86-generic"
+        ]
+    },
+    "x86-generic-tot-asan-informational": {
+        "_template": "tot-asan-informational",
+        "boards": [
+            "x86-generic"
+        ]
+    },
+    "x86-generic-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "x86-generic"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-generic_freon-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "x86-generic_freon"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "vm_tests": []
+    },
+    "x86-generic_freon-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "x86-generic_freon"
+        ]
+    },
+    "x86-generic_freon-full": {
+        "_template": "full",
+        "boards": [
+            "x86-generic_freon"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-generic_freon-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "x86-generic_freon"
+        ]
+    },
+    "x86-generic_freon-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "x86-generic_freon"
+        ],
+        "important": false,
+        "vm_tests": []
+    },
+    "x86-generic_freon-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "x86-generic_freon"
+        ]
+    },
+    "x86-generic_freon-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "x86-generic_freon"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-mario-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "x86-mario"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "x86-mario-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "x86-mario"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-mario-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "x86-mario"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-mario-factory": {
+        "_template": "factory",
+        "boards": [
+            "x86-mario"
+        ]
+    },
+    "x86-mario-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "x86-mario"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "x86-mario-full": {
+        "_template": "full",
+        "boards": [
+            "x86-mario"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-mario-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "x86-mario"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-mario-nowithdebug-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "x86-mario"
+        ],
+        "description": "Commit Queue (internal, nowithdebug)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": false,
+        "useflags": [
+            "-cros-debug",
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "x86-mario-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "x86-mario"
+        ],
+        "description": "Commit Queue (internal)",
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": [
+            "pfq_suite"
+        ]
+    },
+    "x86-mario-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "x86-mario"
+        ]
+    },
+    "x86-mario-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "x86-mario"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-mario-release": {
+        "_template": "release",
+        "boards": [
+            "x86-mario"
+        ]
+    },
+    "x86-mario-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "x86-mario"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "x86-mario"
+                ],
+                "grouped": true,
+                "name": "x86-mario-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "x86-mario"
+                ],
+                "grouped": true,
+                "name": "x86-mario-release-afdo-use"
+            }
+        ]
+    },
+    "x86-mario-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "x86-mario"
+        ]
+    },
+    "x86-mario-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "x86-mario"
+        ]
+    },
+    "x86-mario-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "x86-mario"
+        ],
+        "manifest": "official.xml"
+    },
+    "x86-mario-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "x86-mario"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-zgb-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "x86-zgb"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "x86-zgb-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "x86-zgb"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-zgb-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "x86-zgb"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-zgb-full": {
+        "_template": "full",
+        "boards": [
+            "x86-zgb"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-zgb-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "x86-zgb"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-zgb-paladin": {
+        "_template": "paladin",
+        "active_waterfall": "chromeos",
+        "boards": [
+            "x86-zgb"
+        ],
+        "description": "Commit Queue (internal)",
+        "hw_tests": [
+            "{\n    \"async\": false,\n    \"blocking\": false,\n    \"critical\": false,\n    \"file_bugs\": false,\n    \"max_retries\": 10,\n    \"minimum_duts\": 4,\n    \"num\": 6,\n    \"offload_failures_only\": true,\n    \"pool\": \"cq\",\n    \"priority\": \"CQ\",\n    \"retry\": true,\n    \"suite\": \"bvt-inline\",\n    \"suite_min_duts\": 0,\n    \"timeout\": 7200,\n    \"warn_only\": false\n}"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "x86-zgb-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "x86-zgb"
+        ]
+    },
+    "x86-zgb-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "x86-zgb"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-zgb-release": {
+        "_template": "release",
+        "boards": [
+            "x86-zgb"
+        ]
+    },
+    "x86-zgb-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "x86-zgb"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "x86-zgb"
+                ],
+                "grouped": true,
+                "name": "x86-zgb-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "x86-zgb"
+                ],
+                "grouped": true,
+                "name": "x86-zgb-release-afdo-use"
+            }
+        ]
+    },
+    "x86-zgb-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "x86-zgb"
+        ]
+    },
+    "x86-zgb-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "x86-zgb"
+        ]
+    },
+    "x86-zgb-release-group": {
+        "_template": "release",
+        "boards": [
+            "x86-zgb"
+        ],
+        "child_configs": [
+            {
+                "_template": "release",
+                "boards": [
+                    "x86-zgb"
+                ],
+                "grouped": true,
+                "name": "x86-zgb-release"
+            },
+            {
+                "_template": "release",
+                "boards": [
+                    "x86-zgb_he"
+                ],
+                "build_packages_in_background": true,
+                "chrome_sdk": false,
+                "chrome_sdk_build_chrome": false,
+                "grouped": true,
+                "hw_tests": [],
+                "name": "x86-zgb_he-release",
+                "paygen_skip_testing": true,
+                "unittests": null,
+                "upload_hw_test_artifacts": false,
+                "vm_tests": []
+            }
+        ]
+    },
+    "x86-zgb-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "x86-zgb"
+        ],
+        "manifest": "official.xml"
+    },
+    "x86-zgb-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "x86-zgb"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-zgb_he-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "x86-zgb_he"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "x86-zgb_he-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "x86-zgb_he"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-zgb_he-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "x86-zgb_he"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-zgb_he-full": {
+        "_template": "full",
+        "boards": [
+            "x86-zgb_he"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "x86-zgb_he-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "x86-zgb_he"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-zgb_he-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "x86-zgb_he"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "x86-zgb_he-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "x86-zgb_he"
+        ]
+    },
+    "x86-zgb_he-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "x86-zgb_he"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "x86-zgb_he-release": {
+        "_template": "release",
+        "boards": [
+            "x86-zgb_he"
+        ],
+        "build_packages_in_background": true,
+        "chrome_sdk": false,
+        "chrome_sdk_build_chrome": false,
+        "hw_tests": [],
+        "paygen_skip_testing": true,
+        "unittests": null,
+        "upload_hw_test_artifacts": false,
+        "vm_tests": []
+    },
+    "x86-zgb_he-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "x86-zgb_he"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "x86-zgb_he"
+                ],
+                "grouped": true,
+                "name": "x86-zgb_he-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "x86-zgb_he"
+                ],
+                "grouped": true,
+                "name": "x86-zgb_he-release-afdo-use"
+            }
+        ]
+    },
+    "x86-zgb_he-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "x86-zgb_he"
+        ]
+    },
+    "x86-zgb_he-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "x86-zgb_he"
+        ]
+    },
+    "x86-zgb_he-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "x86-zgb_he"
+        ],
+        "manifest": "official.xml"
+    },
+    "x86-zgb_he-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "x86-zgb_he"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "zako-chrome-pfq": {
+        "_template": "chrome-pfq",
+        "boards": [
+            "zako"
+        ],
+        "important": false,
+        "manifest": "official.xml"
+    },
+    "zako-chromium-pfq": {
+        "_template": "chromium-pfq",
+        "boards": [
+            "zako"
+        ],
+        "description": "Preflight Chromium Uprev & Build (internal)",
+        "doc": "http://www.chromium.org/chromium-os/build/builder-overview#TOC-Chrome-PFQ",
+        "important": false,
+        "internal": true,
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "zako-compile-only-pre-cq": {
+        "_template": "compile-only-pre-cq",
+        "boards": [
+            "zako"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "zako-depthcharge-firmware": {
+        "_template": "depthcharge-firmware",
+        "boards": [
+            "zako"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "zako-depthcharge-full-firmware": {
+        "_template": "depthcharge-full-firmware",
+        "boards": [
+            "zako"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty",
+            "depthcharge"
+        ]
+    },
+    "zako-firmware": {
+        "_template": "firmware",
+        "boards": [
+            "zako"
+        ],
+        "manifest": "official.xml",
+        "useflags": [
+            "chrome_internal",
+            "chromeless_tty"
+        ]
+    },
+    "zako-full": {
+        "_template": "full",
+        "boards": [
+            "zako"
+        ],
+        "prebuilts": "public",
+        "useflags": [
+            "-chrome_internal"
+        ]
+    },
+    "zako-no-vmtest-pre-cq": {
+        "_template": "no-vmtest-pre-cq",
+        "boards": [
+            "zako"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "zako-paladin": {
+        "_template": "paladin",
+        "boards": [
+            "zako"
+        ],
+        "description": "Commit Queue (internal)",
+        "important": false,
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "prebuilts": "private",
+        "useflags": [
+            "chrome_internal"
+        ],
+        "vm_tests": []
+    },
+    "zako-payloads": {
+        "_template": "payloads",
+        "boards": [
+            "zako"
+        ]
+    },
+    "zako-pre-cq": {
+        "_template": "pre-cq",
+        "boards": [
+            "zako"
+        ],
+        "internal": true,
+        "manifest": "official.xml",
+        "manifest_repo_url": "https://chrome-internal-review.googlesource.com/chromeos/manifest-internal",
+        "overlays": "both",
+        "useflags": [
+            "chrome_internal"
+        ]
+    },
+    "zako-release": {
+        "_template": "release",
+        "boards": [
+            "zako"
+        ]
+    },
+    "zako-release-afdo": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "zako"
+        ],
+        "child_configs": [
+            {
+                "_template": "release-afdo-generate",
+                "boards": [
+                    "zako"
+                ],
+                "grouped": true,
+                "name": "zako-release-afdo-generate"
+            },
+            {
+                "_template": "release-afdo-use",
+                "boards": [
+                    "zako"
+                ],
+                "grouped": true,
+                "name": "zako-release-afdo-use"
+            }
+        ]
+    },
+    "zako-release-afdo-generate": {
+        "_template": "release-afdo-generate",
+        "boards": [
+            "zako"
+        ]
+    },
+    "zako-release-afdo-use": {
+        "_template": "release-afdo-use",
+        "boards": [
+            "zako"
+        ]
+    },
+    "zako-tot-chrome-pfq-informational": {
+        "_template": "chrome-pfq-informational",
+        "boards": [
+            "zako"
+        ],
+        "manifest": "official.xml"
+    },
+    "zako-tot-chromium-pfq-informational": {
+        "_template": "chromium-pfq-informational",
+        "boards": [
+            "zako"
+        ],
+        "useflags": [
+            "-chrome_internal"
+        ]
+    }
+}
\ No newline at end of file
diff --git a/cbuildbot/config_lib.py b/cbuildbot/config_lib.py
new file mode 100644
index 0000000..b9f71b2
--- /dev/null
+++ b/cbuildbot/config_lib.py
@@ -0,0 +1,1378 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configuration options for various cbuildbot builders."""
+
+from __future__ import print_function
+
+import copy
+import itertools
+import json
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import osutils
+
+
+GS_PATH_DEFAULT = 'default' # Means gs://chromeos-image-archive/ + bot_id
+
+# Contains the valid build config suffixes in the order that they are dumped.
+CONFIG_TYPE_PRECQ = 'pre-cq'
+CONFIG_TYPE_PALADIN = 'paladin'
+CONFIG_TYPE_RELEASE = 'release'
+CONFIG_TYPE_FULL = 'full'
+CONFIG_TYPE_FIRMWARE = 'firmware'
+CONFIG_TYPE_FACTORY = 'factory'
+CONFIG_TYPE_RELEASE_AFDO = 'release-afdo'
+
+# This is only used for unitests... find a better solution?
+CONFIG_TYPE_DUMP_ORDER = (
+    CONFIG_TYPE_PALADIN,
+    CONFIG_TYPE_PRECQ,
+    constants.PRE_CQ_LAUNCHER_CONFIG,
+    'incremental',
+    'telemetry',
+    CONFIG_TYPE_FULL,
+    'full-group',
+    CONFIG_TYPE_RELEASE,
+    'release-group',
+    'release-afdo',
+    'release-afdo-generate',
+    'release-afdo-use',
+    'sdk',
+    'chromium-pfq',
+    'chromium-pfq-informational',
+    'chrome-perf',
+    'chrome-pfq',
+    'chrome-pfq-informational',
+    'pre-flight-branch',
+    CONFIG_TYPE_FACTORY,
+    CONFIG_TYPE_FIRMWARE,
+    'toolchain-major',
+    'toolchain-minor',
+    'llvm',
+    'asan',
+    'asan-informational',
+    'refresh-packages',
+    'test-ap',
+    'test-ap-group',
+    constants.BRANCH_UTIL_CONFIG,
+    constants.PAYLOADS_TYPE,
+    'cbuildbot',
+)
+
+# In the Json, this special build config holds the default values for all
+# other configs.
+DEFAULT_BUILD_CONFIG = '_default'
+
+# We cache the config we load from disk to avoid reparsing.
+_CACHED_CONFIG = None
+
+
+def IsPFQType(b_type):
+  """Returns True if this build type is a PFQ."""
+  return b_type in (constants.PFQ_TYPE, constants.PALADIN_TYPE,
+                    constants.CHROME_PFQ_TYPE)
+
+
+def IsCQType(b_type):
+  """Returns True if this build type is a Commit Queue."""
+  return b_type == constants.PALADIN_TYPE
+
+
+def IsCanaryType(b_type):
+  """Returns True if this build type is a Canary."""
+  return b_type == constants.CANARY_TYPE
+
+
+def OverrideConfigForTrybot(build_config, options):
+  """Apply trybot-specific configuration settings.
+
+  Args:
+    build_config: The build configuration dictionary to override.
+      The dictionary is not modified.
+    options: The options passed on the commandline.
+
+  Returns:
+    A build configuration dictionary with the overrides applied.
+  """
+  # TODO: crbug.com/504653 is about deleting this method fully.
+
+  copy_config = copy.deepcopy(build_config)
+  for my_config in [copy_config] + copy_config['child_configs']:
+    # Force uprev. This is so patched in changes are always built.
+    my_config['uprev'] = True
+    if my_config['internal']:
+      my_config['overlays'] = constants.BOTH_OVERLAYS
+
+    # Use the local manifest which only requires elevated access if it's really
+    # needed to build.
+    if not options.remote_trybot:
+      my_config['manifest'] = my_config['dev_manifest']
+
+    my_config['push_image'] = False
+
+    if my_config['build_type'] != constants.PAYLOADS_TYPE:
+      my_config['paygen'] = False
+
+    if options.hwtest and my_config['hw_tests_override'] is not None:
+      my_config['hw_tests'] = my_config['hw_tests_override']
+
+    # Default to starting with a fresh chroot on remote trybot runs.
+    if options.remote_trybot:
+      my_config['chroot_replace'] = True
+
+    # In trybots, we want to always run VM tests and all unit tests, so that
+    # developers will get better testing for their changes.
+    if my_config['vm_tests_override'] is not None:
+      my_config['vm_tests'] = my_config['vm_tests_override']
+
+  return copy_config
+
+
+class BuildConfig(dict):
+  """Dictionary of explicit configuration settings for a cbuildbot config
+
+  Each dictionary entry is in turn a dictionary of config_param->value.
+
+  See _settings for details on known configurations, and their documentation.
+  """
+
+  _delete_key_sentinel = object()
+
+  @classmethod
+  def delete_key(cls):
+    """Used to remove the given key from inherited config.
+
+    Usage:
+      new_config = base_config.derive(foo=delete_key())
+    """
+    return cls._delete_key_sentinel
+
+  @classmethod
+  def delete_keys(cls, keys):
+    """Used to remove a set of keys from inherited config.
+
+    Usage:
+      new_config = base_config.derive(delete_keys(set_of_keys))
+    """
+    return {k: cls._delete_key_sentinel for k in keys}
+
+  def __getattr__(self, name):
+    """Support attribute-like access to each dict entry."""
+    if name in self:
+      return self[name]
+
+    # Super class (dict) has no __getattr__ method, so use __getattribute__.
+    return super(BuildConfig, self).__getattribute__(name)
+
+  def GetBotId(self, remote_trybot=False):
+    """Get the 'bot id' of a particular bot.
+
+    The bot id is used to specify the subdirectory where artifacts are stored
+    in Google Storage. To avoid conflicts between remote trybots and regular
+    bots, we add a 'trybot-' prefix to any remote trybot runs.
+
+    Args:
+      remote_trybot: Whether this run is a remote trybot run.
+    """
+    return 'trybot-%s' % self.name if remote_trybot else self.name
+
+  def deepcopy(self):
+    """Create a deep copy of this object.
+
+    This is a specialized version of copy.deepcopy() for BuildConfig objects. It
+    speeds up deep copies by 10x because we know in advance what is stored
+    inside a BuildConfig object and don't have to do as much introspection. This
+    function is called a lot during setup of the config objects so optimizing it
+    makes a big difference. (It saves seconds off the load time of this module!)
+    """
+    new_config = BuildConfig(self)
+    for k, v in self.iteritems():
+      # type(v) is faster than isinstance.
+      if type(v) is list:
+        new_config[k] = v[:]
+
+    if new_config.get('child_configs'):
+      new_config['child_configs'] = [
+          x.deepcopy() for x in new_config['child_configs']]
+
+    if new_config.get('hw_tests'):
+      new_config['hw_tests'] = [copy.copy(x) for x in new_config['hw_tests']]
+
+    if new_config.get('hw_tests_override'):
+      new_config['hw_tests_override'] = [
+          copy.copy(x) for x in new_config['hw_tests_override']
+      ]
+
+    return new_config
+
+  def derive(self, *args, **kwargs):
+    """Create a new config derived from this one.
+
+    Note: If an override is callable, it will be called and passed the prior
+    value for the given key (or None) to compute the new value.
+
+    Args:
+      args: Mapping instances to mixin.
+      kwargs: Settings to inject; see _settings for valid values.
+
+    Returns:
+      A new _config instance.
+    """
+    inherits = list(args)
+    inherits.append(kwargs)
+    new_config = self.deepcopy()
+
+    for update_config in inherits:
+      for k, v in update_config.iteritems():
+        if callable(v):
+          new_config[k] = v(new_config.get(k))
+        else:
+          new_config[k] = v
+
+      keys_to_delete = [k for k in new_config if
+                        new_config[k] is self._delete_key_sentinel]
+
+      for k in keys_to_delete:
+        new_config.pop(k, None)
+
+    return new_config
+
+
+class HWTestConfig(object):
+  """Config object for hardware tests suites.
+
+  Members:
+    suite: Name of the test suite to run.
+    timeout: Number of seconds to wait before timing out waiting for
+             results.
+    pool: Pool to use for hw testing.
+    blocking: Suites that set this true run sequentially; each must pass
+              before the next begins.  Tests that set this false run in
+              parallel after all blocking tests have passed.
+    async: Fire-and-forget suite.
+    warn_only: Failure on HW tests warns only (does not generate error).
+    critical: Usually we consider structural failures here as OK.
+    priority:  Priority at which tests in the suite will be scheduled in
+               the hw lab.
+    file_bugs: Should we file bugs if a test fails in a suite run.
+    num: Maximum number of DUTs to use when scheduling tests in the hw lab.
+    minimum_duts: minimum number of DUTs required for testing in the hw lab.
+    retry: Whether we should retry tests that fail in a suite run.
+    max_retries: Integer, maximum job retries allowed at suite level.
+                 None for no max.
+    suite_min_duts: Preferred minimum duts. Lab will prioritize on getting such
+                    number of duts even if the suite is competing with
+                    other suites that have higher priority.
+
+  Some combinations of member settings are invalid:
+    * A suite config may not specify both blocking and async.
+    * A suite config may not specify both retry and async.
+    * A suite config may not specify both warn_only and critical.
+  """
+  # This timeout is larger than it needs to be because of autotest overhead.
+  # TODO(davidjames): Reduce this timeout once http://crbug.com/366141 is fixed.
+  DEFAULT_HW_TEST_TIMEOUT = 60 * 220
+  BRANCHED_HW_TEST_TIMEOUT = 10 * 60 * 60
+
+  def __init__(self, suite, num=constants.HWTEST_DEFAULT_NUM,
+               pool=constants.HWTEST_MACH_POOL, timeout=DEFAULT_HW_TEST_TIMEOUT,
+               async=False, warn_only=False, critical=False, blocking=False,
+               file_bugs=False, priority=constants.HWTEST_BUILD_PRIORITY,
+               retry=True, max_retries=10, minimum_duts=0, suite_min_duts=0,
+               offload_failures_only=False):
+    """Constructor -- see members above."""
+    assert not async or (not blocking and not retry)
+    assert not warn_only or not critical
+    self.suite = suite
+    self.num = num
+    self.pool = pool
+    self.timeout = timeout
+    self.blocking = blocking
+    self.async = async
+    self.warn_only = warn_only
+    self.critical = critical
+    self.file_bugs = file_bugs
+    self.priority = priority
+    self.retry = retry
+    self.max_retries = max_retries
+    self.minimum_duts = minimum_duts
+    self.suite_min_duts = suite_min_duts
+    self.offload_failures_only = offload_failures_only
+
+  def SetBranchedValues(self):
+    """Changes the HW Test timeout/priority values to branched values."""
+    self.timeout = max(HWTestConfig.BRANCHED_HW_TEST_TIMEOUT, self.timeout)
+
+    # Set minimum_duts default to 0, which means that lab will not check the
+    # number of available duts to meet the minimum requirement before creating
+    # a suite job for branched build.
+    self.minimum_duts = 0
+
+    # Only reduce priority if it's lower.
+    new_priority = constants.HWTEST_DEFAULT_PRIORITY
+    if (constants.HWTEST_PRIORITIES_MAP[self.priority] >
+        constants.HWTEST_PRIORITIES_MAP[new_priority]):
+      self.priority = new_priority
+
+  @property
+  def timeout_mins(self):
+    return int(self.timeout / 60)
+
+  def __eq__(self, other):
+    return self.__dict__ == other.__dict__
+
+def DefaultSettings():
+  # Enumeration of valid settings; any/all config settings must be in this.
+  # All settings must be documented.
+  return dict(
+      # The name of the template we inherit settings from.
+      _template=None,
+
+      # The name of the config.
+      name=None,
+
+      # A list of boards to build.
+      boards=None,
+
+      # The profile of the variant to set up and build.
+      profile=None,
+
+      # This bot pushes changes to the overlays.
+      master=False,
+
+      # If False, this flag indicates that the CQ should not check whether
+      # this bot passed or failed. Set this to False if you are setting up a
+      # new bot. Once the bot is on the waterfall and is consistently green,
+      # mark the builder as important=True.
+      important=False,
+
+      # An integer. If this builder fails this many times consecutively, send
+      # an alert email to the recipients health_alert_recipients. This does
+      # not apply to tryjobs. This feature is similar to the ERROR_WATERMARK
+      # feature of upload_symbols, and it may make sense to merge the features
+      # at some point.
+      health_threshold=0,
+
+      # List of email addresses to send health alerts to for this builder. It
+      # supports automatic email address lookup for the following sheriff
+      # types:
+      #     'tree': tree sheriffs
+      #     'chrome': chrome gardeners
+      health_alert_recipients=[],
+
+      # Whether this is an internal build config.
+      internal=False,
+
+      # Whether this is a branched build config. Used for pfq logic.
+      branch=False,
+
+      # The name of the manifest to use. E.g., to use the buildtools manifest,
+      # specify 'buildtools'.
+      manifest=constants.DEFAULT_MANIFEST,
+
+      # The name of the manifest to use if we're building on a local trybot.
+      # This should only require elevated access if it's really needed to
+      # build this config.
+      dev_manifest=constants.DEFAULT_MANIFEST,
+
+      # Applies only to paladin builders. If true, Sync to the manifest
+      # without applying any test patches, then do a fresh build in a new
+      # chroot. Then, apply the patches and build in the existing chroot.
+      build_before_patching=False,
+
+      # Applies only to paladin builders. If True, Sync to the master manifest
+      # without applying any of the test patches, rather than running
+      # CommitQueueSync. This is basically ToT immediately prior to the
+      # current commit queue run.
+      do_not_apply_cq_patches=False,
+
+      # Applies only to master builders. List of the names of slave builders
+      # to be treated as sanity checkers. If only sanity check builders fail,
+      # then the master will ignore the failures. In a CQ run, if any of the
+      # sanity check builders fail and other builders fail as well, the master
+      # will treat the build as failed, but will not reset the ready bit of
+      # the tested patches.
+      sanity_check_slaves=None,
+
+      # emerge use flags to use while setting up the board, building packages,
+      # making images, etc.
+      useflags=[],
+
+      # Set the variable CHROMEOS_OFFICIAL for the build. Known to affect
+      # parallel_emerge, cros_set_lsb_release, and chromeos_version.sh. See
+      # bug chromium-os:14649
+      chromeos_official=False,
+
+      # Use binary packages for building the toolchain. (emerge --getbinpkg)
+      usepkg_toolchain=True,
+
+      # Use binary packages for build_packages and setup_board.
+      usepkg_build_packages=True,
+
+      # If set, run BuildPackages in the background and allow subsequent
+      # stages to run in parallel with this one.
+      #
+      # For each release group, the first builder should be set to run in the
+      # foreground (to build binary packages), and the remainder of the
+      # builders should be set to run in parallel (to install the binary
+      # packages.)
+      build_packages_in_background=False,
+
+      # Only use binaries in build_packages for Chrome itself.
+      chrome_binhost_only=False,
+
+      # Does this profile need to sync chrome?  If None, we guess based on
+      # other factors.  If True/False, we always do that.
+      sync_chrome=None,
+
+      # Use the newest ebuilds for all the toolchain packages.
+      latest_toolchain=False,
+
+      # This is only valid when latest_toolchain is True. If you set this to a
+      # commit-ish, the gcc ebuild will use it to build the toolchain
+      # compiler.
+      gcc_githash=None,
+
+      # Wipe and replace the board inside the chroot.
+      board_replace=False,
+
+      # Wipe and replace chroot, but not source.
+      chroot_replace=True,
+
+      # Uprevs the local ebuilds to build new changes since last stable.
+      # build.  If master then also pushes these changes on success. Note that
+      # we uprev on just about every bot config because it gives us a more
+      # deterministic build system (the tradeoff being that some bots build
+      # from source more frequently than if they never did an uprev). This way
+      # the release/factory/etc... builders will pick up changes that devs
+      # pushed before it runs, but after the correspoding PFQ bot ran (which
+      # is what creates+uploads binpkgs).  The incremental bots are about the
+      # only ones that don't uprev because they mimic the flow a developer
+      # goes through on their own local systems.
+      uprev=True,
+
+      # Select what overlays to look at for revving and prebuilts. This can be
+      # any constants.VALID_OVERLAYS.
+      overlays=constants.PUBLIC_OVERLAYS,
+
+      # Select what overlays to push at. This should be a subset of overlays
+      # for the particular builder.  Must be None if not a master.  There
+      # should only be one master bot pushing changes to each overlay per
+      # branch.
+      push_overlays=None,
+
+      # Uprev Chrome, values of 'tot', 'stable_release', or None.
+      chrome_rev=None,
+
+      # Exit the builder right after checking compilation.
+      # TODO(mtennant): Should be something like "compile_check_only".
+      compilecheck=False,
+
+      # Test CLs to verify they're ready for the commit queue.
+      pre_cq=False,
+
+      # Runs the tests that the signer would run. This should only be set if
+      # 'recovery' is in images.
+      signer_tests=False,
+
+      # Runs unittests for packages.
+      unittests=True,
+
+      # A list of the packages to blacklist from unittests.
+      unittest_blacklist=[],
+
+      # Builds autotest tests.  Must be True if vm_tests is set.
+      build_tests=True,
+
+      # Generates AFDO data. Will capture a profile of chrome using a hwtest
+      # to run a predetermined set of benchmarks.
+      afdo_generate=False,
+
+      # Generates AFDO data, builds the minimum amount of artifacts and
+      # assumes a non-distributed builder (i.e.: the whole process in a single
+      # builder).
+      afdo_generate_min=False,
+
+      # Update the Chrome ebuild with the AFDO profile info.
+      afdo_update_ebuild=False,
+
+      # Uses AFDO data. The Chrome build will be optimized using the AFDO
+      # profile information found in the chrome ebuild file.
+      afdo_use=False,
+
+      # A list of the vm_tests to run by default.
+      vm_tests=[constants.SMOKE_SUITE_TEST_TYPE,
+                constants.SIMPLE_AU_TEST_TYPE],
+
+      # A list of all VM Tests to use if VM Tests are forced on (--vmtest
+      # command line or trybot). None means no override.
+      vm_tests_override=None,
+
+      # The number of times to run the VMTest stage. If this is >1, then we
+      # will run the stage this many times, stopping if we encounter any
+      # failures.
+      vm_test_runs=1,
+
+      # A list of HWTestConfig objects to run.
+      hw_tests=[],
+
+      # A list of all HW Tests to use if HW Tests are forced on (--hwtest
+      # command line or trybot). None means no override.
+      hw_tests_override=None,
+
+      # If true, uploads artifacts for hw testing. Upload payloads for test
+      # image if the image is built. If not, dev image is used and then base
+      # image.
+      upload_hw_test_artifacts=True,
+
+      # If true, uploads individual image tarballs.
+      upload_standalone_images=True,
+
+      # upload_gce_images -- If true, uploads tarballs that can be used as the
+      #                      basis for GCE images.
+      upload_gce_images=False,
+
+      # List of patterns for portage packages for which stripped binpackages
+      # should be uploaded to GS. The patterns are used to search for packages
+      # via `equery list`.
+      upload_stripped_packages=[
+          # Used by SimpleChrome workflow.
+          'chromeos-base/chromeos-chrome',
+          'sys-kernel/*kernel*',
+      ],
+
+      # Google Storage path to offload files to.
+      #   None - No upload
+      #   GS_PATH_DEFAULT - 'gs://chromeos-image-archive/' + bot_id
+      #   value - Upload to explicit path
+      gs_path=GS_PATH_DEFAULT,
+
+      # TODO(sosa): Deprecate binary.
+      # Type of builder.  Check constants.VALID_BUILD_TYPES.
+      build_type=constants.PFQ_TYPE,
+
+      # The class name used to build this config.  See the modules in
+      # cbuildbot / builders/*_builders.py for possible values.  This should
+      # be the name in string form -- e.g. "simple_builders.SimpleBuilder" to
+      # get the SimpleBuilder class in the simple_builders module.  If not
+      # specified, we'll fallback to legacy probing behavior until everyone
+      # has been converted (see the scripts/cbuildbot.py file for details).
+      builder_class_name=None,
+
+      # List of images we want to build -- see build_image for more details.
+      images=['test'],
+
+      # Image from which we will build update payloads.  Must either be None
+      # or name one of the images in the 'images' list, above.
+      payload_image=None,
+
+      # Whether to build a netboot image.
+      factory_install_netboot=True,
+
+      # Whether to build the factory toolkit.
+      factory_toolkit=True,
+
+      # Whether to build factory packages in BuildPackages.
+      factory=True,
+
+      # Tuple of specific packages we want to build.  Most configs won't
+      # specify anything here and instead let build_packages calculate.
+      packages=[],
+
+      # Do we push a final release image to chromeos-images.
+      push_image=False,
+
+      # Do we upload debug symbols.
+      upload_symbols=False,
+
+      # Whether we upload a hwqual tarball.
+      hwqual=False,
+
+      # Run a stage that generates release payloads for signed images.
+      paygen=False,
+
+      # If the paygen stage runs, generate tests, and schedule auto-tests for
+      # them.
+      paygen_skip_testing=False,
+
+      # If the paygen stage runs, don't generate any delta payloads. This is
+      # only done if deltas are broken for a given board.
+      paygen_skip_delta_payloads=False,
+
+      # Run a stage that generates and uploads package CPE information.
+      cpe_export=True,
+
+      # Run a stage that generates and uploads debug symbols.
+      debug_symbols=True,
+
+      # Do not package the debug symbols in the binary package. The debug
+      # symbols will be in an archive with the name cpv.debug.tbz2 in
+      # /build/${BOARD}/packages and uploaded with the prebuilt.
+      separate_debug_symbols=True,
+
+      # Include *.debug files for debugging core files with gdb in debug.tgz.
+      # These are very large. This option only has an effect if debug_symbols
+      # and archive are set.
+      archive_build_debug=False,
+
+      # Run a stage that archives build and test artifacts for developer
+      # consumption.
+      archive=True,
+
+      # Git repository URL for our manifests.
+      #  https://chromium.googlesource.com/chromiumos/manifest
+      #  https://chrome-internal.googlesource.com/chromeos/manifest-internal
+      manifest_repo_url=None,
+
+      # Whether we are using the manifest_version repo that stores per-build
+      # manifests.
+      manifest_version=False,
+
+      # Use a different branch of the project manifest for the build.
+      manifest_branch=None,
+
+      # Use the Last Known Good Manifest blessed by Paladin.
+      use_lkgm=False,
+
+      # If we use_lkgm -- What is the name of the manifest to look for?
+      lkgm_manifest=constants.LKGM_MANIFEST,
+
+      # LKGM for Chrome OS generated for Chrome builds that are blessed from
+      # canary runs.
+      use_chrome_lkgm=False,
+
+      # True if this build config is critical for the chrome_lkgm decision.
+      critical_for_chrome=False,
+
+      # Upload prebuilts for this build. Valid values are PUBLIC, PRIVATE, or
+      # False.
+      prebuilts=False,
+
+      # Use SDK as opposed to building the chroot from source.
+      use_sdk=True,
+
+      # List this config when user runs cbuildbot with --list option without
+      # the --all flag.
+      trybot_list=False,
+
+      # The description string to print out for config when user runs --list.
+      description=None,
+
+      # Boolean that enables parameter --git-sync for upload_prebuilts.
+      git_sync=False,
+
+      # A list of the child config groups, if applicable. See the AddGroup
+      # method.
+      child_configs=[],
+
+      # Set shared user password for "chronos" user in built images. Use
+      # "None" (default) to remove the shared user password. Note that test
+      # images will always set the password to "test0000".
+      shared_user_password=None,
+
+      # Whether this config belongs to a config group.
+      grouped=False,
+
+      # layout of build_image resulting image. See
+      # scripts/build_library/legacy_disk_layout.json or
+      # overlay-<board>/scripts/disk_layout.json for possible values.
+      disk_layout=None,
+
+      # If enabled, run the PatchChanges stage.  Enabled by default. Can be
+      # overridden by the --nopatch flag.
+      postsync_patch=True,
+
+      # Reexec into the buildroot after syncing.  Enabled by default.
+      postsync_reexec=True,
+
+      # Create delta sysroot during ArchiveStage. Disabled by default.
+      create_delta_sysroot=False,
+
+      # Run the binhost_test stage. Only makes sense for builders that have no
+      # boards.
+      binhost_test=False,
+
+      # Run the BranchUtilTestStage. Useful for builders that publish new
+      # manifest versions that we may later want to branch off of.
+      branch_util_test=False,
+
+      # TODO(sosa): Collapse to one option.
+      # ========== Dev installer prebuilts options =======================
+
+      # Upload prebuilts for this build to this bucket. If it equals None the
+      # default buckets are used.
+      binhost_bucket=None,
+
+      # Parameter --key for upload_prebuilts. If it equals None, the default
+      # values are used, which depend on the build type.
+      binhost_key=None,
+
+      # Parameter --binhost-base-url for upload_prebuilts. If it equals None,
+      # the default value is used.
+      binhost_base_url=None,
+
+      # Upload dev installer prebuilts.
+      dev_installer_prebuilts=False,
+
+      # Enable rootfs verification on the image.
+      rootfs_verification=True,
+
+      # Build the Chrome SDK.
+      chrome_sdk=False,
+
+      # If chrome_sdk is set to True, this determines whether we attempt to
+      # build Chrome itself with the generated SDK.
+      chrome_sdk_build_chrome=True,
+
+      # If chrome_sdk is set to True, this determines whether we use goma to
+      # build chrome.
+      chrome_sdk_goma=False,
+
+      # Run image tests. This should only be set if 'base' is in our list of
+      # images.
+      image_test=False,
+
+      # ==================================================================
+      # The documentation associated with the config.
+      doc=None,
+
+      # ==================================================================
+      # Hints to Buildbot master UI
+
+      # If set, tells buildbot what name to give to the corresponding builder
+      # on its waterfall.
+      buildbot_waterfall_name=None,
+
+      # If not None, the name (in constants.CIDB_KNOWN_WATERFALLS) of the
+      # waterfall that this target should be active on.
+      active_waterfall=None,
+  )
+
+
+def GerritInstanceParameters(name, instance, defaults=False):
+  GOB_HOST = '%s.googlesource.com'
+  param_names = ['_GOB_INSTANCE', '_GERRIT_INSTANCE', '_GOB_HOST',
+                 '_GERRIT_HOST', '_GOB_URL', '_GERRIT_URL']
+  if defaults:
+    return dict([('%s%s' % (name, x), None) for x in param_names])
+
+  gob_instance = instance
+  gerrit_instance = '%s-review' % instance
+  gob_host = GOB_HOST % gob_instance
+  gerrit_host = GOB_HOST % gerrit_instance
+  gob_url = 'https://%s' % gob_host
+  gerrit_url = 'https://%s' % gerrit_host
+
+  params = [gob_instance, gerrit_instance, gob_host, gerrit_host,
+            gob_url, gerrit_url]
+
+  return dict([('%s%s' % (name, pn), p) for pn, p in zip(param_names, params)])
+
+
+def DefaultSiteParameters():
+  # Enumeration of valid site parameters; any/all site parameters must be here.
+  # All site parameters should be documented.
+  default_site_params = {}
+
+  # Helper variables for defining site parameters.
+  gob_host = '%s.googlesource.com'
+
+  external_remote = 'cros'
+  internal_remote = 'cros-internal'
+  chromium_remote = 'chromium'
+  chrome_remote = 'chrome'
+
+  internal_change_prefix = '*'
+  external_change_prefix = ''
+
+  # Gerrit instance site parameters.
+  default_site_params.update(GOB_HOST=gob_host)
+  default_site_params.update(
+      GerritInstanceParameters('EXTERNAL', 'chromium'))
+  default_site_params.update(
+      GerritInstanceParameters('INTERNAL', 'chrome-internal'))
+  default_site_params.update(
+      GerritInstanceParameters('AOSP', 'android', defaults=True))
+  default_site_params.update(
+      GerritInstanceParameters('WEAVE', 'weave', defaults=True))
+
+  default_site_params.update(
+      # Parameters to define which manifests to use.
+      MANIFEST_PROJECT=None,
+      MANIFEST_INT_PROJECT=None,
+      MANIFEST_PROJECTS=None,
+      MANIFEST_URL=None,
+      MANIFEST_INT_URL=None,
+
+      # CrOS remotes specified in the manifests.
+      EXTERNAL_REMOTE=external_remote,
+      INTERNAL_REMOTE=internal_remote,
+      GOB_REMOTES=None,
+      KAYLE_INTERNAL_REMOTE=None,
+      CHROMIUM_REMOTE=None,
+      CHROME_REMOTE=None,
+      AOSP_REMOTE=None,
+      WEAVE_REMOTE=None,
+
+      # Only remotes listed in CROS_REMOTES are considered branchable.
+      # CROS_REMOTES and BRANCHABLE_PROJECTS must be kept in sync.
+      GERRIT_HOSTS={
+          external_remote: default_site_params['EXTERNAL_GERRIT_HOST'],
+          internal_remote: default_site_params['INTERNAL_GERRIT_HOST']
+      },
+      CROS_REMOTES={
+          external_remote: default_site_params['EXTERNAL_GOB_URL'],
+          internal_remote: default_site_params['INTERNAL_GOB_URL']
+      },
+      GIT_REMOTES={
+          chromium_remote: default_site_params['EXTERNAL_GOB_URL'],
+          chrome_remote: default_site_params['INTERNAL_GOB_URL'],
+          external_remote: default_site_params['EXTERNAL_GOB_URL'],
+          internal_remote: default_site_params['INTERNAL_GOB_URL'],
+      },
+
+      # Prefix to distinguish internal and external changes. This is used
+      # when a user specifies a patch with "-g", when generating a key for
+      # a patch to use in our PatchCache, and when displaying a custom
+      # string for the patch.
+      INTERNAL_CHANGE_PREFIX=internal_change_prefix,
+      EXTERNAL_CHANGE_PREFIX=external_change_prefix,
+      CHANGE_PREFIX={
+          external_remote: internal_change_prefix,
+          internal_remote: external_change_prefix
+      },
+
+      # List of remotes that are okay to include in the external manifest.
+      EXTERNAL_REMOTES=None,
+
+      # Mapping 'remote name' -> regexp that matches names of repositories on
+      # that remote that can be branched when creating CrOS branch.
+      # Branching script will actually create a new git ref when branching
+      # these projects. It won't attempt to create a git ref for other projects
+      # that may be mentioned in a manifest. If a remote is missing from this
+      # dictionary, all projects on that remote are considered to not be
+      # branchable.
+      BRANCHABLE_PROJECTS={
+          external_remote: r'chromiumos/(.+)',
+          internal_remote: r'chromeos/(.+)'
+      },
+
+      # Additional parameters used to filter manifests, create modified
+      # manifests, and to branch manifests.
+      MANIFEST_VERSIONS_GOB_URL=None,
+      MANIFEST_VERSIONS_GOB_URL_TEST=None,
+      MANIFEST_VERSIONS_INT_GOB_URL=None,
+      MANIFEST_VERSIONS_INT_GOB_URL_TEST=None,
+      MANIFEST_VERSIONS_GS_URL=None,
+
+      # Standard directories under buildroot for cloning these repos.
+      EXTERNAL_MANIFEST_VERSIONS_PATH=None,
+      INTERNAL_MANIFEST_VERSIONS_PATH=None,
+
+      # URL of the repo project.
+      REPO_URL='https://chromium.googlesource.com/external/repo'
+  )
+
+  return default_site_params
+
+
+class SiteParameters(dict):
+  """This holds the site-wide configuration parameters for a SiteConfig."""
+
+  def __getattr__(self, name):
+    """Support attribute-like access to each SiteValue entry."""
+    if name in self:
+      return self[name]
+
+    return super(SiteParameters, self).__getattribute__(name)
+
+  @classmethod
+  def HideDefaults(cls, site_params):
+    """Hide default valued site parameters.
+
+    Args:
+      site_params: A dictionary of site parameters.
+
+    Returns:
+      A dictionary of site parameters containing only non-default
+      valued entries.
+    """
+    defaults = DefaultSiteParameters()
+    return {k: v for k, v in site_params.iteritems() if defaults.get(k) != v}
+
+
+class SiteConfig(dict):
+  """This holds a set of named BuildConfig values."""
+
+  def __init__(self, defaults=None, templates=None, site_params=None):
+    """Init.
+
+    Args:
+      defaults: Dictionary of key value pairs to use as BuildConfig values.
+                All BuildConfig values should be defined here. If None,
+                the DefaultSettings() is used. Most sites should use
+                DefaultSettings(), and then update to add any site specific
+                values needed.
+      templates: Dictionary of template names to partial BuildConfigs
+                 other BuildConfigs can be based on. Mostly used to reduce
+                 verbosity of the config dump file format.
+      site_params: Dictionary of site-wide configuration parameters. Keys
+                   of the site_params dictionary should be strings.
+    """
+    super(SiteConfig, self).__init__()
+    self._defaults = DefaultSettings() if defaults is None else defaults
+    self._templates = {} if templates is None else templates
+    self._site_params = (
+        DefaultSiteParameters() if site_params is None else site_params)
+
+  def GetDefault(self):
+    """Create the cannonical default build configuration."""
+    # Enumeration of valid settings; any/all config settings must be in this.
+    # All settings must be documented.
+    return BuildConfig(**self._defaults)
+
+  def GetTemplates(self):
+    """Create the cannonical default build configuration."""
+    return self._templates
+
+  @property
+  def params(self):
+    """Create the canonical default build configuration."""
+    return SiteParameters(**self._site_params)
+
+  #
+  # Methods for searching a SiteConfig's contents.
+  #
+  def GetBoards(self):
+    """Return an iterable of all boards in the SiteConfig."""
+    return set(itertools.chain.from_iterable(
+        x.boards for x in self.itervalues() if x.boards))
+
+  def FindFullConfigsForBoard(self, board=None):
+    """Returns full builder configs for a board.
+
+    Args:
+      board: The board to match. By default, match all boards.
+
+    Returns:
+      A tuple containing a list of matching external configs and a list of
+      matching internal release configs for a board.
+    """
+    ext_cfgs = []
+    int_cfgs = []
+
+    for name, c in self.iteritems():
+      if c['boards'] and (board is None or board in c['boards']):
+        if (name.endswith('-%s' % CONFIG_TYPE_RELEASE) and
+            c['internal']):
+          int_cfgs.append(c.deepcopy())
+        elif (name.endswith('-%s' % CONFIG_TYPE_FULL) and
+              not c['internal']):
+          ext_cfgs.append(c.deepcopy())
+
+    return ext_cfgs, int_cfgs
+
+  def FindCanonicalConfigForBoard(self, board, allow_internal=True):
+    """Get the canonical cbuildbot builder config for a board."""
+    ext_cfgs, int_cfgs = self.FindFullConfigsForBoard(board)
+    # If both external and internal builds exist for this board, prefer the
+    # internal one unless instructed otherwise.
+    both = (int_cfgs if allow_internal else []) + ext_cfgs
+
+    if not both:
+      raise ValueError('Invalid board specified: %s.' % board)
+    return both[0]
+
+  def GetSlavesForMaster(self, master_config, options=None):
+    """Gets the important slave builds corresponding to this master.
+
+    A slave config is one that matches the master config in build_type,
+    chrome_rev, and branch.  It also must be marked important.  For the
+    full requirements see the logic in code below.
+
+    The master itself is eligible to be a slave (of itself) if it has boards.
+
+    TODO(dgarrett): Replace this with explicit master/slave defitions to make
+    the concept less Chrome OS specific. crbug.com/492382.
+
+    Args:
+      master_config: A build config for a master builder.
+      options: The options passed on the commandline. This argument is optional,
+               and only makes sense when called from cbuildbot.
+
+    Returns:
+      A list of build configs corresponding to the slaves for the master
+        represented by master_config.
+
+    Raises:
+      AssertionError if the given config is not a master config or it does
+        not have a manifest_version.
+    """
+    assert master_config['manifest_version']
+    assert master_config['master']
+
+    slave_configs = []
+    if options is not None and options.remote_trybot:
+      return slave_configs
+
+    # TODO(davidjames): In CIDB the master isn't considered a slave of itself,
+    # so we probably shouldn't consider it a slave here either.
+    for build_config in self.itervalues():
+      if (build_config['important'] and
+          build_config['manifest_version'] and
+          (not build_config['master'] or build_config['boards']) and
+          build_config['build_type'] == master_config['build_type'] and
+          build_config['chrome_rev'] == master_config['chrome_rev'] and
+          build_config['branch'] == master_config['branch']):
+        slave_configs.append(build_config)
+
+    return slave_configs
+
+  #
+  # Methods used when creating a Config programatically.
+  #
+  def Add(self, name, *args, **kwargs):
+    """Add a new BuildConfig to the SiteConfig.
+
+    Example usage:
+      # Creates default build named foo.
+      site_config.Add('foo')
+
+      # Creates default build with board 'foo_board'
+      site_config.Add('foo',
+                      boards=['foo_board'])
+
+      # Creates build based on template_build for 'foo_board'.
+      site_config.Add('foo',
+                      template_build,
+                      boards=['foo_board'])
+
+      # Creates build based on template for 'foo_board'. with mixin.
+      # Inheritance order is default, template, mixin, arguments.
+      site_config.Add('foo',
+                      template_build,
+                      mixin_build_config,
+                      boards=['foo_board'])
+
+    Args:
+      name: The name to label this configuration; this is what cbuildbot
+            would see.
+      args: BuildConfigs to patch into this config. First one (if present) is
+            considered the template. See AddTemplate for help on templates.
+      kwargs: BuildConfig values to explicitly set on this config.
+
+    Returns:
+      The BuildConfig just added to the SiteConfig.
+    """
+    inherits, overrides = args, kwargs
+
+    assert name not in self, '%s already exists.' % (name,)
+    overrides['name'] = name
+
+    # Remember our template, if we have one.
+    if '_template' not in overrides and args and '_template' in args[0]:
+      overrides['_template'] = args[0]['_template']
+
+    if '_template' in overrides:
+      assert overrides['_template'] in self.GetTemplates(), \
+          '%s inherits from non-template' % (name,)
+
+    result = self.GetDefault().derive(*inherits, **overrides)
+
+    self[name] = result
+    return result
+
+  def AddConfig(self, config, name, *args, **kwargs):
+    """Derive and add the config to cbuildbot's usable config targets
+
+    Args:
+      config: BuildConfig to derive the new config from.
+      name: The name to label this configuration; this is what cbuildbot
+            would see.
+      args: See the docstring of derive.
+      kwargs: See the docstring of derive.
+
+    Returns:
+      See the docstring of derive.
+    """
+    inherits, overrides = args, kwargs
+
+    # Overrides 'name' and '_template' so that we consistently use the
+    # provided names and not the names from mix-ins. E.g., If this config
+    # inherits from multiple templates, we only pay attention to the first
+    # one listed. TODO(davidjames): Clean up the inheritance more so that
+    # this isn't needed.
+    overrides['name'] = name
+    overrides['_template'] = config.get('_template')
+    if config:
+      assert overrides['_template'], '%s inherits from non-template' % (name,)
+
+    # Add ourselves into the global dictionary, adding in the defaults.
+    new_config = config.derive(*inherits, **overrides)
+    self[name] = self.GetDefault().derive(config, new_config)
+
+    # Return a BuildConfig object without the defaults, so that other objects
+    # can derive from us without inheriting the defaults.
+    return new_config
+
+  def AddConfigWithoutTemplate(self, name, *args, **kwargs):
+    """Add a config containing only explicitly listed values (no defaults)."""
+    return self.AddConfig(BuildConfig(), name, *args, **kwargs)
+
+  def AddGroup(self, name, *args, **kwargs):
+    """Create a new group of build configurations.
+
+    Args:
+      name: The name to label this configuration; this is what cbuildbot
+            would see.
+      args: Configurations to build in this group. The first config in
+            the group is considered the primary configuration and is used
+            for syncing and creating the chroot.
+      kwargs: Override values to use for the parent config.
+
+    Returns:
+      A new BuildConfig instance.
+    """
+    child_configs = [self.GetDefault().derive(x, grouped=True) for x in args]
+    return self.AddConfig(args[0], name, child_configs=child_configs, **kwargs)
+
+  def SaveConfigToFile(self, config_file):
+    """Save this Config to a Json file.
+
+    Args:
+      config_file: The file to write too.
+    """
+    json_string = self.SaveConfigToString()
+    osutils.WriteFile(config_file, json_string)
+
+  def HideDefaults(self, name, cfg):
+    """Hide the defaults from a given config entry.
+
+    Args:
+      name: Default build name (usually dictionary key).
+      cfg: A config entry.
+
+    Returns:
+      The same config entry, but without any defaults.
+    """
+    my_default = self.GetDefault()
+    my_default['name'] = name
+
+    template = cfg.get('_template')
+    if template:
+      my_default.update(self._templates[template])
+      my_default['_template'] = None
+
+    d = {}
+    for k, v in cfg.iteritems():
+      if my_default.get(k) != v:
+        if k == 'child_configs':
+          d['child_configs'] = [self.HideDefaults(name, child) for child in v]
+        else:
+          d[k] = v
+
+    return d
+
+  def AddTemplate(self, name, *args, **kwargs):
+    """Create a template named |name|.
+
+    Templates are used to define common settings that are shared across types
+    of builders. They help reduce duplication in config_dump.json, because we
+    only define the template and its settings once.
+
+    Args:
+      name: The name of the template.
+      args: See the docstring of BuildConfig.derive.
+      kwargs: See the docstring of BuildConfig.derive.
+    """
+    kwargs['_template'] = name
+
+    if args:
+      cfg = args[0].derive(*args[1:], **kwargs)
+    else:
+      cfg = BuildConfig(*args, **kwargs)
+
+    self._templates[name] = cfg
+
+    return cfg
+
+  class _JSONEncoder(json.JSONEncoder):
+    """Json Encoder that encodes objects as their dictionaries."""
+    # pylint: disable=method-hidden
+    def default(self, obj):
+      return self.encode(obj.__dict__)
+
+  def SaveConfigToString(self):
+    """Save this Config object to a Json format string."""
+    default = self.GetDefault()
+    site_params = self.params
+
+    config_dict = {}
+    for k, v in self.iteritems():
+      config_dict[k] = self.HideDefaults(k, v)
+
+    config_dict['_default'] = default
+    config_dict['_templates'] = self._templates
+    config_dict['_site_params'] = SiteParameters.HideDefaults(site_params)
+
+    return json.dumps(config_dict, cls=self._JSONEncoder,
+                      sort_keys=True, indent=4, separators=(',', ': '))
+
+  def DumpExpandedConfigToString(self):
+    """Dump the SiteConfig to Json with all configs full expanded.
+
+    This is intended for debugging default/template behavior. The dumped JSON
+    can't be reloaded (at least not reliably).
+    """
+    return json.dumps(self, cls=self._JSONEncoder,
+                      sort_keys=True, indent=4, separators=(',', ': '))
+
+#
+# Methods related to loading/saving Json.
+#
+
+def LoadConfigFromFile(config_file=constants.CHROMEOS_CONFIG_FILE):
+  """Load a Config a Json encoded file."""
+  json_string = osutils.ReadFile(config_file)
+  return LoadConfigFromString(json_string)
+
+
+def LoadConfigFromString(json_string):
+  """Load a cbuildbot config from it's Json encoded string."""
+  config_dict = json.loads(json_string, object_hook=_DecodeDict)
+
+  # Use standard defaults, but allow the config to override.
+  defaults = DefaultSettings()
+  defaults.update(config_dict.pop(DEFAULT_BUILD_CONFIG))
+
+  templates = config_dict.pop('_templates', None)
+
+  site_params = DefaultSiteParameters()
+  site_params.update(config_dict.pop('_site_params', {}))
+
+  defaultBuildConfig = BuildConfig(**defaults)
+
+  builds = {n: _CreateBuildConfig(n, defaultBuildConfig, v, templates)
+            for n, v in config_dict.iteritems()}
+
+  # config is the struct that holds the complete cbuildbot config.
+  result = SiteConfig(defaults=defaults, templates=templates,
+                      site_params=site_params)
+  result.update(builds)
+
+  return result
+
+# TODO(dgarrett): Remove Decode methods when we prove unicde strings work.
+def _DecodeList(data):
+  """Convert a JSON result list from unicode to utf-8."""
+  rv = []
+  for item in data:
+    if isinstance(item, unicode):
+      item = item.encode('utf-8')
+    elif isinstance(item, list):
+      item = _DecodeList(item)
+    elif isinstance(item, dict):
+      item = _DecodeDict(item)
+
+    # Other types (None, int, float, etc) are stored unmodified.
+    rv.append(item)
+  return rv
+
+
+def _DecodeDict(data):
+  """Convert a JSON result dict from unicode to utf-8."""
+  rv = {}
+  for key, value in data.iteritems():
+    if isinstance(key, unicode):
+      key = key.encode('utf-8')
+
+    if isinstance(value, unicode):
+      value = value.encode('utf-8')
+    elif isinstance(value, list):
+      value = _DecodeList(value)
+    elif isinstance(value, dict):
+      value = _DecodeDict(value)
+
+    # Other types (None, int, float, etc) are stored unmodified.
+    rv[key] = value
+  return rv
+
+
+def _CreateHwTestConfig(jsonString):
+  """Create a HWTestConfig object from a JSON string."""
+  # Each HW Test is dumped as a json string embedded in json.
+  hw_test_config = json.loads(jsonString, object_hook=_DecodeDict)
+  return HWTestConfig(**hw_test_config)
+
+
+def _CreateBuildConfig(name, default, build_dict, templates):
+  """Create a BuildConfig object from it's parsed JSON dictionary encoding."""
+  # These build config values need special handling.
+  child_configs = build_dict.pop('child_configs', None)
+  template = build_dict.get('_template')
+
+  # Use the name passed in as the default build name.
+  build_dict.setdefault('name', name)
+
+  my_default = default
+  if template:
+    my_default = default.derive(templates[template])
+  result = my_default.derive(**build_dict)
+
+  hwtests = result.pop('hw_tests', None)
+  if hwtests is not None:
+    result['hw_tests'] = [_CreateHwTestConfig(hwtest) for hwtest in hwtests]
+
+  hwtests = result.pop('hw_tests_override', None)
+  if hwtests is not None:
+    result['hw_tests_override'] = [
+        _CreateHwTestConfig(hwtest) for hwtest in hwtests
+    ]
+
+  if child_configs is not None:
+    result['child_configs'] = [
+        _CreateBuildConfig(name, default, child, templates)
+        for child in child_configs
+    ]
+
+  return result
+
+
+def ClearConfigCache():
+  """Clear the currently cached SiteConfig.
+
+  This is intended to be used very early in the startup, after we fetch/update
+  the site config information available to us.
+
+  However, this operation is never 100% safe, since the Chrome OS config, or an
+  outdated config was availble to any code that ran before (including on
+  import), and that code might have used or cached related values.
+  """
+  # pylint: disable=global-statement
+  global _CACHED_CONFIG
+  _CACHED_CONFIG = None
+
+
+def GetConfig():
+  """Load the current SiteConfig.
+
+  Returns:
+    SiteConfig instance to use for this build.
+  """
+  # pylint: disable=global-statement
+  global _CACHED_CONFIG
+
+  if _CACHED_CONFIG is None:
+    if os.path.exists(constants.SITE_CONFIG_FILE):
+      # Use a site specific config, if present.
+      filename = constants.SITE_CONFIG_FILE
+    else:
+      # Fall back to default Chrome OS configuration.
+      filename = constants.CHROMEOS_CONFIG_FILE
+
+    _CACHED_CONFIG = LoadConfigFromFile(filename)
+
+  return _CACHED_CONFIG
diff --git a/cbuildbot/config_lib_unittest b/cbuildbot/config_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/config_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/config_lib_unittest.py b/cbuildbot/config_lib_unittest.py
new file mode 100644
index 0000000..488aee1
--- /dev/null
+++ b/cbuildbot/config_lib_unittest.py
@@ -0,0 +1,719 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for config."""
+
+from __future__ import print_function
+
+import copy
+import cPickle
+import mock
+
+from chromite.cbuildbot import chromeos_config
+from chromite.cbuildbot import config_lib
+from chromite.lib import cros_test_lib
+
+# pylint: disable=protected-access
+
+
+
+def MockBuildConfig():
+  """Create a BuildConfig object for convenient testing pleasure."""
+  site_config = MockSiteConfig()
+  return site_config['x86-generic-paladin']
+
+
+def MockSiteConfig():
+  """Create a SiteConfig object for convenient testing pleasure.
+
+  Shared amoung a number of unittest files, so be careful if changing it.
+  """
+  result = config_lib.SiteConfig()
+
+  # Add a single, simple build config.
+  result.Add(
+      'x86-generic-paladin',
+      active_waterfall='chromiumos',
+      boards=['x86-generic'],
+      build_type='paladin',
+      chrome_sdk=True,
+      chrome_sdk_build_chrome=False,
+      description='Commit Queue',
+      doc='http://mock_url/',
+      image_test=True,
+      images=['base', 'test'],
+      important=True,
+      manifest_version=True,
+      prebuilts='public',
+      trybot_list=True,
+      upload_standalone_images=False,
+      vm_tests=['smoke_suite'],
+  )
+
+  return result
+
+
+def AssertSiteIndependentParameters(site_config):
+  """Helper function to test that SiteConfigs contain site-independent values.
+
+  Args:
+    site_config: A SiteConfig object.
+
+  Returns:
+    A boolean. True if the config contained all site-independent values.
+    False otherwise.
+  """
+  # Enumerate the necessary site independent parameter keys.
+  # All keys must be documented.
+  # TODO (msartori): Fill in this list.
+  site_independent_params = [
+  ]
+
+  site_params = site_config.params
+  return all([x in site_params for x in site_independent_params])
+
+
+class _CustomObject(object):
+  """Simple object. For testing deepcopy."""
+
+  def __init__(self, x):
+    self.x = x
+
+  def __eq__(self, other):
+    return self.x == other.x
+
+
+class _CustomObjectWithSlots(object):
+  """Simple object with slots. For testing deepcopy."""
+
+  __slots__ = ['x']
+
+  def __init__(self, x):
+    self.x = x
+
+  def __eq__(self, other):
+    return self.x == other.x
+
+
+class BuildConfigClassTest(cros_test_lib.TestCase):
+  """BuildConfig tests."""
+
+  def setUp(self):
+    self.fooConfig = config_lib.BuildConfig(name='foo', value=1)
+    self.barConfig = config_lib.BuildConfig(name='bar', value=2)
+    self.deepConfig = config_lib.BuildConfig(
+        name='deep', nested=[1, 2, 3], value=3,
+        child_configs=[self.fooConfig, self.barConfig])
+
+    self.config = {
+        'foo': self.fooConfig,
+        'bar': self.barConfig,
+        'deep': self.deepConfig,
+    }
+
+
+  def testMockSiteConfig(self):
+    """Make sure Mock generator fucntion doesn't crash."""
+    site_config = MockSiteConfig()
+    self.assertIsNotNone(site_config)
+
+    build_config = MockBuildConfig()
+    self.assertIsNotNone(build_config)
+
+  def testValueAccess(self):
+    self.assertEqual(self.fooConfig.name, 'foo')
+    self.assertEqual(self.fooConfig.name, self.fooConfig['name'])
+
+    self.assertRaises(AttributeError, getattr, self.fooConfig, 'foobar')
+
+  def testDeleteKey(self):
+    base_config = config_lib.BuildConfig(foo='bar')
+    inherited_config = base_config.derive(
+        foo=config_lib.BuildConfig.delete_key())
+    self.assertTrue('foo' in base_config)
+    self.assertFalse('foo' in inherited_config)
+
+  def testDeleteKeys(self):
+    base_config = config_lib.BuildConfig(foo='bar', baz='bak')
+    inherited_config_1 = base_config.derive(qzr='flp')
+    inherited_config_2 = inherited_config_1.derive(
+        config_lib.BuildConfig.delete_keys(base_config))
+    self.assertEqual(inherited_config_2, {'qzr': 'flp'})
+
+  def testCallableOverrides(self):
+    append_foo = lambda x: x + 'foo' if x else 'foo'
+    base_config = config_lib.BuildConfig()
+    inherited_config_1 = base_config.derive(foo=append_foo)
+    inherited_config_2 = inherited_config_1.derive(foo=append_foo)
+    self.assertEqual(inherited_config_1, {'foo': 'foo'})
+    self.assertEqual(inherited_config_2, {'foo': 'foofoo'})
+
+  def AssertDeepCopy(self, obj1, obj2, obj3):
+    """Assert that |obj3| is a deep copy of |obj1|.
+
+    Args:
+      obj1: Object that was copied.
+      obj2: A true deep copy of obj1 (produced using copy.deepcopy).
+      obj3: The purported deep copy of obj1.
+    """
+    # Check whether the item was copied by deepcopy. If so, then it
+    # must have been copied by our algorithm as well.
+    if obj1 is not obj2:
+      self.assertIsNot(obj1, obj3)
+
+    # Assert the three items are all equal.
+    self.assertEqual(obj1, obj2)
+    self.assertEqual(obj1, obj3)
+
+    if isinstance(obj1, (tuple, list)):
+      # Copy tuples and lists item by item.
+      for i in range(len(obj1)):
+        self.AssertDeepCopy(obj1[i], obj2[i], obj3[i])
+    elif isinstance(obj1, set):
+      # Compare sorted versions of the set.
+      self.AssertDeepCopy(list(sorted(obj1)), list(sorted(obj2)),
+                          list(sorted(obj3)))
+    elif isinstance(obj1, dict):
+      # Copy dicts item by item.
+      for k in obj1:
+        self.AssertDeepCopy(obj1[k], obj2[k], obj3[k])
+    elif hasattr(obj1, '__dict__'):
+      # Make sure the dicts are copied.
+      self.AssertDeepCopy(obj1.__dict__, obj2.__dict__, obj3.__dict__)
+    elif hasattr(obj1, '__slots__'):
+      # Make sure the slots are copied.
+      for attr in obj1.__slots__:
+        self.AssertDeepCopy(getattr(obj1, attr), getattr(obj2, attr),
+                            getattr(obj3, attr))
+    else:
+      # This should be an object that copy.deepcopy didn't copy (probably an
+      # immutable object.) If not, the test needs to be updated to handle this
+      # kind of object.
+      self.assertIs(obj1, obj2)
+
+  def testDeepCopy(self):
+    """Test that we deep copy correctly."""
+    for cfg in [self.fooConfig, self.barConfig, self.deepConfig]:
+      self.AssertDeepCopy(cfg, copy.deepcopy(cfg), cfg.deepcopy())
+
+  def testAssertDeepCopy(self):
+    """Test that we test deep copy correctly."""
+    test1 = ['foo', 'bar', ['hey']]
+    tests = [test1,
+             set([tuple(x) for x in test1]),
+             dict(zip([tuple(x) for x in test1], test1)),
+             _CustomObject(test1),
+             _CustomObjectWithSlots(test1)]
+
+    for x in tests + [[tests]]:
+      copy_x = copy.deepcopy(x)
+      self.AssertDeepCopy(x, copy_x, copy.deepcopy(x))
+      self.AssertDeepCopy(x, copy_x, cPickle.loads(cPickle.dumps(x, -1)))
+      self.assertRaises(AssertionError, self.AssertDeepCopy, x,
+                        copy_x, x)
+      if not isinstance(x, set):
+        self.assertRaises(AssertionError, self.AssertDeepCopy, x,
+                          copy_x, copy.copy(x))
+
+class SiteParametersClassTest(cros_test_lib.TestCase):
+  """SiteParameters tests."""
+
+  def testAttributeAccess(self):
+    """Test that SiteParameters dot-accessor works correctly."""
+    site_params = config_lib.SiteParameters()
+
+    # Ensure our test key is not in site_params.
+    self.assertTrue(site_params.get('foo') is None)
+
+    # Test that we raise when accessing a non-existent value.
+    # pylint: disable=pointless-statement
+    with self.assertRaises(AttributeError):
+      site_params.foo
+
+    # Test the dot-accessor.
+    site_params.update({'foo': 'bar'})
+    self.assertEquals('bar', site_params.foo)
+
+
+class SiteConfigClassTest(cros_test_lib.TestCase):
+  """Config tests."""
+
+  def testAdd(self):
+    """Test the SiteConfig.Add behavior."""
+
+    minimal_defaults = {
+        'name': None, '_template': None, 'value': 'default',
+    }
+
+    site_config = config_lib.SiteConfig(defaults=minimal_defaults)
+    template = site_config.AddTemplate('template', value='template')
+    mixin = config_lib.BuildConfig(value='mixin')
+
+    site_config.Add('default')
+
+    site_config.Add('default_with_override',
+                    value='override')
+
+    site_config.Add('default_with_mixin',
+                    mixin)
+
+    site_config.Add('mixin_with_override',
+                    mixin,
+                    value='override')
+
+    site_config.Add('default_with_template',
+                    template)
+
+    site_config.Add('template_with_override',
+                    template,
+                    value='override')
+
+
+    site_config.Add('template_with_mixin',
+                    template,
+                    mixin)
+
+    site_config.Add('template_with_mixin_override',
+                    template,
+                    mixin,
+                    value='override')
+
+    expected = {
+        'default': {
+            '_template': None,
+            'name': 'default',
+            'value': 'default',
+        },
+        'default_with_override': {
+            '_template': None,
+            'name': 'default_with_override',
+            'value': 'override',
+        },
+        'default_with_mixin': {
+            '_template': None,
+            'name': 'default_with_mixin',
+            'value': 'mixin',
+        },
+        'mixin_with_override': {
+            '_template': None,
+            'name': 'mixin_with_override',
+            'value': 'override',
+        },
+        'default_with_template': {
+            '_template': 'template',
+            'name': 'default_with_template',
+            'value': 'template',
+        },
+        'template_with_override': {
+            '_template': 'template',
+            'name': 'template_with_override',
+            'value': 'override'
+        },
+        'template_with_mixin': {
+            '_template': 'template',
+            'name': 'template_with_mixin',
+            'value': 'mixin',
+        },
+        'template_with_mixin_override': {
+            '_template': 'template',
+            'name': 'template_with_mixin_override',
+            'value': 'override'
+        },
+    }
+
+    self.maxDiff = None
+    self.assertDictEqual(site_config, expected)
+
+  def testAddErrors(self):
+    """Test the SiteConfig.Add behavior."""
+    site_config = MockSiteConfig()
+
+    site_config.Add('foo')
+
+    # Test we can't add the
+    with self.assertRaises(AssertionError):
+      site_config.Add('foo')
+
+    # Create a template without using AddTemplate so the site config doesn't
+    # know about it.
+    fake_template = config_lib.BuildConfig(
+        name='fake_template', _template='fake_template')
+
+    with self.assertRaises(AssertionError):
+      site_config.Add('bar', fake_template)
+
+  def testSaveLoadEmpty(self):
+    config = config_lib.SiteConfig(defaults={}, site_params={})
+    config_str = config.SaveConfigToString()
+    loaded = config_lib.LoadConfigFromString(config_str)
+
+    self.assertEqual(config, loaded)
+
+    self.assertEqual(loaded.keys(), [])
+    self.assertEqual(loaded._templates.keys(), [])
+    self.assertEqual(loaded.GetDefault(), config_lib.DefaultSettings())
+    self.assertEqual(loaded.params,
+                     config_lib.SiteParameters(
+                         config_lib.DefaultSiteParameters()))
+
+    self.assertNotEqual(loaded.SaveConfigToString(), '')
+
+    # Make sure we can dump debug content without crashing.
+    self.assertNotEqual(loaded.DumpExpandedConfigToString(), '')
+
+  def testSaveLoadComplex(self):
+
+    # pylint: disable=line-too-long
+    src_str = """{
+    "_default": {
+        "bar": true,
+        "baz": false,
+        "child_configs": [],
+        "foo": false,
+        "hw_tests": [],
+        "nested": { "sub1": 1, "sub2": 2 }
+    },
+    "_site_params": {
+        "site_foo": true,
+        "site_bar": false
+    },
+    "_templates": {
+       "build": {
+            "baz": true
+       }
+    },
+    "diff_build": {
+        "_template": "build",
+        "bar": false,
+        "foo": true,
+        "name": "diff_build"
+    },
+    "match_build": {
+        "name": "match_build"
+    },
+    "parent_build": {
+        "child_configs": [
+            {
+                "name": "empty_build"
+            },
+            {
+                "bar": false,
+                "name": "child_build",
+                "hw_tests": [
+                    "{\\n    \\"async\\": true,\\n    \\"blocking\\": false,\\n    \\"critical\\": false,\\n    \\"file_bugs\\": true,\\n    \\"max_retries\\": null,\\n    \\"minimum_duts\\": 4,\\n    \\"num\\": 2,\\n    \\"offload_failures_only\\": false,\\n    \\"pool\\": \\"bvt\\",\\n    \\"priority\\": \\"PostBuild\\",\\n    \\"retry\\": false,\\n    \\"suite\\": \\"bvt-perbuild\\",\\n    \\"suite_min_duts\\": 1,\\n    \\"timeout\\": 13200,\\n    \\"warn_only\\": false\\n}"
+                ]
+            }
+        ],
+        "name": "parent_build"
+    },
+    "default_name_build": {
+    }
+}"""
+
+    config = config_lib.LoadConfigFromString(src_str)
+
+    expected_defaults = config_lib.DefaultSettings()
+    expected_defaults.update({
+        "bar": True,
+        "baz": False,
+        "child_configs": [],
+        "foo": False,
+        "hw_tests": [],
+        "nested": {"sub1": 1, "sub2": 2},
+    })
+
+    self.assertEqual(config.GetDefault(), expected_defaults)
+
+    # Verify assorted stuff in the loaded config to make sure it matches
+    # expectations.
+    self.assertFalse(config['match_build'].foo)
+    self.assertTrue(config['match_build'].bar)
+    self.assertFalse(config['match_build'].baz)
+    self.assertTrue(config['diff_build'].foo)
+    self.assertFalse(config['diff_build'].bar)
+    self.assertTrue(config['diff_build'].baz)
+    self.assertTrue(config['parent_build'].bar)
+    self.assertTrue(config['parent_build'].child_configs[0].bar)
+    self.assertFalse(config['parent_build'].child_configs[1].bar)
+    self.assertEqual(
+        config['parent_build'].child_configs[1].hw_tests[0],
+        config_lib.HWTestConfig(
+            suite='bvt-perbuild',
+            async=True, file_bugs=True, max_retries=None,
+            minimum_duts=4, num=2, priority='PostBuild',
+            retry=False, suite_min_duts=1))
+    self.assertEqual(config['default_name_build'].name, 'default_name_build')
+
+    self.assertTrue(config.params.site_foo)
+    self.assertFalse(config.params.site_bar)
+
+    # Load an save again, just to make sure there are no changes.
+    loaded = config_lib.LoadConfigFromString(config.SaveConfigToString())
+
+    self.assertEqual(config, loaded)
+
+    # Make sure we can dump debug content without crashing.
+    self.assertNotEqual(config.DumpExpandedConfigToString(), '')
+
+  def testChromeOsLoad(self):
+    """This test compares chromeos_config to config_dump.json."""
+    # If there is a test failure, the diff will be big.
+    self.maxDiff = None
+
+    src = chromeos_config.GetConfig()
+    new = config_lib.LoadConfigFromFile()
+
+    self.assertDictEqual(src.GetDefault(),
+                         new.GetDefault())
+
+    #
+    # BUG ALERT ON TEST FAILURE
+    #
+    # assertDictEqual can correctly compare these structs for equivalence, but
+    # has a bug when displaying differences on failure. The embedded
+    # HWTestConfig values are correctly compared, but ALWAYS display as
+    # different, if something else triggers a failure.
+    #
+
+    # This for loop is to make differences easier to find/read.
+    for name in src.iterkeys():
+      self.assertDictEqual(new[name], src[name])
+
+    # This confirms they are exactly the same.
+    self.assertDictEqual(new, src)
+
+
+class SiteConfigFindTest(cros_test_lib.TestCase):
+  """Tests related to Find helpers on SiteConfig."""
+
+  def testGetBoardsMockConfig(self):
+    site_config = MockSiteConfig()
+    self.assertEqual(
+        site_config.GetBoards(),
+        set(['x86-generic']))
+
+  def testGetBoardsComplexConfig(self):
+    site_config = MockSiteConfig()
+    site_config.AddConfigWithoutTemplate('build_a', boards=['foo_board'])
+    site_config.AddConfigWithoutTemplate('build_b', boards=['bar_board'])
+    site_config.AddConfigWithoutTemplate(
+        'build_c', boards=['foo_board', 'car_board'])
+
+    self.assertEqual(
+        site_config.GetBoards(),
+        set(['x86-generic', 'foo_board', 'bar_board', 'car_board']))
+
+
+class FindConfigsForBoardTest(cros_test_lib.TestCase):
+  """Test locating of official build for a board."""
+
+  def setUp(self):
+    self.config = chromeos_config.GetConfig()
+
+  def _CheckFullConfig(
+      self, board, external_expected=None, internal_expected=None):
+    """Check FindFullConfigsForBoard has expected results.
+
+    Args:
+      board: Argument to pass to FindFullConfigsForBoard.
+      external_expected: Expected config name (singular) to be found.
+      internal_expected: Expected config name (singular) to be found.
+    """
+
+    def check_expected(l, expected):
+      if expected is not None:
+        self.assertTrue(expected in [v['name'] for v in l])
+
+    external, internal = self.config.FindFullConfigsForBoard(board)
+    self.assertFalse(external_expected is None and internal_expected is None)
+    check_expected(external, external_expected)
+    check_expected(internal, internal_expected)
+
+  def _CheckCanonicalConfig(self, board, ending):
+    self.assertEquals(
+        '-'.join((board, ending)),
+        self.config.FindCanonicalConfigForBoard(board)['name'])
+
+  def testExternal(self):
+    """Test finding of a full builder."""
+    self._CheckFullConfig(
+        'amd64-generic', external_expected='amd64-generic-full')
+
+  def testInternal(self):
+    """Test finding of a release builder."""
+    self._CheckFullConfig('lumpy', internal_expected='lumpy-release')
+
+  def testBoth(self):
+    """Both an external and internal config exist for board."""
+    self._CheckFullConfig(
+        'daisy', external_expected='daisy-full',
+        internal_expected='daisy-release')
+
+  def testExternalCanonicalResolution(self):
+    """Test an external canonical config."""
+    self._CheckCanonicalConfig('x86-generic', 'full')
+
+  def testInternalCanonicalResolution(self):
+    """Test prefer internal over external when both exist."""
+    self._CheckCanonicalConfig('daisy', 'release')
+
+  def testAFDOCanonicalResolution(self):
+    """Test prefer non-AFDO over AFDO builder."""
+    self._CheckCanonicalConfig('lumpy', 'release')
+
+  def testOneFullConfigPerBoard(self):
+    """There is at most one 'full' config for a board."""
+    # Verifies that there is one external 'full' and one internal 'release'
+    # build per board.  This is to ensure that we fail any new configs that
+    # wrongly have names like *-bla-release or *-bla-full. This case can also
+    # be caught if the new suffix was added to
+    # config_lib.CONFIG_TYPE_DUMP_ORDER
+    # (see testNonOverlappingConfigTypes), but that's not guaranteed to happen.
+    def AtMostOneConfig(board, label, configs):
+      if len(configs) > 1:
+        self.fail(
+            'Found more than one %s config for %s: %r'
+            % (label, board, [c['name'] for c in configs]))
+
+    boards = set()
+    for build_config in self.config.itervalues():
+      boards.update(build_config['boards'])
+
+    # Sanity check of the boards.
+    self.assertTrue(boards)
+
+    for b in boards:
+      # TODO(akeshet): Figure out why we have both panther_embedded-minimal
+      # release and panther_embedded-release, and eliminate one of them.
+      if b == 'panther_embedded':
+        continue
+      external, internal = self.config.FindFullConfigsForBoard(b)
+      AtMostOneConfig(b, 'external', external)
+      AtMostOneConfig(b, 'internal', internal)
+
+
+class OverrideForTrybotTest(cros_test_lib.TestCase):
+  """Test config override functionality."""
+
+  # TODO(dgarrett): Test other override behaviors.
+
+  def setUp(self):
+    self.base_hwtests = [config_lib.HWTestConfig('base')]
+    self.override_hwtests = [config_lib.HWTestConfig('override')]
+
+    self.all_configs = MockSiteConfig()
+    self.all_configs.Add(
+        'no_tests_without_override',
+        vm_tests=[],
+    )
+    self.all_configs.Add(
+        'no_tests_with_override',
+        vm_tests=[],
+        vm_tests_override=['o_a', 'o_b'],
+        hw_tests_override=self.override_hwtests,
+    )
+    self.all_configs.Add(
+        'tests_without_override',
+        vm_tests=['a', 'b'],
+        hw_tests=self.base_hwtests,
+    )
+    self.all_configs.Add(
+        'tests_with_override',
+        vm_tests=['a', 'b'],
+        vm_tests_override=['o_a', 'o_b'],
+        hw_tests=self.base_hwtests,
+        hw_tests_override=self.override_hwtests,
+    )
+
+  def _createMockOptions(self, **kwargs):
+    mock_options = mock.Mock()
+    for k, v in kwargs.iteritems():
+      mock_options.__setattr__(k, v)
+
+    return mock_options
+
+  def testVmTestOverride(self):
+    """Verify that vm_tests override for trybots pay heed to original config."""
+    mock_options = self._createMockOptions(hwtest=False, remote_trybot=False)
+
+    result = config_lib.OverrideConfigForTrybot(
+        self.all_configs['no_tests_without_override'], mock_options)
+    self.assertEqual(result.vm_tests, [])
+
+    result = config_lib.OverrideConfigForTrybot(
+        self.all_configs['no_tests_with_override'], mock_options)
+    self.assertEqual(result.vm_tests, ['o_a', 'o_b'])
+
+    result = config_lib.OverrideConfigForTrybot(
+        self.all_configs['tests_without_override'], mock_options)
+    self.assertEqual(result.vm_tests, ['a', 'b'])
+
+    result = config_lib.OverrideConfigForTrybot(
+        self.all_configs['tests_with_override'], mock_options)
+    self.assertEqual(result.vm_tests, ['o_a', 'o_b'])
+
+  def testHwTestOverrideDisabled(self):
+    """Verify that hw_tests_override is not used without --hwtest."""
+    mock_options = self._createMockOptions(hwtest=False, remote_trybot=False)
+
+    result = config_lib.OverrideConfigForTrybot(
+        self.all_configs['no_tests_without_override'], mock_options)
+    self.assertEqual(result.hw_tests, [])
+
+    result = config_lib.OverrideConfigForTrybot(
+        self.all_configs['no_tests_with_override'], mock_options)
+    self.assertEqual(result.hw_tests, [])
+
+    result = config_lib.OverrideConfigForTrybot(
+        self.all_configs['tests_without_override'], mock_options)
+    self.assertEqual(result.hw_tests, self.base_hwtests)
+
+    result = config_lib.OverrideConfigForTrybot(
+        self.all_configs['tests_with_override'], mock_options)
+    self.assertEqual(result.hw_tests, self.base_hwtests)
+
+  def testHwTestOverrideEnabled(self):
+    """Verify that hw_tests_override is not used without --hwtest."""
+    mock_options = self._createMockOptions(hwtest=True, remote_trybot=False)
+
+    result = config_lib.OverrideConfigForTrybot(
+        self.all_configs['no_tests_without_override'], mock_options)
+    self.assertEqual(result.hw_tests, [])
+
+    result = config_lib.OverrideConfigForTrybot(
+        self.all_configs['no_tests_with_override'], mock_options)
+    self.assertEqual(result.hw_tests, self.override_hwtests)
+
+    result = config_lib.OverrideConfigForTrybot(
+        self.all_configs['tests_without_override'], mock_options)
+    self.assertEqual(result.hw_tests, self.base_hwtests)
+
+    result = config_lib.OverrideConfigForTrybot(
+        self.all_configs['tests_with_override'], mock_options)
+    self.assertEqual(result.hw_tests, self.override_hwtests)
+
+
+class GetConfigTests(cros_test_lib.TestCase):
+  """Tests related to SiteConfig.GetConfig()."""
+
+  def testGetConfigCaching(self):
+    """Test that config_lib.GetConfig() caches it's results correctly."""
+    config_a = config_lib.GetConfig()
+    config_b = config_lib.GetConfig()
+
+    # Ensure that we get a SiteConfig, and that the result is cached.
+    self.assertIsInstance(config_a, config_lib.SiteConfig)
+    self.assertIs(config_a, config_b)
+
+    # Clear our cache.
+    config_lib.ClearConfigCache()
+    config_c = config_lib.GetConfig()
+    config_d = config_lib.GetConfig()
+
+    # Ensure that this gives us a new instance of the SiteConfig.
+    self.assertIsNot(config_a, config_c)
+
+    # But also that it's cached going forward.
+    self.assertIsInstance(config_c, config_lib.SiteConfig)
+    self.assertIs(config_c, config_d)
diff --git a/cbuildbot/constants.py b/cbuildbot/constants.py
new file mode 100644
index 0000000..168f2e7
--- /dev/null
+++ b/cbuildbot/constants.py
@@ -0,0 +1,846 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module contains constants used by cbuildbot and related code."""
+
+from __future__ import print_function
+
+import os
+
+
+def _FindSourceRoot():
+  """Try and find the root check out of the chromiumos tree"""
+  source_root = path = os.path.realpath(os.path.join(
+      os.path.abspath(__file__), '..', '..', '..'))
+  while True:
+    if os.path.isdir(os.path.join(path, '.repo')):
+      return path
+    elif path == '/':
+      break
+    path = os.path.dirname(path)
+  return source_root
+
+
+SOURCE_ROOT = _FindSourceRoot()
+CHROOT_SOURCE_ROOT = '/mnt/host/source'
+CHROOT_WORKSPACE_ROOT = '/mnt/host/workspace'
+CHROOT_CACHE_ROOT = '/var/cache/chromeos-cache'
+
+CROSUTILS_DIR = os.path.join(SOURCE_ROOT, 'src/scripts')
+CHROMITE_DIR = os.path.realpath(os.path.join(
+    os.path.abspath(__file__), '..', '..'))
+BOOTSTRAP_DIR = os.path.join(CHROMITE_DIR, 'bootstrap')
+DEPOT_TOOLS_DIR = os.path.join(SOURCE_ROOT, 'chromium/tools/depot_tools')
+CHROMITE_BIN_SUBDIR = 'chromite/bin'
+CHROMITE_BIN_DIR = os.path.join(CHROMITE_DIR, 'bin')
+PATH_TO_CBUILDBOT = os.path.join(CHROMITE_BIN_SUBDIR, 'cbuildbot')
+DEFAULT_CHROOT_DIR = 'chroot'
+
+SITE_CONFIG_DIR = os.path.join(CHROMITE_DIR, 'config')
+SITE_CONFIG_FILE = os.path.join(SITE_CONFIG_DIR, 'config_dump.json')
+
+CHROMEOS_CONFIG_FILE = os.path.join(CHROMITE_DIR, 'cbuildbot',
+                                    'config_dump.json')
+
+# The following define the location for storing toolchain packages and
+# SDK overlay tarballs created during SDK builder runs. The paths are relative
+# to the build root's chroot, which guarantees that they are reachable from it
+# and get cleaned up when it is removed.
+SDK_TOOLCHAINS_OUTPUT = 'tmp/toolchain-pkgs'
+SDK_OVERLAYS_OUTPUT = 'tmp/sdk-overlays'
+
+AUTOTEST_BUILD_PATH = 'usr/local/build/autotest'
+CQ_CONFIG_FILENAME = 'COMMIT-QUEUE.ini'
+
+# Path to the lsb-release file on the device.
+LSB_RELEASE_PATH = '/etc/lsb-release'
+
+HOME_DIRECTORY = os.path.expanduser('~')
+
+# If cbuiltbot is running on a bot, then the cidb access credentials will be
+# available here. This directory will not exist otherwise.
+CIDB_PROD_BOT_CREDS = os.path.join(HOME_DIRECTORY, '.cidb_creds',
+                                   'prod_cidb_bot')
+CIDB_DEBUG_BOT_CREDS = os.path.join(HOME_DIRECTORY, '.cidb_creds',
+                                    'debug_cidb_bot')
+
+WATERFALL_INTERNAL = 'chromeos'
+WATERFALL_EXTERNAL = 'chromiumos'
+WATERFALL_TRYBOT = 'chromiumos.tryserver'
+WATERFALL_RELEASE = 'chromeos_release'
+WATERFALL_BRANCH = 'chromeos.branch'
+# These waterfalls are not yet using cidb.
+WATERFALL_CHROMIUM = 'chromiumos.chromium'
+WATERFALL_CHROME = 'chromeos.chrome'
+
+CIDB_KNOWN_WATERFALLS = (WATERFALL_INTERNAL,
+                         WATERFALL_EXTERNAL,
+                         WATERFALL_TRYBOT,
+                         WATERFALL_RELEASE,
+                         WATERFALL_BRANCH,
+                         WATERFALL_CHROMIUM,
+                         WATERFALL_CHROME,)
+
+ALL_WATERFALLS = CIDB_KNOWN_WATERFALLS
+
+# URLs to the various waterfalls.
+BUILD_DASHBOARD = 'http://build.chromium.org/p/chromiumos'
+BUILD_INT_DASHBOARD = 'https://uberchromegw.corp.google.com/i/chromeos'
+TRYBOT_DASHBOARD = 'https://uberchromegw.corp.google.com/i/chromiumos.tryserver'
+RELEASE_DASHBOARD = 'https://uberchromegw.corp.google.com/i/chromeos_release'
+BRANCH_DASHBOARD = 'https://uberchromegw.corp.google.com/i/chromeos.branch'
+CHROMIUM_DASHBOARD = ('https://uberchromegw.corp.google.com/'
+                      'i/chromiumos.chromium')
+CHROME_DASHBOARD = 'https://uberchromegw.corp.google.com/i/chromeos.chrome'
+
+# Waterfall to dashboard URL mapping.
+WATERFALL_TO_DASHBOARD = {
+    WATERFALL_INTERNAL: BUILD_INT_DASHBOARD,
+    WATERFALL_EXTERNAL: BUILD_DASHBOARD,
+    WATERFALL_TRYBOT: TRYBOT_DASHBOARD,
+    WATERFALL_RELEASE: RELEASE_DASHBOARD,
+    WATERFALL_BRANCH: BRANCH_DASHBOARD,
+    WATERFALL_CHROMIUM: CHROMIUM_DASHBOARD,
+    WATERFALL_CHROME: CHROME_DASHBOARD,
+}
+
+# Builder status strings
+BUILDER_STATUS_FAILED = 'fail'
+BUILDER_STATUS_PASSED = 'pass'
+BUILDER_STATUS_INFLIGHT = 'inflight'
+BUILDER_STATUS_MISSING = 'missing'
+BUILDER_STATUS_ABORTED = 'aborted'
+# The following statuses are currently only used for build stages.
+BUILDER_STATUS_PLANNED = 'planned'
+BUILDER_STATUS_SKIPPED = 'skipped'
+BUILDER_STATUS_FORGIVEN = 'forgiven'
+BUILDER_COMPLETED_STATUSES = (BUILDER_STATUS_PASSED,
+                              BUILDER_STATUS_FAILED,
+                              BUILDER_STATUS_ABORTED,
+                              BUILDER_STATUS_SKIPPED,
+                              BUILDER_STATUS_FORGIVEN)
+BUILDER_ALL_STATUSES = (BUILDER_STATUS_FAILED,
+                        BUILDER_STATUS_PASSED,
+                        BUILDER_STATUS_INFLIGHT,
+                        BUILDER_STATUS_MISSING,
+                        BUILDER_STATUS_ABORTED,
+                        BUILDER_STATUS_PLANNED,
+                        BUILDER_STATUS_SKIPPED,
+                        BUILDER_STATUS_FORGIVEN)
+
+# CL status strings
+CL_STATUS_FAILED = BUILDER_STATUS_FAILED
+CL_STATUS_INFLIGHT = BUILDER_STATUS_INFLIGHT
+CL_STATUS_PASSED = BUILDER_STATUS_PASSED
+CL_STATUS_LAUNCHING = 'launching'
+CL_STATUS_WAITING = 'waiting'
+CL_STATUS_READY_TO_SUBMIT = 'ready-to-submit'
+CL_STATUS_FULLY_VERIFIED = 'fully-verified'
+
+# Change sources
+CHANGE_SOURCE_INTERNAL = 'internal'
+CHANGE_SOURCE_EXTERNAL = 'external'
+
+# Build failure categories
+FAILURE_CATEGORY_BAD_CL = 'bad_cl'
+FAILURE_CATEGORY_BUG_IN_TOT = 'bug_in_tot'
+FAILURE_CATEGORY_MERGE_CONFLICT = 'merge_conflict'
+FAILURE_CATEGORY_TREE_CLOSED = 'tree_closed'
+FAILURE_CATEGORY_SCHEDULED_ABORT = 'scheduled_abort'
+FAILURE_CATEGORY_CL_NOT_READY = 'cl_not_ready'
+FAILURE_CATEGORY_BAD_CHROME = 'bad_chrome'
+FAILURE_CATEGORY_INFRA_FAILURE = 'infra_failure'
+FAILURE_CATEGORY_TEST_FLAKE = 'test_flake'
+FAILURE_CATEGORY_GERRIT_FAILURE = 'gerrit_failure'
+FAILURE_CATEGORY_GS_FAILURE = 'gs_failure'
+FAILURE_CATEGORY_LAB_FAILURE = 'lab_failure'
+FAILURE_CATEGORY_BAD_BINARY_PACKAGE = 'bad_binary_package'
+FAILURE_CATEGORY_BUILD_FLAKE = 'build_flake'
+FAILURE_CATEGORY_MYSTERY = 'mystery'
+
+FAILURE_CATEGORY_ALL_CATEGORIES = (
+    FAILURE_CATEGORY_BAD_CL,
+    FAILURE_CATEGORY_BUG_IN_TOT,
+    FAILURE_CATEGORY_MERGE_CONFLICT,
+    FAILURE_CATEGORY_TREE_CLOSED,
+    FAILURE_CATEGORY_SCHEDULED_ABORT,
+    FAILURE_CATEGORY_CL_NOT_READY,
+    FAILURE_CATEGORY_BAD_CHROME,
+    FAILURE_CATEGORY_INFRA_FAILURE,
+    FAILURE_CATEGORY_TEST_FLAKE,
+    FAILURE_CATEGORY_GERRIT_FAILURE,
+    FAILURE_CATEGORY_GS_FAILURE,
+    FAILURE_CATEGORY_LAB_FAILURE,
+    FAILURE_CATEGORY_BAD_BINARY_PACKAGE,
+    FAILURE_CATEGORY_BUILD_FLAKE,
+    FAILURE_CATEGORY_MYSTERY,
+)
+
+
+# Exception categories, as recorded in cidb
+EXCEPTION_CATEGORY_UNKNOWN = 'unknown'
+EXCEPTION_CATEGORY_BUILD = 'build'
+EXCEPTION_CATEGORY_TEST = 'test'
+EXCEPTION_CATEGORY_INFRA = 'infra'
+EXCEPTION_CATEGORY_LAB = 'lab'
+
+EXCEPTION_CATEGORY_ALL_CATEGORIES = (
+    EXCEPTION_CATEGORY_UNKNOWN,
+    EXCEPTION_CATEGORY_BUILD,
+    EXCEPTION_CATEGORY_TEST,
+    EXCEPTION_CATEGORY_INFRA,
+    EXCEPTION_CATEGORY_LAB,
+)
+
+# TODO: Eliminate these or merge with manifest_version.py:STATUS_PASSED
+# crbug.com/318930
+FINAL_STATUS_PASSED = 'passed'
+FINAL_STATUS_FAILED = 'failed'
+
+# Re-execution API constants.
+# Used by --resume and --bootstrap to decipher which options they
+# can pass to the target cbuildbot (since it may not have that
+# option).
+# Format is Major:Minor.  Minor is used for tracking new options added
+# that aren't critical to the older version if it's not ran.
+# Major is used for tracking heavy API breakage- for example, no longer
+# supporting the --resume option.
+REEXEC_API_MAJOR = 0
+REEXEC_API_MINOR = 3
+REEXEC_API_VERSION = '%i.%i' % (REEXEC_API_MAJOR, REEXEC_API_MINOR)
+
+# Minor version 3 is the first to support --master-build-id
+REEXEC_API_MASTER_BUILD_ID = 3
+
+# We rely on the (waterfall, builder name, build number) to uniquely identify
+# a build. However, future migrations or state wipes of the buildbot master may
+# cause it to reset its build number counter. When that happens, this value
+# should be incremented, ensuring that (waterfall, builder name, build number,
+# buildbot generation) is a unique identifier of builds.
+BUILDBOT_GENERATION = 1
+
+ISOLATESERVER = 'https://isolateserver.appspot.com'
+
+GOOGLE_EMAIL = '@google.com'
+CHROMIUM_EMAIL = '@chromium.org'
+
+CORP_DOMAIN = 'corp.google.com'
+GOLO_DOMAIN = 'golo.chromium.org'
+CHROME_DOMAIN = 'chrome.' + CORP_DOMAIN
+
+GOB_HOST = '%s.googlesource.com'
+
+EXTERNAL_GOB_INSTANCE = 'chromium'
+EXTERNAL_GERRIT_INSTANCE = 'chromium-review'
+EXTERNAL_GOB_HOST = GOB_HOST % EXTERNAL_GOB_INSTANCE
+EXTERNAL_GERRIT_HOST = GOB_HOST % EXTERNAL_GERRIT_INSTANCE
+EXTERNAL_GOB_URL = 'https://%s' % EXTERNAL_GOB_HOST
+EXTERNAL_GERRIT_URL = 'https://%s' % EXTERNAL_GERRIT_HOST
+
+INTERNAL_GOB_INSTANCE = 'chrome-internal'
+INTERNAL_GERRIT_INSTANCE = 'chrome-internal-review'
+INTERNAL_GOB_HOST = GOB_HOST % INTERNAL_GOB_INSTANCE
+INTERNAL_GERRIT_HOST = GOB_HOST % INTERNAL_GERRIT_INSTANCE
+INTERNAL_GOB_URL = 'https://%s' % INTERNAL_GOB_HOST
+INTERNAL_GERRIT_URL = 'https://%s' % INTERNAL_GERRIT_HOST
+
+GOB_COOKIE_PATH = os.path.expanduser('~/.git-credential-cache/cookie')
+GITCOOKIES_PATH = os.path.expanduser('~/.gitcookies')
+
+# Timestamps in the JSON from GoB's web interface is of the form 'Tue
+# Dec 02 17:48:06 2014' and is assumed to be in UTC.
+GOB_COMMIT_TIME_FORMAT = '%a %b %d %H:%M:%S %Y'
+
+CHROMITE_PROJECT = 'chromiumos/chromite'
+CHROMITE_URL = '%s/%s' % (EXTERNAL_GOB_URL, CHROMITE_PROJECT)
+CHROMIUM_SRC_PROJECT = 'chromium/src'
+CHROMIUM_GOB_URL = '%s/%s.git' % (EXTERNAL_GOB_URL, CHROMIUM_SRC_PROJECT)
+CHROME_INTERNAL_PROJECT = 'chrome/src-internal'
+CHROME_INTERNAL_GOB_URL = '%s/%s.git' % (
+    INTERNAL_GOB_URL, CHROME_INTERNAL_PROJECT)
+
+DEFAULT_MANIFEST = 'default.xml'
+OFFICIAL_MANIFEST = 'official.xml'
+LKGM_MANIFEST = 'LKGM/lkgm.xml'
+
+SHARED_CACHE_ENVVAR = 'CROS_CACHEDIR'
+PARALLEL_EMERGE_STATUS_FILE_ENVVAR = 'PARALLEL_EMERGE_STATUS_FILE'
+
+# These projects can be responsible for infra failures.
+INFRA_PROJECTS = (CHROMITE_PROJECT,)
+
+# The manifest contains extra attributes in the 'project' nodes to determine our
+# branching strategy for the project.
+#   create: Create a new branch on the project repo for the new CrOS branch.
+#           This is the default.
+#   pin: On the CrOS branch, pin the project to the current revision.
+#   tot: On the CrOS branch, the project still tracks ToT.
+MANIFEST_ATTR_BRANCHING = 'branch-mode'
+MANIFEST_ATTR_BRANCHING_CREATE = 'create'
+MANIFEST_ATTR_BRANCHING_PIN = 'pin'
+MANIFEST_ATTR_BRANCHING_TOT = 'tot'
+MANIFEST_ATTR_BRANCHING_ALL = (
+    MANIFEST_ATTR_BRANCHING_CREATE,
+    MANIFEST_ATTR_BRANCHING_PIN,
+    MANIFEST_ATTR_BRANCHING_TOT,
+)
+
+STREAK_COUNTERS = 'streak_counters'
+
+PATCH_BRANCH = 'patch_branch'
+STABLE_EBUILD_BRANCH = 'stabilizing_branch'
+MERGE_BRANCH = 'merge_branch'
+
+# These branches are deleted at the beginning of every buildbot run.
+CREATED_BRANCHES = [
+    PATCH_BRANCH,
+    STABLE_EBUILD_BRANCH,
+    MERGE_BRANCH
+]
+
+# Constants for uprevving Chrome
+
+# Portage category and package name for Chrome.
+CHROME_PN = 'chromeos-chrome'
+CHROME_CP = 'chromeos-base/%s' % CHROME_PN
+
+# Other packages to uprev while uprevving Chrome.
+OTHER_CHROME_PACKAGES = ['chromeos-base/chromium-source']
+
+# Chrome use flags
+USE_CHROME_INTERNAL = 'chrome_internal'
+USE_AFDO_USE = 'afdo_use'
+
+
+# Builds and validates _alpha ebuilds.  These builds sync to the latest
+# revsion of the Chromium src tree and build with that checkout.
+CHROME_REV_TOT = 'tot'
+
+# Builds and validates chrome at a given revision through cbuildbot
+# --chrome_version
+CHROME_REV_SPEC = 'spec'
+
+# Builds and validates the latest Chromium release as defined by
+# ~/trunk/releases in the Chrome src tree.  These ebuilds are suffixed with rc.
+CHROME_REV_LATEST = 'latest_release'
+
+# Builds and validates the latest Chromium release for a specific Chromium
+# branch that we want to watch.  These ebuilds are suffixed with rc.
+CHROME_REV_STICKY = 'stable_release'
+
+# Builds and validates Chromium for a pre-populated directory.
+# Also uses _alpha, since portage doesn't have anything lower.
+CHROME_REV_LOCAL = 'local'
+VALID_CHROME_REVISIONS = [CHROME_REV_TOT, CHROME_REV_LATEST,
+                          CHROME_REV_STICKY, CHROME_REV_LOCAL, CHROME_REV_SPEC]
+
+
+# Build types supported.
+
+# TODO(sosa): Deprecate PFQ type.
+# Incremental builds that are built using binary packages when available.
+# These builds have less validation than other build types.
+INCREMENTAL_TYPE = 'binary'
+
+# These builds serve as PFQ builders.  This is being deprecated.
+PFQ_TYPE = 'pfq'
+
+# Hybrid Commit and PFQ type.  Ultimate protection.  Commonly referred to
+# as simply "commit queue" now.
+PALADIN_TYPE = 'paladin'
+
+# A builder that kicks off Pre-CQ builders that bless the purest CLs.
+PRE_CQ_LAUNCHER_TYPE = 'priest'
+
+# Chrome PFQ type.  Incremental build type that builds and validates new
+# versions of Chrome.  Only valid if set with CHROME_REV.  See
+# VALID_CHROME_REVISIONS for more information.
+CHROME_PFQ_TYPE = 'chrome'
+
+# Builds from source and non-incremental.  This builds fully wipe their
+# chroot before the start of every build and no not use a BINHOST.
+BUILD_FROM_SOURCE_TYPE = 'full'
+
+# Full but with versioned logic.
+CANARY_TYPE = 'canary'
+
+# Generate payloads for an already built build/version.
+PAYLOADS_TYPE = 'payloads'
+
+BRANCH_UTIL_CONFIG = 'branch-util'
+
+# Special build type for Chroot builders.  These builds focus on building
+# toolchains and validate that they work.
+CHROOT_BUILDER_TYPE = 'chroot'
+CHROOT_BUILDER_BOARD = 'amd64-host'
+
+VALID_BUILD_TYPES = (
+    PALADIN_TYPE,
+    INCREMENTAL_TYPE,
+    BUILD_FROM_SOURCE_TYPE,
+    CANARY_TYPE,
+    CHROOT_BUILDER_TYPE,
+    CHROOT_BUILDER_BOARD,
+    CHROME_PFQ_TYPE,
+    PFQ_TYPE,
+    PRE_CQ_LAUNCHER_TYPE,
+    PAYLOADS_TYPE,
+)
+
+# The default list of pre-cq configs to use.
+PRE_CQ_DEFAULT_CONFIGS = ['rambi-pre-cq', 'mixed-a-pre-cq', 'mixed-b-pre-cq',
+                          'mixed-c-pre-cq']
+
+# The name of the pre-cq launching config.
+PRE_CQ_LAUNCHER_CONFIG = 'pre-cq-launcher'
+
+# The name of the Pre-CQ launcher on the waterfall.
+PRE_CQ_LAUNCHER_NAME = 'Pre-CQ Launcher'
+
+# The COMMIT-QUEUE.ini and commit message option that overrides pre-cq configs
+# to test with.
+PRE_CQ_CONFIGS_OPTION = 'pre-cq-configs'
+PRE_CQ_CONFIGS_OPTION_REGEX = PRE_CQ_CONFIGS_OPTION + ':'
+
+# Define pool of machines for Hardware tests.
+HWTEST_DEFAULT_NUM = 6
+HWTEST_TRYBOT_NUM = 3
+HWTEST_MACH_POOL = 'bvt'
+HWTEST_PALADIN_POOL = 'cq'
+HWTEST_TOT_PALADIN_POOL = 'tot-cq'
+HWTEST_PFQ_POOL = 'pfq'
+HWTEST_SUITES_POOL = 'suites'
+HWTEST_CHROME_PERF_POOL = 'chromeperf'
+HWTEST_TRYBOT_POOL = HWTEST_SUITES_POOL
+HWTEST_WIFICELL_PRE_CQ_POOL = 'wificell-pre-cq'
+
+
+# Master build timeouts in seconds. This is the overall timeout set by the
+# master for the lock-step master-slave builds.
+MASTER_BUILD_TIMEOUT_SECONDS = {
+    PFQ_TYPE: 20 * 60,
+    # Canaries are scheduled to run every 8 hours. Leave some gap.
+    CANARY_TYPE: (7 * 60 + 50) * 60,
+}
+MASTER_BUILD_TIMEOUT_DEFAULT_SECONDS = 4 * 60 * 60
+
+
+# Defines for the various hardware test suites:
+#   AU: Blocking suite run against all canaries; tests basic AU
+#       functionality.
+#   BVT:  Basic blocking suite to be run against any build that
+#       requires a HWTest phase.
+#   COMMIT:  Suite of basic tests required for commits to the source
+#       tree.  Runs as a blocking suite on the CQ and PFQ; runs as
+#       a non-blocking suite on canaries.
+#   CANARY:  Non-blocking suite run only against the canaries.
+#   AFDO:  Non-blocking suite run only AFDO builders.
+#   MOBLAB: Blocking Suite run only on *_moblab builders.
+HWTEST_AU_SUITE = 'au'
+HWTEST_BVT_SUITE = 'bvt-inline'
+HWTEST_COMMIT_SUITE = 'bvt-cq'
+HWTEST_CANARY_SUITE = 'bvt-perbuild'
+HWTEST_AFDO_SUITE = 'AFDO_record'
+HWTEST_MOBLAB_SUITE = 'moblab'
+HWTEST_MOBLAB_QUICK_SUITE = 'moblab_quick'
+HWTEST_SANITY_SUITE = 'sanity'
+HWTEST_PROVISION_SUITE = 'bvt-provision'
+
+
+# Additional timeout to wait for autotest to abort a suite if the test takes
+# too long to run. This is meant to be overly conservative as a timeout may
+# indicate that autotest is at capacity.
+HWTEST_TIMEOUT_EXTENSION = 10 * 60
+
+HWTEST_DEFAULT_PRIORITY = 'DEFAULT'
+HWTEST_CQ_PRIORITY = 'CQ'
+HWTEST_BUILD_PRIORITY = 'Build'
+HWTEST_PFQ_PRIORITY = 'PFQ'
+HWTEST_POST_BUILD_PRIORITY = 'PostBuild'
+
+# Ordered by priority (first item being lowest).
+HWTEST_VALID_PRIORITIES = ['Weekly',
+                           'Daily',
+                           HWTEST_POST_BUILD_PRIORITY,
+                           HWTEST_DEFAULT_PRIORITY,
+                           HWTEST_BUILD_PRIORITY,
+                           HWTEST_PFQ_PRIORITY,
+                           HWTEST_CQ_PRIORITY]
+
+# Creates a mapping of priorities to make easy comparsions.
+HWTEST_PRIORITIES_MAP = dict(
+    (p, i) for i, p in enumerate(HWTEST_VALID_PRIORITIES))
+
+# Defines VM Test types.
+FULL_AU_TEST_TYPE = 'full_suite'
+SIMPLE_AU_TEST_TYPE = 'pfq_suite'
+SMOKE_SUITE_TEST_TYPE = 'smoke_suite'
+TELEMETRY_SUITE_TEST_TYPE = 'telemetry_suite'
+CROS_VM_TEST_TYPE = 'cros_vm_test'
+DEV_MODE_TEST_TYPE = 'dev_mode_test'
+# Special test type for the GCE test lab. It runs all tests in the smoke suite,
+# but runs them on GCE.
+GCE_VM_TEST_TYPE = 'gce_vm_test'
+VALID_VM_TEST_TYPES = [FULL_AU_TEST_TYPE, SIMPLE_AU_TEST_TYPE,
+                       SMOKE_SUITE_TEST_TYPE, TELEMETRY_SUITE_TEST_TYPE,
+                       CROS_VM_TEST_TYPE, DEV_MODE_TEST_TYPE, GCE_VM_TEST_TYPE]
+
+CHROMIUMOS_OVERLAY_DIR = 'src/third_party/chromiumos-overlay'
+VERSION_FILE = os.path.join(CHROMIUMOS_OVERLAY_DIR,
+                            'chromeos/config/chromeos_version.sh')
+SDK_VERSION_FILE = os.path.join(CHROMIUMOS_OVERLAY_DIR,
+                                'chromeos/binhost/host/sdk_version.conf')
+SDK_GS_BUCKET = 'chromiumos-sdk'
+
+PUBLIC = 'public'
+PRIVATE = 'private'
+
+BOTH_OVERLAYS = 'both'
+PUBLIC_OVERLAYS = PUBLIC
+PRIVATE_OVERLAYS = PRIVATE
+VALID_OVERLAYS = [BOTH_OVERLAYS, PUBLIC_OVERLAYS, PRIVATE_OVERLAYS, None]
+
+# Common default logging settings for use with the logging module.
+LOGGER_FMT = '%(asctime)s: %(levelname)s: %(message)s'
+LOGGER_DATE_FMT = '%H:%M:%S'
+
+# Used by remote patch serialization/deserialzation.
+INTERNAL_PATCH_TAG = 'i'
+EXTERNAL_PATCH_TAG = 'e'
+PATCH_TAGS = (INTERNAL_PATCH_TAG, EXTERNAL_PATCH_TAG)
+
+# Tree status strings
+TREE_OPEN = 'open'
+TREE_THROTTLED = 'throttled'
+TREE_CLOSED = 'closed'
+TREE_MAINTENANCE = 'maintenance'
+# The statuses are listed in the order of increasing severity.
+VALID_TREE_STATUSES = (TREE_OPEN, TREE_THROTTLED, TREE_CLOSED, TREE_MAINTENANCE)
+
+
+# Common parts of query used for CQ, THROTTLED_CQ, and PRECQ.
+# "NOT is:draft" in this query doesn't work, it finds any non-draft revision.
+# We want to match drafts anyway, so we can comment on them.
+_QUERIES = {
+    # CLs that are open and not vetoed.
+    'open': 'status:open AND -label:CodeReview=-2 AND -label:Verified=-1',
+
+    # CLs that are approved and verified.
+    'approved': 'label:Code-Review=+2 AND label:Verified=+1',
+}
+
+#
+# Please note that requiring the +2 code review (or Trybot-Ready) for all CQ
+# and PreCQ runs is a security requirement. Otherwise arbitrary people can
+# run code on our servers.
+#
+# The Verified and Commit-Queue flags can be set by any registered user (you
+# don't need commit access to set them.)
+#
+
+
+# Default gerrit query used to find changes for CQ.
+# Permits CQ+1 or CQ+2 changes.
+CQ_READY_QUERY = (
+    '%(open)s AND %(approved)s AND label:Commit-Queue>=1' % _QUERIES,
+    lambda change: change.IsMergeable())
+
+# Gerrit query used to find changes for CQ when tree is throttled.
+# Permits only CQ+2 changes.
+THROTTLED_CQ_READY_QUERY = (
+    '%(open)s AND %(approved)s AND label:Commit-Queue>=2' % _QUERIES,
+    lambda change: change.IsMergeable() and change.HasApproval('COMR', '2'))
+
+# The PreCQ does not require the CQ bit to be set if it's a recent CL, or if
+# the Trybot-Ready flag has been set.
+PRECQ_READY_QUERY = (
+    '%(open)s AND (%(approved)s AND label:Commit-Queue>=1 OR '
+    'label:Code-Review=+2 AND -age:2h OR label:Trybot-Ready=+1)' % _QUERIES,
+    lambda change: (not change.IsBeingMerged() and
+                    change.HasApproval('CRVW', '2') or
+                    change.HasApproval('TRY', '1')))
+
+GERRIT_ON_BORG_LABELS = {
+    'Code-Review': 'CRVW',
+    'Commit-Queue': 'COMR',
+    'Verified': 'VRIF',
+    'Trybot-Ready': 'TRY',
+}
+
+# Actions that a CQ run can take on a CL
+CL_ACTION_PICKED_UP = 'picked_up'         # CL picked up in CommitQueueSync
+CL_ACTION_SUBMITTED = 'submitted'         # CL submitted successfully
+CL_ACTION_KICKED_OUT = 'kicked_out'       # CL CQ-Ready value set to zero
+CL_ACTION_SUBMIT_FAILED = 'submit_failed' # CL submitted but submit failed
+CL_ACTION_VERIFIED = 'verified'           # CL was verified by the builder
+CL_ACTION_FORGIVEN = 'forgiven'           # Build failed, but CL not kicked out
+
+# Actions the Pre-CQ Launcher can take on a CL
+# See cbuildbot/stages/sync_stages.py:PreCQLauncherStage for more info
+CL_ACTION_PRE_CQ_INFLIGHT = 'pre_cq_inflight'
+CL_ACTION_PRE_CQ_PASSED = 'pre_cq_passed'
+CL_ACTION_PRE_CQ_FAILED = 'pre_cq_failed'
+CL_ACTION_PRE_CQ_LAUNCHING = 'pre_cq_launching'
+CL_ACTION_PRE_CQ_WAITING = 'pre_cq_waiting'
+CL_ACTION_PRE_CQ_FULLY_VERIFIED = 'pre_cq_fully_verified'
+CL_ACTION_PRE_CQ_READY_TO_SUBMIT = 'pre_cq_ready_to_submit'
+# Recording this action causes the pre-cq status and all per-config statuses to
+# be reset.
+CL_ACTION_PRE_CQ_RESET = 'pre_cq_reset'
+
+# Miscellaneous actions
+
+# Recorded by pre-cq launcher for a change when it is noticed that a previously
+# rejected change is again in the queue.
+# This is a best effort detection for developers re-marking their changes, to
+# help calculate true CQ handling time. It is susceptible to developers
+# un-marking their change after is requeued or to the CQ picking up a CL before
+# it is seen by the pre-cq-launcher.
+CL_ACTION_REQUEUED = 'requeued'
+
+# Recorded by pre-cq launcher when it begins handling a change that isn't marked
+# as CQ+1. This indicates that all actions between this and the next
+# CL_ACTION_REQUEUED action have occured on a non-CQ+1 change.
+CL_ACTION_SPECULATIVE = 'speculative'
+
+# Recorded by pre-cq launcher when it has screened a change for necessary
+# tryjobs
+CL_ACTION_SCREENED_FOR_PRE_CQ = 'screened_for_pre_cq'
+# Recorded by pre-cq launcher for each tryjob config necessary to validate
+# a change, with |reason| field specifying the config.
+CL_ACTION_VALIDATION_PENDING_PRE_CQ = 'validation_pending_pre_cq'
+
+# Recorded by CQ slaves builds when a picked-up CL is determined to be
+# irrelevant to that slave build.
+CL_ACTION_IRRELEVANT_TO_SLAVE = 'irrelevant_to_slave'
+
+# Recorded by pre-cq-launcher when it launches a tryjob with a particular
+# config. The |reason| field of the action will be the config.
+CL_ACTION_TRYBOT_LAUNCHING = 'trybot_launching'
+
+
+CL_ACTIONS = (CL_ACTION_PICKED_UP,
+              CL_ACTION_SUBMITTED,
+              CL_ACTION_KICKED_OUT,
+              CL_ACTION_SUBMIT_FAILED,
+              CL_ACTION_VERIFIED,
+              CL_ACTION_PRE_CQ_INFLIGHT,
+              CL_ACTION_PRE_CQ_PASSED,
+              CL_ACTION_PRE_CQ_FAILED,
+              CL_ACTION_PRE_CQ_LAUNCHING,
+              CL_ACTION_PRE_CQ_WAITING,
+              CL_ACTION_PRE_CQ_READY_TO_SUBMIT,
+              CL_ACTION_REQUEUED,
+              CL_ACTION_SCREENED_FOR_PRE_CQ,
+              CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+              CL_ACTION_IRRELEVANT_TO_SLAVE,
+              CL_ACTION_TRYBOT_LAUNCHING,
+              CL_ACTION_SPECULATIVE,
+              CL_ACTION_FORGIVEN,
+              CL_ACTION_PRE_CQ_FULLY_VERIFIED,
+              CL_ACTION_PRE_CQ_RESET)
+
+# Actions taken by a builder when making a decision about a CL.
+CL_DECISION_ACTIONS = (
+    CL_ACTION_SUBMITTED,
+    CL_ACTION_KICKED_OUT,
+    CL_ACTION_SUBMIT_FAILED,
+    CL_ACTION_VERIFIED,
+    CL_ACTION_FORGIVEN
+)
+
+# Per-config status strings for a CL.
+CL_PRECQ_CONFIG_STATUS_PENDING = 'pending'
+CL_PRECQ_CONFIG_STATUS_LAUNCHED = 'launched'
+CL_PRECQ_CONFIG_STATUS_INFLIGHT = CL_STATUS_INFLIGHT
+CL_PRECQ_CONFIG_STATUS_FAILED = BUILDER_STATUS_FAILED
+CL_PRECQ_CONFIG_STATUS_VERIFIED = CL_ACTION_VERIFIED
+CL_PRECQ_CONFIG_STATUSES = (CL_PRECQ_CONFIG_STATUS_PENDING,
+                            CL_PRECQ_CONFIG_STATUS_LAUNCHED,
+                            CL_PRECQ_CONFIG_STATUS_INFLIGHT,
+                            CL_PRECQ_CONFIG_STATUS_FAILED,
+                            CL_PRECQ_CONFIG_STATUS_VERIFIED)
+
+# CL submission, rejection, or forgiven reasons (i.e. strategies).
+STRATEGY_CQ_SUCCESS = 'strategy:cq-success'
+STRATEGY_CQ_PARTIAL = 'strategy:cq-submit-partial-pool'
+STRATEGY_PRECQ_SUBMIT = 'strategy:pre-cq-submit'
+STRATEGY_NONMANIFEST = 'strategy:non-manifest-submit'
+
+# CQ types.
+CQ = 'cq'
+PRE_CQ = 'pre-cq'
+
+# Environment variables that should be exposed to all children processes
+# invoked via cros_build_lib.RunCommand.
+ENV_PASSTHRU = ('CROS_SUDO_KEEP_ALIVE', SHARED_CACHE_ENVVAR,
+                PARALLEL_EMERGE_STATUS_FILE_ENVVAR)
+
+# List of variables to proxy into the chroot from the host, and to
+# have sudo export if existent. Anytime this list is modified, a new
+# chroot_version_hooks.d upgrade script that symlinks to 45_rewrite_sudoers.d
+# should be created.
+CHROOT_ENVIRONMENT_WHITELIST = (
+    'CHROMEOS_OFFICIAL',
+    'CHROMEOS_VERSION_AUSERVER',
+    'CHROMEOS_VERSION_DEVSERVER',
+    'CHROMEOS_VERSION_TRACK',
+    'GCC_GITHASH',
+    'GIT_AUTHOR_EMAIL',
+    'GIT_AUTHOR_NAME',
+    'GIT_COMMITTER_EMAIL',
+    'GIT_COMMITTER_NAME',
+    'GIT_PROXY_COMMAND',
+    'GIT_SSH',
+    'RSYNC_PROXY',
+    'SSH_AGENT_PID',
+    'SSH_AUTH_SOCK',
+    'USE',
+    'all_proxy',
+    'ftp_proxy',
+    'http_proxy',
+    'https_proxy',
+    'no_proxy',
+)
+
+# Paths for Chrome LKGM which are relative to the Chromium base url.
+CHROME_LKGM_FILE = 'CHROMEOS_LKGM'
+PATH_TO_CHROME_LKGM = 'chromeos/%s' % CHROME_LKGM_FILE
+
+# Cache constants.
+COMMON_CACHE = 'common'
+
+# Artifact constants.
+def _SlashToUnderscore(string):
+  return string.replace('/', '_')
+
+# GCE tar ball constants.
+def ImageBinToGceTar(image_bin):
+  assert image_bin.endswith('.bin'), ('Filename %s does not end with ".bin"' %
+                                      image_bin)
+  return '%s_gce.tar.gz' % os.path.splitext(image_bin)[0]
+
+DEFAULT_ARCHIVE_BUCKET = 'gs://chromeos-image-archive'
+RELEASE_BUCKET = 'gs://chromeos-releases'
+TRASH_BUCKET = 'gs://chromeos-throw-away-bucket'
+CHROME_SYSROOT_TAR = 'sysroot_%s.tar.xz' % _SlashToUnderscore(CHROME_CP)
+CHROME_ENV_TAR = 'environment_%s.tar.xz' % _SlashToUnderscore(CHROME_CP)
+CHROME_ENV_FILE = 'environment'
+BASE_IMAGE_NAME = 'chromiumos_base_image'
+BASE_IMAGE_TAR = '%s.tar.xz' % BASE_IMAGE_NAME
+BASE_IMAGE_BIN = '%s.bin' % BASE_IMAGE_NAME
+BASE_IMAGE_GCE_TAR = ImageBinToGceTar(BASE_IMAGE_BIN)
+IMAGE_SCRIPTS_NAME = 'image_scripts'
+IMAGE_SCRIPTS_TAR = '%s.tar.xz' % IMAGE_SCRIPTS_NAME
+VM_IMAGE_NAME = 'chromiumos_qemu_image'
+VM_IMAGE_BIN = '%s.bin' % VM_IMAGE_NAME
+VM_DISK_PREFIX = 'chromiumos_qemu_disk.bin'
+VM_MEM_PREFIX = 'chromiumos_qemu_mem.bin'
+VM_TEST_RESULTS = 'vm_test_results_%(attempt)s'
+
+TEST_IMAGE_NAME = 'chromiumos_test_image'
+TEST_IMAGE_TAR = '%s.tar.xz' % TEST_IMAGE_NAME
+TEST_IMAGE_BIN = '%s.bin' % TEST_IMAGE_NAME
+TEST_IMAGE_GCE_TAR = ImageBinToGceTar(TEST_IMAGE_BIN)
+TEST_KEY_PRIVATE = 'id_rsa'
+TEST_KEY_PUBLIC = 'id_rsa.pub'
+
+DEV_IMAGE_NAME = 'chromiumos_image'
+DEV_IMAGE_BIN = '%s.bin' % DEV_IMAGE_NAME
+
+RECOVERY_IMAGE_NAME = 'recovery_image'
+RECOVERY_IMAGE_BIN = '%s.bin' % RECOVERY_IMAGE_NAME
+RECOVERY_IMAGE_TAR = '%s.tar.xz' % RECOVERY_IMAGE_NAME
+
+# Image type constants.
+IMAGE_TYPE_BASE = 'base'
+IMAGE_TYPE_DEV = 'dev'
+IMAGE_TYPE_TEST = 'test'
+IMAGE_TYPE_RECOVERY = 'recovery'
+IMAGE_TYPE_FACTORY = 'factory'
+IMAGE_TYPE_FIRMWARE = 'firmware'
+IMAGE_TYPE_NV_LP0_FIRMWARE = 'nv_lp0_firmware'
+
+IMAGE_TYPE_TO_NAME = {
+    IMAGE_TYPE_BASE: BASE_IMAGE_BIN,
+    IMAGE_TYPE_DEV: DEV_IMAGE_BIN,
+    IMAGE_TYPE_RECOVERY: RECOVERY_IMAGE_BIN,
+    IMAGE_TYPE_TEST: TEST_IMAGE_BIN,
+}
+IMAGE_NAME_TO_TYPE = dict((v, k) for k, v in IMAGE_TYPE_TO_NAME.iteritems())
+
+METADATA_JSON = 'metadata.json'
+PARTIAL_METADATA_JSON = 'partial-metadata.json'
+DELTA_SYSROOT_TAR = 'delta_sysroot.tar.xz'
+DELTA_SYSROOT_BATCH = 'batch'
+
+# Global configuration constants.
+CHROMITE_CONFIG_DIR = os.path.expanduser('~/.chromite')
+CHROME_SDK_BASHRC = os.path.join(CHROMITE_CONFIG_DIR, 'chrome_sdk.bashrc')
+SYNC_RETRIES = 2
+SLEEP_TIMEOUT = 30
+
+# Lab status url.
+LAB_STATUS_URL = 'http://chromiumos-lab.appspot.com/current?format=json'
+
+GOLO_SMTP_SERVER = 'mail.golo.chromium.org'
+
+# Valid sherrif types.
+TREE_SHERIFF = 'tree'
+CHROME_GARDENER = 'chrome'
+
+# URLs to retrieve sheriff names from the waterfall.
+TREE_SHERIFF_URL = '%s/sheriff.js' % (BUILD_DASHBOARD)
+TREE_SHERIFF2_URL = '%s/sheriff2.js' % (BUILD_DASHBOARD)
+CHROME_GARDENER_URL = '%s/sheriff_cr_cros_gardeners.js' % (BUILD_DASHBOARD)
+
+SHERIFF_TYPE_TO_URL = {
+    TREE_SHERIFF: (TREE_SHERIFF_URL, TREE_SHERIFF2_URL),
+    CHROME_GARDENER: (CHROME_GARDENER_URL,)
+}
+
+
+# Useful config targets.
+CQ_MASTER = 'master-paladin'
+CANARY_MASTER = 'master-release'
+PFQ_MASTER = 'master-chromium-pfq'
+BINHOST_PRE_CQ = 'binhost-pre-cq'
+WIFICELL_PRE_CQ = 'wificell-pre-cq'
+
+
+# Email validation regex. Not quite fully compliant with RFC 2822, but good
+# approximation.
+EMAIL_REGEX = r'[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}'
+
+# Blacklist of files not allowed to be uploaded into the Partner Project Google
+# Storage Buckets:
+# debug.tgz contains debug symbols.
+# manifest.xml exposes all of our repo names.
+# vm_test_results can contain symbolicated crash dumps.
+EXTRA_BUCKETS_FILES_BLACKLIST = [
+    'debug.tgz',
+    'manifest.xml',
+    'vm_test_results_*'
+]
+
+# AFDO common constants.
+# How long does the AFDO_record autotest have to generate the AFDO perf data.
+AFDO_GENERATE_TIMEOUT = 90 * 60
+
+# Stats dashboard elastic search and statsd constants.
+# Host and port information specified in topology.py.
+ELASTIC_SEARCH_INDEX = 'metadata_index'
+ELASTIC_SEARCH_USE_HTTP = False
+
+STATSD_PROD_PREFIX = 'chromite'
+STATSD_DEBUG_PREFIX = 'chromite_debug'
+
+# Publication of Project SDK artifacts.
+BRILLO_RELEASE_MANIFESTS_URL = 'gs://brillo-releases/sdk-releases'
+BRILLO_LATEST_RELEASE_URL = os.path.join(BRILLO_RELEASE_MANIFESTS_URL,
+                                         'LATEST')
+
+# Gmail Credentials.
+GMAIL_TOKEN_CACHE_FILE = os.path.expanduser('~/.gmail_credentials')
+GMAIL_TOKEN_JSON_FILE = '/creds/refresh_tokens/chromeos_gmail_alerts'
diff --git a/cbuildbot/failures_lib.py b/cbuildbot/failures_lib.py
new file mode 100644
index 0000000..dbb30e1
--- /dev/null
+++ b/cbuildbot/failures_lib.py
@@ -0,0 +1,472 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Classes of failure types."""
+
+from __future__ import print_function
+
+import collections
+import sys
+import traceback
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+
+
+class StepFailure(Exception):
+  """StepFailure exceptions indicate that a cbuildbot step failed.
+
+  Exceptions that derive from StepFailure should meet the following
+  criteria:
+    1) The failure indicates that a cbuildbot step failed.
+    2) The necessary information to debug the problem has already been
+       printed in the logs for the stage that failed.
+    3) __str__() should be brief enough to include in a Commit Queue
+       failure message.
+  """
+
+  # The constants.EXCEPTION_CATEGORY_ALL_CATEGORIES values that this exception
+  # maps to. Subclasses should redefine this class constant to map to a
+  # different category.
+  EXCEPTION_CATEGORY = constants.EXCEPTION_CATEGORY_UNKNOWN
+
+  def __init__(self, message=''):
+    """Constructor.
+
+    Args:
+      message: An error message.
+    """
+    Exception.__init__(self, message)
+    self.args = (message,)
+
+  def __str__(self):
+    """Stringify the message."""
+    return self.message
+
+
+# A namedtuple to hold information of an exception.
+ExceptInfo = collections.namedtuple(
+    'ExceptInfo', ['type', 'str', 'traceback'])
+
+
+def CreateExceptInfo(exception, tb):
+  """Creates a list of ExceptInfo objects from |exception| and |tb|.
+
+  Creates an ExceptInfo object from |exception| and |tb|. If
+  |exception| is a CompoundFailure with non-empty list of exc_infos,
+  simly returns exception.exc_infos. Note that we do not preserve type
+  of |exception| in this case.
+
+  Args:
+    exception: The exception.
+    tb: The textual traceback.
+
+  Returns:
+    A list of ExceptInfo objects.
+  """
+  if isinstance(exception, CompoundFailure) and exception.exc_infos:
+    return exception.exc_infos
+
+  return [ExceptInfo(exception.__class__, str(exception), tb)]
+
+
+class CompoundFailure(StepFailure):
+  """An exception that contains a list of ExceptInfo objects."""
+
+  def __init__(self, message='', exc_infos=None):
+    """Initializes an CompoundFailure instance.
+
+    Args:
+      message: A string describing the failure.
+      exc_infos: A list of ExceptInfo objects.
+    """
+    self.exc_infos = exc_infos if exc_infos else []
+    if not message:
+      # By default, print all stored ExceptInfo objects. This is the
+      # preferred behavior because we'd always have the full
+      # tracebacks to debug the failure.
+      self.message = '\n'.join(['{e.type}: {e.str}\n{e.traceback}'.format(e=ex)
+                                for ex in self.exc_infos])
+
+    super(CompoundFailure, self).__init__(message=message)
+
+  def ToSummaryString(self):
+    """Returns a string with type and string of each ExceptInfo object.
+
+    This does not include the textual tracebacks on purpose, so the
+    message is more readable on the waterfall.
+    """
+    if self.HasEmptyList():
+      # Fall back to return self.message if list is empty.
+      return self.message
+    else:
+      return '\n'.join(['%s: %s' % (e.type, e.str) for e in self.exc_infos])
+
+  def HasEmptyList(self):
+    """Returns True if self.exc_infos is empty."""
+    return not bool(self.exc_infos)
+
+  def HasFailureType(self, cls):
+    """Returns True if any of the failures matches |cls|."""
+    return any(issubclass(x.type, cls) for x in self.exc_infos)
+
+  def MatchesFailureType(self, cls):
+    """Returns True if all failures matches |cls|."""
+    return (not self.HasEmptyList() and
+            all(issubclass(x.type, cls) for x in self.exc_infos))
+
+  def HasFatalFailure(self, whitelist=None):
+    """Determine if there are non-whitlisted failures.
+
+    Args:
+      whitelist: A list of whitelisted exception types.
+
+    Returns:
+      Returns True if any failure is not in |whitelist|.
+    """
+    if not whitelist:
+      return not self.HasEmptyList()
+
+    for ex in self.exc_infos:
+      if all(not issubclass(ex.type, cls) for cls in whitelist):
+        return True
+
+    return False
+
+
+class SetFailureType(object):
+  """A wrapper to re-raise the exception as the pre-set type."""
+
+  def __init__(self, category_exception, source_exception=None):
+    """Initializes the decorator.
+
+    Args:
+      category_exception: The exception type to re-raise as. It must be
+        a subclass of CompoundFailure.
+      source_exception: The exception types to re-raise. By default, re-raise
+        all Exception classes.
+    """
+    assert issubclass(category_exception, CompoundFailure)
+    self.category_exception = category_exception
+    self.source_exception = source_exception
+    if self.source_exception is None:
+      self.source_exception = Exception
+
+  def __call__(self, functor):
+    """Returns a wrapped function."""
+    def wrapped_functor(*args, **kwargs):
+      try:
+        return functor(*args, **kwargs)
+      except self.source_exception:
+        # Get the information about the original exception.
+        exc_type, exc_value, _ = sys.exc_info()
+        exc_traceback = traceback.format_exc()
+        if issubclass(exc_type, self.category_exception):
+          # Do not re-raise if the exception is a subclass of the set
+          # exception type because it offers more information.
+          raise
+        else:
+          exc_infos = CreateExceptInfo(exc_value, exc_traceback)
+          raise self.category_exception(exc_infos=exc_infos)
+
+    return wrapped_functor
+
+
+class RetriableStepFailure(StepFailure):
+  """This exception is thrown when a step failed, but should be retried."""
+
+
+class BuildScriptFailure(StepFailure):
+  """This exception is thrown when a build command failed.
+
+  It is intended to provide a shorter summary of what command failed,
+  for usage in failure messages from the Commit Queue, so as to ensure
+  that developers aren't spammed with giant error messages when common
+  commands (e.g. build_packages) fail.
+  """
+
+  EXCEPTION_CATEGORY = constants.EXCEPTION_CATEGORY_BUILD
+
+  def __init__(self, exception, shortname):
+    """Construct a BuildScriptFailure object.
+
+    Args:
+      exception: A RunCommandError object.
+      shortname: Short name for the command we're running.
+    """
+    StepFailure.__init__(self)
+    assert isinstance(exception, cros_build_lib.RunCommandError)
+    self.exception = exception
+    self.shortname = shortname
+    self.args = (exception, shortname)
+
+  def __str__(self):
+    """Summarize a build command failure briefly."""
+    result = self.exception.result
+    if result.returncode:
+      return '%s failed (code=%s)' % (self.shortname, result.returncode)
+    else:
+      return self.exception.msg
+
+
+class PackageBuildFailure(BuildScriptFailure):
+  """This exception is thrown when packages fail to build."""
+
+  def __init__(self, exception, shortname, failed_packages):
+    """Construct a PackageBuildFailure object.
+
+    Args:
+      exception: The underlying exception.
+      shortname: Short name for the command we're running.
+      failed_packages: List of packages that failed to build.
+    """
+    BuildScriptFailure.__init__(self, exception, shortname)
+    self.failed_packages = set(failed_packages)
+    self.args = (exception, shortname, failed_packages)
+
+  def __str__(self):
+    return ('Packages failed in %s: %s'
+            % (self.shortname, ' '.join(sorted(self.failed_packages))))
+
+
+class InfrastructureFailure(CompoundFailure):
+  """Raised if a stage fails due to infrastructure issues."""
+
+  EXCEPTION_CATEGORY = constants.EXCEPTION_CATEGORY_INFRA
+
+
+# Chrome OS Test Lab failures.
+class TestLabFailure(InfrastructureFailure):
+  """Raised if a stage fails due to hardware lab infrastructure issues."""
+
+  EXCEPTION_CATEGORY = constants.EXCEPTION_CATEGORY_LAB
+
+
+class SuiteTimedOut(TestLabFailure):
+  """Raised if a test suite timed out with no test failures."""
+
+
+class BoardNotAvailable(TestLabFailure):
+  """Raised if the board is not available in the lab."""
+
+
+class SwarmingProxyFailure(TestLabFailure):
+  """Raised when error related to swarming proxy occurs."""
+
+
+# Gerrit-on-Borg failures.
+class GoBFailure(InfrastructureFailure):
+  """Raised if a stage fails due to Gerrit-on-Borg (GoB) issues."""
+
+
+class GoBQueryFailure(GoBFailure):
+  """Raised if a stage fails due to Gerrit-on-Borg (GoB) query errors."""
+
+
+class GoBSubmitFailure(GoBFailure):
+  """Raised if a stage fails due to Gerrit-on-Borg (GoB) submission errors."""
+
+
+class GoBFetchFailure(GoBFailure):
+  """Raised if a stage fails due to Gerrit-on-Borg (GoB) fetch errors."""
+
+
+# Google Storage failures.
+class GSFailure(InfrastructureFailure):
+  """Raised if a stage fails due to Google Storage (GS) issues."""
+
+
+class GSUploadFailure(GSFailure):
+  """Raised if a stage fails due to Google Storage (GS) upload issues."""
+
+
+class GSDownloadFailure(GSFailure):
+  """Raised if a stage fails due to Google Storage (GS) download issues."""
+
+
+# Builder failures.
+class BuilderFailure(InfrastructureFailure):
+  """Raised if a stage fails due to builder issues."""
+
+
+class MasterSlaveVersionMismatchFailure(BuilderFailure):
+  """Raised if a slave build has a different full_version than its master."""
+
+# Crash collection service failures.
+class CrashCollectionFailure(InfrastructureFailure):
+  """Raised if a stage fails due to crash collection services."""
+
+
+class TestFailure(StepFailure):
+  """Raised if a test stage (e.g. VMTest) fails."""
+
+  EXCEPTION_CATEGORY = constants.EXCEPTION_CATEGORY_TEST
+
+
+class TestWarning(StepFailure):
+  """Raised if a test stage (e.g. VMTest) returns a warning code."""
+
+
+class BuildFailureMessage(object):
+  """Message indicating that changes failed to be validated."""
+
+  def __init__(self, message, tracebacks, internal, reason, builder):
+    """Create a BuildFailureMessage object.
+
+    Args:
+      message: The message to print.
+      tracebacks: Exceptions received by individual builders, if any.
+      internal: Whether this failure occurred on an internal builder.
+      reason: A string describing the failure.
+      builder: The builder the failure occurred on.
+    """
+    # Convert each of the input arguments into simple Python datastructures
+    # (i.e. not generators) that can be easily pickled.
+    self.message = str(message)
+    self.tracebacks = tuple(tracebacks)
+    self.internal = bool(internal)
+    self.reason = str(reason)
+    self.builder = str(builder)
+
+  def __str__(self):
+    return self.message
+
+  def GetFailingStages(self):
+    """Get a list of the failing stage prefixes from tracebacks.
+
+    Returns:
+      A list of failing stage prefixes if there are tracebacks; None otherwise.
+    """
+    failing_stages = None
+    if self.tracebacks:
+      failing_stages = set(x.failed_prefix for x in self.tracebacks)
+    return failing_stages
+
+  def MatchesFailureType(self, cls):
+    """Check if all of the tracebacks match the specified failure type."""
+    for tb in self.tracebacks:
+      if not isinstance(tb.exception, cls):
+        if (isinstance(tb.exception, CompoundFailure) and
+            tb.exception.MatchesFailureType(cls)):
+          # If the exception is a CompoundFailure instance and all its
+          # stored exceptions match |cls|, it meets the criteria.
+          continue
+        else:
+          return False
+
+    return True
+
+  def HasFailureType(self, cls):
+    """Check if any of the failures match the specified failure type."""
+    for tb in self.tracebacks:
+      if isinstance(tb.exception, cls):
+        return True
+
+      if (isinstance(tb.exception, CompoundFailure) and
+          tb.exception.HasFailureType(cls)):
+        # If the exception is a CompoundFailure instance and any of its
+        # stored exceptions match |cls|, it meets the criteria.
+        return True
+
+    return False
+
+  def IsPackageBuildFailure(self):
+    """Check if all of the failures are package build failures."""
+    return self.MatchesFailureType(PackageBuildFailure)
+
+  def FindPackageBuildFailureSuspects(self, changes, sanity):
+    """Figure out what changes probably caused our failures.
+
+    We use a fairly simplistic algorithm to calculate breakage: If you changed
+    a package, and that package broke, you probably broke the build. If there
+    were multiple changes to a broken package, we fail them all.
+
+    Some safeguards are implemented to ensure that bad changes are kicked out:
+      1) Changes to overlays (e.g. ebuilds, eclasses, etc.) are always kicked
+         out if the build fails.
+      2) If a package fails that nobody changed, we kick out all of the
+         changes.
+      3) If any failures occur that we can't explain, we kick out all of the
+         changes.
+
+    It is certainly possible to trick this algorithm: If one developer submits
+    a change to libchromeos that breaks the power_manager, and another developer
+    submits a change to the power_manager at the same time, only the
+    power_manager change will be kicked out. That said, in that situation, the
+    libchromeos change will likely be kicked out on the next run, thanks to
+    safeguard #2 above.
+
+    Args:
+      changes: List of changes to examine.
+      sanity: The sanity checker builder passed and the tree was open when
+              the build started.
+
+    Returns:
+      Set of changes that likely caused the failure.
+    """
+    # Import portage_util here to avoid circular imports.
+    # portage_util -> parallel -> failures_lib
+    from chromite.lib import portage_util
+    blame_everything = False
+    suspects = set()
+    for tb in self.tracebacks:
+      # Only look at PackageBuildFailure objects.
+      failed_packages = []
+      if isinstance(tb.exception, PackageBuildFailure):
+        failed_packages = tb.exception.failed_packages
+      else:
+        blame_everything = True
+
+      for package in failed_packages:
+        failed_projects = portage_util.FindWorkonProjects([package])
+        blame_assigned = False
+        for change in changes:
+          if change.project in failed_projects:
+            blame_assigned = True
+            suspects.add(change)
+        if not blame_assigned:
+          blame_everything = True
+
+    # Only do broad-brush blaming if the tree is sane.
+    if sanity:
+      if blame_everything or not suspects:
+        suspects = changes[:]
+      else:
+        # Never treat changes to overlays as innocent.
+        suspects.update(change for change in changes
+                        if '/overlays/' in change.project)
+
+    return suspects
+
+
+def ReportStageFailureToCIDB(db, build_stage_id, exception):
+  """Reports stage failure to cidb along with inner exceptions.
+
+  Args:
+    db: A valid cidb handle.
+    build_stage_id: The cidb id for the build stage that failed.
+    exception: The failure exception to report.
+  """
+  outer_failure_id = db.InsertFailure(build_stage_id,
+                                      type(exception).__name__,
+                                      str(exception),
+                                      _GetExceptionCategory(type(exception)))
+
+  # This assumes that CompoundFailure can't be nested.
+  if isinstance(exception, CompoundFailure):
+    for exc_class, exc_str, _ in exception.exc_infos:
+      db.InsertFailure(build_stage_id,
+                       exc_class.__name__,
+                       exc_str,
+                       _GetExceptionCategory(exc_class),
+                       outer_failure_id)
+
+
+def _GetExceptionCategory(exception_class):
+  # Do not use try/catch. If a subclass of StepFailure does not have a valid
+  # EXCEPTION_CATEGORY, it is a programming error, not a runtime error.
+  if issubclass(exception_class, StepFailure):
+    return exception_class.EXCEPTION_CATEGORY
+  else:
+    return constants.EXCEPTION_CATEGORY_UNKNOWN
diff --git a/cbuildbot/failures_lib_unittest b/cbuildbot/failures_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/failures_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/failures_lib_unittest.py b/cbuildbot/failures_lib_unittest.py
new file mode 100644
index 0000000..86ae2e6
--- /dev/null
+++ b/cbuildbot/failures_lib_unittest.py
@@ -0,0 +1,235 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the failures_lib module."""
+
+from __future__ import print_function
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import fake_cidb
+
+
+class CompoundFailureTest(cros_test_lib.TestCase):
+  """Test the CompoundFailure class."""
+
+  def _CreateExceptInfos(self, cls, message='', traceback='', num=1):
+    """A helper function to create a list of ExceptInfo objects."""
+    exc_infos = []
+    for _ in xrange(num):
+      exc_infos.extend(failures_lib.CreateExceptInfo(cls(message), traceback))
+
+    return exc_infos
+
+  def testHasEmptyList(self):
+    """Tests the HasEmptyList method."""
+    self.assertTrue(failures_lib.CompoundFailure().HasEmptyList())
+    exc_infos = self._CreateExceptInfos(KeyError)
+    self.assertFalse(
+        failures_lib.CompoundFailure(exc_infos=exc_infos).HasEmptyList())
+
+  def testHasAndMatchesFailureType(self):
+    """Tests the HasFailureType and the MatchesFailureType methods."""
+    # Create a CompoundFailure instance with mixed types of exceptions.
+    exc_infos = self._CreateExceptInfos(KeyError)
+    exc_infos.extend(self._CreateExceptInfos(ValueError))
+    exc = failures_lib.CompoundFailure(exc_infos=exc_infos)
+    self.assertTrue(exc.HasFailureType(KeyError))
+    self.assertTrue(exc.HasFailureType(ValueError))
+    self.assertFalse(exc.MatchesFailureType(KeyError))
+    self.assertFalse(exc.MatchesFailureType(ValueError))
+
+    # Create a CompoundFailure instance with a single type of exceptions.
+    exc_infos = self._CreateExceptInfos(KeyError, num=5)
+    exc = failures_lib.CompoundFailure(exc_infos=exc_infos)
+    self.assertTrue(exc.HasFailureType(KeyError))
+    self.assertFalse(exc.HasFailureType(ValueError))
+    self.assertTrue(exc.MatchesFailureType(KeyError))
+    self.assertFalse(exc.MatchesFailureType(ValueError))
+
+  def testHasFatalFailure(self):
+    """Tests the HasFatalFailure method."""
+    exc_infos = self._CreateExceptInfos(KeyError)
+    exc_infos.extend(self._CreateExceptInfos(ValueError))
+    exc = failures_lib.CompoundFailure(exc_infos=exc_infos)
+    self.assertTrue(exc.HasFatalFailure())
+    self.assertTrue(exc.HasFatalFailure(whitelist=[KeyError]))
+    self.assertFalse(exc.HasFatalFailure(whitelist=[KeyError, ValueError]))
+
+    exc = failures_lib.CompoundFailure()
+    self.assertFalse(exc.HasFatalFailure())
+
+  def testMessageContainsAllInfo(self):
+    """Tests that by default, all information is included in the message."""
+    exc_infos = self._CreateExceptInfos(KeyError, message='bar1',
+                                        traceback='foo1')
+    exc_infos.extend(self._CreateExceptInfos(ValueError, message='bar2',
+                                             traceback='foo2'))
+    exc = failures_lib.CompoundFailure(exc_infos=exc_infos)
+    self.assertTrue('bar1' in str(exc))
+    self.assertTrue('bar2' in str(exc))
+    self.assertTrue('KeyError' in str(exc))
+    self.assertTrue('ValueError' in str(exc))
+    self.assertTrue('foo1' in str(exc))
+    self.assertTrue('foo2' in str(exc))
+
+  def testReportStageFailureToCIDB(self):
+    """Tests that the reporting fuction reports all included exceptions."""
+    fake_db = fake_cidb.FakeCIDBConnection()
+    inner_exception_1 = failures_lib.TestLabFailure()
+    inner_exception_2 = TypeError()
+    exc_infos = failures_lib.CreateExceptInfo(inner_exception_1, None)
+    exc_infos += failures_lib.CreateExceptInfo(inner_exception_2, None)
+    outer_exception = failures_lib.GoBFailure(exc_infos=exc_infos)
+
+    mock_build_stage_id = 9345
+
+    failures_lib.ReportStageFailureToCIDB(fake_db,
+                                          mock_build_stage_id,
+                                          outer_exception)
+    self.assertEqual(3, len(fake_db.failureTable))
+    self.assertEqual(
+        set([mock_build_stage_id]),
+        set([x['build_stage_id'] for x in fake_db.failureTable.values()]))
+    self.assertEqual(
+        set([constants.EXCEPTION_CATEGORY_INFRA,
+             constants.EXCEPTION_CATEGORY_UNKNOWN,
+             constants.EXCEPTION_CATEGORY_LAB]),
+        set([x['exception_category'] for x in fake_db.failureTable.values()]))
+
+    # Find the outer failure id.
+    for failure_id, failure in fake_db.failureTable.iteritems():
+      if failure['outer_failure_id'] is None:
+        outer_failure_id = failure_id
+        break
+
+    # Now verify inner failures reference this failure.
+    for failure_id, failure in fake_db.failureTable.iteritems():
+      if failure_id != outer_failure_id:
+        self.assertEqual(outer_failure_id, failure['outer_failure_id'])
+
+
+class SetFailureTypeTest(cros_test_lib.TestCase):
+  """Test that the SetFailureType decorator works."""
+  ERROR_MESSAGE = 'You failed!'
+
+  class TacoNotTasty(failures_lib.CompoundFailure):
+    """Raised when the taco is not tasty."""
+
+  class NoGuacamole(TacoNotTasty):
+    """Raised when no guacamole in the taco."""
+
+  class SubparLunch(failures_lib.CompoundFailure):
+    """Raised when the lunch is subpar."""
+
+  class FooException(Exception):
+    """A foo exception."""
+
+  def _GetFunction(self, set_type, raise_type, *args, **kwargs):
+    """Returns a function to test.
+
+    Args:
+      set_type: The exception type that the function is decorated with.
+      raise_type: The exception type that the function raises.
+      *args: args to pass to the instance of |raise_type|.
+
+    Returns:
+      The function to test.
+    """
+    @failures_lib.SetFailureType(set_type)
+    def f():
+      raise raise_type(*args, **kwargs)
+
+    return f
+
+  def testAssertionFailOnIllegalExceptionType(self):
+    """Assertion should fail if the pre-set type is not allowed ."""
+    self.assertRaises(AssertionError, self._GetFunction, ValueError,
+                      self.FooException)
+
+  def testReraiseAsNewException(self):
+    """Tests that the pre-set exception type is raised correctly."""
+    try:
+      self._GetFunction(self.TacoNotTasty, self.FooException,
+                        self.ERROR_MESSAGE)()
+    except Exception as e:
+      self.assertTrue(isinstance(e, self.TacoNotTasty))
+      self.assertTrue(e.message, self.ERROR_MESSAGE)
+      self.assertEqual(len(e.exc_infos), 1)
+      self.assertEqual(e.exc_infos[0].str, self.ERROR_MESSAGE)
+      self.assertEqual(e.exc_infos[0].type, self.FooException)
+      self.assertTrue(isinstance(e.exc_infos[0].traceback, str))
+
+  def testReraiseACompoundFailure(self):
+    """Tests that the list of ExceptInfo objects are copied over."""
+    tb1 = 'Dummy traceback1'
+    tb2 = 'Dummy traceback2'
+    org_infos = failures_lib.CreateExceptInfo(ValueError('No taco.'), tb1) + \
+                failures_lib.CreateExceptInfo(OSError('No salsa'), tb2)
+    try:
+      self._GetFunction(self.SubparLunch, self.TacoNotTasty,
+                        exc_infos=org_infos)()
+    except Exception as e:
+      self.assertTrue(isinstance(e, self.SubparLunch))
+      # The orignal exceptions stored in exc_infos are preserved.
+      self.assertEqual(e.exc_infos, org_infos)
+      # All essential inforamtion should be included in the message of
+      # the new excpetion.
+      self.assertTrue(tb1 in str(e))
+      self.assertTrue(tb2 in str(e))
+      self.assertTrue(str(ValueError) in str(e))
+      self.assertTrue(str(OSError) in str(e))
+      self.assertTrue(str('No taco') in str(e))
+      self.assertTrue(str('No salsa') in str(e))
+
+      # Assert that summary does not contain the textual tracebacks.
+      self.assertFalse(tb1 in e.ToSummaryString())
+      self.assertFalse(tb2 in e.ToSummaryString())
+
+  def testReraiseACompoundFailureWithEmptyList(self):
+    """Tests that a CompoundFailure with empty list is handled correctly."""
+    try:
+      self._GetFunction(self.SubparLunch, self.TacoNotTasty,
+                        message='empty list')()
+    except Exception as e:
+      self.assertTrue(isinstance(e, self.SubparLunch))
+      self.assertEqual(e.exc_infos[0].type, self.TacoNotTasty)
+
+  def testReraiseOriginalException(self):
+    """Tests that the original exception is re-raised."""
+    # NoGuacamole is a subclass of TacoNotTasty, so the wrapper has no
+    # effect on it.
+    f = self._GetFunction(self.TacoNotTasty, self.NoGuacamole)
+    self.assertRaises(self.NoGuacamole, f)
+
+  def testPassArgsToWrappedFunctor(self):
+    """Tests that we can pass arguments to the functor."""
+    @failures_lib.SetFailureType(self.TacoNotTasty)
+    def f(arg):
+      return arg
+
+    @failures_lib.SetFailureType(self.TacoNotTasty)
+    def g(kwarg=''):
+      return kwarg
+
+    # Test passing arguments.
+    self.assertEqual(f('foo'), 'foo')
+    # Test passing keyword arguments.
+    self.assertEqual(g(kwarg='bar'), 'bar')
+
+
+class ExceptInfoTest(cros_test_lib.TestCase):
+  """Tests the namedtuple class ExceptInfo."""
+
+  def testConvertToExceptInfo(self):
+    """Tests converting an exception to an ExceptInfo object."""
+    traceback = 'Dummy traceback'
+    message = 'Taco is not a valid option!'
+    except_infos = failures_lib.CreateExceptInfo(
+        ValueError(message), traceback)
+
+    self.assertEqual(except_infos[0].type, ValueError)
+    self.assertEqual(except_infos[0].str, message)
+    self.assertEqual(except_infos[0].traceback, traceback)
diff --git a/cbuildbot/lkgm_manager.py b/cbuildbot/lkgm_manager.py
new file mode 100644
index 0000000..05e4eb3
--- /dev/null
+++ b/cbuildbot/lkgm_manager.py
@@ -0,0 +1,476 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A library to generate and store the manifests for cros builders to use."""
+
+from __future__ import print_function
+
+import codecs
+import os
+import re
+from xml.dom import minidom
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import manifest_version
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+
+
+site_config = config_lib.GetConfig()
+
+
+# Paladin constants for manifest names.
+PALADIN_COMMIT_ELEMENT = 'pending_commit'
+
+CHROME_ELEMENT = 'chrome'
+CHROME_VERSION_ATTR = 'version'
+LKGM_ELEMENT = 'lkgm'
+LKGM_VERSION_ATTR = 'version'
+
+
+class PromoteCandidateException(Exception):
+  """Exception thrown for failure to promote manifest candidate."""
+
+
+class _LKGMCandidateInfo(manifest_version.VersionInfo):
+  """Class to encapsualte the chrome os lkgm candidate info
+
+  You can instantiate this class in two ways.
+  1)using a version file, specifically chromeos_version.sh,
+  which contains the version information.
+  2) just passing in the 4 version components (major, minor, sp, patch and
+    revision number),
+  Args:
+      You can instantiate this class in two ways.
+  1)using a version file, specifically chromeos_version.sh,
+  which contains the version information.
+  2) passing in a string with the 3 version components + revision e.g. 41.0.0-r1
+  Args:
+    version_string: Optional 3 component version string to parse.  Contains:
+        build_number: release build number.
+        branch_build_number: current build number on a branch.
+        patch_number: patch number.
+        revision_number: version revision
+    chrome_branch: If version_string specified, specify chrome_branch i.e. 13.
+    version_file: version file location.
+  """
+  LKGM_RE = r'(\d+\.\d+\.\d+)(?:-rc(\d+))?'
+
+  def __init__(self, version_string=None, chrome_branch=None, incr_type=None,
+               version_file=None):
+    self.revision_number = 1
+    if version_string:
+      match = re.search(self.LKGM_RE, version_string)
+      assert match, 'LKGM did not re %s' % self.LKGM_RE
+      super(_LKGMCandidateInfo, self).__init__(match.group(1), chrome_branch,
+                                               incr_type=incr_type)
+      if match.group(2):
+        self.revision_number = int(match.group(2))
+
+    else:
+      super(_LKGMCandidateInfo, self).__init__(version_file=version_file,
+                                               incr_type=incr_type)
+
+  def VersionString(self):
+    """returns the full version string of the lkgm candidate"""
+    return '%s.%s.%s-rc%s' % (self.build_number, self.branch_build_number,
+                              self.patch_number, self.revision_number)
+
+  def VersionComponents(self):
+    """Return an array of ints of the version fields for comparing."""
+    return map(int, [self.build_number, self.branch_build_number,
+                     self.patch_number, self.revision_number])
+
+  def IncrementVersion(self):
+    """Increments the version by incrementing the revision #."""
+    self.revision_number += 1
+    return self.VersionString()
+
+  def UpdateVersionFile(self, *args, **kwargs):
+    """Update the version file on disk.
+
+    For LKGMCandidateInfo there is no version file so this function is a no-op.
+    """
+
+
+class LKGMManager(manifest_version.BuildSpecsManager):
+  """A Class to manage lkgm candidates and their states.
+
+  Vars:
+    lkgm_subdir:  Subdirectory within manifest repo to store candidates.
+  """
+  # Sub-directories for LKGM and Chrome LKGM's.
+  LKGM_SUBDIR = 'LKGM-candidates'
+  CHROME_PFQ_SUBDIR = 'chrome-LKGM-candidates'
+  COMMIT_QUEUE_SUBDIR = 'paladin'
+
+  def __init__(self, source_repo, manifest_repo, build_names, build_type,
+               incr_type, force, branch, manifest=constants.DEFAULT_MANIFEST,
+               dry_run=True, master=False,
+               lkgm_path_rel=constants.LKGM_MANIFEST):
+    """Initialize an LKGM Manager.
+
+    Args:
+      source_repo: Repository object for the source code.
+      manifest_repo: Manifest repository for manifest versions/buildspecs.
+      build_names: Identifiers for the build. Must match config_lib
+          entries. If multiple identifiers are provided, the first item in the
+          list must be an identifier for the group.
+      build_type: Type of build.  Must be a pfq type.
+      incr_type: How we should increment this version - build|branch|patch
+      force: Create a new manifest even if there are no changes.
+      branch: Branch this builder is running on.
+      manifest: Manifest to use for checkout. E.g. 'full' or 'buildtools'.
+      dry_run: Whether we actually commit changes we make or not.
+      master: Whether we are the master builder.
+      lkgm_path_rel: Path to the LKGM symlink, relative to manifest dir.
+    """
+    super(LKGMManager, self).__init__(
+        source_repo=source_repo, manifest_repo=manifest_repo,
+        manifest=manifest, build_names=build_names, incr_type=incr_type,
+        force=force, branch=branch, dry_run=dry_run, master=master)
+
+    self.lkgm_path = os.path.join(self.manifest_dir, lkgm_path_rel)
+    self.compare_versions_fn = _LKGMCandidateInfo.VersionCompare
+    self.build_type = build_type
+    # Chrome PFQ and PFQ's exist at the same time and version separately so they
+    # must have separate subdirs in the manifest-versions repository.
+    if self.build_type == constants.CHROME_PFQ_TYPE:
+      self.rel_working_dir = self.CHROME_PFQ_SUBDIR
+    elif config_lib.IsCQType(self.build_type):
+      self.rel_working_dir = self.COMMIT_QUEUE_SUBDIR
+    else:
+      assert config_lib.IsPFQType(self.build_type)
+      self.rel_working_dir = self.LKGM_SUBDIR
+
+  def GetCurrentVersionInfo(self):
+    """Returns the lkgm version info from the version file."""
+    version_info = super(LKGMManager, self).GetCurrentVersionInfo()
+    return _LKGMCandidateInfo(version_info.VersionString(),
+                              chrome_branch=version_info.chrome_branch,
+                              incr_type=self.incr_type)
+
+  def _WriteXml(self, dom_instance, file_path):
+    """Wrapper function to write xml encoded in a proper way.
+
+    Args:
+      dom_instance: A DOM document instance contains contents to be written.
+      file_path: Path to the file to write into.
+    """
+    with codecs.open(file_path, 'w+', 'utf-8') as f:
+      dom_instance.writexml(f)
+
+  def _AddLKGMToManifest(self, manifest):
+    """Write the last known good version string to the manifest.
+
+    Args:
+      manifest: Path to the manifest.
+    """
+    # Get the last known good version string.
+    try:
+      lkgm_filename = os.path.basename(os.readlink(self.lkgm_path))
+      lkgm_version, _ = os.path.splitext(lkgm_filename)
+    except OSError:
+      return
+
+    # Write the last known good version string to the manifest.
+    manifest_dom = minidom.parse(manifest)
+    lkgm_element = manifest_dom.createElement(LKGM_ELEMENT)
+    lkgm_element.setAttribute(LKGM_VERSION_ATTR, lkgm_version)
+    manifest_dom.documentElement.appendChild(lkgm_element)
+    self._WriteXml(manifest_dom, manifest)
+
+  def _AddChromeVersionToManifest(self, manifest, chrome_version):
+    """Adds the chrome element with version |chrome_version| to |manifest|.
+
+    The manifest file should contain the Chrome version to build for
+    PFQ slaves.
+
+    Args:
+      manifest: Path to the manifest
+      chrome_version: A string representing the version of Chrome
+        (e.g. 35.0.1863.0).
+    """
+    manifest_dom = minidom.parse(manifest)
+    chrome = manifest_dom.createElement(CHROME_ELEMENT)
+    chrome.setAttribute(CHROME_VERSION_ATTR, chrome_version)
+    manifest_dom.documentElement.appendChild(chrome)
+    self._WriteXml(manifest_dom, manifest)
+
+  def _AddPatchesToManifest(self, manifest, patches):
+    """Adds list of |patches| to given |manifest|.
+
+    The manifest should have sufficient information for the slave
+    builders to fetch the patches from Gerrit and to print the CL link
+    (see cros_patch.GerritFetchOnlyPatch).
+
+    Args:
+      manifest: Path to the manifest.
+      patches: A list of cros_patch.GerritPatch objects.
+    """
+    manifest_dom = minidom.parse(manifest)
+    for patch in patches:
+      pending_commit = manifest_dom.createElement(PALADIN_COMMIT_ELEMENT)
+      attr_dict = patch.GetAttributeDict()
+      for k, v in attr_dict.iteritems():
+        pending_commit.setAttribute(k, v)
+      manifest_dom.documentElement.appendChild(pending_commit)
+
+    self._WriteXml(manifest_dom, manifest)
+
+  def CreateNewCandidate(self, validation_pool=None,
+                         chrome_version=None,
+                         retries=manifest_version.NUM_RETRIES,
+                         build_id=None):
+    """Creates, syncs to, and returns the next candidate manifest.
+
+    Args:
+      validation_pool: Validation pool to apply to the manifest before
+        publishing.
+      chrome_version: The Chrome version to write in the manifest. Defaults
+        to None, in which case no version is written.
+      retries: Number of retries for updating the status. Defaults to
+        manifest_version.NUM_RETRIES.
+      build_id: Optional integer cidb id of the build that is creating
+                this candidate.
+
+    Raises:
+      GenerateBuildSpecException in case of failure to generate a buildspec
+    """
+    self.CheckoutSourceCode()
+
+    # Refresh manifest logic from manifest_versions repository to grab the
+    # LKGM to generate the blamelist.
+    version_info = self.GetCurrentVersionInfo()
+    self.RefreshManifestCheckout()
+    self.InitializeManifestVariables(version_info)
+
+    self.GenerateBlameListSinceLKGM()
+    new_manifest = self.CreateManifest()
+
+    # For Chrome PFQ, add the version of Chrome to use.
+    if chrome_version:
+      self._AddChromeVersionToManifest(new_manifest, chrome_version)
+
+    # For the Commit Queue, apply the validation pool as part of checkout.
+    if validation_pool:
+      # If we have nothing that could apply from the validation pool and
+      # we're not also a pfq type, we got nothing to do.
+      assert self.cros_source.directory == validation_pool.build_root
+      if (not validation_pool.ApplyPoolIntoRepo() and
+          not config_lib.IsPFQType(self.build_type)):
+        return None
+
+      self._AddPatchesToManifest(new_manifest, validation_pool.changes)
+
+      # Add info about the last known good version to the manifest. This will
+      # be used by slaves to calculate what artifacts from old builds are safe
+      # to use.
+      self._AddLKGMToManifest(new_manifest)
+
+    last_error = None
+    for attempt in range(0, retries + 1):
+      try:
+        # Refresh manifest logic from manifest_versions repository.
+        # Note we don't need to do this on our first attempt as we needed to
+        # have done it to get the LKGM.
+        if attempt != 0:
+          self.RefreshManifestCheckout()
+          self.InitializeManifestVariables(version_info)
+
+        # If we don't have any valid changes to test, make sure the checkout
+        # is at least different.
+        if ((not validation_pool or not validation_pool.changes) and
+            not self.force and self.HasCheckoutBeenBuilt()):
+          return None
+
+        # Check whether the latest spec available in manifest-versions is
+        # newer than our current version number. If so, use it as the base
+        # version number. Otherwise, we default to 'rc1'.
+        if self.latest:
+          latest = max(self.latest, version_info.VersionString(),
+                       key=self.compare_versions_fn)
+          version_info = _LKGMCandidateInfo(
+              latest, chrome_branch=version_info.chrome_branch,
+              incr_type=self.incr_type)
+
+        git.CreatePushBranch(manifest_version.PUSH_BRANCH, self.manifest_dir,
+                             sync=False)
+        version = self.GetNextVersion(version_info)
+        self.PublishManifest(new_manifest, version, build_id=build_id)
+        self.current_version = version
+        return self.GetLocalManifest(version)
+      except cros_build_lib.RunCommandError as e:
+        err_msg = 'Failed to generate LKGM Candidate. error: %s' % e
+        logging.error(err_msg)
+        last_error = err_msg
+
+    raise manifest_version.GenerateBuildSpecException(last_error)
+
+  def CreateFromManifest(self, manifest, retries=manifest_version.NUM_RETRIES,
+                         build_id=None):
+    """Sets up an lkgm_manager from the given manifest.
+
+    This method sets up an LKGM manager and publishes a new manifest to the
+    manifest versions repo based on the passed in manifest but filtering
+    internal repositories and changes out of it.
+
+    Args:
+      manifest: A manifest that possibly contains private changes/projects. It
+        is named with the given version we want to create a new manifest from
+        i.e R20-1920.0.1-rc7.xml where R20-1920.0.1-rc7 is the version.
+      retries: Number of retries for updating the status.
+      build_id: Optional integer cidb build id of the build publishing the
+                manifest.
+
+    Raises:
+      GenerateBuildSpecException in case of failure to check-in the new
+        manifest because of a git error or the manifest is already checked-in.
+    """
+    last_error = None
+    new_manifest = manifest_version.FilterManifest(
+        manifest, whitelisted_remotes=site_config.params.EXTERNAL_REMOTES)
+    version_info = self.GetCurrentVersionInfo()
+    for _attempt in range(0, retries + 1):
+      try:
+        self.RefreshManifestCheckout()
+        self.InitializeManifestVariables(version_info)
+
+        git.CreatePushBranch(manifest_version.PUSH_BRANCH, self.manifest_dir,
+                             sync=False)
+        version = os.path.splitext(os.path.basename(manifest))[0]
+        logging.info('Publishing filtered build spec')
+        self.PublishManifest(new_manifest, version, build_id=build_id)
+        self.current_version = version
+        return self.GetLocalManifest(version)
+      except cros_build_lib.RunCommandError as e:
+        err_msg = 'Failed to generate LKGM Candidate. error: %s' % e
+        logging.error(err_msg)
+        last_error = err_msg
+
+    raise manifest_version.GenerateBuildSpecException(last_error)
+
+  def PromoteCandidate(self, retries=manifest_version.NUM_RETRIES):
+    """Promotes the current LKGM candidate to be a real versioned LKGM."""
+    assert self.current_version, 'No current manifest exists.'
+
+    last_error = None
+    path_to_candidate = self.GetLocalManifest(self.current_version)
+    assert os.path.exists(path_to_candidate), 'Candidate not found locally.'
+
+    # This may potentially fail for not being at TOT while pushing.
+    for attempt in range(0, retries + 1):
+      try:
+        if attempt > 0:
+          self.RefreshManifestCheckout()
+        git.CreatePushBranch(manifest_version.PUSH_BRANCH,
+                             self.manifest_dir, sync=False)
+        manifest_version.CreateSymlink(path_to_candidate, self.lkgm_path)
+        git.RunGit(self.manifest_dir, ['add', self.lkgm_path])
+        self.PushSpecChanges(
+            'Automatic: %s promoting %s to LKGM' % (self.build_names[0],
+                                                    self.current_version))
+        return
+      except cros_build_lib.RunCommandError as e:
+        last_error = 'Failed to promote manifest. error: %s' % e
+        logging.info(last_error)
+        logging.info('Retrying to promote manifest:  Retry %d/%d', attempt + 1,
+                     retries)
+
+    raise PromoteCandidateException(last_error)
+
+  def _ShouldGenerateBlameListSinceLKGM(self):
+    """Returns True if we should generate the blamelist."""
+    # We want to generate the blamelist only for valid pfq types and if we are
+    # building on the master branch i.e. revving the build number.
+    return (self.incr_type == 'build' and
+            config_lib.IsPFQType(self.build_type) and
+            self.build_type != constants.CHROME_PFQ_TYPE)
+
+  def GenerateBlameListSinceLKGM(self):
+    """Prints out links to all CL's that have been committed since LKGM.
+
+    Add buildbot trappings to print <a href='url'>text</a> in the waterfall for
+    each CL committed since we last had a passing build.
+    """
+    if not self._ShouldGenerateBlameListSinceLKGM():
+      logging.info('Not generating blamelist for lkgm as it is not appropriate '
+                   'for this build type.')
+      return
+    # Suppress re-printing changes we tried ourselves on paladin
+    # builders since they are redundant.
+    only_print_chumps = self.build_type == constants.PALADIN_TYPE
+    GenerateBlameList(self.cros_source, self.lkgm_path,
+                      only_print_chumps=only_print_chumps)
+
+  def GetLatestPassingSpec(self):
+    """Get the last spec file that passed in the current branch."""
+    raise NotImplementedError()
+
+
+def GenerateBlameList(source_repo, lkgm_path, only_print_chumps=False):
+  """Generate the blamelist since the specified manifest.
+
+  Args:
+    source_repo: Repository object for the source code.
+    lkgm_path: Path to LKGM manifest.
+    only_print_chumps: If True, only print changes that were chumped.
+  """
+  handler = git.Manifest(lkgm_path)
+  reviewed_on_re = re.compile(r'\s*Reviewed-on:\s*(\S+)')
+  author_re = re.compile(r'\s*Author:.*<(\S+)@\S+>\s*')
+  committer_re = re.compile(r'\s*Commit:.*<(\S+)@\S+>\s*')
+  for rel_src_path, checkout in handler.checkouts_by_path.iteritems():
+    project = checkout['name']
+
+    # Additional case in case the repo has been removed from the manifest.
+    src_path = source_repo.GetRelativePath(rel_src_path)
+    if not os.path.exists(src_path):
+      logging.info('Detected repo removed from manifest %s' % project)
+      continue
+
+    revision = checkout['revision']
+    cmd = ['log', '--pretty=full', '%s..HEAD' % revision]
+    try:
+      result = git.RunGit(src_path, cmd)
+    except cros_build_lib.RunCommandError as ex:
+      # Git returns 128 when the revision does not exist.
+      if ex.result.returncode != 128:
+        raise
+      logging.warning('Detected branch removed from local checkout.')
+      logging.PrintBuildbotStepWarnings()
+      return
+    current_author = None
+    current_committer = None
+    for line in unicode(result.output, 'ascii', 'ignore').splitlines():
+      author_match = author_re.match(line)
+      if author_match:
+        current_author = author_match.group(1)
+
+      committer_match = committer_re.match(line)
+      if committer_match:
+        current_committer = committer_match.group(1)
+
+      review_match = reviewed_on_re.match(line)
+      if review_match:
+        review = review_match.group(1)
+        _, _, change_number = review.rpartition('/')
+        if not current_author:
+          logging.notice('Failed to locate author before the line of review: '
+                         '%s. Author name is set to <Unknown>', line)
+          current_author = '<Unknown>'
+        items = [
+            os.path.basename(project),
+            current_author,
+            change_number,
+        ]
+        # TODO(phobbs) verify the domain of the email address as well.
+        if current_committer not in ('chrome-bot', 'chrome-internal-fetch',
+                                     'chromeos-commit-bot', '3su6n15k.default'):
+          items.insert(0, 'CHUMP')
+        elif only_print_chumps:
+          continue
+        logging.PrintBuildbotLink(' | '.join(items), review)
diff --git a/cbuildbot/lkgm_manager_unittest b/cbuildbot/lkgm_manager_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/lkgm_manager_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/lkgm_manager_unittest.py b/cbuildbot/lkgm_manager_unittest.py
new file mode 100644
index 0000000..eacd679
--- /dev/null
+++ b/cbuildbot/lkgm_manager_unittest.py
@@ -0,0 +1,462 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for lkgm_manager"""
+
+from __future__ import print_function
+
+import contextlib
+import mock
+import os
+import tempfile
+from xml.dom import minidom
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import lkgm_manager
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot import repository
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import patch as cros_patch
+
+site_config = config_lib.GetConfig()
+
+
+FAKE_VERSION_STRING = '1.2.4-rc3'
+FAKE_VERSION_STRING_NEXT = '1.2.4-rc4'
+CHROME_BRANCH = '13'
+
+FAKE_VERSION = """
+CHROMEOS_BUILD=1
+CHROMEOS_BRANCH=2
+CHROMEOS_PATCH=4
+CHROME_BRANCH=13
+"""
+
+
+# pylint: disable=protected-access
+
+
+class LKGMCandidateInfoTest(cros_test_lib.TestCase):
+  """Test methods testing methods in _LKGMCandidateInfo class."""
+
+  def testLoadFromString(self):
+    """Tests whether we can load from a string."""
+    info = lkgm_manager._LKGMCandidateInfo(version_string=FAKE_VERSION_STRING,
+                                           chrome_branch=CHROME_BRANCH)
+    self.assertEqual(info.VersionString(), FAKE_VERSION_STRING)
+
+  def testIncrementVersionPatch(self):
+    """Tests whether we can increment a lkgm info."""
+    info = lkgm_manager._LKGMCandidateInfo(version_string=FAKE_VERSION_STRING,
+                                           chrome_branch=CHROME_BRANCH)
+    info.IncrementVersion()
+    self.assertEqual(info.VersionString(), FAKE_VERSION_STRING_NEXT)
+
+  def testVersionCompare(self):
+    """Tests whether our comparision method works."""
+    info0 = lkgm_manager._LKGMCandidateInfo('5.2.3-rc100')
+    info1 = lkgm_manager._LKGMCandidateInfo('1.2.3-rc1')
+    info2 = lkgm_manager._LKGMCandidateInfo('1.2.3-rc2')
+    info3 = lkgm_manager._LKGMCandidateInfo('1.2.200-rc1')
+    info4 = lkgm_manager._LKGMCandidateInfo('1.4.3-rc1')
+
+    self.assertGreater(info0, info1)
+    self.assertGreater(info0, info2)
+    self.assertGreater(info0, info3)
+    self.assertGreater(info0, info4)
+    self.assertGreater(info2, info1)
+    self.assertGreater(info3, info1)
+    self.assertGreater(info3, info2)
+    self.assertGreater(info4, info1)
+    self.assertGreater(info4, info2)
+    self.assertGreater(info4, info3)
+    self.assertEqual(info0, info0)
+    self.assertEqual(info1, info1)
+    self.assertEqual(info2, info2)
+    self.assertEqual(info3, info3)
+    self.assertEqual(info4, info4)
+    self.assertNotEqual(info0, info1)
+    self.assertNotEqual(info0, info2)
+    self.assertNotEqual(info0, info3)
+    self.assertNotEqual(info0, info4)
+    self.assertNotEqual(info1, info0)
+    self.assertNotEqual(info1, info2)
+    self.assertNotEqual(info1, info3)
+    self.assertNotEqual(info1, info4)
+    self.assertNotEqual(info2, info0)
+    self.assertNotEqual(info2, info1)
+    self.assertNotEqual(info2, info3)
+    self.assertNotEqual(info2, info4)
+    self.assertNotEqual(info3, info0)
+    self.assertNotEqual(info3, info1)
+    self.assertNotEqual(info3, info2)
+    self.assertNotEqual(info3, info4)
+    self.assertNotEqual(info4, info0)
+    self.assertNotEqual(info4, info1)
+    self.assertNotEqual(info4, info1)
+    self.assertNotEqual(info4, info3)
+
+
+@contextlib.contextmanager
+def TemporaryManifest():
+  with tempfile.NamedTemporaryFile() as f:
+    # Create fake but empty manifest file.
+    new_doc = minidom.getDOMImplementation().createDocument(
+        None, 'manifest', None)
+    print(new_doc.toxml())
+    new_doc.writexml(f)
+    f.flush()
+    yield f
+
+
+class LKGMManagerTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for the BuildSpecs manager."""
+
+  def setUp(self):
+    self.push_mock = self.PatchObject(git, 'CreatePushBranch')
+
+    self.source_repo = 'ssh://source/repo'
+    self.manifest_repo = 'ssh://manifest/repo'
+    self.version_file = 'version-file.sh'
+    self.branch = 'master'
+    self.build_name = 'x86-generic'
+    self.incr_type = 'branch'
+
+    # Create tmp subdirs based on the one provided TempDirMixin.
+    self.tmpdir = os.path.join(self.tempdir, "base")
+    osutils.SafeMakedirs(self.tmpdir)
+    self.tmpmandir = os.path.join(self.tempdir, "man")
+    osutils.SafeMakedirs(self.tmpmandir)
+
+    repo = repository.RepoRepository(
+        self.source_repo, self.tmpdir, self.branch, depth=1)
+    self.manager = lkgm_manager.LKGMManager(
+        repo, self.manifest_repo, self.build_name, constants.PFQ_TYPE, 'branch',
+        force=False, branch=self.branch, dry_run=True)
+    self.manager.manifest_dir = self.tmpmandir
+    self.manager.lkgm_path = os.path.join(
+        self.tmpmandir, constants.LKGM_MANIFEST)
+
+    self.manager.all_specs_dir = '/LKGM/path'
+    manifest_dir = self.manager.manifest_dir
+    self.manager.specs_for_builder = os.path.join(manifest_dir,
+                                                  self.manager.rel_working_dir,
+                                                  'build-name', '%(builder)s')
+    self.manager.SLEEP_TIMEOUT = 0
+
+  def _GetPathToManifest(self, info):
+    return os.path.join(self.manager.all_specs_dir, '%s.xml' %
+                        info.VersionString())
+
+  def testCreateNewCandidate(self):
+    """Tests that we can create a new candidate and uprev an old rc."""
+    # Let's stub out other LKGMManager calls cause they're already
+    # unit tested.
+
+    my_info = lkgm_manager._LKGMCandidateInfo('1.2.3')
+    most_recent_candidate = lkgm_manager._LKGMCandidateInfo('1.2.3-rc12')
+    self.manager.latest = most_recent_candidate.VersionString()
+
+    new_candidate = lkgm_manager._LKGMCandidateInfo('1.2.3-rc13')
+    new_manifest = 'some_manifest'
+
+    build_id = 59271
+
+    self.PatchObject(lkgm_manager.LKGMManager, 'CheckoutSourceCode')
+    self.PatchObject(lkgm_manager.LKGMManager, 'CreateManifest',
+                     return_value=new_manifest)
+    self.PatchObject(lkgm_manager.LKGMManager, 'HasCheckoutBeenBuilt',
+                     return_value=False)
+
+    # Do manifest refresh work.
+    self.PatchObject(lkgm_manager.LKGMManager, 'RefreshManifestCheckout')
+    self.PatchObject(lkgm_manager.LKGMManager, 'GetCurrentVersionInfo',
+                     return_value=my_info)
+    init_mock = self.PatchObject(lkgm_manager.LKGMManager,
+                                 'InitializeManifestVariables')
+
+    # Publish new candidate.
+    publish_mock = self.PatchObject(lkgm_manager.LKGMManager, 'PublishManifest')
+
+    candidate_path = self.manager.CreateNewCandidate(build_id=build_id)
+    self.assertEqual(candidate_path, self._GetPathToManifest(new_candidate))
+
+    publish_mock.assert_called_once_with(new_manifest,
+                                         new_candidate.VersionString(),
+                                         build_id=build_id)
+    init_mock.assert_called_once_with(my_info)
+    self.push_mock.assert_called_once_with(mock.ANY, mock.ANY, sync=False)
+
+  def testCreateFromManifest(self):
+    """Tests that we can create a new candidate from another manifest."""
+    # Let's stub out other LKGMManager calls cause they're already
+    # unit tested.
+
+    version = '2010.0.0-rc7'
+    my_info = lkgm_manager._LKGMCandidateInfo('2010.0.0')
+    new_candidate = lkgm_manager._LKGMCandidateInfo(version)
+    manifest = ('/tmp/manifest-versions-internal/paladin/buildspecs/'
+                '20/%s.xml' % version)
+    new_manifest = '/path/to/tmp/file.xml'
+
+    build_id = 20162
+
+    filter_mock = self.PatchObject(manifest_version, 'FilterManifest',
+                                   return_value=new_manifest)
+
+    # Do manifest refresh work.
+    self.PatchObject(lkgm_manager.LKGMManager, 'GetCurrentVersionInfo',
+                     return_value=my_info)
+    self.PatchObject(lkgm_manager.LKGMManager, 'RefreshManifestCheckout')
+    init_mock = self.PatchObject(lkgm_manager.LKGMManager,
+                                 'InitializeManifestVariables')
+
+    # Publish new candidate.
+    publish_mock = self.PatchObject(lkgm_manager.LKGMManager, 'PublishManifest')
+
+    candidate_path = self.manager.CreateFromManifest(manifest,
+                                                     build_id=build_id)
+    self.assertEqual(candidate_path, self._GetPathToManifest(new_candidate))
+    self.assertEqual(self.manager.current_version, version)
+
+    filter_mock.assert_called_once_with(
+        manifest, whitelisted_remotes=site_config.params.EXTERNAL_REMOTES)
+    publish_mock.assert_called_once_with(new_manifest, version,
+                                         build_id=build_id)
+    init_mock.assert_called_once_with(my_info)
+    self.push_mock.assert_called_once_with(mock.ANY, mock.ANY, sync=False)
+
+  def testCreateNewCandidateReturnNoneIfNoWorkToDo(self):
+    """Tests that we return nothing if there is nothing to create."""
+    new_manifest = 'some_manifest'
+    my_info = lkgm_manager._LKGMCandidateInfo('1.2.3')
+    self.PatchObject(lkgm_manager.LKGMManager, 'CheckoutSourceCode')
+    self.PatchObject(lkgm_manager.LKGMManager, 'CreateManifest',
+                     return_value=new_manifest)
+    self.PatchObject(lkgm_manager.LKGMManager, 'RefreshManifestCheckout')
+    self.PatchObject(lkgm_manager.LKGMManager, 'GetCurrentVersionInfo',
+                     return_value=my_info)
+    init_mock = self.PatchObject(lkgm_manager.LKGMManager,
+                                 'InitializeManifestVariables')
+    self.PatchObject(lkgm_manager.LKGMManager, 'HasCheckoutBeenBuilt',
+                     return_value=True)
+
+    candidate = self.manager.CreateNewCandidate()
+    self.assertEqual(candidate, None)
+    init_mock.assert_called_once_with(my_info)
+
+  def _CreateManifest(self):
+    """Returns a created test manifest in tmpdir with its dir_pfx."""
+    self.manager.current_version = '1.2.4-rc21'
+    dir_pfx = CHROME_BRANCH
+    manifest = os.path.join(self.manager.manifest_dir,
+                            self.manager.rel_working_dir, 'buildspecs',
+                            dir_pfx, '1.2.4-rc21.xml')
+    osutils.Touch(manifest)
+    return manifest, dir_pfx
+
+  def testGenerateBlameListSinceLKGM(self):
+    """Tests that we can generate a blamelist from two commit messages.
+
+    This test tests the functionality of generating a blamelist for a git log.
+    Note in this test there are two commit messages, one commited by the
+    Commit Queue and another from Non-Commit Queue.  We test the correct
+    handling in both cases.
+    """
+    fake_git_log = """Author: Sammy Sosa <fake@fake.com>
+    Commit: Chris Sosa <sosa@chromium.org>
+
+    Date:   Mon Aug 8 14:52:06 2011 -0700
+
+    Add in a test for cbuildbot
+
+    TEST=So much testing
+    BUG=chromium-os:99999
+
+    Change-Id: Ib72a742fd2cee3c4a5223b8easwasdgsdgfasdf
+    Reviewed-on: https://chromium-review.googlesource.com/1234
+    Reviewed-by: Fake person <fake@fake.org>
+    Tested-by: Sammy Sosa <fake@fake.com>
+    Author: Sammy Sosa <fake@fake.com>
+    Commit: Gerrit <chrome-bot@chromium.org>
+
+    Date:   Mon Aug 8 14:52:06 2011 -0700
+
+    Add in a test for cbuildbot
+
+    TEST=So much testing
+    BUG=chromium-os:99999
+
+    Change-Id: Ib72a742fd2cee3c4a5223b8easwasdgsdgfasdf
+    Reviewed-on: https://chromium-review.googlesource.com/1235
+    Reviewed-by: Fake person <fake@fake.org>
+    Tested-by: Sammy Sosa <fake@fake.com>
+    """
+    self.manager.incr_type = 'build'
+    self.PatchObject(cros_build_lib, 'RunCommand', side_effect=Exception())
+    exists_mock = self.PatchObject(os.path, 'exists', return_value=True)
+    link_mock = self.PatchObject(logging, 'PrintBuildbotLink')
+
+    project = {
+        'name': 'fake/repo',
+        'path': 'fake/path',
+        'revision': '1234567890',
+    }
+    fake_project_handler = mock.Mock(spec=git.Manifest)
+    fake_project_handler.checkouts_by_path = {project['path']: project}
+    self.PatchObject(git, 'Manifest', return_value=fake_project_handler)
+
+    fake_result = cros_build_lib.CommandResult(output=fake_git_log)
+    self.PatchObject(git, 'RunGit', return_value=fake_result)
+
+    self.manager.GenerateBlameListSinceLKGM()
+
+    exists_mock.assert_called_once_with(
+        os.path.join(self.tmpdir, project['path']))
+    link_mock.assert_has_calls([
+        mock.call('CHUMP | repo | fake | 1234',
+                  'https://chromium-review.googlesource.com/1234'),
+        mock.call('repo | fake | 1235',
+                  'https://chromium-review.googlesource.com/1235'),
+    ])
+
+  def testAddChromeVersionToManifest(self):
+    """Tests whether we can write the chrome version to the manifest file."""
+    with TemporaryManifest() as f:
+      chrome_version = '35.0.1863.0'
+      # Write the chrome element to manifest.
+      self.manager._AddChromeVersionToManifest(f.name, chrome_version)
+
+      # Read the manifest file.
+      new_doc = minidom.parse(f.name)
+      elements = new_doc.getElementsByTagName(lkgm_manager.CHROME_ELEMENT)
+      self.assertEqual(len(elements), 1)
+      self.assertEqual(
+          elements[0].getAttribute(lkgm_manager.CHROME_VERSION_ATTR),
+          chrome_version)
+
+  def testAddLKGMToManifest(self, present=True):
+    """Tests whether we can write the LKGM version to the manifest file."""
+    with TemporaryManifest() as f:
+      # Set up LGKM symlink.
+      if present:
+        lkgm_version = '6377.0.0-rc1'
+        os.makedirs(os.path.dirname(self.manager.lkgm_path))
+        os.symlink('../foo/%s.xml' % lkgm_version, self.manager.lkgm_path)
+
+      # Write the chrome element to manifest.
+      self.manager._AddLKGMToManifest(f.name)
+
+      # Read the manifest file.
+      new_doc = minidom.parse(f.name)
+      elements = new_doc.getElementsByTagName(lkgm_manager.LKGM_ELEMENT)
+      if present:
+        self.assertEqual(len(elements), 1)
+        self.assertEqual(
+            elements[0].getAttribute(lkgm_manager.LKGM_VERSION_ATTR),
+            lkgm_version)
+      else:
+        self.assertEqual(len(elements), 0)
+
+  def testAddLKGMToManifestWithMissingFile(self):
+    """Tests writing the LKGM version when LKGM.xml is missing."""
+    self.testAddLKGMToManifest(present=False)
+
+  def testAddPatchesToManifest(self):
+    """Tests whether we can add a fake patch to an empty manifest file.
+
+    This test creates an empty xml file with just manifest/ tag in it then
+    runs the AddPatchesToManifest with one mocked out GerritPatch and ensures
+    the newly generated manifest has the correct patch information afterwards.
+    """
+    with TemporaryManifest() as f:
+      gerrit_patch = cros_patch.GerritFetchOnlyPatch(
+          'https://host/chromite/tacos',
+          'chromite/tacos',
+          'refs/changes/11/12345/4',
+          'master',
+          'cros-internal',
+          '7181e4b5e182b6f7d68461b04253de095bad74f9',
+          'I47ea30385af60ae4cc2acc5d1a283a46423bc6e1',
+          '12345',
+          '4',
+          'foo@chromium.org',
+          1,
+          1,
+          3)
+
+      self.manager._AddPatchesToManifest(f.name, [gerrit_patch])
+
+      new_doc = minidom.parse(f.name)
+      element = new_doc.getElementsByTagName(
+          lkgm_manager.PALADIN_COMMIT_ELEMENT)[0]
+
+      self.assertEqual(element.getAttribute(
+          cros_patch.ATTR_CHANGE_ID), gerrit_patch.change_id)
+      self.assertEqual(element.getAttribute(
+          cros_patch.ATTR_COMMIT), gerrit_patch.commit)
+      self.assertEqual(element.getAttribute(cros_patch.ATTR_PROJECT),
+                       gerrit_patch.project)
+      self.assertEqual(element.getAttribute(cros_patch.ATTR_REMOTE),
+                       gerrit_patch.remote)
+      self.assertEqual(element.getAttribute(cros_patch.ATTR_BRANCH),
+                       gerrit_patch.tracking_branch)
+      self.assertEqual(element.getAttribute(cros_patch.ATTR_REF),
+                       gerrit_patch.ref)
+      self.assertEqual(
+          element.getAttribute(cros_patch.ATTR_OWNER_EMAIL),
+          gerrit_patch.owner_email)
+      self.assertEqual(
+          element.getAttribute(cros_patch.ATTR_PROJECT_URL),
+          gerrit_patch.project_url)
+      self.assertEqual(
+          element.getAttribute(cros_patch.ATTR_PATCH_NUMBER),
+          gerrit_patch.patch_number)
+      self.assertEqual(
+          element.getAttribute(cros_patch.ATTR_FAIL_COUNT),
+          str(gerrit_patch.fail_count))
+      self.assertEqual(
+          element.getAttribute(cros_patch.ATTR_PASS_COUNT),
+          str(gerrit_patch.pass_count))
+      self.assertEqual(
+          element.getAttribute(cros_patch.ATTR_TOTAL_FAIL_COUNT),
+          str(gerrit_patch.total_fail_count))
+
+  def testAddPatchesToManifestWithUnicode(self):
+    """Tests to add a fake patch with unicode to an empty manifest file.
+
+    Test whether _AddPatchesToManifest can add to a patch with unicode to
+    manifest file without any UnicodeError exception and that the decoded
+    manifest has the original unicode string.
+    """
+    with TemporaryManifest() as f:
+      gerrit_patch = cros_patch.GerritFetchOnlyPatch(
+          'https://host/chromite/tacos',
+          'chromite/tacos',
+          'refs/changes/11/12345/4',
+          'master',
+          'cros-internal',
+          '7181e4b5e182b6f7d68461b04253de095bad74f9',
+          'I47ea30385af60ae4cc2acc5d1a283a46423bc6e1',
+          '12345',
+          '4',
+          u'foo\xe9@chromium.org',
+          1,
+          1,
+          3)
+
+      self.manager._AddPatchesToManifest(f.name, [gerrit_patch])
+
+      new_doc = minidom.parse(f.name)
+      element = new_doc.getElementsByTagName(
+          lkgm_manager.PALADIN_COMMIT_ELEMENT)[0]
+
+      self.assertEqual(
+          element.getAttribute(cros_patch.ATTR_OWNER_EMAIL),
+          gerrit_patch.owner_email)
diff --git a/cbuildbot/manifest_version.py b/cbuildbot/manifest_version.py
new file mode 100644
index 0000000..92e5de3
--- /dev/null
+++ b/cbuildbot/manifest_version.py
@@ -0,0 +1,1079 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A library to generate and store the manifests for cros builders to use."""
+
+from __future__ import print_function
+
+import cPickle
+import fnmatch
+import glob
+import os
+import re
+import shutil
+import tempfile
+from xml.dom import minidom
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import repository
+from chromite.lib import cidb
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import timeout_util
+
+
+site_config = config_lib.GetConfig()
+
+
+BUILD_STATUS_URL = (
+    '%s/builder-status' % site_config.params.MANIFEST_VERSIONS_GS_URL)
+PUSH_BRANCH = 'temp_auto_checkin_branch'
+NUM_RETRIES = 20
+
+MANIFEST_ELEMENT = 'manifest'
+DEFAULT_ELEMENT = 'default'
+PROJECT_ELEMENT = 'project'
+REMOTE_ELEMENT = 'remote'
+PROJECT_NAME_ATTR = 'name'
+PROJECT_REMOTE_ATTR = 'remote'
+PROJECT_GROUP_ATTR = 'groups'
+REMOTE_NAME_ATTR = 'name'
+
+PALADIN_COMMIT_ELEMENT = 'pending_commit'
+PALADIN_PROJECT_ATTR = 'project'
+
+
+class FilterManifestException(Exception):
+  """Exception thrown when failing to filter the internal manifest."""
+
+
+class VersionUpdateException(Exception):
+  """Exception gets thrown for failing to update the version file"""
+
+
+class StatusUpdateException(Exception):
+  """Exception gets thrown for failure to update the status"""
+
+
+class GenerateBuildSpecException(Exception):
+  """Exception gets thrown for failure to Generate a buildspec for the build"""
+
+
+class BuildSpecsValueError(Exception):
+  """Exception gets thrown when a encountering invalid values."""
+
+
+def RefreshManifestCheckout(manifest_dir, manifest_repo):
+  """Checks out manifest-versions into the manifest directory.
+
+  If a repository is already present, it will be cleansed of any local
+  changes and restored to its pristine state, checking out the origin.
+  """
+  reinitialize = True
+  if os.path.exists(manifest_dir):
+    result = git.RunGit(manifest_dir, ['config', 'remote.origin.url'],
+                        error_code_ok=True)
+    if (result.returncode == 0 and
+        result.output.rstrip() == manifest_repo):
+      logging.info('Updating manifest-versions checkout.')
+      try:
+        git.RunGit(manifest_dir, ['gc', '--auto'])
+        git.CleanAndCheckoutUpstream(manifest_dir)
+      except cros_build_lib.RunCommandError:
+        logging.warning('Could not update manifest-versions checkout.')
+      else:
+        reinitialize = False
+  else:
+    logging.info('No manifest-versions checkout exists at %s', manifest_dir)
+
+  if reinitialize:
+    logging.info('Cloning fresh manifest-versions checkout.')
+    osutils.RmDir(manifest_dir, ignore_missing=True)
+    repository.CloneGitRepo(manifest_dir, manifest_repo)
+
+
+def _PushGitChanges(git_repo, message, dry_run=False, push_to=None):
+  """Push the final commit into the git repo.
+
+  Args:
+    git_repo: git repo to push
+    message: Commit message
+    dry_run: If true, don't actually push changes to the server
+    push_to: A git.RemoteRef object specifying the remote branch to push to.
+      Defaults to the tracking branch of the current branch.
+  """
+  if push_to is None:
+    # TODO(akeshet): Clean up git.GetTrackingBranch to always or never return a
+    # tuple.
+    # pylint: disable=unpacking-non-sequence
+    push_to = git.GetTrackingBranch(
+        git_repo, for_checkout=False, for_push=True)
+
+  git.RunGit(git_repo, ['add', '-A'])
+
+  # It's possible that while we are running on dry_run, someone has already
+  # committed our change.
+  try:
+    git.RunGit(git_repo, ['commit', '-m', message])
+  except cros_build_lib.RunCommandError:
+    if dry_run:
+      return
+    raise
+
+  git.GitPush(git_repo, PUSH_BRANCH, push_to, skip=dry_run)
+
+
+def CreateSymlink(src_file, dest_file):
+  """Creates a relative symlink from src to dest with optional removal of file.
+
+  More robust symlink creation that creates a relative symlink from src_file to
+  dest_file.
+
+  This is useful for multiple calls of CreateSymlink where you are using
+  the dest_file location to store information about the status of the src_file.
+
+  Args:
+    src_file: source for the symlink
+    dest_file: destination for the symlink
+  """
+  dest_dir = os.path.dirname(dest_file)
+  osutils.SafeUnlink(dest_file)
+  osutils.SafeMakedirs(dest_dir)
+
+  rel_src_file = os.path.relpath(src_file, dest_dir)
+  logging.debug('Linking %s to %s', rel_src_file, dest_file)
+  os.symlink(rel_src_file, dest_file)
+
+
+class VersionInfo(object):
+  """Class to encapsulate the Chrome OS version info scheme.
+
+  You can instantiate this class in three ways.
+  1) using a version file, specifically chromeos_version.sh,
+     which contains the version information.
+  2) passing in a string with the 3 version components.
+  3) using a source repo and calling from_repo().
+
+  Args:
+    version_string: Optional 3 component version string to parse.  Contains:
+        build_number: release build number.
+        branch_build_number: current build number on a branch.
+        patch_number: patch number.
+    chrome_branch: If version_string specified, specify chrome_branch i.e. 13.
+    incr_type: How we should increment this version -
+        chrome_branch|build|branch|patch
+    version_file: version file location.
+  """
+  # Pattern for matching build name format.  Includes chrome branch hack.
+  VER_PATTERN = r'(\d+).(\d+).(\d+)(?:-R(\d+))*'
+  KEY_VALUE_PATTERN = r'%s=(\d+)\s*$'
+  VALID_INCR_TYPES = ('chrome_branch', 'build', 'branch', 'patch')
+
+  def __init__(self, version_string=None, chrome_branch=None,
+               incr_type='build', version_file=None):
+    if version_file:
+      self.version_file = version_file
+      logging.debug('Using VERSION _FILE = %s', version_file)
+      self._LoadFromFile()
+    else:
+      match = re.search(self.VER_PATTERN, version_string)
+      self.build_number = match.group(1)
+      self.branch_build_number = match.group(2)
+      self.patch_number = match.group(3)
+      self.chrome_branch = chrome_branch
+      self.version_file = None
+
+    self.incr_type = incr_type
+
+  @classmethod
+  def from_repo(cls, source_repo, **kwargs):
+    kwargs['version_file'] = os.path.join(source_repo, constants.VERSION_FILE)
+    return cls(**kwargs)
+
+  def _LoadFromFile(self):
+    """Read the version file and set the version components"""
+    with open(self.version_file, 'r') as version_fh:
+      for line in version_fh:
+        if not line.strip():
+          continue
+
+        match = self.FindValue('CHROME_BRANCH', line)
+        if match:
+          self.chrome_branch = match
+          logging.debug('Set the Chrome branch number to:%s',
+                        self.chrome_branch)
+          continue
+
+        match = self.FindValue('CHROMEOS_BUILD', line)
+        if match:
+          self.build_number = match
+          logging.debug('Set the build version to:%s', self.build_number)
+          continue
+
+        match = self.FindValue('CHROMEOS_BRANCH', line)
+        if match:
+          self.branch_build_number = match
+          logging.debug('Set the branch version to:%s',
+                        self.branch_build_number)
+          continue
+
+        match = self.FindValue('CHROMEOS_PATCH', line)
+        if match:
+          self.patch_number = match
+          logging.debug('Set the patch version to:%s', self.patch_number)
+          continue
+
+    logging.debug(self.VersionString())
+
+  def FindValue(self, key, line):
+    """Given the key find the value from the line, if it finds key = value
+
+    Args:
+      key: key to look for
+      line: string to search
+
+    Returns:
+      None: on a non match
+      value: for a matching key
+    """
+    match = re.search(self.KEY_VALUE_PATTERN % (key,), line)
+    return match.group(1) if match else None
+
+  def IncrementVersion(self):
+    """Updates the version file by incrementing the patch component."""
+    if not self.incr_type or self.incr_type not in self.VALID_INCR_TYPES:
+      raise VersionUpdateException('Need to specify the part of the version to'
+                                   ' increment')
+
+    if self.incr_type == 'chrome_branch':
+      self.chrome_branch = str(int(self.chrome_branch) + 1)
+
+    # Increment build_number for 'chrome_branch' incr_type to avoid
+    # crbug.com/213075.
+    if self.incr_type in ('build', 'chrome_branch'):
+      self.build_number = str(int(self.build_number) + 1)
+      self.branch_build_number = '0'
+      self.patch_number = '0'
+    elif self.incr_type == 'branch' and self.patch_number == '0':
+      self.branch_build_number = str(int(self.branch_build_number) + 1)
+    else:
+      self.patch_number = str(int(self.patch_number) + 1)
+
+    return self.VersionString()
+
+  def UpdateVersionFile(self, message, dry_run, push_to=None):
+    """Update the version file with our current version.
+
+    Args:
+      message: Commit message.
+      dry_run: Git dryrun.
+      push_to: A git.RemoteRef object.
+    """
+
+    if not self.version_file:
+      raise VersionUpdateException('Cannot call UpdateVersionFile without '
+                                   'an associated version_file')
+
+    components = (('CHROMEOS_BUILD', self.build_number),
+                  ('CHROMEOS_BRANCH', self.branch_build_number),
+                  ('CHROMEOS_PATCH', self.patch_number),
+                  ('CHROME_BRANCH', self.chrome_branch))
+
+    with tempfile.NamedTemporaryFile(prefix='mvp') as temp_fh:
+      with open(self.version_file, 'r') as source_version_fh:
+        for line in source_version_fh:
+          for key, value in components:
+            line = re.sub(self.KEY_VALUE_PATTERN % (key,),
+                          '%s=%s\n' % (key, value), line)
+          temp_fh.write(line)
+
+      temp_fh.flush()
+
+      repo_dir = os.path.dirname(self.version_file)
+
+      try:
+        git.CreateBranch(repo_dir, PUSH_BRANCH)
+        shutil.copyfile(temp_fh.name, self.version_file)
+        _PushGitChanges(repo_dir, message, dry_run=dry_run, push_to=push_to)
+      finally:
+        # Update to the remote version that contains our changes. This is needed
+        # to ensure that we don't build a release using a local commit.
+        git.CleanAndCheckoutUpstream(repo_dir)
+
+  def VersionString(self):
+    """returns the version string"""
+    return '%s.%s.%s' % (self.build_number, self.branch_build_number,
+                         self.patch_number)
+
+  def VersionComponents(self):
+    """Return an array of ints of the version fields for comparing."""
+    return map(int, [self.build_number, self.branch_build_number,
+                     self.patch_number])
+
+  @classmethod
+  def VersionCompare(cls, version_string):
+    """Useful method to return a comparable version of a LKGM string."""
+    return cls(version_string).VersionComponents()
+
+  def __cmp__(self, other):
+    sinfo = self.VersionComponents()
+    oinfo = other.VersionComponents()
+
+    for s, o in zip(sinfo, oinfo):
+      if s != o:
+        return -1 if s < o else 1
+    return 0
+
+  __hash__ = None
+
+  def BuildPrefix(self):
+    """Returns the build prefix to match the buildspecs in  manifest-versions"""
+    if self.incr_type == 'branch':
+      if self.patch_number == '0':
+        return '%s.' % self.build_number
+      else:
+        return '%s.%s.' % (self.build_number, self.branch_build_number)
+    # Default to build incr_type.
+    return ''
+
+  def __str__(self):
+    return '%s(%s)' % (self.__class__, self.VersionString())
+
+
+class BuilderStatus(object):
+  """Object representing the status of a build."""
+
+  def __init__(self, status, message, dashboard_url=None):
+    """Constructor for BuilderStatus.
+
+    Args:
+      status: Status string (should be one of STATUS_FAILED, STATUS_PASSED,
+              STATUS_INFLIGHT, or STATUS_MISSING).
+      message: A failures_lib.BuildFailureMessage object with details
+               of builder failure. Or, None.
+      dashboard_url: Optional url linking to builder dashboard for this build.
+    """
+    self.status = status
+    self.message = message
+    self.dashboard_url = dashboard_url
+
+  # Helper methods to make checking the status object easy.
+
+  def Failed(self):
+    """Returns True if the Builder failed."""
+    return self.status == constants.BUILDER_STATUS_FAILED
+
+  def Passed(self):
+    """Returns True if the Builder passed."""
+    return self.status == constants.BUILDER_STATUS_PASSED
+
+  def Inflight(self):
+    """Returns True if the Builder is still inflight."""
+    return self.status == constants.BUILDER_STATUS_INFLIGHT
+
+  def Missing(self):
+    """Returns True if the Builder is missing any status."""
+    return self.status == constants.BUILDER_STATUS_MISSING
+
+  def Completed(self):
+    """Returns True if the Builder has completed."""
+    return self.status in constants.BUILDER_COMPLETED_STATUSES
+
+  @classmethod
+  def GetCompletedStatus(cls, success):
+    """Return the appropriate status constant for a completed build.
+
+    Args:
+      success: Whether the build was successful or not.
+    """
+    if success:
+      return constants.BUILDER_STATUS_PASSED
+    else:
+      return constants.BUILDER_STATUS_FAILED
+
+  def AsFlatDict(self):
+    """Returns a flat json-able representation of this builder status.
+
+    Returns:
+      A dictionary of the form {'status' : status, 'message' : message,
+      'dashboard_url' : dashboard_url} where all values are guaranteed
+      to be strings. If dashboard_url is None, the key will be excluded.
+    """
+    flat_dict = {'status' : str(self.status),
+                 'message' : str(self.message),
+                 'reason' : str(None if self.message is None
+                                else self.message.reason)}
+    if self.dashboard_url is not None:
+      flat_dict['dashboard_url'] = str(self.dashboard_url)
+    return flat_dict
+
+  def AsPickledDict(self):
+    """Returns a pickled dictionary representation of this builder status."""
+    return cPickle.dumps(dict(status=self.status, message=self.message,
+                              dashboard_url=self.dashboard_url))
+
+
+class BuildSpecsManager(object):
+  """A Class to manage buildspecs and their states."""
+
+  SLEEP_TIMEOUT = 1 * 60
+
+  def __init__(self, source_repo, manifest_repo, build_names, incr_type, force,
+               branch, manifest=constants.DEFAULT_MANIFEST, dry_run=True,
+               master=False):
+    """Initializes a build specs manager.
+
+    Args:
+      source_repo: Repository object for the source code.
+      manifest_repo: Manifest repository for manifest versions / buildspecs.
+      build_names: Identifiers for the build. Must match SiteConfig
+          entries. If multiple identifiers are provided, the first item in the
+          list must be an identifier for the group.
+      incr_type: How we should increment this version - build|branch|patch
+      force: Create a new manifest even if there are no changes.
+      branch: Branch this builder is running on.
+      manifest: Manifest to use for checkout. E.g. 'full' or 'buildtools'.
+      dry_run: Whether we actually commit changes we make or not.
+      master: Whether we are the master builder.
+    """
+    self.cros_source = source_repo
+    buildroot = source_repo.directory
+    if manifest_repo.startswith(site_config.params.INTERNAL_GOB_URL):
+      self.manifest_dir = os.path.join(buildroot, 'manifest-versions-internal')
+    else:
+      self.manifest_dir = os.path.join(buildroot, 'manifest-versions')
+
+    self.manifest_repo = manifest_repo
+    self.build_names = build_names
+    self.incr_type = incr_type
+    self.force = force
+    self.branch = branch
+    self.manifest = manifest
+    self.dry_run = dry_run
+    self.master = master
+
+    # Directories and specifications are set once we load the specs.
+    self.buildspecs_dir = None
+    self.all_specs_dir = None
+    self.pass_dirs = None
+    self.fail_dirs = None
+
+    # Specs.
+    self.latest = None
+    self._latest_status = None
+    self.latest_unprocessed = None
+    self.compare_versions_fn = VersionInfo.VersionCompare
+
+    self.current_version = None
+    self.rel_working_dir = ''
+
+  def _LatestSpecFromList(self, specs):
+    """Find the latest spec in a list of specs.
+
+    Args:
+      specs: List of specs.
+
+    Returns:
+      The latest spec if specs is non-empty.
+      None otherwise.
+    """
+    if specs:
+      return max(specs, key=self.compare_versions_fn)
+
+  def _LatestSpecFromDir(self, version_info, directory):
+    """Returns the latest buildspec that match '*.xml' in a directory.
+
+    Args:
+      version_info: A VersionInfo object which will provide a build prefix
+                    to match for.
+      directory: Directory of the buildspecs.
+    """
+    if os.path.exists(directory):
+      match_string = version_info.BuildPrefix() + '*.xml'
+      specs = fnmatch.filter(os.listdir(directory), match_string)
+      return self._LatestSpecFromList([os.path.splitext(m)[0] for m in specs])
+
+  def RefreshManifestCheckout(self):
+    """Checks out manifest versions into the manifest directory."""
+    RefreshManifestCheckout(self.manifest_dir, self.manifest_repo)
+
+  def InitializeManifestVariables(self, version_info=None, version=None):
+    """Initializes manifest-related instance variables.
+
+    Args:
+      version_info: Info class for version information of cros. If None,
+                    version must be specified instead.
+      version: Requested version. If None, build the latest version.
+
+    Returns:
+      Whether the requested version was found.
+    """
+    assert version_info or version, 'version or version_info must be specified'
+    working_dir = os.path.join(self.manifest_dir, self.rel_working_dir)
+    specs_for_builder = os.path.join(working_dir, 'build-name', '%(builder)s')
+    self.buildspecs_dir = os.path.join(working_dir, 'buildspecs')
+
+    # If version is specified, find out what Chrome branch it is on.
+    if version is not None:
+      dirs = glob.glob(os.path.join(self.buildspecs_dir, '*', version + '.xml'))
+      if len(dirs) == 0:
+        return False
+      assert len(dirs) <= 1, 'More than one spec found for %s' % version
+      dir_pfx = os.path.basename(os.path.dirname(dirs[0]))
+      version_info = VersionInfo(chrome_branch=dir_pfx, version_string=version)
+    else:
+      dir_pfx = version_info.chrome_branch
+
+    self.all_specs_dir = os.path.join(self.buildspecs_dir, dir_pfx)
+    self.pass_dirs, self.fail_dirs = [], []
+    for build_name in self.build_names:
+      specs_for_build = specs_for_builder % {'builder': build_name}
+      self.pass_dirs.append(
+          os.path.join(specs_for_build, constants.BUILDER_STATUS_PASSED,
+                       dir_pfx))
+      self.fail_dirs.append(
+          os.path.join(specs_for_build, constants.BUILDER_STATUS_FAILED,
+                       dir_pfx))
+
+    # Calculate the status of the latest build, and whether the build was
+    # processed.
+    if version is None:
+      self.latest = self._LatestSpecFromDir(version_info, self.all_specs_dir)
+      if self.latest is not None:
+        self._latest_status = self.GetBuildStatus(self.build_names[0],
+                                                  self.latest)
+        if self._latest_status.Missing():
+          self.latest_unprocessed = self.latest
+
+    return True
+
+  def GetBuildSpecFilePath(self, milestone, platform):
+    """Get the file path given milestone and platform versions.
+
+    Args:
+      milestone: a string representing milestone, e.g. '44'
+      platform: a string representing platform version, e.g. '7072.0.0-rc4'
+
+    Returns:
+      A string, representing the path to its spec file.
+    """
+    return os.path.join(self.buildspecs_dir, milestone, platform + '.xml')
+
+  def GetCurrentVersionInfo(self):
+    """Returns the current version info from the version file."""
+    version_file_path = self.cros_source.GetRelativePath(constants.VERSION_FILE)
+    return VersionInfo(version_file=version_file_path, incr_type=self.incr_type)
+
+  def HasCheckoutBeenBuilt(self):
+    """Checks to see if we've previously built this checkout."""
+    if self._latest_status and self._latest_status.Passed():
+      latest_spec_file = '%s.xml' % os.path.join(
+          self.all_specs_dir, self.latest)
+      # We've built this checkout before if the manifest isn't different than
+      # the last one we've built.
+      return not self.cros_source.IsManifestDifferent(latest_spec_file)
+    else:
+      # We've never built this manifest before so this checkout is always new.
+      return False
+
+  def CreateManifest(self):
+    """Returns the path to a new manifest based on the current checkout."""
+    new_manifest = tempfile.mkstemp('manifest_versions.manifest')[1]
+    osutils.WriteFile(new_manifest,
+                      self.cros_source.ExportManifest(mark_revision=True))
+    return new_manifest
+
+  def GetNextVersion(self, version_info):
+    """Returns the next version string that should be built."""
+    version = version_info.VersionString()
+    if self.latest == version:
+      message = ('Automatic: %s - Updating to a new version number from %s' %
+                 (self.build_names[0], version))
+      version = version_info.IncrementVersion()
+      version_info.UpdateVersionFile(message, dry_run=self.dry_run)
+      assert version != self.latest
+      logging.info('Incremented version number to  %s', version)
+
+    return version
+
+  def PublishManifest(self, manifest, version, build_id=None):
+    """Publishes the manifest as the manifest for the version to others.
+
+    Args:
+      manifest: Path to manifest file to publish.
+      version: Manifest version string, e.g. 6102.0.0-rc4
+      build_id: Optional integer giving build_id of the build that is
+                publishing this manifest. If specified and non-negative,
+                build_id will be included in the commit message.
+    """
+    # Note: This commit message is used by master.cfg for figuring out when to
+    #       trigger slave builders.
+    commit_message = 'Automatic: Start %s %s %s' % (self.build_names[0],
+                                                    self.branch, version)
+    if build_id is not None and build_id >= 0:
+      commit_message += '\nCrOS-Build-Id: %s' % build_id
+
+    logging.info('Publishing build spec for: %s', version)
+    logging.info('Publishing with commit message: %s', commit_message)
+    logging.debug('Manifest contents below.\n%s', osutils.ReadFile(manifest))
+
+    # Copy the manifest into the manifest repository.
+    spec_file = '%s.xml' % os.path.join(self.all_specs_dir, version)
+    osutils.SafeMakedirs(os.path.dirname(spec_file))
+
+    shutil.copyfile(manifest, spec_file)
+
+    # Actually push the manifest.
+    self.PushSpecChanges(commit_message)
+
+  def DidLastBuildFail(self):
+    """Returns True if the last build failed."""
+    return self._latest_status and self._latest_status.Failed()
+
+  @staticmethod
+  def GetBuildStatus(builder, version, retries=NUM_RETRIES):
+    """Returns a BuilderStatus instance for the given the builder.
+
+    Args:
+      builder: Builder to look at.
+      version: Version string.
+      retries: Number of retries for getting the status.
+
+    Returns:
+      A BuilderStatus instance containing the builder status and any optional
+      message associated with the status passed by the builder.  If no status
+      is found for this builder then the returned BuilderStatus object will
+      have status STATUS_MISSING.
+    """
+    url = BuildSpecsManager._GetStatusUrl(builder, version)
+    ctx = gs.GSContext(retries=retries)
+    try:
+      output = ctx.Cat(url)
+    except gs.GSNoSuchKey:
+      return BuilderStatus(constants.BUILDER_STATUS_MISSING, None)
+
+    return BuildSpecsManager._UnpickleBuildStatus(output)
+
+  @staticmethod
+  def GetSlaveStatusesFromCIDB(master_build_id):
+    """Get statuses of slaves associated with |master_build_id|.
+
+    Args:
+      master_build_id: Master build id to check.
+
+    Returns:
+      A dictionary mapping the slave name to a status in
+      BuildStatus.ALL_STATUSES.
+    """
+    status_dict = dict()
+    db = cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
+    assert db, 'No database connection to use.'
+    status_list = db.GetSlaveStatuses(master_build_id)
+    for d in status_list:
+      status_dict[d['build_config']] = d['status']
+    return status_dict
+
+  def GetBuildersStatus(self, master_build_id, builders_array, timeout=3 * 60):
+    """Get the statuses of the slave builders of the master.
+
+    This function checks the status of slaves in |builders_array|. It
+    queries CIDB for all builds associated with the |master_build_id|,
+    then filters out builds that are not in |builders_array| (e.g.,
+    slaves that are not important).
+
+    Args:
+      master_build_id: Master build id to check.
+      builders_array: A list of the names of build configs to check.
+      timeout: Number of seconds to wait for the results.
+
+    Returns:
+      A build_config name-> status dictionary of build statuses.
+    """
+    builders_completed = set()
+
+    def _GetStatusesFromDB():
+      """Helper function that iterates through current statuses."""
+      status_dict = self.GetSlaveStatusesFromCIDB(master_build_id)
+      for builder in set(builders_array) - set(status_dict.keys()):
+        logging.warning('No status found for build config %s.', builder)
+
+      latest_completed = set(
+          [b for b, s in status_dict.iteritems() if s in
+           constants.BUILDER_COMPLETED_STATUSES and b in builders_array])
+      for builder in sorted(latest_completed - builders_completed):
+        logging.info('Build config %s completed with status "%s".',
+                     builder, status_dict[builder])
+      builders_completed.update(latest_completed)
+
+      if len(builders_completed) < len(builders_array):
+        logging.info('Still waiting for the following builds to complete: %r',
+                     sorted(set(builders_array).difference(builders_completed)))
+        return None
+      else:
+        return 'Builds completed.'
+
+    def _PrintRemainingTime(remaining):
+      logging.info('%s until timeout...', remaining)
+
+    # Check for build completion until all builders report in.
+    try:
+      builds_succeeded = timeout_util.WaitForSuccess(
+          lambda x: x is None,
+          _GetStatusesFromDB,
+          timeout,
+          period=self.SLEEP_TIMEOUT,
+          side_effect_func=_PrintRemainingTime)
+    except timeout_util.TimeoutError:
+      builds_succeeded = None
+
+    # Actually fetch the BuildStatus pickles from Google Storage.
+    builder_statuses = {}
+    for builder in builders_array:
+      logging.debug("Checking for builder %s's status", builder)
+      builder_status = self.GetBuildStatus(builder, self.current_version)
+      builder_statuses[builder] = builder_status
+
+    if not builds_succeeded:
+      logging.error('Not all builds finished before timeout (%d minutes)'
+                    ' reached.', int((timeout / 60) + 0.5))
+
+    return builder_statuses
+
+  @staticmethod
+  def _UnpickleBuildStatus(pickle_string):
+    """Returns a BuilderStatus instance from a pickled string."""
+    try:
+      status_dict = cPickle.loads(pickle_string)
+    except (cPickle.UnpicklingError, AttributeError, EOFError,
+            ImportError, IndexError, TypeError) as e:
+      # The above exceptions are listed as possible unpickling exceptions
+      # by http://docs.python.org/2/library/pickle
+      # In addition to the exceptions listed in the doc, we've also observed
+      # TypeError in the wild.
+      logging.warning('Failed with %r to unpickle status file.', e)
+      return BuilderStatus(constants.BUILDER_STATUS_FAILED, message=None)
+
+    return BuilderStatus(**status_dict)
+
+  def GetLatestPassingSpec(self):
+    """Get the last spec file that passed in the current branch."""
+    version_info = self.GetCurrentVersionInfo()
+    return self._LatestSpecFromDir(version_info, self.pass_dirs[0])
+
+  def GetLocalManifest(self, version=None):
+    """Return path to local copy of manifest given by version.
+
+    Returns:
+      Path of |version|.  By default if version is not set, returns the path
+      of the current version.
+    """
+    if not self.all_specs_dir:
+      raise BuildSpecsValueError('GetLocalManifest failed, BuildSpecsManager '
+                                 'instance not yet initialized by call to '
+                                 'InitializeManifestVariables.')
+    if version:
+      return os.path.join(self.all_specs_dir, version + '.xml')
+    elif self.current_version:
+      return os.path.join(self.all_specs_dir, self.current_version + '.xml')
+
+    return None
+
+  def BootstrapFromVersion(self, version):
+    """Initialize a manifest from a release version returning the path to it."""
+    # Only refresh the manifest checkout if needed.
+    if not self.InitializeManifestVariables(version=version):
+      self.RefreshManifestCheckout()
+      if not self.InitializeManifestVariables(version=version):
+        raise BuildSpecsValueError('Failure in BootstrapFromVersion. '
+                                   'InitializeManifestVariables failed after '
+                                   'RefreshManifestCheckout for version '
+                                   '%s.' % version)
+
+    # Return the current manifest.
+    self.current_version = version
+    return self.GetLocalManifest(self.current_version)
+
+  def CheckoutSourceCode(self):
+    """Syncs the cros source to the latest git hashes for the branch."""
+    self.cros_source.Sync(self.manifest)
+
+  def GetNextBuildSpec(self, retries=NUM_RETRIES, build_id=None):
+    """Returns a path to the next manifest to build.
+
+    Args:
+      retries: Number of retries for updating the status.
+      build_id: Optional integer cidb id of this build, which will be used to
+                annotate the manifest-version commit if one is created.
+
+    Raises:
+      GenerateBuildSpecException in case of failure to generate a buildspec
+    """
+    last_error = None
+    for index in range(0, retries + 1):
+      try:
+        self.CheckoutSourceCode()
+
+        version_info = self.GetCurrentVersionInfo()
+        self.RefreshManifestCheckout()
+        self.InitializeManifestVariables(version_info)
+
+        if not self.force and self.HasCheckoutBeenBuilt():
+          return None
+
+        # If we're the master, always create a new build spec. Otherwise,
+        # only create a new build spec if we've already built the existing
+        # spec.
+        if self.master or not self.latest_unprocessed:
+          git.CreatePushBranch(PUSH_BRANCH, self.manifest_dir, sync=False)
+          version = self.GetNextVersion(version_info)
+          new_manifest = self.CreateManifest()
+          self.PublishManifest(new_manifest, version, build_id=build_id)
+        else:
+          version = self.latest_unprocessed
+
+        self.current_version = version
+        return self.GetLocalManifest(version)
+      except cros_build_lib.RunCommandError as e:
+        last_error = 'Failed to generate buildspec. error: %s' % e
+        logging.error(last_error)
+        logging.error('Retrying to generate buildspec:  Retry %d/%d', index + 1,
+                      retries)
+
+    # Cleanse any failed local changes and throw an exception.
+    self.RefreshManifestCheckout()
+    raise GenerateBuildSpecException(last_error)
+
+  @staticmethod
+  def _GetStatusUrl(builder, version):
+    """Get the status URL in Google Storage for a given builder / version."""
+    return os.path.join(BUILD_STATUS_URL, version, builder)
+
+  def _UploadStatus(self, version, status, message=None, fail_if_exists=False,
+                    dashboard_url=None):
+    """Upload build status to Google Storage.
+
+    Args:
+      version: Version number to use. Must be a string.
+      status: Status string.
+      message: A failures_lib.BuildFailureMessage object with details
+               of builder failure, or None (default).
+      fail_if_exists: If set, fail if the status already exists.
+      dashboard_url: Optional url linking to builder dashboard for this build.
+    """
+    data = BuilderStatus(status, message, dashboard_url).AsPickledDict()
+
+    gs_version = None
+    # This HTTP header tells Google Storage to return the PreconditionFailed
+    # error message if the file already exists. Unfortunately, with new versions
+    # of gsutil, PreconditionFailed is sometimes returned erroneously, so we've
+    # replaced this check with # an Exists check below instead.
+    # TODO(davidjames): Revert CL:223267 when Google Storage is fixed.
+    #if fail_if_exists:
+    #  gs_version = 0
+
+    logging.info('Recording status %s for %s', status, self.build_names)
+    for build_name in self.build_names:
+      url = BuildSpecsManager._GetStatusUrl(build_name, version)
+
+      ctx = gs.GSContext(dry_run=self.dry_run)
+      # Check if the file already exists.
+      if fail_if_exists and not self.dry_run and ctx.Exists(url):
+        raise GenerateBuildSpecException('Builder already inflight')
+      # Do the actual upload.
+      ctx.Copy('-', url, input=data, version=gs_version)
+
+  def UploadStatus(self, success, message=None, dashboard_url=None):
+    """Uploads the status of the build for the current build spec.
+
+    Args:
+      success: True for success, False for failure
+      message: A failures_lib.BuildFailureMessage object with details
+               of builder failure, or None (default).
+      dashboard_url: Optional url linking to builder dashboard for this build.
+    """
+    status = BuilderStatus.GetCompletedStatus(success)
+    self._UploadStatus(self.current_version, status, message=message,
+                       dashboard_url=dashboard_url)
+
+  def SetInFlight(self, version, dashboard_url=None):
+    """Marks the buildspec as inflight in Google Storage."""
+    try:
+      self._UploadStatus(version, constants.BUILDER_STATUS_INFLIGHT,
+                         fail_if_exists=True,
+                         dashboard_url=dashboard_url)
+    except gs.GSContextPreconditionFailed:
+      raise GenerateBuildSpecException('Builder already inflight')
+    except gs.GSContextException as e:
+      raise GenerateBuildSpecException(e)
+
+  def _SetPassSymlinks(self, success_map):
+    """Marks the buildspec as passed by creating a symlink in passed dir.
+
+    Args:
+      success_map: Map of config names to whether they succeeded.
+    """
+    src_file = '%s.xml' % os.path.join(self.all_specs_dir, self.current_version)
+    for i, build_name in enumerate(self.build_names):
+      if success_map[build_name]:
+        sym_dir = self.pass_dirs[i]
+      else:
+        sym_dir = self.fail_dirs[i]
+      dest_file = '%s.xml' % os.path.join(sym_dir, self.current_version)
+      status = BuilderStatus.GetCompletedStatus(success_map[build_name])
+      logging.debug('Build %s: %s -> %s', status, src_file, dest_file)
+      CreateSymlink(src_file, dest_file)
+
+  def PushSpecChanges(self, commit_message):
+    """Pushes any changes you have in the manifest directory."""
+    _PushGitChanges(self.manifest_dir, commit_message, dry_run=self.dry_run)
+
+  def UpdateStatus(self, success_map, message=None, retries=NUM_RETRIES,
+                   dashboard_url=None):
+    """Updates the status of the build for the current build spec.
+
+    Args:
+      success_map: Map of config names to whether they succeeded.
+      message: Message accompanied with change in status.
+      retries: Number of retries for updating the status
+      dashboard_url: Optional url linking to builder dashboard for this build.
+    """
+    last_error = None
+    if message:
+      logging.info('Updating status with message %s', message)
+    for index in range(0, retries + 1):
+      try:
+        self.RefreshManifestCheckout()
+        git.CreatePushBranch(PUSH_BRANCH, self.manifest_dir, sync=False)
+        success = all(success_map.values())
+        commit_message = ('Automatic checkin: status=%s build_version %s for '
+                          '%s' % (BuilderStatus.GetCompletedStatus(success),
+                                  self.current_version,
+                                  self.build_names[0]))
+
+        self._SetPassSymlinks(success_map)
+
+        self.PushSpecChanges(commit_message)
+      except cros_build_lib.RunCommandError as e:
+        last_error = ('Failed to update the status for %s with the '
+                      'following error %s' % (self.build_names[0],
+                                              e.message))
+        logging.error(last_error)
+        logging.error('Retrying to generate buildspec:  Retry %d/%d', index + 1,
+                      retries)
+      else:
+        # Upload status to Google Storage as well.
+        self.UploadStatus(success, message=message, dashboard_url=dashboard_url)
+        return
+
+    # Cleanse any failed local changes and throw an exception.
+    self.RefreshManifestCheckout()
+    raise StatusUpdateException(last_error)
+
+
+def _GetDefaultRemote(manifest_dom):
+  """Returns the default remote in a manifest (if any).
+
+  Args:
+    manifest_dom: DOM Document object representing the manifest.
+
+  Returns:
+    Default remote if one exists, None otherwise.
+  """
+  default_nodes = manifest_dom.getElementsByTagName(DEFAULT_ELEMENT)
+  if default_nodes:
+    if len(default_nodes) > 1:
+      raise FilterManifestException(
+          'More than one <default> element found in manifest')
+    return default_nodes[0].getAttribute(PROJECT_REMOTE_ATTR)
+  return None
+
+
+def _GetGroups(project_element):
+  """Returns the default remote in a manifest (if any).
+
+  Args:
+    project_element: DOM Document object representing a project.
+
+  Returns:
+    List of names of the groups the project belongs too.
+  """
+  group = project_element.getAttribute(PROJECT_GROUP_ATTR)
+  if not group:
+    return []
+
+  return [s.strip() for s in group.split(',')]
+
+
+def FilterManifest(manifest, whitelisted_remotes=None, whitelisted_groups=None):
+  """Returns a path to a new manifest with whitelists enforced.
+
+  Args:
+    manifest: Path to an existing manifest that should be filtered.
+    whitelisted_remotes: Tuple of remotes to allow in the generated manifest.
+      Only projects with those remotes will be included in the external
+      manifest. (None means all remotes are acceptable)
+    whitelisted_groups: Tuple of groups to allow in the generated manifest.
+      (None means all groups are acceptable)
+
+  Returns:
+    Path to a new manifest that is a filtered copy of the original.
+  """
+  temp_fd, new_path = tempfile.mkstemp('external_manifest')
+  manifest_dom = minidom.parse(manifest)
+  manifest_node = manifest_dom.getElementsByTagName(MANIFEST_ELEMENT)[0]
+  remotes = manifest_dom.getElementsByTagName(REMOTE_ELEMENT)
+  projects = manifest_dom.getElementsByTagName(PROJECT_ELEMENT)
+  pending_commits = manifest_dom.getElementsByTagName(PALADIN_COMMIT_ELEMENT)
+
+  default_remote = _GetDefaultRemote(manifest_dom)
+
+  # Remove remotes that don't match our whitelist.
+  for remote_element in remotes:
+    name = remote_element.getAttribute(REMOTE_NAME_ATTR)
+    if (name is not None and
+        whitelisted_remotes and
+        name not in whitelisted_remotes):
+      manifest_node.removeChild(remote_element)
+
+  filtered_projects = set()
+  for project_element in projects:
+    project_remote = project_element.getAttribute(PROJECT_REMOTE_ATTR)
+    project = project_element.getAttribute(PROJECT_NAME_ATTR)
+    if not project_remote:
+      if not default_remote:
+        # This should not happen for a valid manifest. Either each
+        # project must have a remote specified or there should
+        # be manifest default we could use.
+        raise FilterManifestException(
+            'Project %s has unspecified remote with no default' % project)
+      project_remote = default_remote
+
+    groups = _GetGroups(project_element)
+
+    filter_remote = (whitelisted_remotes and
+                     project_remote not in whitelisted_remotes)
+
+    filter_group = (whitelisted_groups and
+                    not any([g in groups for g in whitelisted_groups]))
+
+    if filter_remote or filter_group:
+      filtered_projects.add(project)
+      manifest_node.removeChild(project_element)
+
+  for commit_element in pending_commits:
+    if commit_element.getAttribute(
+        PALADIN_PROJECT_ATTR) in filtered_projects:
+      manifest_node.removeChild(commit_element)
+
+  with os.fdopen(temp_fd, 'w') as manifest_file:
+    # Filter out empty lines.
+    filtered_manifest_noempty = filter(
+        str.strip, manifest_dom.toxml('utf-8').splitlines())
+    manifest_file.write(os.linesep.join(filtered_manifest_noempty))
+
+  return new_path
diff --git a/cbuildbot/manifest_version_unittest b/cbuildbot/manifest_version_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/manifest_version_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/manifest_version_unittest.py b/cbuildbot/manifest_version_unittest.py
new file mode 100644
index 0000000..233d890
--- /dev/null
+++ b/cbuildbot/manifest_version_unittest.py
@@ -0,0 +1,407 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for manifest_version. Needs to be run inside of chroot."""
+
+from __future__ import print_function
+
+import mox
+import os
+import tempfile
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot import repository
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import git
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+
+
+FAKE_VERSION = """
+CHROMEOS_BUILD=%(build_number)s
+CHROMEOS_BRANCH=%(branch_build_number)s
+CHROMEOS_PATCH=%(patch_number)s
+CHROME_BRANCH=%(chrome_branch)s
+"""
+
+FAKE_WHITELISTED_REMOTES = ('cros', 'chromium')
+FAKE_NON_WHITELISTED_REMOTE = 'hottubtimemachine'
+
+FAKE_VERSION_STRING = '1.2.3'
+FAKE_VERSION_STRING_NEXT = '1.2.4'
+CHROME_BRANCH = '13'
+
+# Use the chromite repo to actually test git changes.
+GIT_TEST_PATH = 'chromite'
+
+MOCK_BUILD_ID = 162345
+
+
+# pylint: disable=protected-access
+
+
+class HelperMethodsTest(cros_test_lib.TempDirTestCase):
+  """Test methods associated with methods not in a class."""
+
+  def testCreateSymlink(self):
+    """Tests that we can create symlinks and remove a previous one."""
+    srcfile = os.path.join(self.tempdir, 'src')
+    osutils.Touch(srcfile)
+    other_dir = os.path.join(self.tempdir, 'other_dir')
+    os.makedirs(other_dir)
+    destfile = os.path.join(other_dir, 'dest')
+
+    manifest_version.CreateSymlink(srcfile, destfile)
+    self.assertTrue(os.path.lexists(destfile),
+                    'Unable to create symlink to %s' % destfile)
+
+
+class VersionInfoTest(cros_test_lib.MoxTempDirTestCase):
+  """Test methods testing methods in VersionInfo class."""
+
+  @classmethod
+  def WriteFakeVersionFile(cls, version_file, version=None, chrome_branch=None):
+    """Helper method to write a version file from specified version number."""
+    if version is None:
+      version = FAKE_VERSION_STRING
+    if chrome_branch is None:
+      chrome_branch = CHROME_BRANCH
+
+    osutils.SafeMakedirs(os.path.split(version_file)[0])
+    info = manifest_version.VersionInfo(version, chrome_branch)
+    osutils.WriteFile(version_file, FAKE_VERSION % info.__dict__)
+
+  @classmethod
+  def CreateFakeVersionFile(cls, tmpdir, version=None, chrome_branch=None):
+    """Helper method to create a version file from specified version number."""
+    version_file = tempfile.mktemp(dir=tmpdir)
+    cls.WriteFakeVersionFile(version_file, version=version,
+                             chrome_branch=chrome_branch)
+    return version_file
+
+  def testLoadFromFile(self):
+    """Tests whether we can load from a version file."""
+    version_file = self.CreateFakeVersionFile(self.tempdir)
+    info = manifest_version.VersionInfo(version_file=version_file)
+    self.assertEqual(info.VersionString(), FAKE_VERSION_STRING)
+
+  def testLoadFromRepo(self):
+    """Tests whether we can load from a source repo."""
+    version_file = os.path.join(self.tempdir, constants.VERSION_FILE)
+    self.WriteFakeVersionFile(version_file)
+    info = manifest_version.VersionInfo.from_repo(self.tempdir)
+    self.assertEqual(info.VersionString(), FAKE_VERSION_STRING)
+
+  def testLoadFromString(self):
+    """Tests whether we can load from a string."""
+    info = manifest_version.VersionInfo(FAKE_VERSION_STRING, CHROME_BRANCH)
+    self.assertEqual(info.VersionString(), FAKE_VERSION_STRING)
+
+  def CommonTestIncrementVersion(self, incr_type, version, chrome_branch=None):
+    """Common test increment.  Returns path to new incremented file."""
+    message = 'Incrementing cuz I sed so'
+    self.mox.StubOutWithMock(git, 'CreateBranch')
+    self.mox.StubOutWithMock(manifest_version, '_PushGitChanges')
+    self.mox.StubOutWithMock(git, 'CleanAndCheckoutUpstream')
+
+    git.CreateBranch(self.tempdir, manifest_version.PUSH_BRANCH)
+
+    version_file = self.CreateFakeVersionFile(
+        self.tempdir, version=version, chrome_branch=chrome_branch)
+
+    manifest_version._PushGitChanges(self.tempdir, message, dry_run=False,
+                                     push_to=None)
+
+    git.CleanAndCheckoutUpstream(self.tempdir)
+    self.mox.ReplayAll()
+    info = manifest_version.VersionInfo(version_file=version_file,
+                                        incr_type=incr_type)
+    info.IncrementVersion()
+    info.UpdateVersionFile(message, dry_run=False)
+    self.mox.VerifyAll()
+    return version_file
+
+  def testIncrementVersionPatch(self):
+    """Tests whether we can increment a version file by patch number."""
+    version_file = self.CommonTestIncrementVersion('branch', '1.2.3')
+    new_info = manifest_version.VersionInfo(version_file=version_file,
+                                            incr_type='branch')
+    self.assertEqual(new_info.VersionString(), '1.2.4')
+
+  def testIncrementVersionBranch(self):
+    """Tests whether we can increment a version file by branch number."""
+    version_file = self.CommonTestIncrementVersion('branch', '1.2.0')
+    new_info = manifest_version.VersionInfo(version_file=version_file,
+                                            incr_type='branch')
+    self.assertEqual(new_info.VersionString(), '1.3.0')
+
+  def testIncrementVersionBuild(self):
+    """Tests whether we can increment a version file by build number."""
+    version_file = self.CommonTestIncrementVersion('build', '1.0.0')
+    new_info = manifest_version.VersionInfo(version_file=version_file,
+                                            incr_type='build')
+    self.assertEqual(new_info.VersionString(), '2.0.0')
+
+  def testIncrementVersionChrome(self):
+    """Tests whether we can increment the chrome version."""
+    version_file = self.CommonTestIncrementVersion(
+        'chrome_branch', version='1.0.0', chrome_branch='29')
+    new_info = manifest_version.VersionInfo(version_file=version_file)
+    self.assertEqual(new_info.VersionString(), '2.0.0')
+    self.assertEqual(new_info.chrome_branch, '30')
+
+
+class BuildSpecsManagerTest(cros_test_lib.MoxTempDirTestCase,
+                            cros_test_lib.MockTestCase):
+  """Tests for the BuildSpecs manager."""
+
+  def setUp(self):
+    os.makedirs(os.path.join(self.tempdir, '.repo'))
+    self.source_repo = 'ssh://source/repo'
+    self.manifest_repo = 'ssh://manifest/repo'
+    self.version_file = 'version-file.sh'
+    self.branch = 'master'
+    self.build_names = ['x86-generic-paladin']
+    self.incr_type = 'branch'
+
+    repo = repository.RepoRepository(
+        self.source_repo, self.tempdir, self.branch)
+    self.manager = manifest_version.BuildSpecsManager(
+        repo, self.manifest_repo, self.build_names, self.incr_type, False,
+        branch=self.branch, dry_run=True)
+
+    # Change default to something we clean up.
+    self.tmpmandir = os.path.join(self.tempdir, 'man')
+    osutils.SafeMakedirs(self.tmpmandir)
+    self.manager.manifest_dir = self.tmpmandir
+    # Shorten the sleep between attempts.
+    self.manager.SLEEP_TIMEOUT = 1
+
+  def testPublishManifestCommitMessageWithBuildId(self):
+    """Tests that PublishManifest writes a build id."""
+    expected_message = ('Automatic: Start x86-generic-paladin master 1\n'
+                        'CrOS-Build-Id: %s' % MOCK_BUILD_ID)
+    self.mox.StubOutWithMock(self.manager, 'PushSpecChanges')
+
+    info = manifest_version.VersionInfo(
+        FAKE_VERSION_STRING, CHROME_BRANCH, incr_type='branch')
+
+    # Create a fake manifest file.
+    m = os.path.join(self.tmpmandir, '1.xml')
+    osutils.Touch(m)
+    self.manager.InitializeManifestVariables(info)
+
+    self.manager.PushSpecChanges(expected_message)
+
+    self.mox.ReplayAll()
+    self.manager.PublishManifest(m, '1', build_id=MOCK_BUILD_ID)
+    self.mox.VerifyAll()
+
+  def testPublishManifestCommitMessageWithNegativeBuildId(self):
+    """Tests that PublishManifest doesn't write a negative build_id"""
+    expected_message = 'Automatic: Start x86-generic-paladin master 1'
+    self.mox.StubOutWithMock(self.manager, 'PushSpecChanges')
+
+    info = manifest_version.VersionInfo(
+        FAKE_VERSION_STRING, CHROME_BRANCH, incr_type='branch')
+
+    # Create a fake manifest file.
+    m = os.path.join(self.tmpmandir, '1.xml')
+    osutils.Touch(m)
+    self.manager.InitializeManifestVariables(info)
+
+    self.manager.PushSpecChanges(expected_message)
+
+    self.mox.ReplayAll()
+    self.manager.PublishManifest(m, '1', build_id=-1)
+    self.mox.VerifyAll()
+
+  def testPublishManifestCommitMessageWithNoneBuildId(self):
+    """Tests that PublishManifest doesn't write a non-existant build_id"""
+    expected_message = 'Automatic: Start x86-generic-paladin master 1'
+    self.mox.StubOutWithMock(self.manager, 'PushSpecChanges')
+
+    info = manifest_version.VersionInfo(
+        FAKE_VERSION_STRING, CHROME_BRANCH, incr_type='branch')
+
+    # Create a fake manifest file.
+    m = os.path.join(self.tmpmandir, '1.xml')
+    osutils.Touch(m)
+    self.manager.InitializeManifestVariables(info)
+
+    self.manager.PushSpecChanges(expected_message)
+
+    self.mox.ReplayAll()
+    self.manager.PublishManifest(m, '1')
+    self.mox.VerifyAll()
+
+  def testLoadSpecs(self):
+    """Tests whether we can load specs correctly."""
+    info = manifest_version.VersionInfo(
+        FAKE_VERSION_STRING, CHROME_BRANCH, incr_type='branch')
+    mpath = os.path.join(self.manager.manifest_dir, 'buildspecs', CHROME_BRANCH)
+    m1, m2, m3, m4 = [os.path.join(mpath, '1.2.%d.xml' % x)
+                      for x in [2, 3, 4, 5]]
+    for_build = os.path.join(self.manager.manifest_dir, 'build-name',
+                             self.build_names[0])
+
+    # Create fake buildspecs.
+    osutils.SafeMakedirs(os.path.join(mpath))
+    for m in [m1, m2, m3, m4]:
+      osutils.Touch(m)
+
+    # Fake BuilderStatus with status MISSING.
+    missing = manifest_version.BuilderStatus(constants.BUILDER_STATUS_MISSING,
+                                             None)
+
+    # Fail 1, pass 2, leave 3,4 unprocessed.
+    manifest_version.CreateSymlink(m1, os.path.join(
+        for_build, 'fail', CHROME_BRANCH, os.path.basename(m1)))
+    manifest_version.CreateSymlink(m1, os.path.join(
+        for_build, 'pass', CHROME_BRANCH, os.path.basename(m2)))
+    self.mox.StubOutWithMock(self.manager, 'GetBuildStatus')
+    self.manager.GetBuildStatus(self.build_names[0], '1.2.5').AndReturn(missing)
+    self.mox.ReplayAll()
+    self.manager.InitializeManifestVariables(info)
+    self.mox.VerifyAll()
+    self.assertEqual(self.manager.latest_unprocessed, '1.2.5')
+
+  def testLatestSpecFromDir(self):
+    """Tests whether we can get sorted specs correctly from a directory."""
+    self.mox.StubOutWithMock(repository, 'CloneGitRepo')
+    info = manifest_version.VersionInfo(
+        '99.1.2', CHROME_BRANCH, incr_type='branch')
+
+    specs_dir = os.path.join(self.manager.manifest_dir, 'buildspecs',
+                             CHROME_BRANCH)
+    m1, m2, m3, m4 = [os.path.join(specs_dir, x)
+                      for x in ['100.0.0.xml', '99.3.3.xml', '99.1.10.xml',
+                                '99.1.5.xml']]
+
+    # Create fake buildspecs.
+    osutils.SafeMakedirs(specs_dir)
+    for m in [m1, m2, m3, m4]:
+      osutils.Touch(m)
+
+    self.mox.ReplayAll()
+    spec = self.manager._LatestSpecFromDir(info, specs_dir)
+    self.mox.VerifyAll()
+    # Should be the latest on the 99.1 branch.
+    self.assertEqual(spec, '99.1.10')
+
+  def testGetNextVersionNoIncrement(self):
+    """Tests whether we can get the next version to be built correctly.
+
+    Tests without pre-existing version in manifest dir.
+    """
+    info = manifest_version.VersionInfo(
+        FAKE_VERSION_STRING, CHROME_BRANCH, incr_type='branch')
+
+    self.manager.latest = None
+    self.mox.ReplayAll()
+    version = self.manager.GetNextVersion(info)
+    self.mox.VerifyAll()
+    self.assertEqual(FAKE_VERSION_STRING, version)
+
+  def testGetNextVersionIncrement(self):
+    """Tests that we create a new version if a previous one exists."""
+    self.mox.StubOutWithMock(manifest_version.VersionInfo, 'UpdateVersionFile')
+    version_file = VersionInfoTest.CreateFakeVersionFile(self.tempdir)
+    info = manifest_version.VersionInfo(version_file=version_file,
+                                        incr_type='branch')
+    info.UpdateVersionFile(
+        'Automatic: %s - Updating to a new version number from %s' % (
+            self.build_names[0], FAKE_VERSION_STRING), dry_run=True)
+
+    self.manager.latest = FAKE_VERSION_STRING
+    self.mox.ReplayAll()
+    version = self.manager.GetNextVersion(info)
+    self.mox.VerifyAll()
+    self.assertEqual(FAKE_VERSION_STRING_NEXT, version)
+
+  def testGetNextBuildSpec(self):
+    """End-to-end test of updating the manifest."""
+    my_info = manifest_version.VersionInfo('1.2.3', chrome_branch='4')
+    self.PatchObject(manifest_version.BuildSpecsManager,
+                     'GetCurrentVersionInfo', return_value=my_info)
+    self.PatchObject(repository.RepoRepository, 'Sync')
+    self.PatchObject(repository.RepoRepository, 'ExportManifest',
+                     return_value='<manifest />')
+    rc = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
+    rc.SetDefaultCmdResult()
+
+    self.mox.ReplayAll()
+    self.manager.GetNextBuildSpec(retries=0)
+    self.manager.UpdateStatus({self.build_names[0]: True})
+    self.mox.VerifyAll()
+
+  def testUnpickleBuildStatus(self):
+    """Tests that _UnpickleBuildStatus returns the correct values."""
+    failed_msg = failures_lib.BuildFailureMessage(
+        'you failed', ['traceback'], True, 'taco', 'bot')
+    failed_input_status = manifest_version.BuilderStatus(
+        constants.BUILDER_STATUS_FAILED, failed_msg)
+    passed_input_status = manifest_version.BuilderStatus(
+        constants.BUILDER_STATUS_PASSED, None)
+
+    failed_output_status = self.manager._UnpickleBuildStatus(
+        failed_input_status.AsPickledDict())
+    passed_output_status = self.manager._UnpickleBuildStatus(
+        passed_input_status.AsPickledDict())
+    empty_string_status = self.manager._UnpickleBuildStatus('')
+
+    self.assertEqual(failed_input_status.AsFlatDict(),
+                     failed_output_status.AsFlatDict())
+    self.assertEqual(passed_input_status.AsFlatDict(),
+                     passed_output_status.AsFlatDict())
+    self.assertTrue(empty_string_status.Failed())
+
+  def _GetBuildersStatus(self, builders, status_runs):
+    """Test a call to BuildSpecsManager.GetBuildersStatus.
+
+    Args:
+      builders: List of builders to get status for.
+      status_runs: List of dictionaries of expected build and status.
+    """
+    self.mox.StubOutWithMock(manifest_version.BuildSpecsManager,
+                             'GetSlaveStatusesFromCIDB')
+    self.mox.StubOutWithMock(manifest_version.BuildSpecsManager,
+                             'GetBuildStatus')
+    for status_dict in status_runs:
+      manifest_version.BuildSpecsManager.GetSlaveStatusesFromCIDB(
+          mox.IgnoreArg()).AndReturn(status_dict)
+
+    final_status_dict = status_runs[-1]
+    for builder in builders:
+      status = manifest_version.BuilderStatus(
+          final_status_dict.get(builder), None)
+      manifest_version.BuildSpecsManager.GetBuildStatus(
+          builder, mox.IgnoreArg()).AndReturn(status)
+
+    self.mox.ReplayAll()
+    statuses = self.manager.GetBuildersStatus(mox.IgnoreArg, builders)
+    self.mox.VerifyAll()
+    return statuses
+
+  def testGetBuildersStatusBothFinished(self):
+    """Tests GetBuilderStatus where both builds have finished."""
+    status_runs = [{'build1': constants.BUILDER_STATUS_FAILED,
+                    'build2': constants.BUILDER_STATUS_PASSED}]
+    statuses = self._GetBuildersStatus(['build1', 'build2'], status_runs)
+    self.assertTrue(statuses['build1'].Failed())
+    self.assertTrue(statuses['build2'].Passed())
+
+  def testGetBuildersStatusLoop(self):
+    """Tests GetBuilderStatus where builds are inflight."""
+    status_runs = [{'build1': constants.BUILDER_STATUS_INFLIGHT,
+                    'build2': constants.BUILDER_STATUS_MISSING},
+                   {'build1': constants.BUILDER_STATUS_FAILED,
+                    'build2': constants.BUILDER_STATUS_INFLIGHT},
+                   {'build1': constants.BUILDER_STATUS_FAILED,
+                    'build2': constants.BUILDER_STATUS_PASSED}]
+    statuses = self._GetBuildersStatus(['build1', 'build2'], status_runs)
+    self.assertTrue(statuses['build1'].Failed())
+    self.assertTrue(statuses['build2'].Passed())
diff --git a/cbuildbot/metadata_lib.py b/cbuildbot/metadata_lib.py
new file mode 100644
index 0000000..74c24db
--- /dev/null
+++ b/cbuildbot/metadata_lib.py
@@ -0,0 +1,766 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing class for recording metadata about a run."""
+
+from __future__ import print_function
+
+import collections
+import datetime
+import json
+import math
+import multiprocessing
+import os
+import re
+
+from chromite.cbuildbot import results_lib
+from chromite.cbuildbot import constants
+from chromite.lib import clactions
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import parallel
+
+
+# Number of parallel processes used when uploading/downloading GS files.
+MAX_PARALLEL = 40
+
+ARCHIVE_ROOT = 'gs://chromeos-image-archive/%(target)s'
+# NOTE: gsutil 3.42 has a bug where '/' is ignored in this context unless it
+#       is listed twice. So we list it twice here for now.
+METADATA_URL_GLOB = os.path.join(ARCHIVE_ROOT,
+                                 'R%(milestone)s**//metadata.json')
+LATEST_URL = os.path.join(ARCHIVE_ROOT, 'LATEST-master')
+
+
+GerritPatchTuple = clactions.GerritPatchTuple
+GerritChangeTuple = clactions.GerritChangeTuple
+CLActionTuple = collections.namedtuple('CLActionTuple',
+                                       ['change', 'action', 'timestamp',
+                                        'reason'])
+CLActionWithBuildTuple = collections.namedtuple(
+    'CLActionWithBuildTuple',
+    ['change', 'action', 'timestamp', 'reason', 'bot_type', 'build'])
+
+
+class _DummyLock(object):
+  """A Dummy clone of RLock that does nothing."""
+  def acquire(self, blocking=1):
+    pass
+
+  def release(self):
+    pass
+
+  def __exit__(self, exc_type, exc_value, traceback):
+    pass
+
+  def __enter__(self):
+    pass
+
+class CBuildbotMetadata(object):
+  """Class for recording metadata about a run."""
+
+  def __init__(self, metadata_dict=None, multiprocess_manager=None):
+    """Constructor for CBuildbotMetadata.
+
+    Args:
+      metadata_dict: Optional dictionary containing initial metadata,
+                     as returned by loading metadata from json.
+      multiprocess_manager: Optional multiprocess.Manager instance. If
+                            supplied, the metadata instance will use
+                            multiprocess containers so that its state
+                            is correctly synced across processes.
+    """
+    super(CBuildbotMetadata, self).__init__()
+    if multiprocess_manager:
+      self._metadata_dict = multiprocess_manager.dict()
+      self._cl_action_list = multiprocess_manager.list()
+      self._per_board_dict = multiprocess_manager.dict()
+      self._subdict_update_lock = multiprocess_manager.RLock()
+    else:
+      self._metadata_dict = {}
+      self._cl_action_list = []
+      self._per_board_dict = {}
+      # If we are not using a manager, then metadata is not expected to be
+      # multiprocess safe. Use a dummy RLock.
+      self._subdict_update_lock = _DummyLock()
+
+    if metadata_dict:
+      self.UpdateWithDict(metadata_dict)
+
+  @staticmethod
+  def FromJSONString(json_string):
+    """Construct a CBuildbotMetadata from a json representation.
+
+    Args:
+      json_string: A string json representation of a CBuildbotMetadata
+                   dictionary.
+
+    Returns:
+      A CbuildbotMetadata instance.
+    """
+    return CBuildbotMetadata(json.loads(json_string))
+
+  def UpdateWithDict(self, metadata_dict):
+    """Update metadata dictionary with values supplied in |metadata_dict|
+
+    This method is effectively the inverse of GetDict. Existing key-values
+    in metadata will be overwritten by those supplied in |metadata_dict|,
+    with the exceptions of:
+     - the cl_actions list which will be extended with the contents (if any)
+     of the supplied dict's cl_actions list.
+     - the per-board metadata dict, which will be recursively extended with the
+       contents of the supplied dict's board-metadata
+
+    Args:
+      metadata_dict: A dictionary of key-value pairs to be added this
+                     metadata instance. Keys should be strings, values
+                     should be json-able.
+
+    Returns:
+      self
+    """
+    # This is effectively the inverse of the dictionary construction in GetDict,
+    # to reconstruct the correct internal representation of a metadata
+    # object.
+    metadata_dict = metadata_dict.copy()
+    cl_action_list = metadata_dict.pop('cl_actions', None)
+    per_board_dict = metadata_dict.pop('board-metadata', None)
+    self._metadata_dict.update(metadata_dict)
+    if cl_action_list:
+      self._cl_action_list.extend(cl_action_list)
+    if per_board_dict:
+      for k, v in per_board_dict.items():
+        self.UpdateBoardDictWithDict(k, v)
+
+    return self
+
+  def UpdateBoardDictWithDict(self, board, board_dict):
+    """Update the per-board dict for |board| with |board_dict|.
+
+    Note: both |board| and and all the keys of |board_dict| musts be strings
+          that do not contain the character ':'
+
+    Returns:
+      self
+    """
+    # Wrap the per-board key-value pairs as key-value pairs in _per_board_dict.
+    # Note -- due to http://bugs.python.org/issue6766 it is not possible to
+    # store a multiprocess dict proxy inside another multiprocess dict proxy.
+    # That is why we are using this flattened representation of board dicts.
+    assert not ':' in board
+    # Even if board_dict is {}, ensure that an entry with this board
+    # gets written.
+    self._per_board_dict[board + ':'] = None
+    for k, v in board_dict.items():
+      assert not ':' in k
+      self._per_board_dict['%s:%s' % (board, k)] = v
+
+    return self
+
+  def UpdateKeyDictWithDict(self, key, key_metadata_dict):
+    """Update metadata for the given key with values supplied in |metadata_dict|
+
+    This method merges the dictionary for the given key with the given key
+    metadata dictionary (allowing them to be effectively updated from any
+    stage).
+
+    This method is multiprocess safe.
+
+    Args:
+      key: The key name (e.g. 'version' or 'status')
+      key_metadata_dict: A dictionary of key-value pairs to be added this
+                     metadata key. Keys should be strings, values
+                     should be json-able.
+
+    Returns:
+      self
+    """
+    with self._subdict_update_lock:
+      # If the key already exists, then use its dictionary
+      target_dict = self._metadata_dict.setdefault(key, {})
+      target_dict.update(key_metadata_dict)
+      self._metadata_dict[key] = target_dict
+
+    return self
+
+  def GetDict(self):
+    """Returns a dictionary representation of metadata."""
+    # CL actions are be stored in self._cl_action_list instead of
+    # in self._metadata_dict['cl_actions'], because _cl_action_list
+    # is potentially a multiprocess.lis. So, _cl_action_list needs to
+    # be copied into a normal list.
+    temp = self._metadata_dict.copy()
+    temp['cl_actions'] = list(self._cl_action_list)
+
+    # Similarly, the per-board dicts are stored in a flattened form in
+    # _per_board_dict. Un-flatten into nested dict.
+    per_board_dict = {}
+    for k, v in self._per_board_dict.items():
+      board, key = k.split(':')
+      board_dict = per_board_dict.setdefault(board, {})
+      if key:
+        board_dict[key] = v
+
+    temp['board-metadata'] = per_board_dict
+    return temp
+
+  # TODO(akeshet): crbug.com/406522 special case cl_actions and board-metadata
+  # so that GetValue can work with them as well.
+  def GetValue(self, key):
+    """Get an item from the metadata dictionary.
+
+    This method is in most cases an inexpensive equivalent to:
+    GetDict()[key]
+
+    However, it cannot be used for items like 'cl_actions' or 'board-metadata'
+    which are not stored directly in the metadata dictionary.
+    """
+    return self._metadata_dict[key]
+
+  def GetJSON(self):
+    """Return a JSON string representation of metadata."""
+    return json.dumps(self.GetDict())
+
+  def RecordCLAction(self, change, action, timestamp=None, reason=''):
+    """Record an action that was taken on a CL, to the metadata.
+
+    Args:
+      change: A GerritPatch object for the change acted on.
+      action: The action taken, should be one of constants.CL_ACTIONS
+      timestamp: An integer timestamp such as int(time.time()) at which
+                 the action was taken. Default: Now.
+      reason: Description of the reason the action was taken. Default: ''
+
+    Returns:
+      self
+    """
+    cl_action = clactions.CLAction.FromGerritPatchAndAction(change, action,
+                                                            reason, timestamp)
+    self._cl_action_list.append(cl_action.AsMetadataEntry())
+    return self
+
+  @staticmethod
+  def GetReportMetadataDict(builder_run, get_statuses_from_slaves,
+                            config=None, stage=None, final_status=None,
+                            completion_instance=None, child_configs_list=None):
+    """Return a metadata dictionary summarizing a build.
+
+    This method replaces code that used to exist in the ArchivingStageMixin
+    class from cbuildbot_stage. It contains all the Report-stage-time
+    metadata construction logic. The logic here is intended to be gradually
+    refactored out so that the metadata is constructed gradually by the
+    stages that are responsible for pieces of data, as they run.
+
+    Args:
+      builder_run: BuilderRun instance for this run.
+      get_statuses_from_slaves: If True, status information of slave
+                                builders will be recorded.
+      config: The build config for this run.  Defaults to self._run.config.
+      stage: The stage name that this metadata file is being uploaded for.
+      final_status: Whether the build passed or failed. If None, the build
+                    will be treated as still running.
+      completion_instance: The stage instance that was used to wait for slave
+                           completion. Used to add slave build information to
+                           master builder's metadata. If None, no such status
+                           information will be included. It not None, this
+                           should be a derivative of
+                           MasterSlaveSyncCompletionStage.
+      child_configs_list: The list of child config metadata.  If specified it
+                          should be added to the metadata.
+
+    Returns:
+       A metadata dictionary suitable to be json-serialized.
+    """
+    config = config or builder_run.config
+    start_time = results_lib.Results.start_time
+    current_time = datetime.datetime.now()
+    start_time_stamp = cros_build_lib.UserDateTimeFormat(timeval=start_time)
+    current_time_stamp = cros_build_lib.UserDateTimeFormat(timeval=current_time)
+    duration = '%s' % (current_time - start_time,)
+
+    metadata = {
+        'status': {
+            'current-time': current_time_stamp,
+            'status': final_status if final_status else 'running',
+            'summary': stage or '',
+        },
+        'time': {
+            'start': start_time_stamp,
+            'finish': current_time_stamp if final_status else '',
+            'duration': duration,
+        }
+    }
+
+    metadata['results'] = []
+    for entry in results_lib.Results.Get():
+      timestr = datetime.timedelta(seconds=math.ceil(entry.time))
+      if entry.result in results_lib.Results.NON_FAILURE_TYPES:
+        status = constants.FINAL_STATUS_PASSED
+      else:
+        status = constants.FINAL_STATUS_FAILED
+      metadata['results'].append({
+          'name': entry.name,
+          'status': status,
+          # The result might be a custom exception.
+          'summary': str(entry.result),
+          'duration': '%s' % timestr,
+          'board': entry.board,
+          'description': entry.description,
+          'log': builder_run.ConstructDashboardURL(stage=entry.name),
+      })
+
+    if child_configs_list:
+      metadata['child-configs'] = child_configs_list
+
+    # If we were a CQ master, then include a summary of the status of slave cq
+    # builders in metadata
+    if get_statuses_from_slaves:
+      statuses = completion_instance.GetSlaveStatuses()
+      if not statuses:
+        logging.warning('completion_instance did not have any statuses '
+                        'to report. Will not add slave status to metadata.')
+
+      metadata['slave_targets'] = {}
+      for builder, status in statuses.iteritems():
+        metadata['slave_targets'][builder] = status.AsFlatDict()
+
+    return metadata
+
+
+# The graphite graphs use seconds since epoch start as time value.
+EPOCH_START = datetime.datetime(1970, 1, 1)
+
+# Formats we like for output.
+NICE_DATE_FORMAT = '%Y/%m/%d'
+NICE_TIME_FORMAT = '%H:%M:%S'
+NICE_DATETIME_FORMAT = NICE_DATE_FORMAT + ' ' + NICE_TIME_FORMAT
+
+
+# TODO(akeshet): Merge this class into CBuildbotMetadata.
+class BuildData(object):
+  """Class for examining metadata from a prior run.
+
+  The raw metadata dict can be accessed at self.metadata_dict or via []
+  and get() on a BuildData object.  Some values from metadata_dict are
+  also surfaced through the following list of supported properties:
+
+  build_id
+  build_number
+  stages
+  slaves
+  chromeos_version
+  chrome_version
+  bot_id
+  status
+  start_datetime
+  finish_datetime
+  start_date_str
+  start_time_str
+  start_datetime_str
+  finish_date_str
+  finish_time_str
+  finish_datetime_str
+  runtime_seconds
+  runtime_minutes
+  epoch_time_seconds
+  count_changes
+  run_date
+  failure_message
+  """
+
+  __slots__ = (
+      'gathered_dict',  # Dict with gathered data (sheets version).
+      'gathered_url',   # URL to metadata.json.gathered location in GS.
+      'metadata_dict',  # Dict representing metadata data from JSON.
+      'metadata_url',   # URL to metadata.json location in GS.
+  )
+
+  # Regexp for parsing datetimes as stored in metadata.json.  Example text:
+  # Fri, 14 Feb 2014 17:00:49 -0800 (PST)
+  DATETIME_RE = re.compile(r'^(.+)\s-\d\d\d\d\s\(P\wT\)$')
+
+  SHEETS_VER_KEY = 'sheets_version'
+
+  @staticmethod
+  def ReadMetadataURLs(urls, gs_ctx=None, exclude_running=True,
+                       get_sheets_version=False):
+    """Read a list of metadata.json URLs and return BuildData objects.
+
+    Args:
+      urls: List of metadata.json GS URLs.
+      gs_ctx: A GSContext object to use.  If not provided gs.GSContext will
+        be called to get a GSContext to use.
+      exclude_running: If True the metadata for builds that are still running
+        will be skipped.
+      get_sheets_version: Whether to try to figure out the last sheets version
+        that was gathered. This requires an extra gsutil request and is only
+        needed if you are writing the metadata to the Google Sheets
+        spreadsheet.
+
+    Returns:
+      List of BuildData objects.
+    """
+    gs_ctx = gs_ctx or gs.GSContext()
+    logging.info('Reading %d metadata URLs using %d processes now.', len(urls),
+                 MAX_PARALLEL)
+
+    build_data_per_url = {}
+    def _ReadMetadataURL(url):
+      # Read the metadata.json URL and parse json into a dict.
+      metadata_dict = json.loads(gs_ctx.Cat(url, print_cmd=False))
+
+      # Read the file next to url which indicates whether the metadata has
+      # been gathered before, and with what stats version.
+      if get_sheets_version:
+        gathered_dict = {}
+        gathered_url = url + '.gathered'
+        if gs_ctx.Exists(gathered_url, print_cmd=False):
+          gathered_dict = json.loads(gs_ctx.Cat(gathered_url,
+                                                print_cmd=False))
+
+        sheets_version = gathered_dict.get(BuildData.SHEETS_VER_KEY)
+      else:
+        sheets_version = None
+
+      bd = BuildData(url, metadata_dict, sheets_version=sheets_version)
+
+      if bd.build_number is None:
+        logging.warning('Metadata at %s was missing build number.', url)
+        m = re.match(r'.*-b([0-9]*)/.*', url)
+        if m:
+          inferred_number = int(m.groups()[0])
+          logging.warning('Inferred build number %d from metadata url.',
+                          inferred_number)
+          bd.metadata_dict['build-number'] = inferred_number
+      if sheets_version is not None:
+        logging.debug('Read %s:\n  build_number=%d, sheets v%d', url,
+                      bd.build_number, sheets_version)
+      else:
+        logging.debug('Read %s:\n  build_number=%d, ungathered', url,
+                      bd.build_number)
+
+      build_data_per_url[url] = bd
+
+    with multiprocessing.Manager() as manager:
+      build_data_per_url = manager.dict()
+      parallel.RunTasksInProcessPool(_ReadMetadataURL, [[url] for url in urls],
+                                     processes=MAX_PARALLEL)
+      builds = [build_data_per_url[url] for url in urls]
+
+    if exclude_running:
+      builds = [b for b in builds if b.status != 'running']
+    return builds
+
+  @staticmethod
+  def MarkBuildsGathered(builds, sheets_version, gs_ctx=None):
+    """Mark specified |builds| as processed for the given stats versions.
+
+    Args:
+      builds: List of BuildData objects.
+      sheets_version: The Google Sheets version these builds are now processed
+        for.
+      gs_ctx: A GSContext object to use, if set.
+    """
+    gs_ctx = gs_ctx or gs.GSContext()
+
+    # Filter for builds that were not already on these versions.
+    builds = [b for b in builds if b.sheets_version != sheets_version]
+    if builds:
+      log_ver_str = 'Sheets v%d' % sheets_version
+      logging.info('Marking %d builds gathered (for %s) using %d processes'
+                   ' now.', len(builds), log_ver_str, MAX_PARALLEL)
+
+      def _MarkGathered(build):
+        build.MarkGathered(sheets_version)
+        json_text = json.dumps(build.gathered_dict.copy())
+        gs_ctx.Copy('-', build.gathered_url, input=json_text, print_cmd=False)
+        logging.debug('Marked build_number %d processed for %s.',
+                      build.build_number, log_ver_str)
+
+      inputs = [[build] for build in builds]
+      parallel.RunTasksInProcessPool(_MarkGathered, inputs,
+                                     processes=MAX_PARALLEL)
+
+  def __init__(self, metadata_url, metadata_dict, sheets_version=None):
+    self.metadata_url = metadata_url
+    self.metadata_dict = metadata_dict
+
+    # If a stats version is not specified default to -1 so that the initial
+    # version (version 0) will be considered "newer".
+    self.gathered_url = metadata_url + '.gathered'
+    self.gathered_dict = {
+        self.SHEETS_VER_KEY: -1 if sheets_version is None else sheets_version,
+    }
+
+  def MarkGathered(self, sheets_version):
+    """Mark this build as processed for the given stats versions."""
+    self.gathered_dict[self.SHEETS_VER_KEY] = sheets_version
+
+  def __getitem__(self, key):
+    """Relay dict-like access to self.metadata_dict."""
+    return self.metadata_dict[key]
+
+  def get(self, key, default=None):
+    """Relay dict-like access to self.metadata_dict."""
+    return self.metadata_dict.get(key, default)
+
+  @property
+  def sheets_version(self):
+    return self.gathered_dict[self.SHEETS_VER_KEY]
+
+  @property
+  def build_number(self):
+    try:
+      return int(self['build-number'])
+    except KeyError:
+      return None
+
+  @property
+  def stages(self):
+    return self['results']
+
+  @property
+  def slaves(self):
+    return self.get('slave_targets', {})
+
+  @property
+  def chromeos_version(self):
+    try:
+      return self['version']['full']
+    except KeyError:
+      return None
+
+  @property
+  def chrome_version(self):
+    try:
+      return self['version']['chrome']
+    except KeyError:
+      return None
+
+  @property
+  def bot_id(self):
+    return self['bot-config']
+
+  @property
+  def status(self):
+    return self.get('status', {}).get('status', None)
+
+  @classmethod
+  def _ToDatetime(cls, time_str):
+    match = cls.DATETIME_RE.search(time_str)
+    if match:
+      return datetime.datetime.strptime(match.group(1), '%a, %d %b %Y %H:%M:%S')
+    else:
+      raise ValueError('Unexpected metadata datetime format: %s' % time_str)
+
+  @property
+  def start_datetime(self):
+    return self._ToDatetime(self['time']['start'])
+
+  @property
+  def finish_datetime(self):
+    return self._ToDatetime(self['time']['finish'])
+
+  @property
+  def start_date_str(self):
+    return self.start_datetime.strftime(NICE_DATE_FORMAT)
+
+  @property
+  def start_time_str(self):
+    return self.start_datetime.strftime(NICE_TIME_FORMAT)
+
+  @property
+  def start_datetime_str(self):
+    return self.start_datetime.strftime(NICE_DATETIME_FORMAT)
+
+  @property
+  def finish_date_str(self):
+    return self.finish_datetime.strftime(NICE_DATE_FORMAT)
+
+  @property
+  def finish_time_str(self):
+    return self.finish_datetime.strftime(NICE_TIME_FORMAT)
+
+  @property
+  def finish_datetime_str(self):
+    return self.finish_datetime.strftime(NICE_DATETIME_FORMAT)
+
+  @property
+  def failure_message(self):
+    message_list = []
+    # First collect failures in the master stages.
+    failed_stages = [s for s in self.stages if s['status'] == 'failed']
+    for stage in failed_stages:
+      if stage['summary']:
+        message_list.append('master: %s' % stage['summary'])
+
+    mapping = {}
+    # Dedup the messages from the slaves.
+    for slave in self.GetFailedSlaves():
+      message = self.slaves[slave]['reason']
+      mapping[message] = mapping.get(message, []) + [slave]
+
+    for message, slaves in mapping.iteritems():
+      if len(slaves) >= 6:
+        # Do not print all the names when there are more than 6 (an
+        # arbitrary number) builders.
+        message_list.append('%d buliders: %s' % (len(slaves), message))
+      else:
+        message_list.append('%s: %s' % (','.join(slaves), message))
+
+    return ' | '.join(message_list)
+
+  def GetFailedStages(self, with_urls=False):
+    """Get names of all failed stages, optionally with URLs for each.
+
+    Args:
+      with_urls: If True then also return URLs.  See Returns.
+
+    Returns:
+      If with_urls is False, return list of stage names.  Otherwise, return list
+        of tuples (stage name, stage URL).
+    """
+    def _Failed(stage):
+      # This can be more discerning in the future, such as for optional stages.
+      return stage['status'] == 'failed'
+
+    if with_urls:
+      # The "log" url includes "/logs/stdio" on the end.  Strip that off.
+      return [(s['name'], os.path.dirname(os.path.dirname(s['log'])))
+              for s in self.stages if _Failed(s)]
+    else:
+      return [s['name'] for s in self.stages if _Failed(s)]
+
+  def GetFailedSlaves(self, with_urls=False):
+    def _Failed(slave):
+      return slave['status'] == 'fail'
+
+    # Older metadata has no slave_targets entry.
+    slaves = self.slaves
+    if with_urls:
+      return [(name, slave['dashboard_url'])
+              for name, slave in slaves.iteritems() if _Failed(slave)]
+    else:
+      return [name for name, slave in slaves.iteritems() if _Failed(slave)]
+
+    return []
+
+  @property
+  def runtime_seconds(self):
+    return (self.finish_datetime - self.start_datetime).seconds
+
+  @property
+  def runtime_minutes(self):
+    return self.runtime_seconds / 60
+
+  @property
+  def epoch_time_seconds(self):
+    # End time seconds since 1/1/1970, for some reason.
+    return int((self.finish_datetime - EPOCH_START).total_seconds())
+
+  @property
+  def patches(self):
+    return [GerritPatchTuple(gerrit_number=int(change['gerrit_number']),
+                             patch_number=int(change['patch_number']),
+                             internal=change['internal'])
+            for change in self.metadata_dict.get('changes', [])]
+
+  @property
+  def count_changes(self):
+    if not self.metadata_dict.get('changes', None):
+      return 0
+
+    return len(self.metadata_dict['changes'])
+
+  @property
+  def build_id(self):
+    return self.metadata_dict['build_id']
+
+  @property
+  def run_date(self):
+    return self.finish_datetime.strftime('%d.%m.%Y')
+
+  def Passed(self):
+    """Return True if this represents a successful run."""
+    return 'passed' == self.metadata_dict['status']['status'].strip()
+
+
+class MetadataException(Exception):
+  """Base exception class for exceptions in this module."""
+
+
+class GetMilestoneError(MetadataException):
+  """Base exception class for exceptions in this module."""
+
+
+def GetLatestMilestone():
+  """Get the latest milestone from CQ Master LATEST-master file."""
+  # Use CQ Master target to get latest milestone.
+  latest_url = LATEST_URL % {'target': constants.CQ_MASTER}
+  gs_ctx = gs.GSContext()
+
+  logging.info('Getting latest milestone from %s', latest_url)
+  try:
+    content = gs_ctx.Cat(latest_url).strip()
+
+    # Expected syntax is like the following: "R35-1234.5.6-rc7".
+    assert content.startswith('R')
+    milestone = content.split('-')[0][1:]
+    logging.info('Latest milestone determined to be: %s', milestone)
+    return int(milestone)
+
+  except gs.GSNoSuchKey:
+    raise GetMilestoneError('LATEST file missing: %s' % latest_url)
+
+
+def GetMetadataURLsSince(target, start_date, end_date):
+  """Get metadata.json URLs for |target| from |start_date| until |end_date|.
+
+  The modified time of the GS files is used to compare with start_date, so
+  the completion date of the builder run is what is important here.
+
+  Args:
+    target: Builder target name.
+    start_date: datetime.date object of starting date.
+    end_date: datetime.date object of ending date.
+
+  Returns:
+    Metadata urls for runs found.
+  """
+  ret = []
+  milestone = GetLatestMilestone()
+  gs_ctx = gs.GSContext()
+  while True:
+    base_url = METADATA_URL_GLOB % {'target': target, 'milestone': milestone}
+    logging.info('Getting %s builds for R%d from "%s"', target, milestone,
+                 base_url)
+
+    try:
+      # Get GS URLs.  We want the datetimes to quickly know when we are done
+      # collecting URLs.
+      urls = gs_ctx.List(base_url, details=True)
+    except gs.GSNoSuchKey:
+      # We ran out of metadata to collect.  Stop searching back in time.
+      logging.info('No %s builds found for $%d.  I will not continue search'
+                   ' to older milestones.', target, milestone)
+      break
+
+    # Sort by timestamp.
+    urls = sorted(urls, key=lambda x: x.creation_time, reverse=True)
+
+    # Add relevant URLs to our list.
+    ret.extend([x.url for x in urls
+                if (x.creation_time.date() >= start_date and
+                    x.creation_time.date() <= end_date)])
+
+    # See if we have gone far enough back by checking datetime of oldest URL
+    # in the current batch.
+    if urls[-1].creation_time.date() < start_date:
+      break
+    else:
+      milestone -= 1
+      logging.info('Continuing on to R%d.', milestone)
+
+  return ret
diff --git a/cbuildbot/metadata_lib_unittest b/cbuildbot/metadata_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/metadata_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/metadata_lib_unittest.py b/cbuildbot/metadata_lib_unittest.py
new file mode 100644
index 0000000..f4e3110
--- /dev/null
+++ b/cbuildbot/metadata_lib_unittest.py
@@ -0,0 +1,148 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the archive_lib module."""
+
+from __future__ import print_function
+
+import multiprocessing
+
+from chromite.cbuildbot import metadata_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import parallel
+
+
+class MetadataTest(cros_test_lib.TestCase):
+  """Tests the correctness of various metadata methods."""
+
+  def testGetDict(self):
+    starting_dict = {
+        'key1': 1,
+        'key2': '2',
+        'cl_actions': [('a', 1), ('b', 2)],
+        'board-metadata': {
+            'board-1': {'info': 432},
+        },
+    }
+    metadata = metadata_lib.CBuildbotMetadata(starting_dict)
+    ending_dict = metadata.GetDict()
+    self.assertEqual(starting_dict, ending_dict)
+
+  def testUpdateKeyDictWithDict(self):
+    expected_dict = {str(x): x for x in range(20)}
+    m = multiprocessing.Manager()
+    metadata = metadata_lib.CBuildbotMetadata(multiprocess_manager=m)
+
+    metadata.UpdateKeyDictWithDict('my_dict', expected_dict)
+
+    self.assertEqual(expected_dict, metadata.GetDict()['my_dict'])
+
+
+  def testUpdateKeyDictWithDictMultiprocess(self):
+    expected_dict = {str(x): x for x in range(20)}
+    m = multiprocessing.Manager()
+    metadata = metadata_lib.CBuildbotMetadata(multiprocess_manager=m)
+
+    with parallel.BackgroundTaskRunner(metadata.UpdateKeyDictWithDict) as q:
+      for k, v in expected_dict.iteritems():
+        q.put(['my_dict', {k: v}])
+
+    self.assertEqual(expected_dict, metadata.GetDict()['my_dict'])
+
+
+  def testUpdateBoardMetadataWithEmptyDict(self):
+    metadata = metadata_lib.CBuildbotMetadata()
+    metadata.UpdateBoardDictWithDict('someboard', {})
+    self.assertEqual(metadata.GetDict()['board-metadata']['someboard'], {})
+
+
+  def testUpdateBoardMetadataWithMultiprocessDict(self):
+    starting_dict = {
+        'key1': 1,
+        'key2': '2',
+        'cl_actions': [('a', 1), ('b', 2)],
+        'board-metadata': {
+            'board-1': {'info': 432},
+        },
+    }
+
+    m = multiprocessing.Manager()
+    metadata = metadata_lib.CBuildbotMetadata(metadata_dict=starting_dict,
+                                              multiprocess_manager=m)
+
+    # pylint: disable=no-member
+    update_dict = m.dict()
+    update_dict['my_key'] = 'some value'
+    metadata.UpdateBoardDictWithDict('board-1', update_dict)
+
+    self.assertEqual(metadata.GetDict()['board-metadata']['board-1']['my_key'],
+                     'some value')
+
+  def testMultiprocessSafety(self):
+    m = multiprocessing.Manager()
+    metadata = metadata_lib.CBuildbotMetadata(multiprocess_manager=m)
+    key_dict = {'key1': 1, 'key2': 2}
+    starting_dict = {
+        'key1': 1,
+        'key2': '2',
+        'key3': key_dict,
+        'cl_actions': [('a', 1), ('b', 2)],
+        'board-metadata': {
+            'board-1': {'info': 432},
+        },
+    }
+
+    # Test that UpdateWithDict is process-safe
+    parallel.RunParallelSteps([lambda: metadata.UpdateWithDict(starting_dict)])
+    ending_dict = metadata.GetDict()
+    self.assertEqual(starting_dict, ending_dict)
+
+    # Test that UpdateKeyDictWithDict is process-safe
+    parallel.RunParallelSteps([lambda: metadata.UpdateKeyDictWithDict(
+        'key3', key_dict)])
+    ending_dict = metadata.GetDict()
+    self.assertEqual(starting_dict, ending_dict)
+
+    # Test that RecordCLAction is process-safe
+    fake_change = metadata_lib.GerritPatchTuple(12345, 1, False)
+    fake_action = ('asdf,')
+    parallel.RunParallelSteps([lambda: metadata.RecordCLAction(fake_change,
+                                                               fake_action)])
+    ending_dict = metadata.GetDict()
+    # Assert that an action was recorded.
+    self.assertEqual(len(starting_dict['cl_actions']) + 1,
+                     len(ending_dict['cl_actions']))
+
+  def testPerBoardDict(self):
+    starting_per_board_dict = {
+        'board-1': {'kubrick': 2001,
+                    'bergman': 'persona',
+                    'hitchcock': 'vertigo'},
+        'board-2': {'kubrick': ['barry lyndon', 'dr. strangelove'],
+                    'bergman': 'the seventh seal'}
+    }
+
+    starting_dict = {'board-metadata': starting_per_board_dict}
+
+    m = multiprocessing.Manager()
+    metadata = metadata_lib.CBuildbotMetadata(metadata_dict=starting_dict,
+                                              multiprocess_manager=m)
+
+    extra_per_board_dict = {
+        'board-1': {'kurosawa': 'rashomon',
+                    'coen brothers': 'fargo'},
+        'board-3': {'hitchcock': 'north by northwest',
+                    'coen brothers': 'the big lebowski'}
+    }
+
+    expected_dict = starting_per_board_dict
+
+    # Write each per board key-value pair to metadata in a separate process.
+    with parallel.BackgroundTaskRunner(metadata.UpdateBoardDictWithDict) as q:
+      for board, board_dict in extra_per_board_dict.iteritems():
+        expected_dict.setdefault(board, {}).update(board_dict)
+        for k, v in board_dict.iteritems():
+          q.put([board, {k: v}])
+
+    self.assertEqual(expected_dict, metadata.GetDict()['board-metadata'])
diff --git a/cbuildbot/prebuilts.py b/cbuildbot/prebuilts.py
new file mode 100644
index 0000000..6660d6d
--- /dev/null
+++ b/cbuildbot/prebuilts.py
@@ -0,0 +1,217 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""cbuildbot logic for uploading prebuilts and managing binhosts."""
+
+from __future__ import print_function
+
+import glob
+import os
+
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.lib import cros_logging as logging
+from chromite.lib import portage_util
+
+_PREFLIGHT_BINHOST = 'PREFLIGHT_BINHOST'
+_CHROME_BINHOST = 'CHROME_BINHOST'
+_FULL_BINHOST = 'FULL_BINHOST'
+_BINHOST_PACKAGE_FILE = ('/usr/share/dev-install/portage/make.profile/'
+                         'package.installable')
+_PRIVATE_BINHOST_CONF_DIR = ('src/private-overlays/chromeos-partner-overlay/'
+                             'chromeos/binhost')
+_PUBLIC_BINHOST_CONF_DIR = 'src/third_party/chromiumos-overlay/chromeos/binhost'
+
+
+def _AddPackagesForPrebuilt(filename):
+  """Add list of packages for upload.
+
+  Process a file that lists all the packages that can be uploaded to the
+  package prebuilt bucket and generates the command line args for
+  upload_prebuilts.
+
+  Args:
+    filename: file with the package full name (category/name-version), one
+              package per line.
+
+  Returns:
+    A list of parameters for upload_prebuilts. For example:
+    ['--packages=net-misc/dhcp', '--packages=app-admin/eselect-python']
+  """
+  try:
+    cmd = []
+    with open(filename) as f:
+      # Get only the package name and category as that is what upload_prebuilts
+      # matches on.
+      for line in f:
+        atom = line.split('#', 1)[0].strip()
+        try:
+          cpv = portage_util.SplitCPV(atom)
+        except ValueError:
+          logging.warning('Could not split atom %r (line: %r)', atom, line)
+          continue
+        if cpv:
+          cmd.extend(['--packages=%s/%s' % (cpv.category, cpv.package)])
+    return cmd
+  except IOError as e:
+    logging.warning('Problem with package file %s' % filename)
+    logging.warning('Skipping uploading of prebuilts.')
+    logging.warning('ERROR(%d): %s' % (e.errno, e.strerror))
+    return None
+
+
+def UploadPrebuilts(category, chrome_rev, private_bucket, buildroot,
+                    version=None, **kwargs):
+  """Upload Prebuilts for non-dev-installer use cases.
+
+  Args:
+    category: Build type. Can be [binary|full|chrome|chroot|paladin].
+    chrome_rev: Chrome_rev of type constants.VALID_CHROME_REVISIONS.
+    private_bucket: True if we are uploading to a private bucket.
+    buildroot: The root directory where the build occurs.
+    version: Specific version to set.
+    board: Board type that was built on this machine.
+    extra_args: Extra args to pass to prebuilts script.
+  """
+  extra_args = ['--prepend-version', category]
+  extra_args.extend(['--upload', 'gs://chromeos-prebuilt'])
+  if private_bucket:
+    extra_args.extend(['--private', '--binhost-conf-dir',
+                       _PRIVATE_BINHOST_CONF_DIR])
+  else:
+    extra_args.extend(['--binhost-conf-dir', _PUBLIC_BINHOST_CONF_DIR])
+
+  if version is not None:
+    extra_args.extend(['--set-version', version])
+
+  if category == constants.CHROOT_BUILDER_TYPE:
+    extra_args.extend(['--sync-host',
+                       '--upload-board-tarball'])
+    tarball_location = os.path.join(buildroot, 'built-sdk.tar.xz')
+    extra_args.extend(['--prepackaged-tarball', tarball_location])
+
+    # Remaining artifacts get uploaded into <year>/<month>/ subdirs so we don't
+    # start dumping even more stuff into the top level. Also, the following
+    # code handles any tarball suffix (.tar.*). For each of the artifact types
+    # below, we also generate a single upload path template to be filled by the
+    # uploading script. This has placeholders for the version (substituted
+    # first) and another qualifier (either board or target, substituted second
+    # and therefore uses a quoted %% modifier).
+    # TODO(garnold) Using a mix of quoted/unquoted template variables is
+    # confusing and error-prone, we should get rid of it.
+    # TODO(garnold) Be specific about matching file suffixes, like making sure
+    # there's nothing past the compression suffix (for example, .tar.xz.log).
+    subdir_prefix = os.path.join(*version.split('.')[0:2])
+
+    # Find toolchain overlay tarballs of the form
+    # built-sdk-overlay-toolchains-<toolchains_spec>.tar.* and create an upload
+    # specification for each of them. The upload path template has the form
+    # cros-sdk-overlay-toolchains-<toolchain_spec>-<version>.tar.*.
+    toolchains_overlay_prefix = 'built-sdk-overlay-toolchains-'
+    for tarball in glob.glob(os.path.join(
+        buildroot, constants.DEFAULT_CHROOT_DIR,
+        constants.SDK_OVERLAYS_OUTPUT, toolchains_overlay_prefix + '*.tar.*')):
+      tarball_name, tarball_suffix = os.path.basename(tarball).split('.', 1)
+
+      # Only add the upload path arg when processing the first tarball.
+      if '--toolchains-overlay-upload-path' not in extra_args:
+        subdir = os.path.join(
+            subdir_prefix,
+            'cros-sdk-overlay-toolchains-%%(toolchains)s-%(version)s.' +
+            tarball_suffix)
+        extra_args.extend(['--toolchains-overlay-upload-path', subdir])
+
+      toolchains = tarball_name[len(toolchains_overlay_prefix):]
+      extra_args.extend(['--toolchains-overlay-tarball',
+                         '%s:%s' % (toolchains, tarball)])
+
+    # Find toolchain package tarballs of the form <target>.tar.* and create an
+    # upload specificion for each fo them. The upload path template has the
+    # form <target>-<version>.tar.*.
+    for tarball in glob.glob(os.path.join(
+        buildroot, constants.DEFAULT_CHROOT_DIR,
+        constants.SDK_TOOLCHAINS_OUTPUT, '*.tar.*')):
+      tarball_target, tarball_suffix = os.path.basename(tarball).split('.', 1)
+
+      # Only add the path arg when processing the first tarball.  We do
+      # this to get access to the tarball suffix dynamically (so it can
+      # change and this code will still work).
+      if '--toolchain-upload-path' not in extra_args:
+        subdir = os.path.join(subdir_prefix,
+                              '%%(target)s-%(version)s.' + tarball_suffix)
+        extra_args.extend(['--toolchain-upload-path', subdir])
+
+      extra_args.extend(['--toolchain-tarball',
+                         '%s:%s' % (tarball_target, tarball)])
+
+  if category == constants.CHROME_PFQ_TYPE:
+    assert chrome_rev
+    key = '%s_%s' % (chrome_rev, _CHROME_BINHOST)
+    extra_args.extend(['--key', key.upper()])
+  elif config_lib.IsPFQType(category):
+    extra_args.extend(['--key', _PREFLIGHT_BINHOST])
+  else:
+    assert category in (constants.BUILD_FROM_SOURCE_TYPE,
+                        constants.CHROOT_BUILDER_TYPE)
+    extra_args.extend(['--key', _FULL_BINHOST])
+
+  if category == constants.CHROME_PFQ_TYPE:
+    extra_args += ['--packages=%s' % x
+                   for x in ([constants.CHROME_PN] +
+                             constants.OTHER_CHROME_PACKAGES)]
+
+  kwargs.setdefault('extra_args', []).extend(extra_args)
+  return _UploadPrebuilts(buildroot=buildroot, **kwargs)
+
+
+class PackageFileMissing(Exception):
+  """Raised when the dev installer package file is missing."""
+
+
+def UploadDevInstallerPrebuilts(binhost_bucket, binhost_key, binhost_base_url,
+                                buildroot, board, **kwargs):
+  """Upload Prebuilts for dev-installer use case.
+
+  Args:
+    binhost_bucket: bucket for uploading prebuilt packages. If it equals None
+                    then the default bucket is used.
+    binhost_key: key parameter to pass onto upload_prebuilts. If it equals
+                 None, then chrome_rev is used to select a default key.
+    binhost_base_url: base url for upload_prebuilts. If None the parameter
+                      --binhost-base-url is absent.
+    buildroot: The root directory where the build occurs.
+    board: Board type that was built on this machine.
+    extra_args: Extra args to pass to prebuilts script.
+  """
+  extra_args = ['--prepend-version', constants.CANARY_TYPE]
+  extra_args.extend(['--binhost-base-url', binhost_base_url])
+  extra_args.extend(['--upload', binhost_bucket])
+  extra_args.extend(['--key', binhost_key])
+
+  filename = os.path.join(buildroot, 'chroot', 'build', board,
+                          _BINHOST_PACKAGE_FILE.lstrip('/'))
+  cmd_packages = _AddPackagesForPrebuilt(filename)
+  if cmd_packages:
+    extra_args.extend(cmd_packages)
+  else:
+    raise PackageFileMissing()
+
+  kwargs.setdefault('extra_args', []).extend(extra_args)
+  return _UploadPrebuilts(buildroot=buildroot, board=board, **kwargs)
+
+
+def _UploadPrebuilts(buildroot, board, extra_args):
+  """Upload prebuilts.
+
+  Args:
+    buildroot: The root directory where the build occurs.
+    board: Board type that was built on this machine.
+    extra_args: Extra args to pass to prebuilts script.
+  """
+  cmd = ['upload_prebuilts', '--build-path', buildroot]
+  if board:
+    cmd.extend(['--board', board])
+  cmd.extend(extra_args)
+  commands.RunBuildScript(buildroot, cmd, chromite_cmd=True)
diff --git a/cbuildbot/prebuilts_unittest b/cbuildbot/prebuilts_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/prebuilts_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/prebuilts_unittest.py b/cbuildbot/prebuilts_unittest.py
new file mode 100644
index 0000000..f49865b
--- /dev/null
+++ b/cbuildbot/prebuilts_unittest.py
@@ -0,0 +1,129 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for prebuilts."""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import prebuilts
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import osutils
+
+
+# pylint: disable=W0212
+class PrebuiltTest(cros_build_lib_unittest.RunCommandTempDirTestCase):
+  """Test general cbuildbot command methods."""
+
+  def setUp(self):
+    self._board = 'test-board'
+    self._buildroot = self.tempdir
+    self._overlays = ['%s/src/third_party/chromiumos-overlay' % self._buildroot]
+    self._chroot = os.path.join(self._buildroot, 'chroot')
+    os.makedirs(os.path.join(self._buildroot, '.repo'))
+
+  def testUploadPrebuilts(self, builder_type=constants.PFQ_TYPE, private=False,
+                          chrome_rev=None, version=None):
+    """Test UploadPrebuilts with a public location."""
+    prebuilts.UploadPrebuilts(builder_type, chrome_rev, private,
+                              buildroot=self._buildroot, board=self._board,
+                              version=version)
+    self.assertCommandContains([builder_type, 'gs://chromeos-prebuilt'])
+
+  def testUploadPrivatePrebuilts(self):
+    """Test UploadPrebuilts with a private location."""
+    self.testUploadPrebuilts(private=True)
+
+  def testChromePrebuilts(self):
+    """Test UploadPrebuilts for Chrome prebuilts."""
+    self.testUploadPrebuilts(builder_type=constants.CHROME_PFQ_TYPE,
+                             chrome_rev='tot')
+
+  def testSdkPrebuilts(self):
+    """Test UploadPrebuilts for SDK builds."""
+    # A magical date for a magical time.
+    version = '1994.04.02.000000'
+
+    # Fake out toolchains overlay tarballs.
+    tarball_dir = os.path.join(self._buildroot, constants.DEFAULT_CHROOT_DIR,
+                               constants.SDK_OVERLAYS_OUTPUT)
+    osutils.SafeMakedirs(tarball_dir)
+
+    toolchain_overlay_tarball_args = []
+    # Sample toolchain combos, corresponding to x86-alex and daisy.
+    toolchain_combos = (
+        ('i686-pc-linux-gnu',),
+        ('armv7a-cros-linux-gnueabi', 'arm-none-eabi'),
+    )
+    for toolchains in ['-'.join(sorted(combo)) for combo in toolchain_combos]:
+      tarball = 'built-sdk-overlay-toolchains-%s.tar.xz' % toolchains
+      tarball_path = os.path.join(tarball_dir, tarball)
+      osutils.Touch(tarball_path)
+      tarball_arg = '%s:%s' % (toolchains, tarball_path)
+      toolchain_overlay_tarball_args.append(['--toolchains-overlay-tarball',
+                                             tarball_arg])
+
+    # Fake out toolchain tarballs.
+    tarball_dir = os.path.join(self._buildroot, constants.DEFAULT_CHROOT_DIR,
+                               constants.SDK_TOOLCHAINS_OUTPUT)
+    osutils.SafeMakedirs(tarball_dir)
+
+    toolchain_tarball_args = []
+    for tarball_base in ('i686', 'arm-none-eabi'):
+      tarball = '%s.tar.xz' % tarball_base
+      tarball_path = os.path.join(tarball_dir, tarball)
+      osutils.Touch(tarball_path)
+      tarball_arg = '%s:%s' % (tarball_base, tarball_path)
+      toolchain_tarball_args.append(['--toolchain-tarball', tarball_arg])
+
+    self.testUploadPrebuilts(builder_type=constants.CHROOT_BUILDER_TYPE,
+                             version=version)
+    self.assertCommandContains([
+        '--toolchains-overlay-upload-path',
+        '1994/04/cros-sdk-overlay-toolchains-%%(toolchains)s-'
+        '%(version)s.tar.xz'])
+    self.assertCommandContains(['--toolchain-upload-path',
+                                '1994/04/%%(target)s-%(version)s.tar.xz'])
+    for args in toolchain_overlay_tarball_args + toolchain_tarball_args:
+      self.assertCommandContains(args)
+    self.assertCommandContains(['--set-version', version])
+    self.assertCommandContains(['--prepackaged-tarball',
+                                os.path.join(self._buildroot,
+                                             'built-sdk.tar.xz')])
+
+  def testDevInstallerPrebuilts(self, packages=('package1', 'package2')):
+    """Test UploadDevInstallerPrebuilts."""
+    args = ['gs://dontcare', 'some_path_to_key', 'https://my_test/location']
+    with mock.patch.object(prebuilts, '_AddPackagesForPrebuilt',
+                           return_value=packages):
+      prebuilts.UploadDevInstallerPrebuilts(*args, buildroot=self._buildroot,
+                                            board=self._board)
+    self.assertCommandContains([constants.CANARY_TYPE] + args[2:] + args[0:2])
+
+  def testAddPackagesForPrebuilt(self):
+    """Test AddPackagesForPrebuilt."""
+    self.assertEqual(prebuilts._AddPackagesForPrebuilt('/'), None)
+
+    data = """# comment!
+cat/pkg-0
+ca-t2/pkg2-123
+ca-t3/pk-g4-4.0.1-r333
+"""
+    pkgs = [
+        'cat/pkg',
+        'ca-t2/pkg2',
+        'ca-t3/pk-g4',
+    ]
+    cmds = ['--packages=' + x for x in pkgs]
+    f = os.path.join(self.tempdir, 'package.provided')
+    osutils.WriteFile(f, data)
+    self.assertEqual(prebuilts._AddPackagesForPrebuilt(f), cmds)
+
+  def testMissingDevInstallerFile(self):
+    """Test that we raise an exception when the installer file is missing."""
+    self.assertRaises(prebuilts.PackageFileMissing,
+                      self.testDevInstallerPrebuilts, packages=())
diff --git a/cbuildbot/remote_try.py b/cbuildbot/remote_try.py
new file mode 100644
index 0000000..d94e2cf
--- /dev/null
+++ b/cbuildbot/remote_try.py
@@ -0,0 +1,257 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Code related to Remote tryjobs."""
+
+from __future__ import print_function
+
+import constants
+import getpass
+import json
+import os
+import time
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import repository
+from chromite.cbuildbot import manifest_version
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import cache
+from chromite.lib import git
+
+
+site_config = config_lib.GetConfig()
+
+
+class ChromiteUpgradeNeeded(Exception):
+  """Exception thrown when it's detected that we need to upgrade chromite."""
+
+  def __init__(self, version=None):
+    Exception.__init__(self)
+    self.version = version
+    self.args = (version,)
+
+  def __str__(self):
+    version_str = ''
+    if self.version:
+      version_str = "  Need format version %r support." % (self.version,)
+    return (
+        "Your version of cbuildbot is too old; please resync it, "
+        "and then retry your submission.%s" % (version_str,))
+
+
+class ValidationError(Exception):
+  """Thrown when tryjob validation fails."""
+
+
+class RemoteTryJob(object):
+  """Remote Tryjob that is submitted through a Git repo."""
+  EXTERNAL_URL = os.path.join(site_config.params.EXTERNAL_GOB_URL,
+                              'chromiumos/tryjobs')
+  INTERNAL_URL = os.path.join(site_config.params.INTERNAL_GOB_URL,
+                              'chromeos/tryjobs')
+
+  # In version 3, remote patches have an extra field.
+  # In version 4, cherry-picking is the norm, thus multiple patches are
+  # generated.
+  TRYJOB_FORMAT_VERSION = 4
+  TRYJOB_FORMAT_FILE = '.tryjob_minimal_format_version'
+
+  # Constants for controlling the length of JSON fields sent to buildbot.
+  # - The trybot description is shown when the run starts, and helps users
+  #   distinguish between their various runs. If no trybot description is
+  #   specified, the list of patches is used as the description. The buildbot
+  #   database limits this field to MAX_DESCRIPTION_LENGTH characters.
+  # - When checking the trybot description length, we also add some PADDING
+  #   to give buildbot room to add extra formatting around the fields used in
+  #   the description.
+  # - We limit the number of patches listed in the description to
+  #   MAX_PATCHES_IN_DESCRIPTION. This is for readability only.
+  # - Every individual field that is stored in a buildset is limited to
+  #   MAX_PROPERTY_LENGTH. We use this to ensure that our serialized list of
+  #   arguments fits within that limit.
+  MAX_DESCRIPTION_LENGTH = 256
+  MAX_PATCHES_IN_DESCRIPTION = 10
+  MAX_PROPERTY_LENGTH = 1023
+  PADDING = 50
+
+  def __init__(self, options, bots, local_patches):
+    """Construct the object.
+
+    Args:
+      options: The parsed options passed into cbuildbot.
+      bots: A list of configs to run tryjobs for.
+      local_patches: A list of LocalPatch objects.
+    """
+    self.options = options
+    self.user = getpass.getuser()
+    self.repo_cache = cache.DiskCache(self.options.cache_dir)
+    cwd = os.path.dirname(os.path.realpath(__file__))
+    self.user_email = git.GetProjectUserEmail(cwd)
+    logging.info('Using email:%s', self.user_email)
+    # Name of the job that appears on the waterfall.
+    patch_list = options.gerrit_patches + options.local_patches
+    self.name = options.remote_description
+    if self.name is None:
+      self.name = ''
+      if options.branch != 'master':
+        self.name = '[%s] ' % options.branch
+
+      self.name += ','.join(patch_list[:self.MAX_PATCHES_IN_DESCRIPTION])
+      if len(patch_list) > self.MAX_PATCHES_IN_DESCRIPTION:
+        remaining_patches = len(patch_list) - self.MAX_PATCHES_IN_DESCRIPTION
+        self.name += '... (%d more CLs)' % (remaining_patches,)
+
+    self.bots = bots[:]
+    self.slaves_request = options.slaves
+    self.description = ('name: %s\n patches: %s\nbots: %s' %
+                        (self.name, patch_list, self.bots))
+    self.extra_args = options.pass_through_args
+    if '--buildbot' not in self.extra_args:
+      self.extra_args.append('--remote-trybot')
+
+    self.extra_args.append('--remote-version=%s'
+                           % (self.TRYJOB_FORMAT_VERSION,))
+    self.local_patches = local_patches
+    self.repo_url = self.EXTERNAL_URL
+    self.cache_key = ('trybot',)
+    self.manifest = None
+    if repository.IsARepoRoot(options.sourceroot):
+      self.manifest = git.ManifestCheckout.Cached(options.sourceroot)
+      if repository.IsInternalRepoCheckout(options.sourceroot):
+        self.repo_url = self.INTERNAL_URL
+        self.cache_key = ('trybot-internal',)
+
+  @property
+  def values(self):
+    return {
+        'bot' : self.bots,
+        'email' : [self.user_email],
+        'extra_args' : self.extra_args,
+        'name' : self.name,
+        'slaves_request' : self.slaves_request,
+        'user' : self.user,
+        'version' : self.TRYJOB_FORMAT_VERSION,
+        }
+
+  def _VerifyForBuildbot(self):
+    """Early validation, to ensure the job can be processed by buildbot."""
+
+    # Buildbot stores the trybot description in a property with a 256
+    # character limit. Validate that our description is well under the limit.
+    if (len(self.user) + len(self.name) + self.PADDING >
+        self.MAX_DESCRIPTION_LENGTH):
+      logging.warning('remote tryjob description is too long, truncating it')
+      self.name = self.name[:self.MAX_DESCRIPTION_LENGTH - self.PADDING] + '...'
+
+    # Buildbot will set extra_args as a buildset 'property'.  It will store
+    # the property in its database in JSON form.  The limit of the database
+    # field is 1023 characters.
+    if len(json.dumps(self.extra_args)) > self.MAX_PROPERTY_LENGTH:
+      raise ValidationError(
+          'The number of extra arguments passed to cbuildbot has exceeded the '
+          'limit.  If you have a lot of local patches, upload them and use the '
+          '-g flag instead.')
+
+  def _Submit(self, workdir, testjob, dryrun):
+    """Internal submission function.  See Submit() for arg description."""
+    # TODO(rcui): convert to shallow clone when that's available.
+    current_time = str(int(time.time()))
+
+    ref_base = os.path.join('refs/tryjobs', self.user_email, current_time)
+    for patch in self.local_patches:
+      # Isolate the name; if it's a tag or a remote, let through.
+      # Else if it's a branch, get the full branch name minus refs/heads.
+      local_branch = git.StripRefsHeads(patch.ref, False)
+      ref_final = os.path.join(ref_base, local_branch, patch.sha1)
+
+      checkout = patch.GetCheckout(self.manifest)
+      checkout.AssertPushable()
+      print('Uploading patch %s' % patch)
+      patch.Upload(checkout['push_url'], ref_final, dryrun=dryrun)
+
+      # TODO(rcui): Pass in the remote instead of tag. http://crosbug.com/33937.
+      tag = constants.EXTERNAL_PATCH_TAG
+      if checkout['remote'] == site_config.params.INTERNAL_REMOTE:
+        tag = constants.INTERNAL_PATCH_TAG
+
+      self.extra_args.append('--remote-patches=%s:%s:%s:%s:%s'
+                             % (patch.project, local_branch, ref_final,
+                                patch.tracking_branch, tag))
+
+    self._VerifyForBuildbot()
+    repository.UpdateGitRepo(workdir, self.repo_url)
+    version_path = os.path.join(workdir,
+                                self.TRYJOB_FORMAT_FILE)
+    with open(version_path, 'r') as f:
+      try:
+        val = int(f.read().strip())
+      except ValueError:
+        raise ChromiteUpgradeNeeded()
+      if val > self.TRYJOB_FORMAT_VERSION:
+        raise ChromiteUpgradeNeeded(val)
+    push_branch = manifest_version.PUSH_BRANCH
+
+    remote_branch = None
+    if testjob:
+      remote_branch = git.RemoteRef('origin', 'refs/remotes/origin/test')
+    git.CreatePushBranch(push_branch, workdir, sync=False,
+                         remote_push_branch=remote_branch)
+
+    file_name = '%s.%s' % (self.user,
+                           current_time)
+    user_dir = os.path.join(workdir, self.user)
+    if not os.path.isdir(user_dir):
+      os.mkdir(user_dir)
+
+    fullpath = os.path.join(user_dir, file_name)
+    with open(fullpath, 'w+') as job_desc_file:
+      json.dump(self.values, job_desc_file)
+
+    git.RunGit(workdir, ['add', fullpath])
+    extra_env = {
+        # The committer field makes sure the creds match what the remote
+        # gerrit instance expects while the author field allows lookup
+        # on the console to work.  http://crosbug.com/27939
+        'GIT_COMMITTER_EMAIL' : self.user_email,
+        'GIT_AUTHOR_EMAIL'    : self.user_email,
+    }
+    git.RunGit(workdir, ['commit', '-m', self.description],
+               extra_env=extra_env)
+
+    try:
+      git.PushWithRetry(push_branch, workdir, retries=3, dryrun=dryrun)
+    except cros_build_lib.RunCommandError:
+      logging.error(
+          'Failed to submit tryjob.  This could be due to too many '
+          'submission requests by users.  Please try again.')
+      raise
+
+  def Submit(self, workdir=None, testjob=False, dryrun=False):
+    """Submit the tryjob through Git.
+
+    Args:
+      workdir: The directory to clone tryjob repo into.  If you pass this
+               in, you are responsible for deleting the directory.  Used for
+               testing.
+      testjob: Submit job to the test branch of the tryjob repo.  The tryjob
+               will be ignored by production master.
+      dryrun: Setting to true will run everything except the final submit step.
+    """
+    if workdir is None:
+      with self.repo_cache.Lookup(self.cache_key) as ref:
+        self._Submit(ref.path, testjob, dryrun)
+    else:
+      self._Submit(workdir, testjob, dryrun)
+
+  def GetTrybotWaterfallLink(self):
+    """Get link to the waterfall for the user."""
+    # The builders on the trybot waterfall are named after the templates.
+    builders = set(site_config[bot]['_template'] for bot in self.bots)
+
+    # Note that this will only show the jobs submitted by the user in the last
+    # 24 hours.
+    return '%s/waterfall?committer=%s&%s' % (
+        constants.TRYBOT_DASHBOARD, self.user_email,
+        '&'.join('builder=%s' % b for b in sorted(builders)))
diff --git a/cbuildbot/remote_try_unittest b/cbuildbot/remote_try_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/remote_try_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/remote_try_unittest.py b/cbuildbot/remote_try_unittest.py
new file mode 100644
index 0000000..49b315a
--- /dev/null
+++ b/cbuildbot/remote_try_unittest.py
@@ -0,0 +1,191 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for remote_try.py."""
+
+from __future__ import print_function
+
+import json
+import os
+import shutil
+import time
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import config_lib_unittest
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.cbuildbot import remote_try
+from chromite.cbuildbot import repository
+from chromite.scripts import cbuildbot
+
+
+site_config = config_lib.GetConfig()
+
+
+class RemoteTryJobMock(remote_try.RemoteTryJob):
+  """Helper for Mocking out a RemoteTryJob."""
+
+
+@cros_test_lib.NetworkTest()
+class RemoteTryTests(cros_test_lib.MockTempDirTestCase):
+  """Test cases related to remote try jobs."""
+
+  PATCHES = ('5555', '6666')
+  BOTS = ('x86-generic-paladin', 'arm-generic-paladin')
+
+  def setUp(self):
+    # pylint: disable=protected-access
+    self.site_config = config_lib_unittest.MockSiteConfig()
+    self.parser = cbuildbot._CreateParser()
+    args = ['-r', '/tmp/test_build1', '-g', '5555', '-g',
+            '6666', '--remote']
+    args.extend(self.BOTS)
+    self.options, args = cbuildbot._ParseCommandLine(self.parser, args)
+    self.options.cache_dir = self.tempdir
+    self.checkout_dir = os.path.join(self.tempdir, 'test_checkout')
+    self.int_mirror, self.ext_mirror = None, None
+
+  def _RunGitSingleOutput(self, cwd, cmd):
+    result = git.RunGit(cwd, cmd)
+    out_lines = result.output.split()
+    self.assertEqual(len(out_lines), 1)
+    return out_lines[0]
+
+  def _GetNewestFile(self, dirname, basehash):
+    newhash = git.GetGitRepoRevision(dirname)
+    self.assertNotEqual(basehash, newhash)
+    cmd = ['log', '--format=%H', '%s..' % basehash]
+    # Make sure we have a single commit.
+    self._RunGitSingleOutput(dirname, cmd)
+    cmd = ['diff', '--name-only', 'HEAD^']
+    # Make sure only one file per commit.
+    return self._RunGitSingleOutput(dirname, cmd)
+
+  def _SubmitJob(self, checkout_dir, job, version=None):
+    """Returns the path to the tryjob description."""
+    self.assertTrue(isinstance(job, RemoteTryJobMock))
+    basehash = git.GetGitRepoRevision(job.repo_url)
+    if version is not None:
+      self._SetMirrorVersion(version)
+    job.Submit(workdir=checkout_dir, dryrun=True)
+    # Get the file that was just created.
+    created_file = self._GetNewestFile(checkout_dir, basehash)
+    return os.path.join(checkout_dir, created_file)
+
+  def _SetupMirrors(self):
+    mirror = os.path.join(self.tempdir, 'tryjobs_mirror')
+    os.mkdir(mirror)
+    url = '%s/%s' % (site_config.params.EXTERNAL_GOB_URL, 'chromiumos/tryjobs')
+    repository.CloneGitRepo(mirror, url,
+                            bare=True)
+    self.ext_mirror = mirror
+    mirror = os.path.join(self.tempdir, 'tryjobs_int_mirror')
+    os.mkdir(mirror)
+    repository.CloneGitRepo(mirror, self.ext_mirror, reference=self.ext_mirror,
+                            bare=True)
+
+    self.int_mirror = mirror
+    RemoteTryJobMock.EXTERNAL_URL = self.ext_mirror
+    RemoteTryJobMock.INTERNAL_URL = self.int_mirror
+    self._SetMirrorVersion(remote_try.RemoteTryJob.TRYJOB_FORMAT_VERSION, True)
+
+  def _SetMirrorVersion(self, version, only_if_missing=False):
+    for path in (self.ext_mirror, self.int_mirror):
+      vpath = os.path.join(path, remote_try.RemoteTryJob.TRYJOB_FORMAT_FILE)
+      if os.path.exists(vpath) and only_if_missing:
+        continue
+      # Get ourselves a working dir.
+      tmp_repo = os.path.join(self.tempdir, 'tmp-repo')
+      git.RunGit(self.tempdir, ['clone', path, tmp_repo])
+      vpath = os.path.join(tmp_repo, remote_try.RemoteTryJob.TRYJOB_FORMAT_FILE)
+      with open(vpath, 'w') as f:
+        f.write(str(version))
+      git.RunGit(tmp_repo, ['add', vpath])
+      git.RunGit(tmp_repo, ['commit', '-m', 'setting version to %s' % version])
+      git.RunGit(tmp_repo, ['push', path, 'master:master'])
+      shutil.rmtree(tmp_repo)
+
+  def _CreateJob(self, mirror=True):
+    job_class = remote_try.RemoteTryJob
+    if mirror:
+      job_class = RemoteTryJobMock
+      self._SetupMirrors()
+
+    job = job_class(self.options, self.BOTS, [])
+    return job
+
+  def testJobTimestamp(self):
+    """Verify jobs have unique names."""
+    def submit_helper(dirname):
+      work_dir = os.path.join(self.tempdir, dirname)
+      return os.path.basename(self._SubmitJob(work_dir, job))
+
+    self.PatchObject(repository, 'IsARepoRoot', return_value=False)
+    job = self._CreateJob()
+
+    file1 = submit_helper('test1')
+    # Tryjob file names are based on timestamp, so delay one second to avoid two
+    # jobfiles having the same name.
+    time.sleep(1)
+    file2 = submit_helper('test2')
+    self.assertNotEqual(file1, file2)
+
+  def testSimpleTryJob(self, version=None):
+    """Test that a tryjob spec file is created and pushed properly."""
+    self.PatchObject(repository, 'IsARepoRoot', return_value=True)
+    self.PatchObject(repository, 'IsInternalRepoCheckout', return_value=False)
+
+    try:
+      os.environ["GIT_AUTHOR_EMAIL"] = "Elmer Fudd <efudd@google.com>"
+      os.environ["GIT_COMMITTER_EMAIL"] = "Elmer Fudd <efudd@google.com>"
+      job = self._CreateJob()
+    finally:
+      os.environ.pop("GIT_AUTHOR_EMAIL", None)
+      os.environ.pop("GIT_COMMITTER_EMAIL", None)
+    created_file = self._SubmitJob(self.checkout_dir, job, version=version)
+    with open(created_file, 'rb') as job_desc_file:
+      values = json.load(job_desc_file)
+
+    self.assertTrue('efudd@google.com' in values['email'][0])
+
+    for patch in self.PATCHES:
+      self.assertTrue(patch in values['extra_args'],
+                      msg=("expected patch %s in args %s" %
+                           (patch, values['extra_args'])))
+
+    self.assertTrue(set(self.BOTS).issubset(values['bot']))
+
+    remote_url = cros_build_lib.RunCommand(
+        ['git', 'config', 'remote.origin.url'], redirect_stdout=True,
+        cwd=self.checkout_dir).output.strip()
+    self.assertEqual(remote_url, self.ext_mirror)
+
+  def testClientVersionAwareness(self):
+    self.assertRaises(
+        remote_try.ChromiteUpgradeNeeded,
+        self.testSimpleTryJob,
+        version=remote_try.RemoteTryJob.TRYJOB_FORMAT_VERSION + 1)
+
+  def testInternalTryJob(self):
+    """Verify internal tryjobs are pushed properly."""
+    self.PatchObject(repository, 'IsARepoRoot', return_value=True)
+    self.PatchObject(repository, 'IsInternalRepoCheckout', return_value=True)
+
+    job = self._CreateJob()
+    self._SubmitJob(self.checkout_dir, job)
+
+    remote_url = cros_build_lib.RunCommand(
+        ['git', 'config', 'remote.origin.url'], redirect_stdout=True,
+        cwd=self.checkout_dir).output.strip()
+    self.assertEqual(remote_url, self.int_mirror)
+
+  def testBareTryJob(self):
+    """Verify submitting a tryjob from just a chromite checkout works."""
+    self.PatchObject(repository, 'IsARepoRoot', return_value=False)
+    self.PatchObject(repository, 'IsInternalRepoCheckout',
+                     side_effect=Exception('should not be called'))
+
+    job = self._CreateJob(mirror=False)
+    self.assertEqual(job.repo_url, remote_try.RemoteTryJob.EXTERNAL_URL)
diff --git a/cbuildbot/repository.py b/cbuildbot/repository.py
new file mode 100644
index 0000000..3408e83
--- /dev/null
+++ b/cbuildbot/repository.py
@@ -0,0 +1,529 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Repository module to handle different types of repositories."""
+
+from __future__ import print_function
+
+import constants
+import os
+import re
+import shutil
+
+from chromite.cbuildbot import config_lib
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import path_util
+from chromite.lib import retry_util
+from chromite.lib import rewrite_git_alternates
+
+
+site_config = config_lib.GetConfig()
+
+
+# File that marks a buildroot as being used by a trybot
+_TRYBOT_MARKER = '.trybot'
+
+
+class SrcCheckOutException(Exception):
+  """Exception gets thrown for failure to sync sources"""
+
+
+def IsARepoRoot(directory):
+  """Returns True if directory is the root of a repo checkout."""
+  return os.path.exists(os.path.join(directory, '.repo'))
+
+
+def IsInternalRepoCheckout(root):
+  """Returns whether root houses an internal 'repo' checkout."""
+  manifest_dir = os.path.join(root, '.repo', 'manifests')
+  manifest_url = git.RunGit(
+      manifest_dir, ['config', 'remote.origin.url']).output.strip()
+  return (os.path.splitext(os.path.basename(manifest_url))[0] ==
+          os.path.splitext(os.path.basename(
+              site_config.params.MANIFEST_INT_URL))[0])
+
+
+def CloneGitRepo(working_dir, repo_url, reference=None, bare=False,
+                 mirror=False, depth=None, branch=None, single_branch=False):
+  """Clone given git repo
+
+  Args:
+    working_dir: location where it should be cloned to
+    repo_url: git repo to clone
+    reference: If given, pathway to a git repository to access git objects
+      from.  Note that the reference must exist as long as the newly created
+      repo is to be usable.
+    bare: Clone a bare checkout.
+    mirror: Clone a mirror checkout.
+    depth: If given, do a shallow clone limiting the objects pulled to just
+      that # of revs of history.  This option is mutually exclusive to
+      reference.
+    branch: If given, clone the given branch from the parent repository.
+    single_branch: Clone only one the requested branch.
+  """
+  osutils.SafeMakedirs(working_dir)
+  cmd = ['clone', repo_url, working_dir]
+  if reference:
+    if depth:
+      raise ValueError("reference and depth are mutually exclusive "
+                       "options; please pick one or the other.")
+    cmd += ['--reference', reference]
+  if bare:
+    cmd += ['--bare']
+  if mirror:
+    cmd += ['--mirror']
+  if depth:
+    cmd += ['--depth', str(int(depth))]
+  if branch:
+    cmd += ['--branch', branch]
+  if single_branch:
+    cmd += ['--single-branch']
+  git.RunGit(working_dir, cmd)
+
+
+def UpdateGitRepo(working_dir, repo_url, **kwargs):
+  """Update the given git repo, blowing away any local changes.
+
+  If the repo does not exist, clone it from scratch.
+
+  Args:
+    working_dir: location where it should be cloned to
+    repo_url: git repo to clone
+    **kwargs: See CloneGitRepo.
+  """
+  assert not kwargs.get('bare'), 'Bare checkouts are not supported'
+  if git.IsGitRepo(working_dir):
+    try:
+      git.CleanAndCheckoutUpstream(working_dir)
+    except cros_build_lib.RunCommandError:
+      logging.warning('Could not update %s', working_dir, exc_info=True)
+      shutil.rmtree(working_dir)
+      CloneGitRepo(working_dir, repo_url, **kwargs)
+  else:
+    CloneGitRepo(working_dir, repo_url, **kwargs)
+
+
+def GetTrybotMarkerPath(buildroot):
+  """Get path to trybot marker file given the buildroot."""
+  return os.path.join(buildroot, _TRYBOT_MARKER)
+
+
+def CreateTrybotMarker(buildroot):
+  """Create the file that identifies a buildroot as being used by a trybot."""
+  osutils.WriteFile(GetTrybotMarkerPath(buildroot), '')
+
+
+def ClearBuildRoot(buildroot, preserve_paths=()):
+  """Remove and recreate the buildroot while preserving the trybot marker."""
+  trybot_root = os.path.exists(GetTrybotMarkerPath(buildroot))
+  if os.path.exists(buildroot):
+    cmd = ['find', buildroot, '-mindepth', '1', '-maxdepth', '1']
+
+    ignores = []
+    for path in preserve_paths:
+      if ignores:
+        ignores.append('-a')
+      ignores += ['!', '-name', path]
+    cmd.extend(ignores)
+
+    cmd += ['-exec', 'rm', '-rf', '{}', '+']
+    cros_build_lib.SudoRunCommand(cmd)
+  else:
+    os.makedirs(buildroot)
+  if trybot_root:
+    CreateTrybotMarker(buildroot)
+
+
+def PrepManifestForRepo(git_repo, manifest):
+  """Use this to store a local manifest in a git repo suitable for repo.
+
+  The repo tool can only fetch manifests from git repositories. So, to use
+  a local manifest file as the basis for a checkout, it must be checked into
+  a local git repository.
+
+  Common Usage:
+    manifest = CreateOrFetchWondrousManifest()
+    with osutils.TempDir() as manifest_git_dir:
+      PrepManifestForRepo(manifest_git_dir, manifest)
+      repo = RepoRepository(manifest_git_dir, repo_dir)
+      repo.Sync()
+
+  Args:
+    git_repo: Path at which to create the git repository (directory created, if
+              needed). If a tempdir, then cleanup is owned by the caller.
+    manifest: Path to existing manifest file to copy into the new git
+              repository.
+  """
+  if not git.IsGitRepo(git_repo):
+    git.Init(git_repo)
+
+  new_manifest = os.path.join(git_repo, constants.DEFAULT_MANIFEST)
+
+  shutil.copyfile(manifest, new_manifest)
+  git.AddPath(new_manifest)
+  message = 'Local repository holding: %s' % manifest
+
+  # Commit new manifest. allow_empty in case it's the same as last manifest.
+  git.Commit(git_repo, message, allow_empty=True)
+
+
+class RepoRepository(object):
+  """A Class that encapsulates a repo repository."""
+  # If a repo hasn't been used in the last 5 runs, wipe it.
+  LRU_THRESHOLD = 5
+
+  def __init__(self, manifest_repo_url, directory, branch=None,
+               referenced_repo=None, manifest=constants.DEFAULT_MANIFEST,
+               depth=None, repo_url=site_config.params.REPO_URL,
+               repo_branch=None, groups=None, repo_cmd='repo'):
+    """Initialize.
+
+    Args:
+      manifest_repo_url: URL to fetch repo manifest from.
+      directory: local path where to checkout the repository.
+      branch: Branch to check out the manifest at.
+      referenced_repo: Repository to reference for git objects, if possible.
+      manifest: Which manifest.xml within the branch to use.  Effectively
+        default.xml if not given.
+      depth: Mutually exclusive option to referenced_repo; this limits the
+        checkout to a max commit history of the given integer.
+      repo_url: URL to fetch repo tool from.
+      repo_branch: Branch to check out the repo tool at.
+      groups: Only sync projects that match this filter.
+      repo_cmd: Name of repo_cmd to use.
+    """
+    self.manifest_repo_url = manifest_repo_url
+    self.repo_url = repo_url
+    self.repo_branch = repo_branch
+    self.directory = directory
+    self.branch = branch
+    self.groups = groups
+    self.repo_cmd = repo_cmd
+
+    # It's perfectly acceptable to pass in a reference pathway that isn't
+    # usable.  Detect it, and suppress the setting so that any depth
+    # settings aren't disabled due to it.
+    if referenced_repo is not None:
+      if depth is not None:
+        raise ValueError("referenced_repo and depth are mutually exclusive "
+                         "options; please pick one or the other.")
+      if not IsARepoRoot(referenced_repo):
+        referenced_repo = None
+    self._referenced_repo = referenced_repo
+    self._manifest = manifest
+
+    # If the repo exists already, force a selfupdate as the first step.
+    self._repo_update_needed = IsARepoRoot(self.directory)
+    if not self._repo_update_needed and git.FindRepoDir(self.directory):
+      raise ValueError('Given directory %s is not the root of a repository.'
+                       % self.directory)
+
+    self._depth = int(depth) if depth is not None else None
+
+  def _SwitchToLocalManifest(self, local_manifest):
+    """Reinitializes the repository if the manifest has changed."""
+    logging.debug('Moving to manifest defined by %s', local_manifest)
+    # TODO: use upstream repo's manifest logic when we bump repo version.
+    manifest_path = self.GetRelativePath('.repo/manifest.xml')
+    os.unlink(manifest_path)
+    shutil.copyfile(local_manifest, manifest_path)
+
+  def Initialize(self, local_manifest=None, extra_args=()):
+    """Initializes a repository.  Optionally forces a local manifest.
+
+    Args:
+      local_manifest: The absolute path to a custom manifest to use.  This will
+                      replace .repo/manifest.xml.
+      extra_args: Extra args to pass to 'repo init'
+    """
+
+    # Do a sanity check on the repo; if it exists and we can't pull a
+    # manifest from it, we know it's fairly screwed up and needs a fresh
+    # rebuild.
+    if os.path.exists(os.path.join(self.directory, '.repo', 'manifest.xml')):
+      cmd = [self.repo_cmd, 'manifest']
+      try:
+        cros_build_lib.RunCommand(cmd, cwd=self.directory, capture_output=True)
+      except cros_build_lib.RunCommandError:
+        logging.warning("Wiping %r due to `repo manifest` failure",
+                        self.directory)
+        paths = [os.path.join(self.directory, '.repo', x) for x in
+                 ('manifest.xml', 'manifests.git', 'manifests', 'repo')]
+        cros_build_lib.SudoRunCommand(['rm', '-rf'] + paths)
+        self._repo_update_needed = False
+
+    # Wipe local_manifest.xml if it exists- it can interfere w/ things in
+    # bad ways (duplicate projects, etc); we control this repository, thus
+    # we can destroy it.
+    osutils.SafeUnlink(os.path.join(self.directory, 'local_manifest.xml'))
+
+    # Force a repo self update first; during reinit, repo doesn't do the
+    # update itself, but we could be doing the init on a repo version less
+    # then v1.9.4, which didn't have proper support for doing reinit that
+    # involved changing the manifest branch in use; thus selfupdate.
+    # Additionally, if the self update fails for *any* reason, wipe the repo
+    # innards and force repo init to redownload it; same end result, just
+    # less efficient.
+    # Additionally, note that this method may be called multiple times;
+    # thus code appropriately.
+    if self._repo_update_needed:
+      cmd = [self.repo_cmd, 'selfupdate']
+      try:
+        cros_build_lib.RunCommand(cmd, cwd=self.directory)
+      except cros_build_lib.RunCommandError:
+        osutils.RmDir(os.path.join(self.directory, '.repo', 'repo'),
+                      ignore_missing=True)
+      self._repo_update_needed = False
+
+    # Use our own repo, in case android.kernel.org (the default location) is
+    # down.
+    init_cmd = [self.repo_cmd, 'init',
+                '--repo-url', self.repo_url,
+                '--manifest-url', self.manifest_repo_url]
+    if self._referenced_repo:
+      init_cmd.extend(['--reference', self._referenced_repo])
+    if self._manifest:
+      init_cmd.extend(['--manifest-name', self._manifest])
+    if self._depth is not None:
+      init_cmd.extend(['--depth', str(self._depth)])
+    init_cmd.extend(extra_args)
+    # Handle branch / manifest options.
+    if self.branch:
+      init_cmd.extend(['--manifest-branch', self.branch])
+    if self.repo_branch:
+      init_cmd.extend(['--repo-branch', self.repo_branch])
+    if self.groups:
+      init_cmd.extend(['--groups', self.groups])
+
+    cros_build_lib.RunCommand(init_cmd, cwd=self.directory, input='\n\ny\n')
+    if local_manifest and local_manifest != self._manifest:
+      self._SwitchToLocalManifest(local_manifest)
+
+  @property
+  def _ManifestConfig(self):
+    return os.path.join(self.directory, '.repo', 'manifests.git', 'config')
+
+  def _EnsureMirroring(self, post_sync=False):
+    """Ensure git is usable from w/in the chroot if --references is enabled
+
+    repo init --references hardcodes the abspath to parent; this pathway
+    however isn't usable from the chroot (it doesn't exist).  As such the
+    pathway is rewritten to use relative pathways pointing at the root of
+    the repo, which via I84988630 enter_chroot sets up a helper bind mount
+    allowing git/repo to access the actual referenced repo.
+
+    This has to be invoked prior to a repo sync of the target trybot to
+    fix any pathways that may have been broken by the parent repo moving
+    on disk, and needs to be invoked after the sync has completed to rewrite
+    any new project's abspath to relative.
+    """
+    if not self._referenced_repo:
+      return
+
+    proj_root = os.path.join(self.directory, '.repo', 'project-objects')
+    if not os.path.exists(proj_root):
+      # Not yet synced, nothing to be done.
+      return
+
+    rewrite_git_alternates.RebuildRepoCheckout(self.directory,
+                                               self._referenced_repo)
+
+    if post_sync:
+      chroot_path = os.path.join(self._referenced_repo, '.repo', 'chroot',
+                                 'external')
+      chroot_path = path_util.ToChrootPath(chroot_path)
+      rewrite_git_alternates.RebuildRepoCheckout(
+          self.directory, self._referenced_repo, chroot_path)
+
+    # Finally, force the git config marker that enter_chroot looks for
+    # to know when to do bind mounting trickery; this normally will exist,
+    # but if we're converting a pre-existing repo checkout, it's possible
+    # that it was invoked w/out the reference arg.  Note this must be
+    # an absolute path to the source repo- enter_chroot uses that to know
+    # what to bind mount into the chroot.
+    cmd = ['config', '--file', self._ManifestConfig, 'repo.reference',
+           self._referenced_repo]
+    git.RunGit('.', cmd)
+
+  def Sync(self, local_manifest=None, jobs=None, all_branches=True,
+           network_only=False):
+    """Sync/update the source.  Changes manifest if specified.
+
+    Args:
+      local_manifest: If true, checks out source to manifest.  DEFAULT_MANIFEST
+        may be used to set it back to the default manifest.
+      jobs: May be set to override the default sync parallelism defined by
+        the manifest.
+      all_branches: If False, a repo sync -c is performed; this saves on
+        sync'ing via grabbing only what is needed for the manifest specified
+        branch. Defaults to True. TODO(davidjames): Set the default back to
+        False once we've fixed http://crbug.com/368722 .
+      network_only: If true, perform only the network half of the sync; skip
+        the checkout.  Primarily of use to validate a manifest (although
+        if the manifest has bad copyfile statements, via skipping checkout
+        the broken copyfile tag won't be spotted), or of use when the
+        invoking code is fine w/ operating on bare repos, ie .repo/projects/*.
+    """
+    try:
+      # Always re-initialize to the current branch.
+      self.Initialize(local_manifest)
+      # Fix existing broken mirroring configurations.
+      self._EnsureMirroring()
+
+      cmd = [self.repo_cmd, '--time', 'sync']
+      if jobs:
+        cmd += ['--jobs', str(jobs)]
+      if not all_branches or self._depth is not None:
+        # Note that this option can break kernel checkouts. crbug.com/464536
+        cmd.append('-c')
+      # Do the network half of the sync; retry as necessary to get the content.
+      retry_util.RunCommandWithRetries(constants.SYNC_RETRIES, cmd + ['-n'],
+                                       cwd=self.directory)
+
+      if network_only:
+        return
+
+      # Do the local sync; note that there is a couple of corner cases where
+      # the new manifest cannot transition from the old checkout cleanly-
+      # primarily involving git submodules.  Thus we intercept, and do
+      # a forced wipe, then a retry.
+      try:
+        cros_build_lib.RunCommand(cmd + ['-l'], cwd=self.directory)
+      except cros_build_lib.RunCommandError:
+        manifest = git.ManifestCheckout.Cached(self.directory)
+        targets = set(project['path'].split('/', 1)[0]
+                      for project in manifest.ListCheckouts())
+        if not targets:
+          # No directories to wipe, thus nothing we can fix.
+          raise
+
+        cros_build_lib.SudoRunCommand(['rm', '-rf'] + sorted(targets),
+                                      cwd=self.directory)
+
+        # Retry the sync now; if it fails, let the exception propagate.
+        cros_build_lib.RunCommand(cmd + ['-l'], cwd=self.directory)
+
+      # We do a second run to fix any new repositories created by repo to
+      # use relative object pathways.  Note that cros_sdk also triggers the
+      # same cleanup- we however kick it erring on the side of caution.
+      self._EnsureMirroring(True)
+      self._DoCleanup()
+
+    except cros_build_lib.RunCommandError as e:
+      err_msg = e.Stringify(error=False, output=False)
+      logging.error(err_msg)
+      raise SrcCheckOutException(err_msg)
+
+  def _DoCleanup(self):
+    """Wipe unused repositories."""
+
+    # Find all projects, even if they're not in the manifest.  Note the find
+    # trickery this is done to keep it as fast as possible.
+    repo_path = os.path.join(self.directory, '.repo', 'projects')
+    current = set(cros_build_lib.RunCommand(
+        ['find', repo_path, '-type', 'd', '-name', '*.git', '-printf', '%P\n',
+         '-a', '!', '-wholename', '*.git/*', '-prune'],
+        print_cmd=False, capture_output=True).output.splitlines())
+    data = {}.fromkeys(current, 0)
+
+    path = os.path.join(self.directory, '.repo', 'project.lru')
+    if os.path.exists(path):
+      existing = [x.strip().split(None, 1)
+                  for x in osutils.ReadFile(path).splitlines()]
+      data.update((k, int(v)) for k, v in existing if k in current)
+
+    # Increment it all...
+    data.update((k, v + 1) for k, v in data.iteritems())
+    # Zero out what is now used.
+    checkouts = git.ManifestCheckout.Cached(self.directory).ListCheckouts()
+    data.update(('%s.git' % x['path'], 0) for x in checkouts)
+
+    # Finally... wipe anything that's greater than our threshold.
+    wipes = [k for k, v in data.iteritems() if v > self.LRU_THRESHOLD]
+    if wipes:
+      cros_build_lib.SudoRunCommand(
+          ['rm', '-rf'] + [os.path.join(repo_path, proj) for proj in wipes])
+      map(data.pop, wipes)
+
+    osutils.WriteFile(path, "\n".join('%s %i' % x for x in data.iteritems()))
+
+  def GetRelativePath(self, path):
+    """Returns full path including source directory of path in repo."""
+    return os.path.join(self.directory, path)
+
+  def ExportManifest(self, mark_revision=False, revisions=True):
+    """Export the revision locked manifest
+
+    Args:
+      mark_revision: If True, then the sha1 of manifest.git is recorded
+        into the resultant manifest tag as a version attribute.
+        Specifically, if manifests.git is at 1234, <manifest> becomes
+        <manifest revision="1234">.
+      revisions: If True, then rewrite all branches/tags into a specific
+        sha1 revision.  If False, don't.
+
+    Returns:
+      The manifest as a string.
+    """
+    cmd = [self.repo_cmd, 'manifest', '-o', '-']
+    if revisions:
+      cmd += ['-r']
+    output = cros_build_lib.RunCommand(
+        cmd, cwd=self.directory, print_cmd=False, capture_output=True,
+        extra_env={'PAGER':'cat'}).output
+
+    if not mark_revision:
+      return output
+    modified = git.RunGit(os.path.join(self.directory, '.repo/manifests'),
+                          ['rev-list', '-n1', 'HEAD'])
+    assert modified.output
+    return output.replace("<manifest>", '<manifest revision="%s">' %
+                          modified.output.strip())
+
+  def IsManifestDifferent(self, other_manifest):
+    """Checks whether this manifest is different than another.
+
+    May blacklists certain repos as part of the diff.
+
+    Args:
+      other_manifest: Second manifest file to compare against.
+
+    Returns:
+      True: If the manifests are different
+      False: If the manifests are same
+    """
+    logging.debug('Calling IsManifestDifferent against %s', other_manifest)
+
+    black_list = ['="chromium/']
+    blacklist_pattern = re.compile(r'|'.join(black_list))
+    manifest_revision_pattern = re.compile(r'<manifest revision="[a-f0-9]+">',
+                                           re.I)
+
+    current = self.ExportManifest()
+    with open(other_manifest, 'r') as manifest2_fh:
+      for (line1, line2) in zip(current.splitlines(), manifest2_fh):
+        line1 = line1.strip()
+        line2 = line2.strip()
+        if blacklist_pattern.search(line1):
+          logging.debug('%s ignored %s', line1, line2)
+          continue
+
+        if line1 != line2:
+          logging.debug('Current and other manifest differ.')
+          logging.debug('current: "%s"', line1)
+          logging.debug('other  : "%s"', line2)
+
+          # Ignore revision differences on the manifest line. The revision of
+          # the manifest.git repo is uninteresting when determining if the
+          # current manifest describes the same sources as the other manifest.
+          if manifest_revision_pattern.search(line2):
+            logging.debug('Ignoring difference in manifest revision.')
+            continue
+
+          return True
+
+      return False
diff --git a/cbuildbot/repository_unittest b/cbuildbot/repository_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/repository_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/repository_unittest.py b/cbuildbot/repository_unittest.py
new file mode 100644
index 0000000..d5a5640
--- /dev/null
+++ b/cbuildbot/repository_unittest.py
@@ -0,0 +1,126 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for repository.py."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import repository
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.lib import osutils
+
+
+site_config = config_lib.GetConfig()
+
+
+class RepositoryTests(cros_build_lib_unittest.RunCommandTestCase):
+  """Test cases related to repository checkout methods."""
+
+  def testExternalRepoCheckout(self):
+    """Test we detect external checkouts properly."""
+    tests = [
+        'https://chromium.googlesource.com/chromiumos/manifest.git',
+        'test@abcdef.bla.com:39291/bla/manifest.git',
+        'test@abcdef.bla.com:39291/bla/manifest',
+        'test@abcdef.bla.com:39291/bla/Manifest-internal',
+    ]
+
+    for test in tests:
+      self.rc.SetDefaultCmdResult(output=test)
+      self.assertFalse(repository.IsInternalRepoCheckout('.'))
+
+  def testInternalRepoCheckout(self):
+    """Test we detect internal checkouts properly."""
+    tests = [
+        'https://chrome-internal.googlesource.com/chromeos/manifest-internal',
+        'test@abcdef.bla.com:39291/bla/manifest-internal.git',
+    ]
+
+    for test in tests:
+      self.rc.SetDefaultCmdResult(output=test)
+      self.assertTrue(repository.IsInternalRepoCheckout('.'))
+
+
+class RepoInitTests(cros_test_lib.TempDirTestCase):
+  """Test cases related to repository initialization."""
+
+  def _Initialize(self, branch='master'):
+    repo = repository.RepoRepository(site_config.params.MANIFEST_URL,
+                                     self.tempdir, branch=branch)
+    repo.Initialize()
+
+  @cros_test_lib.NetworkTest()
+  def testReInitialization(self):
+    """Test ability to switch between branches."""
+    self._Initialize('release-R19-2046.B')
+    self._Initialize('master')
+
+    # Test that a failed re-init due to bad branch doesn't leave repo in bad
+    # state.
+    self.assertRaises(Exception, self._Initialize, 'monkey')
+    self._Initialize('release-R20-2268.B')
+
+
+class RepoInitChromeBotTests(RepoInitTests):
+  """Test that Re-init works with the chrome-bot account.
+
+  In testing, repo init behavior on the buildbots is different from a
+  local run, because there is some logic in 'repo' that filters changes based on
+  GIT_COMMITTER_IDENT.  So for sanity's sake, try to emulate running on the
+  buildbots.
+  """
+  def setUp(self):
+    os.putenv('GIT_COMMITTER_EMAIL', 'chrome-bot@chromium.org')
+    os.putenv('GIT_AUTHOR_EMAIL', 'chrome-bot@chromium.org')
+
+
+class PrepManifestForRepoTests(cros_test_lib.TempDirTestCase):
+  """Tests for our ability to init from a local repository."""
+
+  def testCreateManifestRepo(self):
+    """Test we can create a local git repository with a local manifest."""
+    CONTENTS = 'manifest contents'
+
+    src_manifest = os.path.join(self.tempdir, 'src_manifest')
+    git_repo = os.path.join(self.tempdir, 'git_repo')
+    dst_manifest = os.path.join(git_repo, 'default.xml')
+
+    osutils.WriteFile(src_manifest, CONTENTS)
+    repository.PrepManifestForRepo(git_repo, src_manifest)
+
+    self.assertEqual(CONTENTS, osutils.ReadFile(dst_manifest))
+
+    # This should fail if we don't have a valid Git repo. Not a perfect test.
+    git.GetGitRepoRevision(git_repo)
+
+  def testUpdatingManifestRepo(self):
+    """Test we can update manifest in a local git repository."""
+    CONTENTS = 'manifest contents'
+    CONTENTS2 = 'manifest contents - PART 2'
+
+    src_manifest = os.path.join(self.tempdir, 'src_manifest')
+    git_repo = os.path.join(self.tempdir, 'git_repo')
+    dst_manifest = os.path.join(git_repo, 'default.xml')
+
+    # Do/verify initial repo setup.
+    osutils.WriteFile(src_manifest, CONTENTS)
+    repository.PrepManifestForRepo(git_repo, src_manifest)
+
+    self.assertEqual(CONTENTS, osutils.ReadFile(dst_manifest))
+
+    # Update it.
+    osutils.WriteFile(src_manifest, CONTENTS2)
+    repository.PrepManifestForRepo(git_repo, src_manifest)
+
+    self.assertEqual(CONTENTS2, osutils.ReadFile(dst_manifest))
+
+    # Update it again with same manifest.
+    repository.PrepManifestForRepo(git_repo, src_manifest)
+
+    self.assertEqual(CONTENTS2, osutils.ReadFile(dst_manifest))
diff --git a/cbuildbot/results_lib.py b/cbuildbot/results_lib.py
new file mode 100644
index 0000000..d0446dc
--- /dev/null
+++ b/cbuildbot/results_lib.py
@@ -0,0 +1,269 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Classes for collecting results of our BuildStages as they run."""
+
+from __future__ import print_function
+
+import collections
+import datetime
+import math
+import os
+
+from chromite.cbuildbot import failures_lib
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+
+def _GetCheckpointFile(buildroot):
+  return os.path.join(buildroot, '.completed_stages')
+
+
+def WriteCheckpoint(buildroot):
+  """Drops a completed stages file with current state."""
+  completed_stages_file = _GetCheckpointFile(buildroot)
+  with open(completed_stages_file, 'w+') as save_file:
+    Results.SaveCompletedStages(save_file)
+
+
+def LoadCheckpoint(buildroot):
+  """Restore completed stage info from checkpoint file."""
+  completed_stages_file = _GetCheckpointFile(buildroot)
+  if not os.path.exists(completed_stages_file):
+    logging.warning('Checkpoint file not found in buildroot %s' % buildroot)
+    return
+
+  with open(completed_stages_file, 'r') as load_file:
+    Results.RestoreCompletedStages(load_file)
+
+
+class RecordedTraceback(object):
+  """This class represents a traceback recorded in the list of results."""
+
+  def __init__(self, failed_stage, failed_prefix, exception, traceback):
+    """Construct a RecordedTraceback object.
+
+    Args:
+      failed_stage: The stage that failed during the build. E.g., HWTest [bvt]
+      failed_prefix: The prefix of the stage that failed. E.g., HWTest
+      exception: The raw exception object.
+      traceback: The full stack trace for the failure, as a string.
+    """
+    self.failed_stage = failed_stage
+    self.failed_prefix = failed_prefix
+    self.exception = exception
+    self.traceback = traceback
+
+
+_result_fields = ['name', 'result', 'description', 'prefix', 'board', 'time']
+Result = collections.namedtuple('Result', _result_fields)
+
+
+class _Results(object):
+  """Static class that collects the results of our BuildStages as they run."""
+
+  SUCCESS = "Stage was successful"
+  FORGIVEN = "Stage failed but was optional"
+  SKIPPED = "Stage was skipped"
+  NON_FAILURE_TYPES = (SUCCESS, FORGIVEN, SKIPPED)
+
+  SPLIT_TOKEN = r'\_O_/'
+
+  def __init__(self):
+    # List of results for all stages that's built up as we run. Members are of
+    #  the form ('name', SUCCESS | FORGIVEN | Exception, None | description)
+    self._results_log = []
+
+    # Stages run in a previous run and restored. Stored as a dictionary of
+    # names to previous records.
+    self._previous = {}
+
+    self.start_time = datetime.datetime.now()
+
+  def Clear(self):
+    """Clear existing stage results."""
+    self.__init__()
+
+  def PreviouslyCompletedRecord(self, name):
+    """Check to see if this stage was previously completed.
+
+    Returns:
+      A boolean showing the stage was successful in the previous run.
+    """
+    return self._previous.get(name)
+
+  def BuildSucceededSoFar(self):
+    """Return true if all stages so far have passing states.
+
+    This method returns true if all was successful or forgiven.
+    """
+    return all(entry.result in self.NON_FAILURE_TYPES
+               for entry in self._results_log)
+
+  def StageHasResults(self, name):
+    """Return true if stage has posted results."""
+    return name in [entry.name for entry in self._results_log]
+
+  def Record(self, name, result, description=None, prefix=None, board='',
+             time=0):
+    """Store off an additional stage result.
+
+    Args:
+      name: The name of the stage (e.g. HWTest [bvt])
+      result:
+        Result should be one of:
+          Results.SUCCESS if the stage was successful.
+          Results.SKIPPED if the stage was skipped.
+          Results.FORGIVEN if the stage had warnings.
+          Otherwise, it should be the exception stage errored with.
+      description:
+        The textual backtrace of the exception, or None
+      prefix: The prefix of the stage (e.g. HWTest). Defaults to
+        the value of name.
+      board: The board associated with the stage, if any. Defaults to ''.
+      time: How long the result took to complete.
+    """
+    if prefix is None:
+      prefix = name
+    result = Result(name, result, description, prefix, board, time)
+    self._results_log.append(result)
+
+  def Get(self):
+    """Fetch stage results.
+
+    Returns:
+      A list with one entry per stage run with a result.
+    """
+    return self._results_log
+
+  def GetPrevious(self):
+    """Fetch stage results.
+
+    Returns:
+      A list of stages names that were completed in a previous run.
+    """
+    return self._previous
+
+  def SaveCompletedStages(self, out):
+    """Save the successfully completed stages to the provided file |out|."""
+    for entry in self._results_log:
+      if entry.result != self.SUCCESS:
+        break
+      out.write(self.SPLIT_TOKEN.join(map(str, entry)) + '\n')
+
+  def RestoreCompletedStages(self, out):
+    """Load the successfully completed stages from the provided file |out|."""
+    # Read the file, and strip off the newlines.
+    for line in out:
+      record = line.strip().split(self.SPLIT_TOKEN)
+      if len(record) != len(_result_fields):
+        logging.warning('State file does not match expected format, ignoring.')
+        # Wipe any partial state.
+        self._previous = {}
+        break
+
+      self._previous[record[0]] = Result(*record)
+
+  def GetTracebacks(self):
+    """Get a list of the exceptions that failed the build.
+
+    Returns:
+       A list of RecordedTraceback objects.
+    """
+    tracebacks = []
+    for entry in self._results_log:
+      # If entry.result is not in NON_FAILURE_TYPES, then the stage failed, and
+      # entry.result is the exception object and entry.description is a string
+      # containing the full traceback.
+      if entry.result not in self.NON_FAILURE_TYPES:
+        traceback = RecordedTraceback(entry.name, entry.prefix, entry.result,
+                                      entry.description)
+        tracebacks.append(traceback)
+    return tracebacks
+
+  def Report(self, out, archive_urls=None, current_version=None):
+    """Generate a user friendly text display of the results data.
+
+    Args:
+      out: Output stream to write to (e.g. sys.stdout).
+      archive_urls: Dict where values are archive URLs and keys are names
+        to associate with those URLs (typically board name).  If None then
+        omit the name when logging the URL.
+      current_version: Chrome OS version associated with this report.
+    """
+    results = self._results_log
+
+    line = '*' * 60 + '\n'
+    edge = '*' * 2
+
+    if current_version:
+      out.write(line)
+      out.write(edge +
+                ' RELEASE VERSION: ' +
+                current_version +
+                '\n')
+
+    out.write(line)
+    out.write(edge + ' Stage Results\n')
+    warnings = False
+
+    for entry in results:
+      name, result, run_time = (entry.name, entry.result, entry.time)
+      timestr = datetime.timedelta(seconds=math.ceil(run_time))
+
+      # Don't print data on skipped stages.
+      if result == self.SKIPPED:
+        continue
+
+      out.write(line)
+      details = ''
+      if result == self.SUCCESS:
+        status = 'PASS'
+      elif result == self.FORGIVEN:
+        status = 'FAILED BUT FORGIVEN'
+        warnings = True
+      else:
+        status = 'FAIL'
+        if isinstance(result, cros_build_lib.RunCommandError):
+          # If there was a RunCommand error, give just the command that
+          # failed, not its full argument list, since those are usually
+          # too long.
+          details = ' in %s' % result.result.cmd[0]
+        elif isinstance(result, failures_lib.BuildScriptFailure):
+          # BuildScriptFailure errors publish a 'short' name of the
+          # command that failed.
+          details = ' in %s' % result.shortname
+        else:
+          # There was a normal error. Give the type of exception.
+          details = ' with %s' % type(result).__name__
+
+      out.write('%s %s %s (%s)%s\n' % (edge, status, name, timestr, details))
+
+    out.write(line)
+
+    if archive_urls:
+      out.write('%s BUILD ARTIFACTS FOR THIS BUILD CAN BE FOUND AT:\n' % edge)
+      for name, url in sorted(archive_urls.iteritems()):
+        named_url = url
+        link_name = 'Artifacts'
+        if name:
+          named_url = '%s: %s' % (name, url)
+          link_name = 'Artifacts[%s]' % name
+
+        # Output the bot-id/version used in the archive url.
+        link_name = '%s: %s' % (link_name, '/'.join(url.split('/')[-3:-1]))
+        out.write('%s  %s' % (edge, named_url))
+        logging.PrintBuildbotLink(link_name, url, handle=out)
+      out.write(line)
+
+    for x in self.GetTracebacks():
+      if x.failed_stage and x.traceback:
+        out.write('\nFailed in stage %s:\n\n' % x.failed_stage)
+        out.write(x.traceback)
+        out.write('\n')
+
+    if warnings:
+      logging.PrintBuildbotStepWarnings(out)
+
+
+Results = _Results()
diff --git a/cbuildbot/run_tests b/cbuildbot/run_tests
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/run_tests
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/run_tests.py b/cbuildbot/run_tests.py
new file mode 100644
index 0000000..2c17905
--- /dev/null
+++ b/cbuildbot/run_tests.py
@@ -0,0 +1,462 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Chromite main test runner.
+
+Run the specified tests.  If none are specified, we'll scan the
+tree looking for tests to run and then only run the semi-fast ones.
+
+You can add a .testignore file to a dir to disable scanning it.
+"""
+
+from __future__ import print_function
+
+import errno
+import multiprocessing
+import os
+import signal
+import stat
+import sys
+import tempfile
+
+from chromite.cbuildbot import constants
+from chromite.lib import cgroups
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import namespaces
+from chromite.lib import osutils
+from chromite.lib import proctitle
+from chromite.lib import timeout_util
+
+
+# How long (in minutes) to let a test run before we kill it.
+TEST_TIMEOUT = 20
+# How long (in minutes) before we send SIGKILL after the timeout above.
+TEST_SIG_TIMEOUT = 5
+
+# How long (in seconds) to let tests clean up after CTRL+C is sent.
+SIGINT_TIMEOUT = 5
+# How long (in seconds) to let all children clean up after CTRL+C is sent.
+CTRL_C_TIMEOUT = SIGINT_TIMEOUT + 5
+
+
+# Test has to run inside the chroot.
+INSIDE = 'inside'
+
+# Test has to run outside the chroot.
+OUTSIDE = 'outside'
+
+# Don't run this test (please add a comment as to why).
+SKIP = 'skip'
+
+
+# List all exceptions, with a token describing what's odd here.
+SPECIAL_TESTS = {
+    # Tests that need to run inside the chroot.
+    'cbuildbot/stages/test_stages_unittest': INSIDE,
+    'cli/cros/cros_build_unittest': INSIDE,
+    'cli/cros/cros_chroot_unittest': INSIDE,
+    'cli/cros/cros_debug_unittest': INSIDE,
+    'cli/cros/lint_unittest': INSIDE,
+    'cli/deploy_unittest': INSIDE,
+    'lib/alerts_unittest': INSIDE,
+    'lib/chroot_util_unittest': INSIDE,
+    'lib/filetype_unittest': INSIDE,
+    'lib/upgrade_table_unittest': INSIDE,
+    'mobmonitor/checkfile/manager_unittest': INSIDE,
+    'mobmonitor/scripts/mobmonitor_unittest': INSIDE,
+    'scripts/cros_install_debug_syms_unittest': INSIDE,
+    'scripts/cros_list_modified_packages_unittest': INSIDE,
+    'scripts/cros_mark_as_stable_unittest': INSIDE,
+    'scripts/cros_mark_chrome_as_stable_unittest': INSIDE,
+    'scripts/cros_mark_mojo_as_stable_unittest': INSIDE,
+    'scripts/sync_package_status_unittest': INSIDE,
+    'scripts/cros_portage_upgrade_unittest': INSIDE,
+    'scripts/dep_tracker_unittest': INSIDE,
+    'scripts/test_image_unittest': INSIDE,
+    'scripts/upload_package_status_unittest': INSIDE,
+
+    # Tests that need to run outside the chroot.
+    'lib/cgroups_unittest': OUTSIDE,
+
+    # Tests that take >2 minutes to run.  All the slow tests are
+    # disabled atm though ...
+    #'scripts/cros_portage_upgrade_unittest': SKIP,
+}
+
+SLOW_TESTS = {
+    # Tests that require network can be really slow.
+    'buildbot/manifest_version_unittest': SKIP,
+    'buildbot/repository_unittest': SKIP,
+    'buildbot/remote_try_unittest': SKIP,
+    'lib/cros_build_lib_unittest': SKIP,
+    'lib/gerrit_unittest': SKIP,
+    'lib/patch_unittest': SKIP,
+
+    # cgroups_unittest runs cros_sdk a lot, so is slow.
+    'lib/cgroups_unittest': SKIP,
+}
+
+
+def RunTest(test, cmd, tmpfile, finished, total):
+  """Run |test| with the |cmd| line and save output to |tmpfile|.
+
+  Args:
+    test: The human readable name for this test.
+    cmd: The full command line to run the test.
+    tmpfile: File to write test output to.
+    finished: Counter to update when this test finishes running.
+    total: Total number of tests to run.
+
+  Returns:
+    The exit code of the test.
+  """
+  logging.info('Starting %s', test)
+
+  def _Finished(_log_level, _log_msg, result, delta):
+    with finished.get_lock():
+      finished.value += 1
+      if result.returncode:
+        func = logging.error
+        msg = 'Failed'
+      else:
+        func = logging.info
+        msg = 'Finished'
+      func('%s [%i/%i] %s (%s)', msg, finished.value, total, test, delta)
+
+  ret = cros_build_lib.TimedCommand(
+      cros_build_lib.RunCommand, cmd, capture_output=True, error_code_ok=True,
+      combine_stdout_stderr=True, debug_level=logging.DEBUG,
+      int_timeout=SIGINT_TIMEOUT, timed_log_callback=_Finished)
+  if ret.returncode:
+    tmpfile.write(ret.output)
+    if not ret.output:
+      tmpfile.write('<no output>\n')
+  tmpfile.close()
+
+  return ret.returncode
+
+
+def BuildTestSets(tests, chroot_available, network):
+  """Build the tests to execute.
+
+  Take care of special test handling like whether it needs to be inside or
+  outside of the sdk, whether the test should be skipped, etc...
+
+  Args:
+    tests: List of tests to execute.
+    chroot_available: Whether we can execute tests inside the sdk.
+    network: Whether to execute network tests.
+
+  Returns:
+    List of tests to execute and their full command line.
+  """
+  testsets = []
+  for test in tests:
+    cmd = [test]
+
+    # See if this test requires special consideration.
+    status = SPECIAL_TESTS.get(test)
+    if status is SKIP:
+      logging.info('Skipping %s', test)
+      continue
+    elif status is INSIDE:
+      if not cros_build_lib.IsInsideChroot():
+        if not chroot_available:
+          logging.info('Skipping %s: chroot not available', test)
+          continue
+        cmd = ['cros_sdk', '--', os.path.join('..', '..', 'chromite', test)]
+    elif status is OUTSIDE:
+      if cros_build_lib.IsInsideChroot():
+        logging.info('Skipping %s: must be outside the chroot', test)
+        continue
+    else:
+      mode = os.stat(test).st_mode
+      if stat.S_ISREG(mode):
+        if not mode & 0o111:
+          logging.debug('Skipping %s: not executable', test)
+          continue
+      else:
+        logging.debug('Skipping %s: not a regular file', test)
+        continue
+
+    # Build up the final test command.
+    cmd.append('--verbose')
+    if network:
+      cmd.append('--network')
+    cmd = ['timeout', '--preserve-status', '-k', '%sm' % TEST_SIG_TIMEOUT,
+           '%sm' % TEST_TIMEOUT] + cmd
+
+    testsets.append((test, cmd, tempfile.TemporaryFile()))
+
+  return testsets
+
+
+def RunTests(tests, jobs=1, chroot_available=True, network=False, dryrun=False,
+             failfast=False):
+  """Execute |paths| with |jobs| in parallel (including |network| tests).
+
+  Args:
+    tests: The tests to run.
+    jobs: How many tests to run in parallel.
+    chroot_available: Whether we can run tests inside the sdk.
+    network: Whether to run network based tests.
+    dryrun: Do everything but execute the test.
+    failfast: Stop on first failure
+
+  Returns:
+    True if all tests pass, else False.
+  """
+  finished = multiprocessing.Value('i')
+  testsets = []
+  pids = []
+  failed = aborted = False
+
+  def WaitOne():
+    (pid, status) = os.wait()
+    pids.remove(pid)
+    return status
+
+  # Launch all the tests!
+  try:
+    # Build up the testsets.
+    testsets = BuildTestSets(tests, chroot_available, network)
+
+    # Fork each test and add it to the list.
+    for test, cmd, tmpfile in testsets:
+      if failed and failfast:
+        logging.error('failure detected; stopping new tests')
+        break
+
+      if len(pids) >= jobs:
+        if WaitOne():
+          failed = True
+      pid = os.fork()
+      if pid == 0:
+        proctitle.settitle(test)
+        ret = 1
+        try:
+          if dryrun:
+            logging.info('Would have run: %s', cros_build_lib.CmdToStr(cmd))
+            ret = 0
+          else:
+            ret = RunTest(test, cmd, tmpfile, finished, len(testsets))
+        except KeyboardInterrupt:
+          pass
+        except BaseException:
+          logging.error('%s failed', test, exc_info=True)
+        # We cannot run clean up hooks in the child because it'll break down
+        # things like tempdir context managers.
+        os._exit(ret)  # pylint: disable=protected-access
+      pids.append(pid)
+
+    # Wait for all of them to get cleaned up.
+    while pids:
+      if WaitOne():
+        failed = True
+
+  except KeyboardInterrupt:
+    # If the user wants to stop, reap all the pending children.
+    logging.warning('CTRL+C received; cleaning up tests')
+    aborted = True
+    CleanupChildren(pids)
+
+  # Walk through the results.
+  failed_tests = []
+  for test, cmd, tmpfile in testsets:
+    tmpfile.seek(0)
+    output = tmpfile.read()
+    if output:
+      failed_tests.append(test)
+      print()
+      logging.error('### LOG: %s', test)
+      print(output.rstrip())
+      print()
+
+  if failed_tests:
+    logging.error('The following %i tests failed:\n  %s', len(failed_tests),
+                  '\n  '.join(sorted(failed_tests)))
+    return False
+  elif aborted or failed:
+    return False
+
+  return True
+
+
+def CleanupChildren(pids):
+  """Clean up all the children in |pids|."""
+  # Note: SIGINT was already sent due to the CTRL+C via the kernel itself.
+  # So this func is just waiting for them to clean up.
+  handler = signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+  def _CheckWaitpid(ret):
+    (pid, _status) = ret
+    if pid:
+      try:
+        pids.remove(pid)
+      except ValueError:
+        # We might have reaped a grandchild -- be robust.
+        pass
+    return len(pids)
+
+  def _Waitpid():
+    try:
+      return os.waitpid(-1, os.WNOHANG)
+    except OSError as e:
+      if e.errno == errno.ECHILD:
+        # All our children went away!
+        pids[:] = []
+        return (0, 0)
+      else:
+        raise
+
+  def _RemainingTime(remaining):
+    print('\rwaiting %s for %i tests to exit ... ' % (remaining, len(pids)),
+          file=sys.stderr, end='')
+
+  try:
+    timeout_util.WaitForSuccess(_CheckWaitpid, _Waitpid,
+                                timeout=CTRL_C_TIMEOUT, period=0.1,
+                                side_effect_func=_RemainingTime)
+    print('All tests cleaned up!')
+    return
+  except timeout_util.TimeoutError:
+    # Let's kill them hard now.
+    print('Hard killing %i tests' % len(pids))
+    for pid in pids:
+      try:
+        os.kill(pid, signal.SIGKILL)
+      except OSError as e:
+        if e.errno != errno.ESRCH:
+          raise
+  finally:
+    signal.signal(signal.SIGINT, handler)
+
+
+def FindTests(search_paths=('.',)):
+  """Find all the tests available in |search_paths|."""
+  for search_path in search_paths:
+    for root, dirs, files in os.walk(search_path):
+      if os.path.exists(os.path.join(root, '.testignore')):
+        # Delete the dir list in place.
+        dirs[:] = []
+        continue
+
+      dirs[:] = [x for x in dirs if x[0] != '.']
+
+      for path in files:
+        test = os.path.join(os.path.relpath(root, search_path), path)
+        if test.endswith('_unittest'):
+          yield test
+
+
+def ChrootAvailable():
+  """See if `cros_sdk` will work at all.
+
+  If we try to run unittests in the buildtools group, we won't be able to
+  create one.
+  """
+  ret = cros_build_lib.RunCommand(
+      ['repo', 'list'], capture_output=True, error_code_ok=True,
+      combine_stdout_stderr=True, debug_level=logging.DEBUG)
+  return 'chromiumos-overlay' in ret.output
+
+
+def _ReExecuteIfNeeded(argv, network):
+  """Re-execute as root so we can unshare resources."""
+  if os.geteuid() != 0:
+    cmd = ['sudo', '-E', 'HOME=%s' % os.environ['HOME'],
+           'PATH=%s' % os.environ['PATH'], '--'] + argv
+    os.execvp(cmd[0], cmd)
+  else:
+    cgroups.Cgroup.InitSystem()
+    namespaces.SimpleUnshare(net=not network, pid=True)
+    # We got our namespaces, so switch back to the user to run the tests.
+    gid = int(os.environ.pop('SUDO_GID'))
+    uid = int(os.environ.pop('SUDO_UID'))
+    user = os.environ.pop('SUDO_USER')
+    os.initgroups(user, gid)
+    os.setresgid(gid, gid, gid)
+    os.setresuid(uid, uid, uid)
+    os.environ['USER'] = user
+
+
+def GetParser():
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('-f', '--failfast', default=False, action='store_true',
+                      help='Stop on first failure')
+  parser.add_argument('-q', '--quick', default=False, action='store_true',
+                      help='Only run the really quick tests')
+  parser.add_argument('-n', '--dry-run', default=False, action='store_true',
+                      dest='dryrun',
+                      help='Do everything but actually run the test')
+  parser.add_argument('-l', '--list', default=False, action='store_true',
+                      help='List all the available tests')
+  parser.add_argument('-j', '--jobs', type=int,
+                      help='Number of tests to run in parallel at a time')
+  parser.add_argument('--network', default=False, action='store_true',
+                      help='Run tests that depend on good network connectivity')
+  parser.add_argument('tests', nargs='*', default=None, help='Tests to run')
+  return parser
+
+
+def main(argv):
+  parser = GetParser()
+  opts = parser.parse_args(argv)
+  opts.Freeze()
+
+  # Process list output quickly as it takes no privileges.
+  if opts.list:
+    print('\n'.join(sorted(opts.tests or FindTests((constants.CHROMITE_DIR,)))))
+    return
+
+  # Many of our tests require a valid chroot to run. Make sure it's created
+  # before we block network access.
+  chroot = os.path.join(constants.SOURCE_ROOT, constants.DEFAULT_CHROOT_DIR)
+  if (not os.path.exists(chroot) and
+      ChrootAvailable() and
+      not cros_build_lib.IsInsideChroot()):
+    cros_build_lib.RunCommand(['cros_sdk', '--create'])
+
+  # This is a cheesy hack to make sure gsutil is populated in the cache before
+  # we run tests. This is a partial workaround for crbug.com/468838.
+  gs.GSContext.GetDefaultGSUtilBin()
+
+  # Now let's run some tests.
+  _ReExecuteIfNeeded([sys.argv[0]] + argv, opts.network)
+  # A lot of pieces here expect to be run in the root of the chromite tree.
+  # Make them happy.
+  os.chdir(constants.CHROMITE_DIR)
+  tests = opts.tests or FindTests()
+
+  if opts.quick:
+    SPECIAL_TESTS.update(SLOW_TESTS)
+
+  jobs = opts.jobs or multiprocessing.cpu_count()
+
+  with cros_build_lib.ContextManagerStack() as stack:
+    # If we're running outside the chroot, try to contain ourselves.
+    if cgroups.Cgroup.IsSupported() and not cros_build_lib.IsInsideChroot():
+      stack.Add(cgroups.SimpleContainChildren, 'run_tests')
+
+    # Throw all the tests into a custom tempdir so that if we do CTRL+C, we can
+    # quickly clean up all the files they might have left behind.
+    stack.Add(osutils.TempDir, prefix='chromite.run_tests.', set_global=True,
+              sudo_rm=True)
+
+    def _Finished(_log_level, _log_msg, result, delta):
+      if result:
+        logging.info('All tests succeeded! (%s total)', delta)
+
+    ret = cros_build_lib.TimedCommand(
+        RunTests, tests, jobs=jobs, chroot_available=ChrootAvailable(),
+        network=opts.network, dryrun=opts.dryrun, failfast=opts.failfast,
+        timed_log_callback=_Finished)
+    if not ret:
+      return 1
+
+  if not opts.network:
+    logging.warning('Network tests skipped; use --network to run them')
diff --git a/cbuildbot/run_tests_unittest b/cbuildbot/run_tests_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/run_tests_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/run_tests_unittest.py b/cbuildbot/run_tests_unittest.py
new file mode 100644
index 0000000..4432999
--- /dev/null
+++ b/cbuildbot/run_tests_unittest.py
@@ -0,0 +1,121 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the run_tests module."""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.cbuildbot import run_tests
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+
+
+class RunTestsTest(cros_test_lib.MockTestCase):
+  """Tests for the RunTests() func"""
+
+  def testDryrun(self):
+    """Verify dryrun doesn't do anything crazy."""
+    self.PatchObject(run_tests, 'RunTest', side_effect=Exception('do not run'))
+    ret = run_tests.RunTests(['/bin/false'], dryrun=True)
+    self.assertTrue(ret)
+
+
+class FindTestsTest(cros_test_lib.TempDirTestCase):
+  """Tests for the FindTests() func"""
+
+  def testNames(self):
+    """We only look for *_unittest."""
+    for f in ('foo', 'foo_unittests', 'bar_unittest', 'cow_unittest'):
+      osutils.Touch(os.path.join(self.tempdir, f))
+    found = run_tests.FindTests(search_paths=(self.tempdir,))
+    self.assertEqual(sorted(found), ['./bar_unittest', './cow_unittest'])
+
+  def testSubdirs(self):
+    """We should recurse into subdirs."""
+    for f in ('bar_unittest', 'somw/dir/a/cow_unittest'):
+      osutils.Touch(os.path.join(self.tempdir, f), makedirs=True)
+    found = run_tests.FindTests(search_paths=(self.tempdir,))
+    self.assertEqual(sorted(found),
+                     ['./bar_unittest', 'somw/dir/a/cow_unittest'])
+
+  def testIgnores(self):
+    """Verify we skip ignored dirs."""
+    for f in ('foo', 'bar_unittest'):
+      osutils.Touch(os.path.join(self.tempdir, f))
+    # Make sure it works first.
+    found = run_tests.FindTests(search_paths=(self.tempdir,))
+    self.assertEqual(sorted(found), ['./bar_unittest'])
+    # Mark the dir ignored.
+    osutils.Touch(os.path.join(self.tempdir, '.testignore'))
+    # Make sure we ignore it.
+    found = run_tests.FindTests(search_paths=(self.tempdir,))
+    self.assertEqual(list(found), [])
+
+
+class MainTest(cros_test_lib.MockOutputTestCase):
+  """Tests for the main() func"""
+
+  def setUp(self):
+    self.PatchObject(run_tests, '_ReExecuteIfNeeded')
+    self.PatchObject(run_tests, 'ChrootAvailable', return_value=True)
+
+  def testList(self):
+    """Verify --list works"""
+    self.PatchObject(run_tests, 'RunTests', side_effect=Exception('do not run'))
+    with self.OutputCapturer() as output:
+      run_tests.main(['--list'])
+      # Verify some reasonable number of lines showed up.
+      self.assertGreater(len(output.GetStdoutLines()), 90)
+
+  def testMisc(self):
+    """Verify basic flags get passed down correctly"""
+    m = self.PatchObject(run_tests, 'RunTests', return_value=True)
+    run_tests.main(['--network'])
+    m.assert_called_with(mock.ANY, jobs=mock.ANY, chroot_available=mock.ANY,
+                         network=True, dryrun=False, failfast=False)
+    run_tests.main(['--dry-run'])
+    m.assert_called_with(mock.ANY, jobs=mock.ANY, chroot_available=mock.ANY,
+                         network=False, dryrun=True, failfast=False)
+    run_tests.main(['--jobs', '1000'])
+    m.assert_called_with(mock.ANY, jobs=1000, chroot_available=mock.ANY,
+                         network=False, dryrun=False, failfast=False)
+    run_tests.main(['--failfast'])
+    m.assert_called_with(mock.ANY, jobs=mock.ANY, chroot_available=mock.ANY,
+                         network=False, dryrun=False, failfast=True)
+
+  def testUnknownArg(self):
+    """Verify we kick out unknown args"""
+    self.PatchObject(run_tests, 'RunTests', side_effect=Exception('do not run'))
+    bad_arg = '--foasdf'
+    exit_code = None
+    # Only run the main code w/the capturer enabled so we don't swallow
+    # general test output.
+    with self.OutputCapturer():
+      try:
+        run_tests.main([bad_arg])
+      except SystemExit as e:
+        exit_code = e.code
+    self.assertNotEqual(exit_code, 0,
+                        msg='run_tests wrongly accepted %s' % bad_arg)
+
+  def testQuick(self):
+    """Verify --quick filters out slow tests"""
+    self.PatchObject(run_tests, 'RunTests', return_value=True)
+    # Pick a test that is in SLOW_TESTS but not in SPECIAL_TESTS.
+    slow_test = 'lib/patch_unittest'
+    self.assertIn(slow_test, run_tests.SLOW_TESTS)
+    self.assertNotIn(slow_test, run_tests.SPECIAL_TESTS)
+    run_tests.main(['--quick'])
+    self.assertIn(slow_test, run_tests.SPECIAL_TESTS)
+
+  def testSpecificTests(self):
+    """Verify user specified tests are run."""
+    m = self.PatchObject(run_tests, 'RunTests', return_value=True)
+    tests = ['./some/foo_unittest', './bar_unittest']
+    run_tests.main(tests)
+    m.assert_called_with(tests, jobs=mock.ANY, chroot_available=mock.ANY,
+                         network=mock.ANY, dryrun=mock.ANY, failfast=mock.ANY)
diff --git a/cbuildbot/stages/__init__.py b/cbuildbot/stages/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cbuildbot/stages/__init__.py
diff --git a/cbuildbot/stages/afdo_stages.py b/cbuildbot/stages/afdo_stages.py
new file mode 100644
index 0000000..a953a95
--- /dev/null
+++ b/cbuildbot/stages/afdo_stages.py
@@ -0,0 +1,102 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the AFDO stages."""
+
+from __future__ import print_function
+
+from chromite.cbuildbot import afdo
+from chromite.cbuildbot import constants
+from chromite.lib import alerts
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import portage_util
+from chromite.cbuildbot.stages import generic_stages
+
+
+class AFDODataGenerateStage(generic_stages.BoardSpecificBuilderStage,
+                            generic_stages.ForgivingBuilderStage):
+  """Stage that generates AFDO profile data from a perf profile."""
+
+  def _GetCurrentArch(self):
+    """Get architecture for the current board being built."""
+    return self._GetPortageEnvVar('ARCH', self._current_board)
+
+  def PerformStage(self):
+    """Collect a 'perf' profile and convert it into the AFDO format."""
+    super(AFDODataGenerateStage, self).PerformStage()
+
+    board = self._current_board
+    if not afdo.CanGenerateAFDOData(board):
+      logging.warning('Board %s cannot generate its own AFDO profile.', board)
+      return
+
+    arch = self._GetCurrentArch()
+    buildroot = self._build_root
+    gs_context = gs.GSContext()
+    cpv = portage_util.BestVisible(constants.CHROME_CP,
+                                   buildroot=buildroot)
+    afdo_file = None
+
+    # Generation of AFDO could fail for different reasons.
+    # We will ignore the failures and let the master PFQ builder try
+    # to find an older AFDO profile.
+    try:
+      if afdo.WaitForAFDOPerfData(cpv, arch, buildroot, gs_context):
+        afdo_file = afdo.GenerateAFDOData(cpv, arch, board,
+                                          buildroot, gs_context)
+        assert afdo_file
+        logging.info('Generated %s AFDO profile %s', arch, afdo_file)
+      else:
+        raise afdo.MissingAFDOData('Could not find current "perf" profile. '
+                                   'Master PFQ builder will try to use stale '
+                                   'AFDO profile.')
+    # Will let system-exiting exceptions through.
+    except Exception:
+      logging.PrintBuildbotStepWarnings()
+      logging.warning('AFDO profile generation failed with exception ',
+                      exc_info=True)
+
+      alert_msg = ('Please triage. This will become a fatal error.\n\n'
+                   'arch=%s buildroot=%s\n\nURL=%s' %
+                   (arch, buildroot, self._run.ConstructDashboardURL()))
+      subject_msg = ('Failure in generation of AFDO Data for builder %s' %
+                     self._run.config.name)
+      alerts.SendEmailLog(subject_msg,
+                          afdo.AFDO_ALERT_RECIPIENTS,
+                          server=alerts.SmtpServer(constants.GOLO_SMTP_SERVER),
+                          message=alert_msg)
+      # Re-raise whatever exception we got here. This stage will only
+      # generate a warning but we want to make sure the warning is
+      # generated.
+      raise
+
+
+class AFDOUpdateEbuildStage(generic_stages.BuilderStage):
+  """Updates the Chrome ebuild with the names of the AFDO profiles."""
+
+  def PerformStage(self):
+    buildroot = self._build_root
+    gs_context = gs.GSContext()
+    cpv = portage_util.BestVisible(constants.CHROME_CP,
+                                   buildroot=buildroot)
+    version_number = cpv.version
+
+    # We need the name of one board that has been setup in this
+    # builder to find the Chrome ebuild. The chrome ebuild should be
+    # the same for all the boards, so just use the first one.
+    # If we don't have any boards, leave the called function to guess.
+    board = self._boards[0] if self._boards else None
+    arch_profiles = {}
+    for arch in afdo.AFDO_ARCH_GENERATORS:
+      afdo_file = afdo.GetLatestAFDOFile(cpv, arch, buildroot, gs_context)
+      if not afdo_file:
+        raise afdo.MissingAFDOData('Could not find appropriate AFDO profile')
+      state = 'current' if version_number in afdo_file else 'previous'
+      logging.info('Found %s %s AFDO profile %s', state, arch, afdo_file)
+      arch_profiles[arch] = afdo_file
+
+    # Now update the Chrome ebuild file with the AFDO profiles we found
+    # for each architecture.
+    afdo.UpdateChromeEbuildAFDOFile(board, arch_profiles)
diff --git a/cbuildbot/stages/artifact_stages.py b/cbuildbot/stages/artifact_stages.py
new file mode 100644
index 0000000..0c5803f
--- /dev/null
+++ b/cbuildbot/stages/artifact_stages.py
@@ -0,0 +1,840 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing stages that generate and/or archive artifacts."""
+
+from __future__ import print_function
+
+import glob
+import itertools
+import json
+import multiprocessing
+import os
+import shutil
+
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import prebuilts
+from chromite.cbuildbot.stages import generic_stages
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import path_util
+from chromite.lib import portage_util
+
+
+_FULL_BINHOST = 'FULL_BINHOST'
+_PORTAGE_BINHOST = 'PORTAGE_BINHOST'
+
+
+class NothingToArchiveException(Exception):
+  """Thrown if ArchiveStage found nothing to archive."""
+  def __init__(self, message='No images found to archive.'):
+    super(NothingToArchiveException, self).__init__(message)
+
+
+class ArchiveStage(generic_stages.BoardSpecificBuilderStage,
+                   generic_stages.ArchivingStageMixin):
+  """Archives build and test artifacts for developer consumption.
+
+  Attributes:
+    release_tag: The release tag. E.g. 2981.0.0
+    version: The full version string, including the milestone.
+        E.g. R26-2981.0.0-b123
+  """
+
+  option_name = 'archive'
+  config_name = 'archive'
+
+  # This stage is intended to run in the background, in parallel with tests.
+  def __init__(self, builder_run, board, chrome_version=None, **kwargs):
+    super(ArchiveStage, self).__init__(builder_run, board, **kwargs)
+    self.chrome_version = chrome_version
+
+    # TODO(mtennant): Places that use this release_tag attribute should
+    # move to use self._run.attrs.release_tag directly.
+    self.release_tag = getattr(self._run.attrs, 'release_tag', None)
+
+    self._recovery_image_status_queue = multiprocessing.Queue()
+    self._release_upload_queue = multiprocessing.Queue()
+    self._upload_queue = multiprocessing.Queue()
+    self.artifacts = []
+
+  def WaitForRecoveryImage(self):
+    """Wait until artifacts needed by SignerTest stage are created.
+
+    Returns:
+      True if artifacts created successfully.
+      False otherwise.
+    """
+    logging.info('Waiting for recovery image...')
+    status = self._recovery_image_status_queue.get()
+    # Put the status back so other SignerTestStage instances don't starve.
+    self._recovery_image_status_queue.put(status)
+    return status
+
+  def ArchiveStrippedPackages(self):
+    """Generate and archive stripped versions of packages requested."""
+    tarball = commands.BuildStrippedPackagesTarball(
+        self._build_root,
+        self._current_board,
+        self._run.config.upload_stripped_packages,
+        self.archive_path)
+    if tarball is not None:
+      self._upload_queue.put([tarball])
+
+  def BuildAndArchiveDeltaSysroot(self):
+    """Generate and upload delta sysroot for initial build_packages."""
+    extra_env = {}
+    if self._run.config.useflags:
+      extra_env['USE'] = ' '.join(self._run.config.useflags)
+    in_chroot_path = path_util.ToChrootPath(self.archive_path)
+    cmd = ['generate_delta_sysroot', '--out-dir', in_chroot_path,
+           '--board', self._current_board]
+    # TODO(mtennant): Make this condition into one run param.
+    if not self._run.config.build_tests or not self._run.options.tests:
+      cmd.append('--skip-tests')
+    cros_build_lib.RunCommand(cmd, cwd=self._build_root, enter_chroot=True,
+                              extra_env=extra_env)
+    self._upload_queue.put([constants.DELTA_SYSROOT_TAR])
+
+  def LoadArtifactsList(self, board, image_dir):
+    """Load the list of artifacts to upload for this board.
+
+    It attempts to load a JSON file, scripts/artifacts.json, from the
+    overlay directories for this board. This file specifies the artifacts
+    to generate, if it can't be found, it will use a default set that
+    uploads every .bin file as a .tar.xz file except for
+    chromiumos_qemu_image.bin.
+
+    See BuildStandaloneArchive in cbuildbot_commands.py for format docs.
+    """
+    custom_artifacts_file = portage_util.ReadOverlayFile(
+        'scripts/artifacts.json', board=board)
+    artifacts = None
+
+    if custom_artifacts_file is not None:
+      json_file = json.loads(custom_artifacts_file)
+      artifacts = json_file.get('artifacts')
+
+    if artifacts is None:
+      artifacts = []
+      for image_file in glob.glob(os.path.join(image_dir, '*.bin')):
+        basename = os.path.basename(image_file)
+        if basename != constants.VM_IMAGE_BIN:
+          info = {'input': [basename], 'archive': 'tar', 'compress': 'xz'}
+          artifacts.append(info)
+
+    for artifact in artifacts:
+      # Resolve the (possible) globs in the input list, and store
+      # the actual set of files to use in 'paths'
+      paths = []
+      for s in artifact['input']:
+        glob_paths = glob.glob(os.path.join(image_dir, s))
+        if not glob_paths:
+          logging.warning('No artifacts generated for input: %s', s)
+        else:
+          for path in glob_paths:
+            paths.append(os.path.relpath(path, image_dir))
+      artifact['paths'] = paths
+    self.artifacts = artifacts
+
+  def IsArchivedFile(self, filename):
+    """Return True if filename is the name of a file being archived."""
+    for artifact in self.artifacts:
+      for path in itertools.chain(artifact['paths'], artifact['input']):
+        if os.path.basename(path) == filename:
+          return True
+    return False
+
+  def PerformStage(self):
+    buildroot = self._build_root
+    config = self._run.config
+    board = self._current_board
+    debug = self._run.debug
+    upload_url = self.upload_url
+    archive_path = self.archive_path
+    image_dir = self.GetImageDirSymlink()
+
+    extra_env = {}
+    if config['useflags']:
+      extra_env['USE'] = ' '.join(config['useflags'])
+
+    if not archive_path:
+      raise NothingToArchiveException()
+
+    # The following functions are run in parallel (except where indicated
+    # otherwise)
+    # \- BuildAndArchiveArtifacts
+    #    \- ArchiveReleaseArtifacts
+    #       \- ArchiveFirmwareImages
+    #       \- BuildAndArchiveAllImages
+    #          (builds recovery image first, then launches functions below)
+    #          \- BuildAndArchiveFactoryImages
+    #          \- ArchiveStandaloneArtifacts
+    #             \- ArchiveStandaloneArtifact
+    #          \- ArchiveZipFiles
+    #          \- ArchiveHWQual
+    #          \- ArchiveGceTarballs
+    #       \- PushImage (blocks on BuildAndArchiveAllImages)
+    #    \- ArchiveManifest
+    #    \- ArchiveStrippedPackages
+    #    \- ArchiveImageScripts
+
+    def ArchiveManifest():
+      """Create manifest.xml snapshot of the built code."""
+      output_manifest = os.path.join(archive_path, 'manifest.xml')
+      cmd = ['repo', 'manifest', '-r', '-o', output_manifest]
+      cros_build_lib.RunCommand(cmd, cwd=buildroot, capture_output=True)
+      self._upload_queue.put(['manifest.xml'])
+
+    def BuildAndArchiveFactoryImages():
+      """Build and archive the factory zip file.
+
+      The factory zip file consists of the factory toolkit and the factory
+      install image. Both are built here.
+      """
+      # Build factory install image and create a symlink to it.
+      factory_install_symlink = None
+      if 'factory_install' in config['images']:
+        alias = commands.BuildFactoryInstallImage(buildroot, board, extra_env)
+        factory_install_symlink = self.GetImageDirSymlink(alias)
+        if config['factory_install_netboot']:
+          commands.MakeNetboot(buildroot, board, factory_install_symlink)
+
+      # Build the factory toolkit.
+      chroot_dir = os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR)
+      chroot_tmp_dir = os.path.join(chroot_dir, 'tmp')
+      with osutils.TempDir(base_dir=chroot_tmp_dir, sudo_rm=True) as tempdir:
+        # Build the factory toolkit.
+        if config['factory_toolkit']:
+          toolkit_dir = os.path.join(tempdir, 'factory_toolkit')
+          os.makedirs(toolkit_dir)
+          commands.MakeFactoryToolkit(
+              buildroot, board, toolkit_dir, self._run.attrs.release_tag)
+
+        # Build and upload factory zip if needed.
+        if factory_install_symlink or config['factory_toolkit']:
+          filename = commands.BuildFactoryZip(
+              buildroot, board, archive_path, factory_install_symlink,
+              toolkit_dir, self._run.attrs.release_tag)
+          self._release_upload_queue.put([filename])
+
+    def ArchiveStandaloneArtifact(artifact_info):
+      """Build and upload a single archive."""
+      if artifact_info['paths']:
+        for path in commands.BuildStandaloneArchive(archive_path, image_dir,
+                                                    artifact_info):
+          self._release_upload_queue.put([path])
+
+    def ArchiveStandaloneArtifacts():
+      """Build and upload standalone archives for each image."""
+      if config['upload_standalone_images']:
+        parallel.RunTasksInProcessPool(ArchiveStandaloneArtifact,
+                                       [[x] for x in self.artifacts])
+
+    def ArchiveGceTarballs():
+      """Creates .tar.gz files that can be converted to GCE images.
+
+      These files will be uploaded to GCS buckets, where they can be
+      used as input to the "gcloud compute images create" command.
+      This will convert them into images that can be used to create
+      GCE VM instances.
+      """
+      image_bins = []
+      if 'base' in config['images']:
+        image_bins.append(constants.IMAGE_TYPE_TO_NAME['base'])
+      if 'test' in config['images']:
+        image_bins.append(constants.IMAGE_TYPE_TO_NAME['test'])
+
+      for image_bin in image_bins:
+        if not os.path.exists(os.path.join(image_dir, image_bin)):
+          logging.warning('Missing image file skipped: %s', image_bin)
+          continue
+        output_file = commands.BuildGceTarball(
+            archive_path, image_dir, image_bin)
+        self._release_upload_queue.put([output_file])
+
+      self.board_runattrs.SetParallel('gce_tarball_generated', True)
+
+    def ArchiveZipFiles():
+      """Build and archive zip files.
+
+      This includes:
+        - image.zip (all images in one big zip file)
+        - the au-generator.zip used for update payload generation.
+      """
+      # Zip up everything in the image directory.
+      image_zip = commands.BuildImageZip(archive_path, image_dir)
+      self._release_upload_queue.put([image_zip])
+
+      # Archive au-generator.zip.
+      filename = 'au-generator.zip'
+      shutil.copy(os.path.join(image_dir, filename), archive_path)
+      self._release_upload_queue.put([filename])
+
+    def ArchiveHWQual():
+      """Build and archive the HWQual images."""
+      # TODO(petermayo): This logic needs to be exported from the BuildTargets
+      # stage rather than copied/re-evaluated here.
+      # TODO(mtennant): Make this autotest_built concept into a run param.
+      autotest_built = (config['build_tests'] and self._run.options.tests and
+                        config['upload_hw_test_artifacts'])
+
+      if config['hwqual'] and autotest_built:
+        # Build the full autotest tarball for hwqual image. We don't upload it,
+        # as it's fairly large and only needed by the hwqual tarball.
+        logging.info('Archiving full autotest tarball locally ...')
+        tarball = commands.BuildFullAutotestTarball(self._build_root,
+                                                    self._current_board,
+                                                    image_dir)
+        commands.ArchiveFile(tarball, archive_path)
+
+        # Build hwqual image and upload to Google Storage.
+        hwqual_name = 'chromeos-hwqual-%s-%s' % (board, self.version)
+        filename = commands.ArchiveHWQual(buildroot, hwqual_name, archive_path,
+                                          image_dir)
+        self._release_upload_queue.put([filename])
+
+    def ArchiveFirmwareImages():
+      """Archive firmware images built from source if available."""
+      archive = commands.BuildFirmwareArchive(buildroot, board, archive_path)
+      if archive:
+        self._release_upload_queue.put([archive])
+
+    def BuildAndArchiveAllImages():
+      # Generate the recovery image. To conserve loop devices, we try to only
+      # run one instance of build_image at a time. TODO(davidjames): Move the
+      # image generation out of the archive stage.
+      self.LoadArtifactsList(self._current_board, image_dir)
+
+      # For recovery image to be generated correctly, BuildRecoveryImage must
+      # run before BuildAndArchiveFactoryImages.
+      if 'recovery' in config.images:
+        assert self.IsArchivedFile(constants.BASE_IMAGE_BIN)
+        commands.BuildRecoveryImage(buildroot, board, image_dir, extra_env)
+        self._recovery_image_status_queue.put(True)
+        # Re-generate the artifacts list so we include the newly created
+        # recovery image.
+        self.LoadArtifactsList(self._current_board, image_dir)
+      else:
+        self._recovery_image_status_queue.put(False)
+
+      if config['images']:
+        steps = [
+            BuildAndArchiveFactoryImages,
+            ArchiveHWQual,
+            ArchiveStandaloneArtifacts,
+            ArchiveZipFiles,
+        ]
+        if config['upload_gce_images']:
+          steps.append(ArchiveGceTarballs)
+        parallel.RunParallelSteps(steps)
+
+    def ArchiveImageScripts():
+      """Archive tarball of generated image manipulation scripts."""
+      target = os.path.join(archive_path, constants.IMAGE_SCRIPTS_TAR)
+      files = glob.glob(os.path.join(image_dir, '*.sh'))
+      files = [os.path.basename(f) for f in files]
+      cros_build_lib.CreateTarball(target, image_dir, inputs=files)
+      self._upload_queue.put([constants.IMAGE_SCRIPTS_TAR])
+
+    def PushImage():
+      # This helper script is only available on internal manifests currently.
+      if not config['internal']:
+        return
+
+      self.GetParallel('debug_tarball_generated', pretty_name='debug tarball')
+
+      # Needed for stateful.tgz
+      self.GetParallel('payloads_generated', pretty_name='payloads')
+
+      # Now that all data has been generated, we can upload the final result to
+      # the image server.
+      # TODO: When we support branches fully, the friendly name of the branch
+      # needs to be used with PushImages
+      sign_types = []
+      if config['name'].endswith('-%s' % config_lib.CONFIG_TYPE_FIRMWARE):
+        sign_types += ['firmware']
+      if config['name'].endswith('-%s' % config_lib.CONFIG_TYPE_FACTORY):
+        sign_types += ['factory']
+      urls = commands.PushImages(
+          board=board,
+          archive_url=upload_url,
+          dryrun=debug or not config['push_image'],
+          profile=self._run.options.profile or config['profile'],
+          sign_types=sign_types)
+      self.board_runattrs.SetParallel('instruction_urls_per_channel', urls)
+
+    def ArchiveReleaseArtifacts():
+      with self.ArtifactUploader(self._release_upload_queue, archive=False):
+        steps = [BuildAndArchiveAllImages, ArchiveFirmwareImages]
+        parallel.RunParallelSteps(steps)
+      PushImage()
+
+    def BuildAndArchiveArtifacts():
+      # Run archiving steps in parallel.
+      steps = [ArchiveReleaseArtifacts, ArchiveManifest,
+               self.ArchiveStrippedPackages]
+      if config['images']:
+        steps.append(ArchiveImageScripts)
+      if config['create_delta_sysroot']:
+        steps.append(self.BuildAndArchiveDeltaSysroot)
+
+      with self.ArtifactUploader(self._upload_queue, archive=False):
+        parallel.RunParallelSteps(steps)
+
+    if not self._run.config.afdo_generate_min:
+      BuildAndArchiveArtifacts()
+
+  def _HandleStageException(self, exc_info):
+    # Tell the HWTestStage not to wait for artifacts to be uploaded
+    # in case ArchiveStage throws an exception.
+    self._recovery_image_status_queue.put(False)
+    self.board_runattrs.SetParallel('instruction_urls_per_channel', None)
+    return super(ArchiveStage, self)._HandleStageException(exc_info)
+
+
+class CPEExportStage(generic_stages.BoardSpecificBuilderStage,
+                     generic_stages.ArchivingStageMixin):
+  """Handles generation & upload of package CPE information."""
+
+  config_name = 'cpe_export'
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    """Generate debug symbols and upload debug.tgz."""
+    buildroot = self._build_root
+    board = self._current_board
+    useflags = self._run.config.useflags
+
+    logging.info('Generating CPE export.')
+    result = commands.GenerateCPEExport(buildroot, board, useflags)
+
+    logging.info('Writing CPE export to files for archive.')
+    warnings_filename = os.path.join(self.archive_path,
+                                     'cpe-warnings-chromeos-%s.txt' % board)
+    results_filename = os.path.join(self.archive_path,
+                                    'cpe-chromeos-%s.json' % board)
+
+    osutils.WriteFile(warnings_filename, result.error)
+    osutils.WriteFile(results_filename, result.output)
+
+    logging.info('Uploading CPE files.')
+    self.UploadArtifact(os.path.basename(warnings_filename), archive=False)
+    self.UploadArtifact(os.path.basename(results_filename), archive=False)
+
+
+class DebugSymbolsStage(generic_stages.BoardSpecificBuilderStage,
+                        generic_stages.ArchivingStageMixin):
+  """Handles generation & upload of debug symbols."""
+
+  config_name = 'debug_symbols'
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    """Generate debug symbols and upload debug.tgz."""
+    buildroot = self._build_root
+    board = self._current_board
+
+    commands.GenerateBreakpadSymbols(buildroot, board, self._run.debug)
+    self.board_runattrs.SetParallel('breakpad_symbols_generated', True)
+
+    steps = [self.UploadDebugTarball]
+    failed_list = os.path.join(self.archive_path, 'failed_upload_symbols.list')
+    if self._run.config.upload_symbols:
+      steps.append(lambda: self.UploadSymbols(buildroot, board, failed_list))
+
+    parallel.RunParallelSteps(steps)
+
+  def UploadDebugTarball(self):
+    """Generate and upload the debug tarball."""
+    filename = commands.GenerateDebugTarball(
+        self._build_root, self._current_board, self.archive_path,
+        self._run.config.archive_build_debug)
+    self.UploadArtifact(filename, archive=False)
+    logging.info('Announcing availability of debug tarball now.')
+    self.board_runattrs.SetParallel('debug_tarball_generated', True)
+
+  def UploadSymbols(self, buildroot, board, failed_list):
+    """Upload generated debug symbols."""
+    if self._run.options.remote_trybot or self._run.debug:
+      # For debug builds, limit ourselves to just uploading 1 symbol.
+      # This way trybots and such still exercise this code.
+      cnt = 1
+      official = False
+    else:
+      cnt = None
+      official = self._run.config.chromeos_official
+
+    commands.UploadSymbols(buildroot, board, official, cnt, failed_list)
+
+    if os.path.exists(failed_list):
+      self.UploadArtifact(os.path.basename(failed_list), archive=False)
+
+  def _SymbolsNotGenerated(self):
+    """Tell other stages that our symbols were not generated."""
+    self.board_runattrs.SetParallelDefault('breakpad_symbols_generated', False)
+    self.board_runattrs.SetParallelDefault('debug_tarball_generated', False)
+
+  def HandleSkip(self):
+    """Tell other stages to not wait on us if we are skipped."""
+    self._SymbolsNotGenerated()
+    return super(DebugSymbolsStage, self).HandleSkip()
+
+  def _HandleStageException(self, exc_info):
+    """Tell other stages to not wait on us if we die for some reason."""
+    self._SymbolsNotGenerated()
+    return super(DebugSymbolsStage, self)._HandleStageException(exc_info)
+
+
+class MasterUploadPrebuiltsStage(generic_stages.BuilderStage):
+  """Syncs prebuilt binhost files across slaves."""
+  # TODO(mtennant): This class represents logic spun out from
+  # UploadPrebuiltsStage that is specific to a master builder. This is
+  # currently used by the Commit Queue and the Master PFQ builder, but
+  # could be used by other master builders that upload prebuilts,
+  # e.g., x86-alex-pre-flight-branch. When completed the
+  # UploadPrebuiltsStage code can be thinned significantly.
+  option_name = 'prebuilts'
+  config_name = 'prebuilts'
+
+  def _GenerateCommonArgs(self):
+    """Generate common prebuilt arguments."""
+    generated_args = []
+    if self._run.options.debug:
+      generated_args.extend(['--debug', '--dry-run'])
+
+    profile = self._run.options.profile or self._run.config['profile']
+    if profile:
+      generated_args.extend(['--profile', profile])
+
+    # Generate the version if we are a manifest_version build.
+    if self._run.config.manifest_version:
+      version = self._run.GetVersion()
+      generated_args.extend(['--set-version', version])
+
+    return generated_args
+
+  @staticmethod
+  def _AddOptionsForSlave(slave_config):
+    """Private helper method to add upload_prebuilts args for a slave builder.
+
+    Args:
+      slave_config: The build config of a slave builder.
+
+    Returns:
+      An array of options to add to upload_prebuilts array that allow a master
+      to submit prebuilt conf modifications on behalf of a slave.
+    """
+    args = []
+    if slave_config['prebuilts']:
+      for slave_board in slave_config['boards']:
+        args.extend(['--slave-board', slave_board])
+        slave_profile = slave_config['profile']
+        if slave_profile:
+          args.extend(['--slave-profile', slave_profile])
+
+    return args
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    """Syncs prebuilt binhosts for slave builders."""
+    # Common args we generate for all types of builds.
+    generated_args = self._GenerateCommonArgs()
+    # Args we specifically add for public/private build types.
+    public_args, private_args = [], []
+    # Gather public/private (slave) builders.
+    public_builders, private_builders = [], []
+
+    # Distributed builders that use manifest-versions to sync with one another
+    # share prebuilt logic by passing around versions.
+    assert config_lib.IsPFQType(self._prebuilt_type)
+
+    # Public pfqs should upload host preflight prebuilts.
+    public_args.append('--sync-host')
+
+    # Update all the binhost conf files.
+    generated_args.append('--sync-binhost-conf')
+    for slave_config in self._GetSlaveConfigs():
+      if slave_config['prebuilts'] == constants.PUBLIC:
+        public_builders.append(slave_config['name'])
+        public_args.extend(self._AddOptionsForSlave(slave_config))
+      elif slave_config['prebuilts'] == constants.PRIVATE:
+        private_builders.append(slave_config['name'])
+        private_args.extend(self._AddOptionsForSlave(slave_config))
+
+    # Upload the public prebuilts, if any.
+    if public_builders:
+      prebuilts.UploadPrebuilts(
+          category=self._prebuilt_type, chrome_rev=self._chrome_rev,
+          private_bucket=False, buildroot=self._build_root, board=None,
+          extra_args=generated_args + public_args)
+
+    # Upload the private prebuilts, if any.
+    if private_builders:
+      prebuilts.UploadPrebuilts(
+          category=self._prebuilt_type, chrome_rev=self._chrome_rev,
+          private_bucket=True, buildroot=self._build_root, board=None,
+          extra_args=generated_args + private_args)
+
+    # If we're the Chrome PFQ master, update our binhost JSON file.
+    if self._run.config.build_type == constants.CHROME_PFQ_TYPE:
+      commands.UpdateBinhostJson(self._build_root)
+
+
+class UploadPrebuiltsStage(generic_stages.BoardSpecificBuilderStage):
+  """Uploads binaries generated by this build for developer use."""
+
+  option_name = 'prebuilts'
+  config_name = 'prebuilts'
+
+  def __init__(self, builder_run, board, version=None, **kwargs):
+    self.prebuilts_version = version
+    super(UploadPrebuiltsStage, self).__init__(builder_run, board, **kwargs)
+
+  def GenerateCommonArgs(self):
+    """Generate common prebuilt arguments."""
+    generated_args = []
+    if self._run.options.debug:
+      generated_args.extend(['--debug', '--dry-run'])
+
+    profile = self._run.options.profile or self._run.config.profile
+    if profile:
+      generated_args.extend(['--profile', profile])
+
+    # Generate the version if we are a manifest_version build.
+    if self._run.config.manifest_version:
+      version = self._run.GetVersion()
+    else:
+      version = self.prebuilts_version
+    if version is not None:
+      generated_args.extend(['--set-version', version])
+
+    if self._run.config.git_sync:
+      # Git sync should never be set for pfq type builds.
+      assert not config_lib.IsPFQType(self._prebuilt_type)
+      generated_args.extend(['--git-sync'])
+
+    return generated_args
+
+  @classmethod
+  def _AddOptionsForSlave(cls, slave_config, board):
+    """Private helper method to add upload_prebuilts args for a slave builder.
+
+    Args:
+      slave_config: The build config of a slave builder.
+      board: The name of the "master" board on the master builder.
+
+    Returns:
+      An array of options to add to upload_prebuilts array that allow a master
+      to submit prebuilt conf modifications on behalf of a slave.
+    """
+    args = []
+    if slave_config['prebuilts']:
+      for slave_board in slave_config['boards']:
+        if slave_config['master'] and slave_board == board:
+          # Ignore self.
+          continue
+
+        args.extend(['--slave-board', slave_board])
+        slave_profile = slave_config['profile']
+        if slave_profile:
+          args.extend(['--slave-profile', slave_profile])
+
+    return args
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    """Uploads prebuilts for master and slave builders."""
+    prebuilt_type = self._prebuilt_type
+    board = self._current_board
+    binhosts = []
+
+    # Whether we publish public or private prebuilts.
+    public = self._run.config.prebuilts == constants.PUBLIC
+    # Common args we generate for all types of builds.
+    generated_args = self.GenerateCommonArgs()
+    # Args we specifically add for public/private build types.
+    public_args, private_args = [], []
+    # Public / private builders.
+    public_builders, private_builders = [], []
+
+    # Distributed builders that use manifest-versions to sync with one another
+    # share prebuilt logic by passing around versions.
+    if config_lib.IsPFQType(prebuilt_type):
+      # Public pfqs should upload host preflight prebuilts.
+      if prebuilt_type != constants.CHROME_PFQ_TYPE:
+        public_args.append('--sync-host')
+
+      # Deduplicate against previous binhosts.
+      binhosts.extend(self._GetPortageEnvVar(_PORTAGE_BINHOST, board).split())
+      binhosts.extend(self._GetPortageEnvVar(_PORTAGE_BINHOST, None).split())
+      for binhost in filter(None, binhosts):
+        generated_args.extend(['--previous-binhost-url', binhost])
+
+      if self._run.config.master and board == self._boards[-1]:
+        # The master builder updates all the binhost conf files, and needs to do
+        # so only once so as to ensure it doesn't try to update the same file
+        # more than once. As multiple boards can be built on the same builder,
+        # we arbitrarily decided to update the binhost conf files when we run
+        # upload_prebuilts for the last board. The other boards are treated as
+        # slave boards.
+        generated_args.append('--sync-binhost-conf')
+        for c in self._GetSlaveConfigs():
+          if c['prebuilts'] == constants.PUBLIC:
+            public_builders.append(c['name'])
+            public_args.extend(self._AddOptionsForSlave(c, board))
+          elif c['prebuilts'] == constants.PRIVATE:
+            private_builders.append(c['name'])
+            private_args.extend(self._AddOptionsForSlave(c, board))
+
+    common_kwargs = {
+        'buildroot': self._build_root,
+        'category': prebuilt_type,
+        'chrome_rev': self._chrome_rev,
+        'version': self.prebuilts_version,
+    }
+
+    # Upload the public prebuilts, if any.
+    if public_builders or public:
+      public_board = board if public else None
+      prebuilts.UploadPrebuilts(
+          private_bucket=False, board=public_board,
+          extra_args=generated_args + public_args,
+          **common_kwargs)
+
+    # Upload the private prebuilts, if any.
+    if private_builders or not public:
+      private_board = board if not public else None
+      prebuilts.UploadPrebuilts(
+          private_bucket=True, board=private_board,
+          extra_args=generated_args + private_args,
+          **common_kwargs)
+
+
+class DevInstallerPrebuiltsStage(UploadPrebuiltsStage):
+  """Stage that uploads DevInstaller prebuilts."""
+
+  config_name = 'dev_installer_prebuilts'
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    generated_args = generated_args = self.GenerateCommonArgs()
+    prebuilts.UploadDevInstallerPrebuilts(
+        binhost_bucket=self._run.config.binhost_bucket,
+        binhost_key=self._run.config.binhost_key,
+        binhost_base_url=self._run.config.binhost_base_url,
+        buildroot=self._build_root,
+        board=self._current_board,
+        extra_args=generated_args)
+
+
+class UploadTestArtifactsStage(generic_stages.BoardSpecificBuilderStage,
+                               generic_stages.ArchivingStageMixin):
+  """Upload needed hardware test artifacts."""
+
+  def BuildAutotestTarballs(self):
+    """Build the autotest tarballs."""
+    with osutils.TempDir(prefix='cbuildbot-autotest') as tempdir:
+      with self.ArtifactUploader(strict=True) as queue:
+        cwd = os.path.abspath(
+            os.path.join(self._build_root, 'chroot', 'build',
+                         self._current_board, constants.AUTOTEST_BUILD_PATH,
+                         '..'))
+
+        control_files_tarball = commands.BuildAutotestControlFilesTarball(
+            self._build_root, cwd, tempdir)
+        queue.put([control_files_tarball])
+
+        packages_tarball = commands.BuildAutotestPackagesTarball(
+            self._build_root, cwd, tempdir)
+        queue.put([packages_tarball])
+
+        # Tar up the test suites.
+        test_suites_tarball = commands.BuildAutotestTestSuitesTarball(
+            self._build_root, cwd, tempdir)
+        queue.put([test_suites_tarball])
+
+        # Build the server side package.
+        server_tarball = commands.BuildAutotestServerPackageTarball(
+            self._build_root, cwd, tempdir)
+        queue.put([server_tarball])
+
+  def _GeneratePayloads(self, image_name, **kwargs):
+    """Generate and upload payloads for |image_name|.
+
+    Args:
+      image_name: The image to use.
+      **kwargs: Keyword arguments to pass to commands.GeneratePayloads.
+    """
+    with osutils.TempDir(prefix='cbuildbot-payloads') as tempdir:
+      with self.ArtifactUploader() as queue:
+        image_path = os.path.join(self.GetImageDirSymlink(), image_name)
+        commands.GeneratePayloads(self._build_root, image_path, tempdir,
+                                  **kwargs)
+        for payload in os.listdir(tempdir):
+          queue.put([os.path.join(tempdir, payload)])
+
+  def BuildUpdatePayloads(self):
+    """Archives update payloads when they are ready."""
+    try:
+      # If we are not configured to generate payloads, don't.
+      if not (self._run.config.upload_hw_test_artifacts and
+              self._run.config.images):
+        return
+
+      # If there are no images to generate payloads from, don't.
+      got_images = self.GetParallel('images_generated', pretty_name='images')
+      if not got_images:
+        return
+
+      payload_type = self._run.config.payload_image
+      if payload_type is None:
+        payload_type = 'base'
+        for t in ['test', 'dev']:
+          if t in self._run.config.images:
+            payload_type = t
+            break
+      image_name = constants.IMAGE_TYPE_TO_NAME[payload_type]
+      logging.info('Generating payloads to upload for %s', image_name)
+      self._GeneratePayloads(image_name, full=True, stateful=True)
+      self.board_runattrs.SetParallel('payloads_generated', True)
+      self._GeneratePayloads(image_name, delta=True)
+      self.board_runattrs.SetParallel('delta_payloads_generated', True)
+
+    finally:
+      # Make sure these flags are set to some value, no matter now we exit.
+      self.board_runattrs.SetParallelDefault('payloads_generated', False)
+      self.board_runattrs.SetParallelDefault('delta_payloads_generated', False)
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    """Upload any needed HWTest artifacts."""
+    steps = [self.BuildUpdatePayloads]
+    if (self._run.ShouldBuildAutotest() and
+        self._run.config.upload_hw_test_artifacts):
+      steps.append(self.BuildAutotestTarballs)
+
+    parallel.RunParallelSteps(steps)
+
+
+# TODO(mtennant): This class continues to exist only for subclasses that still
+# need self.archive_stage.  Hopefully, we can get rid of that need, eventually.
+class ArchivingStage(generic_stages.BoardSpecificBuilderStage,
+                     generic_stages.ArchivingStageMixin):
+  """Helper for stages that archive files.
+
+  See ArchivingStageMixin for functionality.
+
+  Attributes:
+    archive_stage: The ArchiveStage instance for this board.
+  """
+
+  def __init__(self, builder_run, board, archive_stage, **kwargs):
+    super(ArchivingStage, self).__init__(builder_run, board, **kwargs)
+    self.archive_stage = archive_stage
diff --git a/cbuildbot/stages/artifact_stages_unittest b/cbuildbot/stages/artifact_stages_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/stages/artifact_stages_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/stages/artifact_stages_unittest.py b/cbuildbot/stages/artifact_stages_unittest.py
new file mode 100644
index 0000000..efaf090
--- /dev/null
+++ b/cbuildbot/stages/artifact_stages_unittest.py
@@ -0,0 +1,585 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the artifact stages."""
+
+from __future__ import print_function
+
+import argparse
+import mock
+import os
+import sys
+
+from chromite.cbuildbot import cbuildbot_unittest
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import prebuilts
+from chromite.cbuildbot.stages import artifact_stages
+from chromite.cbuildbot.stages import build_stages_unittest
+from chromite.cbuildbot.stages import generic_stages_unittest
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import parallel_unittest
+from chromite.lib import partial_mock
+from chromite.lib import path_util
+
+from chromite.cbuildbot.stages.generic_stages_unittest import patch
+from chromite.cbuildbot.stages.generic_stages_unittest import patches
+
+
+DEFAULT_CHROME_BRANCH = '27'
+
+
+# pylint: disable=too-many-ancestors
+
+
+class ArchiveStageTest(generic_stages_unittest.AbstractStageTestCase,
+                       cbuildbot_unittest.SimpleBuilderTestCase):
+  """Exercise ArchiveStage functionality."""
+
+  # pylint: disable=protected-access
+
+  RELEASE_TAG = ''
+  VERSION = '3333.1.0'
+
+  def _PatchDependencies(self):
+    """Patch dependencies of ArchiveStage.PerformStage()."""
+    to_patch = [
+        (parallel, 'RunParallelSteps'), (commands, 'PushImages'),
+        (commands, 'UploadArchivedFile')]
+    self.AutoPatch(to_patch)
+
+  def setUp(self):
+    self._PatchDependencies()
+
+    self._Prepare()
+
+  def _Prepare(self, bot_id=None, **kwargs):
+    extra_config = {'upload_symbols': True, 'push_image': True}
+    super(ArchiveStageTest, self)._Prepare(bot_id, extra_config=extra_config,
+                                           **kwargs)
+
+  def ConstructStage(self):
+    self._run.GetArchive().SetupArchivePath()
+    return artifact_stages.ArchiveStage(self._run, self._current_board)
+
+  def testArchive(self):
+    """Simple did-it-run test."""
+    # TODO(davidjames): Test the individual archive steps as well.
+    self.RunStage()
+
+  # TODO(build): This test is not actually testing anything real.  It confirms
+  # that PushImages is not called, but the mock for RunParallelSteps already
+  # prevents PushImages from being called, regardless of whether this is a
+  # trybot flow.
+  def testNoPushImagesForRemoteTrybot(self):
+    """Test that remote trybot overrides work to disable push images."""
+    self._Prepare('x86-mario-release',
+                  cmd_args=['--remote-trybot', '-r', self.build_root,
+                            '--buildnumber=1234'])
+    self.RunStage()
+    # pylint: disable=no-member
+    self.assertEquals(commands.PushImages.call_count, 0)
+
+  def ConstructStageForArchiveStep(self):
+    """Stage construction for archive steps."""
+    stage = self.ConstructStage()
+    self.PatchObject(stage._upload_queue, 'put', autospec=True)
+    self.PatchObject(path_util, 'ToChrootPath', return_value='',
+                     autospec=True)
+    return stage
+
+  def testBuildAndArchiveDeltaSysroot(self):
+    """Test tarball is added to upload queue."""
+    stage = self.ConstructStageForArchiveStep()
+    with cros_build_lib_unittest.RunCommandMock() as rc:
+      rc.SetDefaultCmdResult()
+      stage.BuildAndArchiveDeltaSysroot()
+    stage._upload_queue.put.assert_called_with([constants.DELTA_SYSROOT_TAR])
+
+  def testBuildAndArchiveDeltaSysrootFailure(self):
+    """Test tarball not added to upload queue on command exception."""
+    stage = self.ConstructStageForArchiveStep()
+    with cros_build_lib_unittest.RunCommandMock() as rc:
+      rc.AddCmdResult(partial_mock.In('generate_delta_sysroot'), returncode=1,
+                      error='generate_delta_sysroot: error')
+      self.assertRaises2(cros_build_lib.RunCommandError,
+                         stage.BuildAndArchiveDeltaSysroot)
+    self.assertFalse(stage._upload_queue.put.called)
+
+
+class UploadPrebuiltsStageTest(
+    generic_stages_unittest.RunCommandAbstractStageTestCase,
+    cbuildbot_unittest.SimpleBuilderTestCase):
+  """Tests for the UploadPrebuilts stage."""
+
+  cmd = 'upload_prebuilts'
+  RELEASE_TAG = ''
+
+  def _Prepare(self, bot_id=None, **kwargs):
+    super(UploadPrebuiltsStageTest, self)._Prepare(bot_id, **kwargs)
+    self.cmd = os.path.join(self.build_root, constants.CHROMITE_BIN_SUBDIR,
+                            'upload_prebuilts')
+    self._run.options.prebuilts = True
+
+  def ConstructStage(self):
+    return artifact_stages.UploadPrebuiltsStage(self._run,
+                                                self._run.config.boards[-1])
+
+  def _VerifyBoardMap(self, bot_id, count, board_map, public_args=None,
+                      private_args=None):
+    """Verify that the prebuilts are uploaded for the specified bot.
+
+    Args:
+      bot_id: Bot to upload prebuilts for.
+      count: Number of assert checks that should be performed.
+      board_map: Map from slave boards to whether the bot is public.
+      public_args: List of extra arguments for public boards.
+      private_args: List of extra arguments for private boards.
+    """
+    self._Prepare(bot_id)
+    self.RunStage()
+    public_prefix = [self.cmd] + (public_args or [])
+    private_prefix = [self.cmd] + (private_args or [])
+    for board, public in board_map.iteritems():
+      if public or public_args:
+        public_cmd = public_prefix + ['--slave-board', board]
+        self.assertCommandContains(public_cmd, expected=public)
+        count -= 1
+      private_cmd = private_prefix + ['--slave-board', board, '--private']
+      self.assertCommandContains(private_cmd, expected=not public)
+      count -= 1
+    if board_map:
+      self.assertCommandContains([self.cmd, '--set-version',
+                                  self._run.GetVersion()], )
+      count -= 1
+    self.assertEqual(
+        count, 0,
+        'Number of asserts performed does not match (%d remaining)' % count)
+
+  def testFullPrebuiltsUpload(self):
+    """Test uploading of full builder prebuilts."""
+    self._VerifyBoardMap('x86-generic-full', 0, {})
+    self.assertCommandContains([self.cmd, '--git-sync'])
+
+  def testIncorrectCount(self):
+    """Test that _VerifyBoardMap asserts when the count is wrong."""
+    self.assertRaises(AssertionError, self._VerifyBoardMap, 'x86-generic-full',
+                      1, {})
+
+
+class MasterUploadPrebuiltsStageTest(
+    generic_stages_unittest.RunCommandAbstractStageTestCase,
+    cbuildbot_unittest.SimpleBuilderTestCase):
+  """Tests for the MasterUploadPrebuilts stage."""
+
+  cmd = 'upload_prebuilts'
+  RELEASE_TAG = '1234.5.6'
+  VERSION = 'R%s-%s' % (DEFAULT_CHROME_BRANCH, RELEASE_TAG)
+
+  def _Prepare(self, bot_id=None, **kwargs):
+    super(MasterUploadPrebuiltsStageTest, self)._Prepare(bot_id, **kwargs)
+    self.cmd = os.path.join(self.build_root, constants.CHROMITE_BIN_SUBDIR,
+                            'upload_prebuilts')
+    self._run.options.prebuilts = True
+
+  def ConstructStage(self):
+    return artifact_stages.MasterUploadPrebuiltsStage(self._run)
+
+  def _RunStage(self, bot_id):
+    """Run the stage under test with the given |bot_id| config.
+
+    Args:
+      bot_id: Builder config target name.
+    """
+    self._Prepare(bot_id)
+    self.RunStage()
+
+  def _VerifyResults(self, public_slave_boards=(), private_slave_boards=()):
+    """Verify that the expected prebuilt commands were run.
+
+    Do various assertions on the two RunCommands that were run by stage.
+    There should be one private (--private) and one public (default) run.
+
+    Args:
+      public_slave_boards: List of public slave boards.
+      private_slave_boards: List of private slave boards.
+    """
+    # TODO(mtennant): Add functionality in partial_mock to support more flexible
+    # asserting.  For example here, asserting that '--sync-host' appears in
+    # the command that did not include '--public'.
+
+    # Some args are expected for any public run.
+    if public_slave_boards:
+      # It would be nice to confirm that --private is not in command, but note
+      # that --sync-host should not appear in the --private command.
+      cmd = [self.cmd, '--sync-binhost-conf', '--sync-host']
+      self.assertCommandContains(cmd, expected=True)
+
+    # Some args are expected for any private run.
+    if private_slave_boards:
+      cmd = [self.cmd, '--sync-binhost-conf', '--private']
+      self.assertCommandContains(cmd, expected=True)
+
+    # Assert public slave boards are mentioned in public run.
+    for board in public_slave_boards:
+      # This check does not actually confirm that this board was in the public
+      # run rather than the private run, unfortunately.
+      cmd = [self.cmd, '--slave-board', board]
+      self.assertCommandContains(cmd, expected=True)
+
+    # Assert private slave boards are mentioned in private run.
+    for board in private_slave_boards:
+      cmd = [self.cmd, '--slave-board', board, '--private']
+      self.assertCommandContains(cmd, expected=True)
+
+    # We expect --set-version so long as build config has manifest_version=True.
+    self.assertCommandContains([self.cmd, '--set-version', self.VERSION],
+                               expected=self._run.config.manifest_version)
+
+  def testMasterPaladinUpload(self):
+    self._RunStage('master-paladin')
+
+    # Provide a sample of private/public slave boards that are expected.
+    public_slave_boards = ('amd64-generic', 'x86-generic')
+    private_slave_boards = ('x86-mario', 'x86-alex', 'lumpy', 'daisy_spring')
+
+    self._VerifyResults(public_slave_boards=public_slave_boards,
+                        private_slave_boards=private_slave_boards)
+
+  def testMasterChromiumPFQUpload(self):
+    self._RunStage('master-chromium-pfq')
+
+    # Provide a sample of private/public slave boards that are expected.
+    public_slave_boards = ('amd64-generic', 'x86-generic', 'daisy')
+    private_slave_boards = ('x86-alex', 'lumpy', 'daisy_skate', 'falco')
+
+    self._VerifyResults(public_slave_boards=public_slave_boards,
+                        private_slave_boards=private_slave_boards)
+
+
+class UploadDevInstallerPrebuiltsStageTest(
+    generic_stages_unittest.AbstractStageTestCase,
+    cbuildbot_unittest.SimpleBuilderTestCase):
+  """Tests for the UploadDevInstallerPrebuilts stage."""
+
+  RELEASE_TAG = 'RT'
+
+  def setUp(self):
+    self.upload_mock = self.PatchObject(
+        prebuilts, 'UploadDevInstallerPrebuilts')
+
+    self._Prepare()
+
+  def _Prepare(self, bot_id=None, **kwargs):
+    super(UploadDevInstallerPrebuiltsStageTest, self)._Prepare(bot_id, **kwargs)
+
+    self._run.options.chrome_rev = None
+    self._run.options.prebuilts = True
+    self._run.config['dev_installer_prebuilts'] = True
+    self._run.config['binhost_bucket'] = 'gs://testbucket'
+    self._run.config['binhost_key'] = 'dontcare'
+    self._run.config['binhost_base_url'] = 'https://dontcare/here'
+
+  def ConstructStage(self):
+    return artifact_stages.DevInstallerPrebuiltsStage(self._run,
+                                                      self._current_board)
+
+  def testDevInstallerUpload(self):
+    """Basic sanity test testing uploads of dev installer prebuilts."""
+    self.RunStage()
+
+    self.upload_mock.assert_called_with(
+        binhost_bucket=self._run.config.binhost_bucket,
+        binhost_key=self._run.config.binhost_key,
+        binhost_base_url=self._run.config.binhost_base_url,
+        buildroot=self.build_root,
+        board=self._current_board,
+        extra_args=mock.ANY)
+
+
+class CPEExportStageTest(generic_stages_unittest.AbstractStageTestCase,
+                         cbuildbot_unittest.SimpleBuilderTestCase):
+  """Test CPEExportStage"""
+
+  def setUp(self):
+    self.StartPatcher(generic_stages_unittest.ArchivingStageMixinMock())
+    self.StartPatcher(parallel_unittest.ParallelMock())
+
+    self.rc_mock = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
+    self.rc_mock.SetDefaultCmdResult(output='')
+
+    self.stage = None
+
+  def ConstructStage(self):
+    """Create a CPEExportStage instance for testing"""
+    self._run.GetArchive().SetupArchivePath()
+    return artifact_stages.CPEExportStage(self._run, self._current_board)
+
+  def assertBoardAttrEqual(self, attr, expected_value):
+    """Assert the value of a board run |attr| against |expected_value|."""
+    value = self.stage.board_runattrs.GetParallel(attr)
+    self.assertEqual(expected_value, value)
+
+  def _TestPerformStage(self):
+    """Run PerformStage for the stage."""
+    self._Prepare()
+    self._run.attrs.release_tag = self.VERSION
+
+    self.stage = self.ConstructStage()
+    self.stage.PerformStage()
+
+  def testCPEExport(self):
+    """Test that CPEExport stage runs without syntax errors."""
+    self._TestPerformStage()
+
+
+class DebugSymbolsStageTest(generic_stages_unittest.AbstractStageTestCase,
+                            cbuildbot_unittest.SimpleBuilderTestCase):
+  """Test DebugSymbolsStage"""
+
+  # pylint: disable=protected-access
+
+  def setUp(self):
+    self.StartPatcher(generic_stages_unittest.ArchivingStageMixinMock())
+    self.StartPatcher(parallel_unittest.ParallelMock())
+
+    self.gen_mock = self.PatchObject(commands, 'GenerateBreakpadSymbols')
+    self.upload_mock = self.PatchObject(commands, 'UploadSymbols')
+    self.tar_mock = self.PatchObject(commands, 'GenerateDebugTarball')
+
+    self.rc_mock = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
+    self.rc_mock.SetDefaultCmdResult(output='')
+
+    self.stage = None
+
+  def ConstructStage(self):
+    """Create a DebugSymbolsStage instance for testing"""
+    self._run.GetArchive().SetupArchivePath()
+    return artifact_stages.DebugSymbolsStage(self._run, self._current_board)
+
+  def assertBoardAttrEqual(self, attr, expected_value):
+    """Assert the value of a board run |attr| against |expected_value|."""
+    value = self.stage.board_runattrs.GetParallel(attr)
+    self.assertEqual(expected_value, value)
+
+  def _TestPerformStage(self, extra_config=None):
+    """Run PerformStage for the stage with the given extra config."""
+    if not extra_config:
+      extra_config = {
+          'archive_build_debug': True,
+          'vm_tests': True,
+          'upload_symbols': True,
+      }
+
+    self._Prepare(extra_config=extra_config)
+    self._run.attrs.release_tag = self.VERSION
+
+    self.tar_mock.side_effect = '/my/tar/ball'
+    self.stage = self.ConstructStage()
+    try:
+      self.stage.PerformStage()
+    except Exception:
+      self.stage._HandleStageException(sys.exc_info())
+      raise
+
+  def testPerformStageWithSymbols(self):
+    """Smoke test for an PerformStage when debugging is enabled"""
+    self._TestPerformStage()
+
+    self.assertEqual(self.gen_mock.call_count, 1)
+    self.assertEqual(self.upload_mock.call_count, 1)
+    self.assertEqual(self.tar_mock.call_count, 1)
+
+    self.assertBoardAttrEqual('breakpad_symbols_generated', True)
+    self.assertBoardAttrEqual('debug_tarball_generated', True)
+
+  def testPerformStageNoSymbols(self):
+    """Smoke test for an PerformStage when debugging is disabled"""
+    extra_config = {
+        'archive_build_debug': False,
+        'vm_tests': False,
+        'upload_symbols': False,
+    }
+    self._TestPerformStage(extra_config)
+
+    self.assertEqual(self.gen_mock.call_count, 1)
+    self.assertEqual(self.upload_mock.call_count, 0)
+    self.assertEqual(self.tar_mock.call_count, 1)
+
+    self.assertBoardAttrEqual('breakpad_symbols_generated', True)
+    self.assertBoardAttrEqual('debug_tarball_generated', True)
+
+  def testGenerateCrashStillNotifies(self):
+    """Crashes in symbol generation should still notify external events."""
+    self.skipTest('Test skipped due to crbug.com/363339')
+    class TestError(Exception):
+      """Unique test exception"""
+
+    self.gen_mock.side_effect = TestError('mew')
+    self.assertRaises(TestError, self._TestPerformStage)
+
+    self.assertEqual(self.gen_mock.call_count, 1)
+    self.assertEqual(self.upload_mock.call_count, 0)
+    self.assertEqual(self.tar_mock.call_count, 0)
+
+    self.assertBoardAttrEqual('breakpad_symbols_generated', False)
+    self.assertBoardAttrEqual('debug_tarball_generated', False)
+
+  def testUploadCrashStillNotifies(self):
+    """Crashes in symbol upload should still notify external events."""
+    class TestError(failures_lib.CrashCollectionFailure):
+      """Unique test exception"""
+
+    self.upload_mock.side_effect = TestError('mew')
+    self.assertRaises(TestError, self._TestPerformStage)
+
+    self.assertEqual(self.gen_mock.call_count, 1)
+    self.assertEqual(self.upload_mock.call_count, 1)
+    self.assertEqual(self.tar_mock.call_count, 1)
+
+    self.assertBoardAttrEqual('breakpad_symbols_generated', True)
+    self.assertBoardAttrEqual('debug_tarball_generated', True)
+
+
+class UploadTestArtifactsStageMock(
+    generic_stages_unittest.ArchivingStageMixinMock):
+  """Partial mock for BuildImageStage."""
+
+  TARGET = 'chromite.cbuildbot.stages.artifact_stages.UploadTestArtifactsStage'
+  ATTRS = (generic_stages_unittest.ArchivingStageMixinMock.ATTRS +
+           ('BuildAutotestTarballs',))
+
+  def BuildAutotestTarballs(self, *args, **kwargs):
+    with patches(
+        patch(commands, 'BuildTarball'),
+        patch(commands, 'FindFilesWithPattern', return_value=['foo.txt'])):
+      self.backup['BuildAutotestTarballs'](*args, **kwargs)
+
+
+class UploadTestArtifactsStageTest(build_stages_unittest.AllConfigsTestCase,
+                                   cbuildbot_unittest.SimpleBuilderTestCase):
+  """Tests UploadTestArtifactsStage."""
+
+  def setUp(self):
+    self._release_tag = None
+
+    osutils.SafeMakedirs(os.path.join(self.build_root, 'chroot', 'tmp'))
+    self.StartPatcher(UploadTestArtifactsStageMock())
+
+  def ConstructStage(self):
+    return artifact_stages.UploadTestArtifactsStage(self._run,
+                                                    self._current_board)
+
+  def RunTestsWithBotId(self, bot_id, options_tests=True):
+    """Test with the config for the specified bot_id."""
+    self._Prepare(bot_id)
+    self._run.options.tests = options_tests
+    self._run.attrs.release_tag = '0.0.1'
+
+    # Simulate images being ready.
+    board_runattrs = self._run.GetBoardRunAttrs(self._current_board)
+    board_runattrs.SetParallel('images_generated', True)
+
+    chroot_base = os.path.join(self.build_root, 'chroot')
+
+    def _ExtractOutputParam(cmd):
+      """Extract the --output option from a list of arguments."""
+      argparser = argparse.ArgumentParser()
+      argparser.add_argument('--output', action='store')
+      options, _ = argparser.parse_known_args(cmd)
+      return options.output
+
+    def _SimUpdatePayload(cmd, *_args, **kwargs):
+      """Simulate cros_generate_update_payload by creating its output file."""
+      self.assertTrue(kwargs.get('enter_chroot'))
+
+      output = _ExtractOutputParam(cmd)
+      self.assertTrue(output)
+      self.assertTrue(os.path.dirname(output))
+
+      # Join these paths manually since output is absolute and os.path.join
+      # will throw away chroot_base.
+      output = os.sep.join([chroot_base, output])
+
+      if not os.path.isdir(os.path.dirname(output)):
+        os.makedirs(os.path.dirname(output))
+      self.assertFalse(os.path.exists(output))
+
+      osutils.Touch(output)
+
+    def _SimUpdateStatefulPayload(cmd, *_args, **kwargs):
+      """Simulate cros_generate_stateful_update_payload like above."""
+      self.assertTrue(kwargs.get('enter_chroot'))
+
+      output = _ExtractOutputParam(cmd)
+      self.assertTrue(output)
+
+      # Join these paths manually since output is absolute and os.path.join
+      # will throw away chroot_base.
+      output = os.sep.join([chroot_base, output])
+
+      if not os.path.isdir(output):
+        os.makedirs(output)
+
+      output = os.path.join(output, commands.STATEFUL_FILE)
+
+      self.assertFalse(os.path.exists(output))
+
+      osutils.Touch(output)
+
+    def _HookRunCommand(rc):
+      rc.AddCmdResult(
+          partial_mock.ListRegex('cros_generate_update_payload'),
+          side_effect=_SimUpdatePayload)
+      rc.AddCmdResult(
+          partial_mock.ListRegex('cros_generate_stateful_update_payload'),
+          side_effect=_SimUpdateStatefulPayload)
+
+    with parallel_unittest.ParallelMock():
+      with self.RunStageWithConfig(mock_configurator=_HookRunCommand) as rc:
+        if (self._run.config.upload_hw_test_artifacts and
+            self._run.config.images):
+          self.assertNotEqual(rc.call_count, 0)
+        else:
+          self.assertEqual(rc.call_count, 0)
+
+  def testAllConfigs(self):
+    """Test all major configurations"""
+    self.RunAllConfigs(self.RunTestsWithBotId, skip_missing=True)
+
+
+# TODO: Delete ArchivingMock once ArchivingStage is deprecated.
+class ArchivingMock(partial_mock.PartialMock):
+  """Partial mock for ArchivingStage."""
+
+  TARGET = 'chromite.cbuildbot.stages.artifact_stages.ArchivingStage'
+  ATTRS = ('UploadArtifact',)
+
+  def UploadArtifact(self, *args, **kwargs):
+    with patch(commands, 'ArchiveFile', return_value='foo.txt'):
+      with patch(commands, 'UploadArchivedFile'):
+        self.backup['UploadArtifact'](*args, **kwargs)
+
+
+# TODO: Delete ArchivingStageTest once ArchivingStage is deprecated.
+class ArchivingStageTest(generic_stages_unittest.AbstractStageTestCase,
+                         cbuildbot_unittest.SimpleBuilderTestCase):
+  """Excerise ArchivingStage functionality."""
+  RELEASE_TAG = ''
+
+  def setUp(self):
+    self.StartPatcher(ArchivingMock())
+
+    self._Prepare()
+
+  def ConstructStage(self):
+    self._run.GetArchive().SetupArchivePath()
+    archive_stage = artifact_stages.ArchiveStage(
+        self._run, self._current_board)
+    return artifact_stages.ArchivingStage(
+        self._run, self._current_board, archive_stage)
diff --git a/cbuildbot/stages/branch_stages.py b/cbuildbot/stages/branch_stages.py
new file mode 100644
index 0000000..b2e52d2
--- /dev/null
+++ b/cbuildbot/stages/branch_stages.py
@@ -0,0 +1,498 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the branch stages."""
+
+from __future__ import print_function
+
+import os
+import re
+from xml.etree import ElementTree
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot.stages import generic_stages
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import parallel
+
+
+site_config = config_lib.GetConfig()
+
+
+class BranchError(Exception):
+  """Raised by branch creation code on error."""
+
+
+class BranchUtilStage(generic_stages.BuilderStage):
+  """Creates, deletes and renames branches, depending on cbuildbot options.
+
+  The two main types of branches are release branches and non-release
+  branches.  Release branches have the form 'release-*' - e.g.,
+  'release-R29-4319.B'.
+
+  On a very basic level, a branch is created by parsing the manifest of a
+  specific version of Chrome OS (e.g., 4319.0.0), and creating the branch
+  remotely for each checkout in the manifest at the specified hash.
+
+  Once a branch is created however, the branch component of the version on the
+  newly created branch needs to be incremented.  Additionally, in some cases
+  the Chrome major version (i.e, R29) and/or the Chrome OS version (i.e.,
+  4319.0.0) of the source branch must be incremented
+  (see _IncrementVersionOnDiskForSourceBranch docstring).  Finally, the external
+  and internal manifests of the new branch need to be fixed up (see
+  FixUpManifests docstring).
+  """
+
+  COMMIT_MESSAGE = 'Bump %(target)s after branching %(branch)s'
+
+  def __init__(self, builder_run, **kwargs):
+    super(BranchUtilStage, self).__init__(builder_run, **kwargs)
+    self.skip_remote_push = (self._run.options.skip_remote_push or
+                             self._run.options.debug_forced)
+    self.branch_name = self._run.options.branch_name
+    self.rename_to = self._run.options.rename_to
+
+  def _RunPush(self, checkout, src_ref, dest_ref, force=False):
+    """Perform a git push for a checkout.
+
+    Args:
+      checkout: A dictionary of checkout manifest attributes.
+      src_ref: The source local ref to push to the remote.
+      dest_ref: The local remote ref that correspond to destination ref name.
+      force: Whether to override non-fastforward checks.
+    """
+    # Convert local tracking ref to refs/heads/* on a remote:
+    # refs/remotes/<remote name>/<branch> to refs/heads/<branch>.
+    # If dest_ref is already refs/heads/<branch> it's a noop.
+    dest_ref = git.NormalizeRef(git.StripRefs(dest_ref))
+    push_to = git.RemoteRef(checkout['push_remote'], dest_ref)
+    git.GitPush(checkout['local_path'], src_ref, push_to, force=force,
+                skip=self.skip_remote_push)
+
+  def _FetchAndCheckoutTo(self, checkout_dir, remote_ref):
+    """Fetch a remote ref and check out to it.
+
+    Args:
+      checkout_dir: Path to git repo to operate on.
+      remote_ref: A git.RemoteRef object.
+    """
+    git.RunGit(checkout_dir, ['fetch', remote_ref.remote, remote_ref.ref],
+               print_cmd=True)
+    git.RunGit(checkout_dir, ['checkout', 'FETCH_HEAD'], print_cmd=True)
+
+  def _GetBranchSuffix(self, manifest, checkout):
+    """Return the branch suffix for the given checkout.
+
+    If a given project is checked out to multiple locations, it is necessary
+    to append a branch suffix. To be safe, we append branch suffixes for all
+    repositories that use a non-standard branch name (e.g., if our default
+    revision is "master", then any repository which does not use "master"
+    has a non-standard branch name.)
+
+    Args:
+      manifest: The associated ManifestCheckout.
+      checkout: The associated ProjectCheckout.
+    """
+    # Get the default and tracking branch.
+    suffix = ''
+    if len(manifest.FindCheckouts(checkout['name'])) > 1:
+      default_branch = git.StripRefs(manifest.default['revision'])
+      tracking_branch = git.StripRefs(checkout['tracking_branch'])
+      suffix = '-%s' % (tracking_branch,)
+      if default_branch != 'master':
+        suffix = re.sub('^-%s-' % re.escape(default_branch), '-', suffix)
+    return suffix
+
+  def _GetSHA1(self, checkout, branch):
+    """Get the SHA1 for the specified |branch| in the specified |checkout|.
+
+    Args:
+      checkout: The ProjectCheckout to look in.
+      branch: Remote branch to look for.
+
+    Returns:
+      If the branch exists, returns the SHA1 of the branch. Otherwise, returns
+      the empty string.  If branch is None, return None.
+    """
+    if branch:
+      cmd = ['show-ref', branch]
+      result = git.RunGit(checkout['local_path'], cmd, error_code_ok=True)
+      if result.returncode == 0:
+        # Output looks like:
+        # a00733b...30ee40e0c2c1 refs/remotes/cros/test-4980.B
+        return result.output.strip().split()[0]
+
+      return ''
+
+  def _CopyBranch(self, src_checkout, src_branch, dst_branch, force=False):
+    """Copy the given |src_branch| to |dst_branch|.
+
+    Args:
+      src_checkout: The ProjectCheckout to work in.
+      src_branch: The remote branch ref to copy from.
+      dst_branch: The remote branch ref to copy to.
+      force: If True then execute the copy even if dst_branch exists.
+    """
+    logging.info('Creating new branch "%s" for %s.', dst_branch,
+                 src_checkout['name'])
+    self._RunPush(src_checkout, src_ref=src_branch, dest_ref=dst_branch,
+                  force=force)
+
+  def _DeleteBranch(self, src_checkout, branch):
+    """Delete the given |branch| in the given |src_checkout|.
+
+    Args:
+      src_checkout: The ProjectCheckout to work in.
+      branch: The branch ref to delete.  Must be a remote branch.
+    """
+    logging.info('Deleting branch "%s" for %s.', branch, src_checkout['name'])
+    self._RunPush(src_checkout, src_ref='', dest_ref=branch)
+
+  def _ProcessCheckout(self, src_manifest, src_checkout):
+    """Performs per-checkout push operations.
+
+    Args:
+      src_manifest: The ManifestCheckout object for the current manifest.
+      src_checkout: The ProjectCheckout object to process.
+    """
+    if not src_checkout.IsBranchableProject():
+      # We don't have the ability to push branches to this repository. Just
+      # use TOT instead.
+      return
+
+    checkout_name = src_checkout['name']
+    remote = src_checkout['push_remote']
+    src_ref = src_checkout['revision']
+    suffix = self._GetBranchSuffix(src_manifest, src_checkout)
+
+    # The source/destination branches depend on options.
+    if self.rename_to:
+      # Rename flow.  Both src and dst branches exist.
+      src_branch = '%s%s' % (self.branch_name, suffix)
+      dst_branch = '%s%s' % (self.rename_to, suffix)
+    elif self._run.options.delete_branch:
+      # Delete flow.  Only dst branch exists.
+      src_branch = None
+      dst_branch = '%s%s' % (self.branch_name, suffix)
+    else:
+      # Create flow (default).  Only dst branch exists.  Source
+      # for the branch will just be src_ref.
+      src_branch = None
+      dst_branch = '%s%s' % (self.branch_name, suffix)
+
+    # Normalize branch refs to remote.  We only process remote branches.
+    src_branch = git.NormalizeRemoteRef(remote, src_branch)
+    dst_branch = git.NormalizeRemoteRef(remote, dst_branch)
+
+    # Determine whether src/dst branches exist now, by getting their sha1s.
+    if src_branch:
+      src_sha1 = self._GetSHA1(src_checkout, src_branch)
+    elif git.IsSHA1(src_ref):
+      src_sha1 = src_ref
+    dst_sha1 = self._GetSHA1(src_checkout, dst_branch)
+
+    # Complain if the branch already exists, unless that is expected.
+    force = self._run.options.force_create or self._run.options.delete_branch
+    if dst_sha1 and not force:
+      # We are either creating a branch or renaming a branch, and the
+      # destination branch unexpectedly exists.  Accept this only if the
+      # destination branch is already at the revision we want.
+      if src_sha1 != dst_sha1:
+        raise BranchError('Checkout %s already contains branch %s.  Run with '
+                          '--force-create to overwrite.'
+                          % (checkout_name, dst_branch))
+
+      logging.info('Checkout %s already contains branch %s and it already'
+                   ' points to revision %s', checkout_name, dst_branch,
+                   dst_sha1)
+
+    elif self._run.options.delete_branch:
+      # Delete the dst_branch, if it exists.
+      if dst_sha1:
+        self._DeleteBranch(src_checkout, dst_branch)
+      else:
+        raise BranchError('Checkout %s does not contain branch %s to delete.'
+                          % (checkout_name, dst_branch))
+
+    elif self.rename_to:
+      # Copy src_branch to dst_branch, if it exists, then delete src_branch.
+      if src_sha1:
+        self._CopyBranch(src_checkout, src_branch, dst_branch)
+        self._DeleteBranch(src_checkout, src_branch)
+      else:
+        raise BranchError('Checkout %s does not contain branch %s to rename.'
+                          % (checkout_name, src_branch))
+
+    else:
+      # Copy src_ref to dst_branch.
+      self._CopyBranch(src_checkout, src_ref, dst_branch,
+                       force=self._run.options.force_create)
+
+  def _UpdateManifest(self, manifest_path):
+    """Rewrite |manifest_path| to point at the right branch.
+
+    Args:
+      manifest_path: The path to the manifest file.
+    """
+    src_manifest = git.ManifestCheckout.Cached(self._build_root,
+                                               manifest_path=manifest_path)
+    doc = ElementTree.parse(manifest_path)
+    root = doc.getroot()
+
+    # Use the local branch ref.
+    new_branch_name = self.rename_to if self.rename_to else self.branch_name
+    new_branch_name = git.NormalizeRef(new_branch_name)
+
+    logging.info('Updating manifest for %s', new_branch_name)
+
+    default_nodes = root.findall('default')
+    for node in default_nodes:
+      node.attrib['revision'] = new_branch_name
+
+    for node in root.findall('project'):
+      path = node.attrib['path']
+      checkout = src_manifest.FindCheckoutFromPath(path)
+
+      if checkout.IsBranchableProject():
+        # Point at the new branch.
+        node.attrib.pop('revision', None)
+        node.attrib.pop('upstream', None)
+        suffix = self._GetBranchSuffix(src_manifest, checkout)
+        if suffix:
+          node.attrib['revision'] = '%s%s' % (new_branch_name, suffix)
+          logging.info('Pointing project %s at: %s', node.attrib['name'],
+                       node.attrib['revision'])
+        elif not default_nodes:
+          # If there isn't a default node we have to add the revision directly.
+          node.attrib['revision'] = new_branch_name
+      else:
+        if checkout.IsPinnableProject():
+          git_repo = checkout.GetPath(absolute=True)
+          repo_head = git.GetGitRepoRevision(git_repo)
+          node.attrib['revision'] = repo_head
+          logging.info('Pinning project %s at: %s', node.attrib['name'],
+                       node.attrib['revision'])
+        else:
+          logging.info('Updating project %s', node.attrib['name'])
+          # We can't branch this repository. Leave it alone.
+          node.attrib['revision'] = checkout['revision']
+          logging.info('Project %s UNPINNED using: %s', node.attrib['name'],
+                       node.attrib['revision'])
+
+        # Can not use the default version of get() here since
+        # 'upstream' can be a valid key with a None value.
+        upstream = checkout.get('upstream')
+        if upstream is not None:
+          node.attrib['upstream'] = upstream
+
+    doc.write(manifest_path)
+    return [node.attrib['name'] for node in root.findall('include')]
+
+  def _FixUpManifests(self, repo_manifest):
+    """Points the checkouts at the new branch in the manifests.
+
+    Within the branch, make sure all manifests with projects that are
+    "branchable" are checked out to "refs/heads/<new_branch>".  Do this
+    by updating all manifests in the known manifest projects.
+    """
+    assert not self._run.options.delete_branch, 'Cannot fix a deleted branch.'
+
+    # Use local branch ref.
+    branch_ref = git.NormalizeRef(self.branch_name)
+
+    logging.debug('Fixing manifest projects for new branch.')
+    for project in site_config.params.MANIFEST_PROJECTS:
+      manifest_checkout = repo_manifest.FindCheckout(project)
+      manifest_dir = manifest_checkout['local_path']
+      push_remote = manifest_checkout['push_remote']
+
+      # Checkout revision can be either a sha1 or a branch ref.
+      src_ref = manifest_checkout['revision']
+      if not git.IsSHA1(src_ref):
+        src_ref = git.NormalizeRemoteRef(push_remote, src_ref)
+
+      git.CreateBranch(
+          manifest_dir, manifest_version.PUSH_BRANCH, src_ref)
+
+      # We want to process default.xml and official.xml + their imports.
+      pending_manifests = [constants.DEFAULT_MANIFEST,
+                           constants.OFFICIAL_MANIFEST]
+      processed_manifests = []
+
+      while pending_manifests:
+        # Canonicalize the manifest name (resolve dir and symlinks).
+        manifest_path = os.path.join(manifest_dir, pending_manifests.pop())
+        manifest_path = os.path.realpath(manifest_path)
+
+        # Don't process a manifest more than once.
+        if manifest_path in processed_manifests:
+          continue
+
+        processed_manifests.append(manifest_path)
+
+        if not os.path.exists(manifest_path):
+          logging.info('Manifest not found: %s', manifest_path)
+          continue
+
+        logging.debug('Fixing manifest at %s.', manifest_path)
+        included_manifests = self._UpdateManifest(manifest_path)
+        pending_manifests += included_manifests
+
+      git.RunGit(manifest_dir, ['add', '-A'], print_cmd=True)
+      message = 'Fix up manifest after branching %s.' % branch_ref
+      git.RunGit(manifest_dir, ['commit', '-m', message], print_cmd=True)
+      push_to = git.RemoteRef(push_remote, branch_ref)
+      git.GitPush(manifest_dir, manifest_version.PUSH_BRANCH, push_to,
+                  skip=self.skip_remote_push)
+
+  def _IncrementVersionOnDisk(self, incr_type, push_to, message):
+    """Bumps the version found in chromeos_version.sh on a branch.
+
+    Args:
+      incr_type: See docstring for manifest_version.VersionInfo.
+      push_to: A git.RemoteRef object.
+      message: The message to give the git commit that bumps the version.
+    """
+    version_info = manifest_version.VersionInfo.from_repo(
+        self._build_root, incr_type=incr_type)
+    version_info.IncrementVersion()
+    version_info.UpdateVersionFile(message,
+                                   dry_run=self.skip_remote_push,
+                                   push_to=push_to)
+
+  @staticmethod
+  def DetermineBranchIncrParams(version_info):
+    """Determines the version component to bump for the new branch."""
+    # We increment the left-most component that is zero.
+    if version_info.branch_build_number != '0':
+      if version_info.patch_number != '0':
+        raise BranchError('Version %s cannot be branched.' %
+                          version_info.VersionString())
+      return 'patch', 'patch number'
+    else:
+      return 'branch', 'branch number'
+
+  @staticmethod
+  def DetermineSourceIncrParams(source_name, dest_name):
+    """Determines the version component to bump for the original branch."""
+    if dest_name.startswith('refs/heads/release-'):
+      return 'chrome_branch', 'Chrome version'
+    elif source_name == 'refs/heads/master':
+      return 'build', 'build number'
+    else:
+      return 'branch', 'branch build number'
+
+  def _IncrementVersionOnDiskForNewBranch(self, push_remote):
+    """Bumps the version found in chromeos_version.sh on the new branch
+
+    When a new branch is created, the branch component of the new branch's
+    version needs to bumped.
+
+    For example, say 'stabilize-link' is created from a the 4230.0.0 manifest.
+    The new branch's version needs to be bumped to 4230.1.0.
+
+    Args:
+      push_remote: a git remote name where the new branch lives.
+    """
+    # This needs to happen before the source branch version bumping above
+    # because we rely on the fact that since our current overlay checkout
+    # is what we just pushed to the new branch, we don't need to do another
+    # sync.  This also makes it easier to implement skip_remote_push
+    # functionality (the new branch doesn't actually get created in
+    # skip_remote_push mode).
+
+    # Use local branch ref.
+    branch_ref = git.NormalizeRef(self.branch_name)
+    push_to = git.RemoteRef(push_remote, branch_ref)
+    version_info = manifest_version.VersionInfo(
+        version_string=self._run.options.force_version)
+    incr_type, incr_target = self.DetermineBranchIncrParams(version_info)
+    message = self.COMMIT_MESSAGE % {
+        'target': incr_target,
+        'branch': branch_ref,
+    }
+    self._IncrementVersionOnDisk(incr_type, push_to, message)
+
+  def _IncrementVersionOnDiskForSourceBranch(self, overlay_dir, push_remote,
+                                             source_branch):
+    """Bumps the version found in chromeos_version.sh on the source branch
+
+    The source branch refers to the branch that the manifest used for creating
+    the new branch came from.  For release branches, we generally branch from a
+    'master' branch manifest.
+
+    To work around crbug.com/213075, for both non-release and release branches,
+    we need to bump the Chrome OS version on the source branch if the manifest
+    used for branch creation is the latest generated manifest for the source
+    branch.
+
+    When we are creating a release branch, the Chrome major version of the
+    'master' (source) branch needs to be bumped.  For example, if we branch
+    'release-R29-4230.B' from the 4230.0.0 manifest (which is from the 'master'
+    branch), the 'master' branch's Chrome major version in chromeos_version.sh
+    (which is 29) needs to be bumped to 30.
+
+    Args:
+      overlay_dir: Absolute path to the chromiumos overlay repo.
+      push_remote: The remote to push to.
+      source_branch: The branch that the manifest we are using comes from.
+    """
+    push_to = git.RemoteRef(push_remote, source_branch)
+    self._FetchAndCheckoutTo(overlay_dir, push_to)
+
+    # Use local branch ref.
+    branch_ref = git.NormalizeRef(self.branch_name)
+    tot_version_info = manifest_version.VersionInfo.from_repo(self._build_root)
+    if (branch_ref.startswith('refs/heads/release-') or
+        tot_version_info.VersionString() == self._run.options.force_version):
+      incr_type, incr_target = self.DetermineSourceIncrParams(
+          source_branch, branch_ref)
+      message = self.COMMIT_MESSAGE % {
+          'target': incr_target,
+          'branch': branch_ref,
+      }
+      try:
+        self._IncrementVersionOnDisk(incr_type, push_to, message)
+      except cros_build_lib.RunCommandError:
+        # There's a chance we are racing against the buildbots for this
+        # increment.  We shouldn't quit the script because of this.  Instead, we
+        # print a warning.
+        self._FetchAndCheckoutTo(overlay_dir, push_to)
+        new_version = manifest_version.VersionInfo.from_repo(self._build_root)
+        if new_version.VersionString() != tot_version_info.VersionString():
+          logging.warning('Version number for branch %s was bumped by another '
+                          'bot.', push_to.ref)
+        else:
+          raise
+
+  def PerformStage(self):
+    """Run the branch operation."""
+    # Setup and initialize the repo.
+    super(BranchUtilStage, self).PerformStage()
+
+    repo_manifest = git.ManifestCheckout.Cached(self._build_root)
+    checkouts = repo_manifest.ListCheckouts()
+
+    logging.debug('Processing %d checkouts from manifest in parallel.',
+                  len(checkouts))
+    args = [[repo_manifest, x] for x in checkouts]
+    parallel.RunTasksInProcessPool(self._ProcessCheckout, args, processes=16)
+
+    if not self._run.options.delete_branch:
+      self._FixUpManifests(repo_manifest)
+
+    # Increment versions for a new branch.
+    if not (self._run.options.delete_branch or self.rename_to):
+      overlay_name = 'chromiumos/overlays/chromiumos-overlay'
+      overlay_checkout = repo_manifest.FindCheckout(overlay_name)
+      overlay_dir = overlay_checkout['local_path']
+      push_remote = overlay_checkout['push_remote']
+      self._IncrementVersionOnDiskForNewBranch(push_remote)
+
+      source_branch = repo_manifest.default['revision']
+      self._IncrementVersionOnDiskForSourceBranch(overlay_dir, push_remote,
+                                                  source_branch)
diff --git a/cbuildbot/stages/branch_stages_unittest b/cbuildbot/stages/branch_stages_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/stages/branch_stages_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/stages/branch_stages_unittest.py b/cbuildbot/stages/branch_stages_unittest.py
new file mode 100644
index 0000000..0397836
--- /dev/null
+++ b/cbuildbot/stages/branch_stages_unittest.py
@@ -0,0 +1,379 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the branch stages."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot import manifest_version_unittest
+from chromite.cbuildbot.stages import branch_stages
+from chromite.cbuildbot.stages import generic_stages_unittest
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.lib import git_unittest
+from chromite.lib import osutils
+from chromite.lib import parallel_unittest
+from chromite.lib import partial_mock
+
+
+MANIFEST_CONTENTS = """\
+<?xml version="1.0" encoding="UTF-8"?>
+<manifest>
+  <remote fetch="https://chromium.googlesource.com"
+          name="cros"
+          review="chromium-review.googlesource.com"/>
+
+  <default remote="cros" revision="refs/heads/master" sync-j="8"/>
+
+  <project groups="minilayout,buildtools"
+           name="chromiumos/chromite"
+           path="chromite"
+           revision="refs/heads/special-branch"/>
+
+  <project name="chromiumos/special"
+           path="src/special-new"
+           revision="new-special-branch"/>
+
+  <project name="chromiumos/special"
+           path="src/special-old"
+           revision="old-special-branch" />
+
+  <!-- Test the explicitly specified branching strategy for projects. -->
+  <project name="chromiumos/external-explicitly-pinned"
+           path="explicit-external"
+           revision="refs/heads/master">
+    <annotation name="branch-mode" value="pin" />
+  </project>
+
+  <project name="chromiumos/external-explicitly-unpinned"
+           path="explicit-unpinned"
+           revision="refs/heads/master">
+    <annotation name="branch-mode" value="tot" />
+  </project>
+
+  <project name="chromiumos/external-explicitly-pinned-sha1"
+           path="explicit-external-sha1"
+           revision="12345">
+    <annotation name="branch-mode" value="pin" />
+  </project>
+
+  <project name="chromiumos/external-explicitly-unpinned-sha1"
+           path="explicit-unpinned-sha1"
+           revision="12345">
+    <annotation name="branch-mode" value="tot" />
+  </project>
+
+  <!-- The next two projects test legacy heristic to determine branching
+       strategy for projects -->
+  <project name="faraway/external"
+           path="external"
+           revision="refs/heads/master" />
+
+  <project name="faraway/unpinned"
+           path="unpinned"
+           revision="refs/heads/master"
+           pin="False" />
+
+</manifest>"""
+
+CHROMITE_REVISION = "fb46d34d7cd4b9c167b74f494f2a99b68df50b18"
+SPECIAL_REVISION1 = "7bc42f093d644eeaf1c77fab60883881843c3c65"
+SPECIAL_REVISION2 = "6270eb3b4f78d9bffec77df50f374f5aae72b370"
+
+VERSIONED_MANIFEST_CONTENTS = """\
+<?xml version="1.0" encoding="UTF-8"?>
+<manifest revision="fe72f0912776fa4596505e236e39286fb217961b">
+  <remote fetch="https://chrome-internal.googlesource.com" name="chrome"/>
+  <remote fetch="https://chromium.googlesource.com/" name="chromium"/>
+  <remote fetch="https://chromium.googlesource.com" name="cros" \
+review="chromium-review.googlesource.com"/>
+  <remote fetch="https://chrome-internal.googlesource.com" name="cros-internal" \
+review="https://chrome-internal-review.googlesource.com"/>
+  <remote fetch="https://special.googlesource.com/" name="special" \
+review="https://special.googlesource.com/"/>
+
+  <default remote="cros" revision="refs/heads/master" sync-j="8"/>
+
+  <project name="chromeos/manifest-internal" path="manifest-internal" \
+remote="cros-internal" revision="fe72f0912776fa4596505e236e39286fb217961b" \
+upstream="refs/heads/master"/>
+  <project groups="minilayout,buildtools" name="chromiumos/chromite" \
+path="chromite" revision="%(chromite_revision)s" \
+upstream="refs/heads/master"/>
+  <project name="chromiumos/manifest" path="manifest" \
+revision="f24b69176b16bf9153f53883c0cc752df8e07d8b" \
+upstream="refs/heads/master"/>
+  <project groups="minilayout" name="chromiumos/overlays/chromiumos-overlay" \
+path="src/third_party/chromiumos-overlay" \
+revision="3ac713c65b5d18585e606a0ee740385c8ec83e44" \
+upstream="refs/heads/master"/>
+  <project name="chromiumos/special" path="src/special-new" \
+revision="%(special_revision1)s" \
+upstream="new-special-branch"/>
+  <project name="chromiumos/special" path="src/special-old" \
+revision="%(special_revision2)s" \
+upstream="old-special-branch"/>
+</manifest>""" % dict(chromite_revision=CHROMITE_REVISION,
+                      special_revision1=SPECIAL_REVISION1,
+                      special_revision2=SPECIAL_REVISION2)
+
+
+class BranchUtilStageTest(generic_stages_unittest.AbstractStageTestCase,
+                          cros_test_lib.LoggingTestCase):
+  """Tests for branch creation/deletion."""
+
+  BOT_ID = constants.BRANCH_UTIL_CONFIG
+  DEFAULT_VERSION = '111.0.0'
+  RELEASE_BRANCH_NAME = 'release-test-branch'
+
+  def _CreateVersionFile(self, version=None):
+    if version is None:
+      version = self.DEFAULT_VERSION
+    version_file = os.path.join(self.build_root, constants.VERSION_FILE)
+    manifest_version_unittest.VersionInfoTest.WriteFakeVersionFile(
+        version_file, version=version)
+
+  def setUp(self):
+    """Setup patchers for specified bot id."""
+    # Mock out methods as needed.
+    self.StartPatcher(parallel_unittest.ParallelMock())
+    self.StartPatcher(git_unittest.ManifestCheckoutMock())
+    self._CreateVersionFile()
+    self.rc_mock = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
+    self.rc_mock.SetDefaultCmdResult()
+
+    # We have a versioned manifest (generated by ManifestVersionSyncStage) and
+    # the regular, user-maintained manifests.
+    manifests = {
+        '.repo/manifest.xml': VERSIONED_MANIFEST_CONTENTS,
+        'manifest/default.xml': MANIFEST_CONTENTS,
+        'manifest-internal/official.xml': MANIFEST_CONTENTS,
+    }
+    for m_path, m_content in manifests.iteritems():
+      full_path = os.path.join(self.build_root, m_path)
+      osutils.SafeMakedirs(os.path.dirname(full_path))
+      osutils.WriteFile(full_path, m_content)
+
+    self.norm_name = git.NormalizeRef(self.RELEASE_BRANCH_NAME)
+
+  def _Prepare(self, bot_id=None, **kwargs):
+    if 'cmd_args' not in kwargs:
+      # Fill in cmd_args so we do not use the default, which specifies
+      # --branch.  That is incompatible with some branch-util flows.
+      kwargs['cmd_args'] = ['-r', self.build_root]
+    super(BranchUtilStageTest, self)._Prepare(bot_id, **kwargs)
+
+  def ConstructStage(self):
+    return branch_stages.BranchUtilStage(self._run)
+
+  def _VerifyPush(self, new_branch, rename_from=None, delete=False):
+    """Verify that |new_branch| has been created.
+
+    Args:
+      new_branch: The new remote branch to create (or delete).
+      rename_from: If set, |rename_from| is being renamed to |new_branch|.
+      delete: If set, |new_branch| is being deleted.
+    """
+    # Pushes all operate on remote branch refs.
+    new_branch = git.NormalizeRef(new_branch)
+
+    # Calculate source and destination revisions.
+    suffixes = ['', '-new-special-branch', '-old-special-branch']
+    if delete:
+      src_revs = [''] * len(suffixes)
+    elif rename_from is not None:
+      rename_from = git.NormalizeRef(rename_from)
+      rename_from_tracking = git.NormalizeRemoteRef('cros', rename_from)
+      src_revs = [
+          '%s%s' % (rename_from_tracking, suffix) for suffix in suffixes
+      ]
+    else:
+      src_revs = [CHROMITE_REVISION, SPECIAL_REVISION1, SPECIAL_REVISION2]
+    dest_revs = ['%s%s' % (new_branch, suffix) for suffix in suffixes]
+
+    # Verify pushes happened correctly.
+    for src_rev, dest_rev in zip(src_revs, dest_revs):
+      cmd = ['push', '%s:%s' % (src_rev, dest_rev)]
+      self.rc_mock.assertCommandContains(cmd)
+      if rename_from is not None:
+        cmd = ['push', ':%s' % (rename_from,)]
+        self.rc_mock.assertCommandContains(cmd)
+
+  def testRelease(self):
+    """Run-through of branch creation."""
+    self._Prepare(extra_cmd_args=['--branch-name', self.RELEASE_BRANCH_NAME,
+                                  '--version', self.DEFAULT_VERSION])
+    # Simulate branch not existing.
+    self.rc_mock.AddCmdResult(
+        partial_mock.ListRegex('git show-ref .*%s' % self.RELEASE_BRANCH_NAME),
+        returncode=1)
+    # SHA1 of HEAD for pinned branches.
+    self.rc_mock.AddCmdResult(
+        partial_mock.ListRegex('git rev-parse HEAD'),
+        output='12345')
+
+    before = manifest_version.VersionInfo.from_repo(self.build_root)
+    self.RunStage()
+    after = manifest_version.VersionInfo.from_repo(self.build_root)
+    # Verify Chrome version was bumped.
+    self.assertEquals(int(after.chrome_branch) - int(before.chrome_branch), 1)
+    self.assertEquals(int(after.build_number) - int(before.build_number), 1)
+
+    # Verify that manifests were branched properly. Notice that external,
+    # explicit-external are pinned to a SHA1, not an actual branch.
+    branch_names = {
+        'chromite': self.norm_name,
+        'external': '12345',
+        'explicit-external': '12345',
+        'explicit-external-sha1': '12345',
+        'src/special-new': self.norm_name + '-new-special-branch',
+        'src/special-old': self.norm_name + '-old-special-branch',
+        'unpinned': 'refs/heads/master',
+        'explicit-unpinned': 'refs/heads/master',
+        # If all we had was a sha1, there is not way to even guess what the
+        # "master" branch is, so leave it pinned.
+        'explicit-unpinned-sha1': '12345',
+    }
+    # Verify that we correctly transfer branch modes to the branched manifest.
+    branch_modes = {
+        'explicit-external': 'pin',
+        'explicit-external-sha1': 'pin',
+        'explicit-unpinned': 'tot',
+        'explicit-unpinned-sha1': 'tot',
+    }
+    for m in ['manifest/default.xml', 'manifest-internal/official.xml']:
+      manifest = git.Manifest(os.path.join(self.build_root, m))
+      for project_data in manifest.checkouts_by_path.itervalues():
+        path = project_data['path']
+        branch_name = branch_names[path]
+        msg = (
+            'Branch name for %s should be %r, but got %r' %
+            (path, branch_name, project_data['revision'])
+        )
+        self.assertEquals(project_data['revision'], branch_name, msg)
+        if path in branch_modes:
+          self.assertEquals(
+              project_data['branch-mode'],
+              branch_modes[path],
+              'Branch mode for %s should be %r, but got %r' % (
+                  path, branch_modes[path], project_data['branch-mode']))
+
+    self._VerifyPush(self.norm_name)
+
+  def testNonRelease(self):
+    """Non-release branch creation."""
+    self._Prepare(extra_cmd_args=['--branch-name', 'refs/heads/test-branch',
+                                  '--version', self.DEFAULT_VERSION])
+    # Simulate branch not existing.
+    self.rc_mock.AddCmdResult(
+        partial_mock.ListRegex('git show-ref .*test-branch'),
+        returncode=1)
+
+    before = manifest_version.VersionInfo.from_repo(self.build_root)
+    # Disable the new branch increment so that
+    # IncrementVersionOnDiskForSourceBranch detects we need to bump the version.
+    self.PatchObject(branch_stages.BranchUtilStage,
+                     '_IncrementVersionOnDiskForNewBranch', autospec=True)
+    self.RunStage()
+    after = manifest_version.VersionInfo.from_repo(self.build_root)
+    # Verify only branch number is bumped.
+    self.assertEquals(after.chrome_branch, before.chrome_branch)
+    self.assertEquals(int(after.build_number) - int(before.build_number), 1)
+    self._VerifyPush(self._run.options.branch_name)
+
+  def testDeletion(self):
+    """Branch deletion."""
+    self._Prepare(extra_cmd_args=['--branch-name', self.RELEASE_BRANCH_NAME,
+                                  '--delete-branch'])
+    self.rc_mock.AddCmdResult(
+        partial_mock.ListRegex('git show-ref .*release-test-branch.*'),
+        output='SomeSHA1Value'
+    )
+    self.RunStage()
+    self._VerifyPush(self.norm_name, delete=True)
+
+  def testRename(self):
+    """Branch rename."""
+    self._Prepare(extra_cmd_args=['--branch-name', self.RELEASE_BRANCH_NAME,
+                                  '--rename-to', 'refs/heads/release-rename'])
+    # Simulate source branch existing and destination branch not existing.
+    self.rc_mock.AddCmdResult(
+        partial_mock.ListRegex('git show-ref .*%s' % self.RELEASE_BRANCH_NAME),
+        output='SomeSHA1Value')
+    self.rc_mock.AddCmdResult(
+        partial_mock.ListRegex('git show-ref .*release-rename'),
+        returncode=1)
+    self.RunStage()
+    self._VerifyPush(self._run.options.rename_to, rename_from=self.norm_name)
+
+  def testDryRun(self):
+    """Verify we don't push to remote when --debug is set."""
+    # Simulate branch not existing.
+    self.rc_mock.AddCmdResult(
+        partial_mock.ListRegex('git show-ref .*%s' % self.RELEASE_BRANCH_NAME),
+        returncode=1)
+
+    self._Prepare(extra_cmd_args=['--branch-name', self.RELEASE_BRANCH_NAME,
+                                  '--debug',
+                                  '--version', self.DEFAULT_VERSION])
+    self.RunStage()
+    self.rc_mock.assertCommandContains(('push',), expected=False)
+
+  def _DetermineIncrForVersion(self, version):
+    version_info = manifest_version.VersionInfo(version)
+    stage_cls = branch_stages.BranchUtilStage
+    return stage_cls.DetermineBranchIncrParams(version_info)
+
+  def testDetermineIncrBranch(self):
+    """Verify branch increment detection."""
+    incr_type, _ = self._DetermineIncrForVersion(self.DEFAULT_VERSION)
+    self.assertEquals(incr_type, 'branch')
+
+  def testDetermineIncrPatch(self):
+    """Verify patch increment detection."""
+    incr_type, _ = self._DetermineIncrForVersion('111.1.0')
+    self.assertEquals(incr_type, 'patch')
+
+  def testDetermineBranchIncrError(self):
+    """Detect unbranchable version."""
+    self.assertRaises(branch_stages.BranchError, self._DetermineIncrForVersion,
+                      '111.1.1')
+
+  def _SimulateIncrementFailure(self):
+    """Simulates a git push failure during source branch increment."""
+    self._Prepare(extra_cmd_args=['--branch-name', self.RELEASE_BRANCH_NAME,
+                                  '--version', self.DEFAULT_VERSION])
+    overlay_dir = os.path.join(
+        self.build_root, constants.CHROMIUMOS_OVERLAY_DIR)
+    self.rc_mock.AddCmdResult(partial_mock.In('push'), returncode=128)
+    stage = self.ConstructStage()
+    args = (overlay_dir, 'gerrit', 'refs/heads/master')
+    # pylint: disable=protected-access
+    stage._IncrementVersionOnDiskForSourceBranch(*args)
+
+  def testSourceIncrementWarning(self):
+    """Test the warning case for incrementing failure."""
+    # Since all git commands are mocked out, the _FetchAndCheckoutTo function
+    # does nothing, and leaves the chromeos_version.sh file in the bumped state,
+    # so it looks like TOT version was indeed bumped by another bot.
+    with cros_test_lib.LoggingCapturer() as logger:
+      self._SimulateIncrementFailure()
+      self.AssertLogsContain(logger, 'bumped by another')
+
+  def testSourceIncrementFailure(self):
+    """Test the failure case for incrementing failure."""
+    def FetchAndCheckoutTo(*_args, **_kwargs):
+      self._CreateVersionFile()
+
+    # Simulate a git checkout of TOT.
+    self.PatchObject(branch_stages.BranchUtilStage, '_FetchAndCheckoutTo',
+                     side_effect=FetchAndCheckoutTo, autospec=True)
+    self.assertRaises(cros_build_lib.RunCommandError,
+                      self._SimulateIncrementFailure)
diff --git a/cbuildbot/stages/build_stages.py b/cbuildbot/stages/build_stages.py
new file mode 100644
index 0000000..44b721e
--- /dev/null
+++ b/cbuildbot/stages/build_stages.py
@@ -0,0 +1,435 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the build stages."""
+
+from __future__ import print_function
+
+import functools
+import glob
+import os
+
+from chromite.cbuildbot import chroot_lib
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import repository
+from chromite.cbuildbot.stages import generic_stages
+from chromite.cbuildbot.stages import test_stages
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import portage_util
+
+
+class CleanUpStage(generic_stages.BuilderStage):
+  """Stages that cleans up build artifacts from previous runs.
+
+  This stage cleans up previous KVM state, temporary git commits,
+  clobbers, and wipes tmp inside the chroot.
+  """
+
+  option_name = 'clean'
+
+  def _CleanChroot(self):
+    commands.CleanupChromeKeywordsFile(self._boards,
+                                       self._build_root)
+    chroot_dir = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR)
+    chroot_tmpdir = os.path.join(chroot_dir, 'tmp')
+    if os.path.exists(chroot_tmpdir):
+      osutils.RmDir(chroot_tmpdir, ignore_missing=True, sudo=True)
+      cros_build_lib.SudoRunCommand(['mkdir', '--mode', '1777', chroot_tmpdir],
+                                    print_cmd=False)
+
+    # Clear out the incremental build cache between runs.
+    cache_dir = 'var/cache/portage'
+    d = os.path.join(chroot_dir, cache_dir)
+    osutils.RmDir(d, ignore_missing=True, sudo=True)
+    for board in self._boards:
+      d = os.path.join(chroot_dir, 'build', board, cache_dir)
+      osutils.RmDir(d, ignore_missing=True, sudo=True)
+
+  def _DeleteChroot(self):
+    chroot = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR)
+    if os.path.exists(chroot):
+      # At this stage, it's not safe to run the cros_sdk inside the buildroot
+      # itself because we haven't sync'd yet, and the version of the chromite
+      # in there might be broken. Since we've already unmounted everything in
+      # there, we can just remove it using rm -rf.
+      osutils.RmDir(chroot, ignore_missing=True, sudo=True)
+
+  def _DeleteArchivedTrybotImages(self):
+    """Clear all previous archive images to save space."""
+    for trybot in (False, True):
+      archive_root = self._run.GetArchive().GetLocalArchiveRoot(trybot=trybot)
+      osutils.RmDir(archive_root, ignore_missing=True)
+
+  def _DeleteArchivedPerfResults(self):
+    """Clear any previously stashed perf results from hw testing."""
+    for result in glob.glob(os.path.join(
+        self._run.options.log_dir,
+        '*.%s' % test_stages.HWTestStage.PERF_RESULTS_EXTENSION)):
+      os.remove(result)
+
+  def _DeleteChromeBuildOutput(self):
+    chrome_src = os.path.join(self._run.options.chrome_root, 'src')
+    for out_dir in glob.glob(os.path.join(chrome_src, 'out_*')):
+      osutils.RmDir(out_dir)
+
+  def _DeleteAutotestSitePackages(self):
+    """Clears any previously downloaded site-packages."""
+    site_packages_dir = os.path.join(self._build_root, 'src', 'third_party',
+                                     'autotest', 'files', 'site-packages')
+    # Note that these shouldn't be recreated but might be around from stale
+    # builders.
+    osutils.RmDir(site_packages_dir, ignore_missing=True)
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    if (not (self._run.options.buildbot or self._run.options.remote_trybot)
+        and self._run.options.clobber):
+      if not commands.ValidateClobber(self._build_root):
+        cros_build_lib.Die("--clobber in local mode must be approved.")
+
+    # If we can't get a manifest out of it, then it's not usable and must be
+    # clobbered.
+    manifest = None
+    if not self._run.options.clobber:
+      try:
+        manifest = git.ManifestCheckout.Cached(self._build_root, search=False)
+      except (KeyboardInterrupt, MemoryError, SystemExit):
+        raise
+      except Exception as e:
+        # Either there is no repo there, or the manifest isn't usable.  If the
+        # directory exists, log the exception for debugging reasons.  Either
+        # way, the checkout needs to be wiped since it's in an unknown
+        # state.
+        if os.path.exists(self._build_root):
+          logging.warning("ManifestCheckout at %s is unusable: %s",
+                          self._build_root, e)
+
+    # Clean mount points first to be safe about deleting.
+    commands.CleanUpMountPoints(self._build_root)
+
+    if manifest is None:
+      self._DeleteChroot()
+      repository.ClearBuildRoot(self._build_root,
+                                self._run.options.preserve_paths)
+    else:
+      tasks = [functools.partial(commands.BuildRootGitCleanup,
+                                 self._build_root),
+               functools.partial(commands.WipeOldOutput, self._build_root),
+               self._DeleteArchivedTrybotImages,
+               self._DeleteArchivedPerfResults,
+               self._DeleteAutotestSitePackages]
+      if self._run.options.chrome_root:
+        tasks.append(self._DeleteChromeBuildOutput)
+      if self._run.config.chroot_replace and self._run.options.build:
+        tasks.append(self._DeleteChroot)
+      else:
+        tasks.append(self._CleanChroot)
+      parallel.RunParallelSteps(tasks)
+
+
+class InitSDKStage(generic_stages.BuilderStage):
+  """Stage that is responsible for initializing the SDK."""
+
+  option_name = 'build'
+
+  def __init__(self, builder_run, chroot_replace=False, **kwargs):
+    """InitSDK constructor.
+
+    Args:
+      builder_run: Builder run instance for this run.
+      chroot_replace: If True, force the chroot to be replaced.
+    """
+    super(InitSDKStage, self).__init__(builder_run, **kwargs)
+    self.force_chroot_replace = chroot_replace
+
+  def PerformStage(self):
+    chroot_path = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR)
+    replace = self._run.config.chroot_replace or self.force_chroot_replace
+    pre_ver = post_ver = None
+    if os.path.isdir(self._build_root) and not replace:
+      try:
+        pre_ver = cros_build_lib.GetChrootVersion(chroot=chroot_path)
+        commands.RunChrootUpgradeHooks(
+            self._build_root, chrome_root=self._run.options.chrome_root,
+            extra_env=self._portage_extra_env)
+      except failures_lib.BuildScriptFailure:
+        logging.PrintBuildbotStepText('Replacing broken chroot')
+        logging.PrintBuildbotStepWarnings()
+      else:
+        # Clear the chroot manifest version as we are in the middle of building.
+        chroot_manager = chroot_lib.ChrootManager(self._build_root)
+        chroot_manager.ClearChrootVersion()
+
+    if not os.path.isdir(chroot_path) or replace:
+      use_sdk = (self._run.config.use_sdk and not self._run.options.nosdk)
+      pre_ver = None
+      commands.MakeChroot(
+          buildroot=self._build_root,
+          replace=replace,
+          use_sdk=use_sdk,
+          chrome_root=self._run.options.chrome_root,
+          extra_env=self._portage_extra_env)
+
+    post_ver = cros_build_lib.GetChrootVersion(chroot=chroot_path)
+    if pre_ver is not None and pre_ver != post_ver:
+      logging.PrintBuildbotStepText('%s->%s' % (pre_ver, post_ver))
+    else:
+      logging.PrintBuildbotStepText(post_ver)
+
+    commands.SetSharedUserPassword(
+        self._build_root,
+        password=self._run.config.shared_user_password)
+
+
+class SetupBoardStage(generic_stages.BoardSpecificBuilderStage, InitSDKStage):
+  """Stage that is responsible for building host pkgs and setting up a board."""
+
+  option_name = 'build'
+
+  def PerformStage(self):
+    # We need to run chroot updates on most builders because they uprev after
+    # the InitSDK stage. For the SDK builder, we can skip updates because uprev
+    # is run prior to InitSDK. This is not just an optimization: It helps
+    # workaround http://crbug.com/225509
+    if self._run.config.build_type != constants.CHROOT_BUILDER_TYPE:
+      usepkg_toolchain = (self._run.config.usepkg_toolchain and
+                          not self._latest_toolchain)
+      commands.UpdateChroot(
+          self._build_root, toolchain_boards=[self._current_board],
+          usepkg=usepkg_toolchain)
+
+    # Only update the board if we need to do so.
+    chroot_path = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR)
+    board_path = os.path.join(chroot_path, 'build', self._current_board)
+    if not os.path.isdir(board_path) or self._run.config.board_replace:
+      usepkg = self._run.config.usepkg_build_packages
+      commands.SetupBoard(
+          self._build_root, board=self._current_board, usepkg=usepkg,
+          chrome_binhost_only=self._run.config.chrome_binhost_only,
+          force=self._run.config.board_replace,
+          extra_env=self._portage_extra_env, chroot_upgrade=False,
+          profile=self._run.options.profile or self._run.config.profile)
+
+
+class BuildPackagesStage(generic_stages.BoardSpecificBuilderStage,
+                         generic_stages.ArchivingStageMixin):
+  """Build Chromium OS packages."""
+
+  option_name = 'build'
+  def __init__(self, builder_run, board, suffix=None, afdo_generate_min=False,
+               afdo_use=False, update_metadata=False, **kwargs):
+    if afdo_use:
+      suffix = self.UpdateSuffix(constants.USE_AFDO_USE, suffix)
+    super(BuildPackagesStage, self).__init__(builder_run, board, suffix=suffix,
+                                             **kwargs)
+    self._afdo_generate_min = afdo_generate_min
+    self._update_metadata = update_metadata
+    assert not afdo_generate_min or not afdo_use
+
+    useflags = self._portage_extra_env.get('USE', '').split()
+    if afdo_use:
+      useflags.append(constants.USE_AFDO_USE)
+
+    if useflags:
+      self._portage_extra_env['USE'] = ' '.join(useflags)
+
+  def VerifyChromeBinpkg(self, packages):
+    # Sanity check: If we didn't check out Chrome (and we're running on ToT),
+    # we should be building Chrome from a binary package.
+    if (not self._run.options.managed_chrome and
+        self._run.manifest_branch == 'master'):
+      commands.VerifyBinpkg(self._build_root,
+                            self._current_board,
+                            constants.CHROME_CP,
+                            packages,
+                            extra_env=self._portage_extra_env)
+
+  def GetListOfPackagesToBuild(self):
+    """Returns a list of packages to build."""
+    if self._run.config.packages:
+      # If the list of packages is set in the config, use it.
+      return self._run.config.packages
+
+    # TODO: the logic below is duplicated from the build_packages
+    # script. Once we switch to `cros build`, we should consolidate
+    # the logic in a shared location.
+    packages = ['virtual/target-os']
+    # Build Dev packages by default.
+    packages += ['virtual/target-os-dev']
+    # Build test packages by default.
+    packages += ['virtual/target-os-test']
+    # Build factory packages if requested by config.
+    if self._run.config.factory:
+      packages += ['chromeos-base/chromeos-installshim',
+                   'chromeos-base/chromeos-factory',
+                   'chromeos-base/chromeos-hwid',
+                   'chromeos-base/autotest-factory-install']
+
+    if self._run.ShouldBuildAutotest():
+      packages += ['chromeos-base/autotest-all']
+
+    return packages
+
+  def RecordPackagesUnderTest(self, packages_to_build):
+    """Records all packages that may affect the board to BuilderRun."""
+    deps = dict()
+    # Include packages that are built in chroot because they can
+    # affect any board.
+    packages = ['virtual/target-sdk']
+    # Include chromite because we are running cbuildbot.
+    packages += ['chromeos-base/chromite']
+    try:
+      deps.update(commands.ExtractDependencies(self._build_root, packages))
+
+      # Include packages that will be built as part of the board.
+      deps.update(commands.ExtractDependencies(self._build_root,
+                                               packages_to_build,
+                                               board=self._current_board))
+    except Exception as e:
+      # Dependency extraction may fail due to bad ebuild changes. Let
+      # the build continues because we have logic to triage build
+      # packages failures separately. Note that we only categorize CLs
+      # on the package-level if dependencies are extracted
+      # successfully, so it is safe to ignore the exception.
+      logging.warning('Unable to gather packages under test: %s', e)
+    else:
+      logging.info('Recording packages under test')
+      self.board_runattrs.SetParallel('packages_under_test', set(deps.keys()))
+
+  def PerformStage(self):
+    # If we have rietveld patches, always compile Chrome from source.
+    noworkon = not self._run.options.rietveld_patches
+    packages = self.GetListOfPackagesToBuild()
+    self.VerifyChromeBinpkg(packages)
+    self.RecordPackagesUnderTest(packages)
+
+    commands.Build(self._build_root,
+                   self._current_board,
+                   build_autotest=self._run.ShouldBuildAutotest(),
+                   usepkg=self._run.config.usepkg_build_packages,
+                   chrome_binhost_only=self._run.config.chrome_binhost_only,
+                   packages=packages,
+                   skip_chroot_upgrade=True,
+                   chrome_root=self._run.options.chrome_root,
+                   noworkon=noworkon,
+                   extra_env=self._portage_extra_env)
+
+    if self._update_metadata:
+      # TODO: Consider moving this into its own stage if there are other similar
+      # things to do after build_packages.
+
+      # Extract firmware version information from the newly created updater.
+      main, ec = commands.GetFirmwareVersions(self._build_root,
+                                              self._current_board)
+      update_dict = {'main-firmware-version': main, 'ec-firmware-version': ec}
+      self._run.attrs.metadata.UpdateBoardDictWithDict(
+          self._current_board, update_dict)
+
+      # Write board metadata update to cidb
+      build_id, db = self._run.GetCIDBHandle()
+      if db:
+        db.UpdateBoardPerBuildMetadata(build_id, self._current_board,
+                                       update_dict)
+
+
+class BuildImageStage(BuildPackagesStage):
+  """Build standard Chromium OS images."""
+
+  option_name = 'build'
+  config_name = 'images'
+
+  def _BuildImages(self):
+    # We only build base, dev, and test images from this stage.
+    if self._afdo_generate_min:
+      images_can_build = set(['test'])
+    else:
+      images_can_build = set(['base', 'dev', 'test'])
+    images_to_build = set(self._run.config.images).intersection(
+        images_can_build)
+
+    version = self._run.attrs.release_tag
+    disk_layout = self._run.config.disk_layout
+    if self._afdo_generate_min and version:
+      version = '%s-afdo-generate' % version
+
+    rootfs_verification = self._run.config.rootfs_verification
+    commands.BuildImage(self._build_root,
+                        self._current_board,
+                        sorted(images_to_build),
+                        rootfs_verification=rootfs_verification,
+                        version=version,
+                        disk_layout=disk_layout,
+                        extra_env=self._portage_extra_env)
+
+    # Update link to latest image.
+    latest_image = os.readlink(self.GetImageDirSymlink('latest'))
+    cbuildbot_image_link = self.GetImageDirSymlink()
+    if os.path.lexists(cbuildbot_image_link):
+      os.remove(cbuildbot_image_link)
+
+    os.symlink(latest_image, cbuildbot_image_link)
+
+    self.board_runattrs.SetParallel('images_generated', True)
+
+    parallel.RunParallelSteps(
+        [self._BuildVMImage, lambda: self._GenerateAuZip(cbuildbot_image_link)])
+
+  def _BuildVMImage(self):
+    if self._run.config.vm_tests and not self._afdo_generate_min:
+      commands.BuildVMImageForTesting(
+          self._build_root,
+          self._current_board,
+          extra_env=self._portage_extra_env)
+
+  def _GenerateAuZip(self, image_dir):
+    """Create au-generator.zip."""
+    if not self._afdo_generate_min:
+      commands.GenerateAuZip(self._build_root,
+                             image_dir,
+                             extra_env=self._portage_extra_env)
+
+  def _HandleStageException(self, exc_info):
+    """Tell other stages to not wait on us if we die for some reason."""
+    self.board_runattrs.SetParallelDefault('images_generated', False)
+    return super(BuildImageStage, self)._HandleStageException(exc_info)
+
+  def PerformStage(self):
+    self._BuildImages()
+
+
+class UprevStage(generic_stages.BuilderStage):
+  """Uprevs Chromium OS packages that the builder intends to validate."""
+
+  config_name = 'uprev'
+  option_name = 'uprev'
+
+  def __init__(self, builder_run, boards=None, **kwargs):
+    super(UprevStage, self).__init__(builder_run, **kwargs)
+    if boards is not None:
+      self._boards = boards
+
+  def PerformStage(self):
+    # Perform other uprevs.
+    overlays, _ = self._ExtractOverlays()
+    commands.UprevPackages(self._build_root,
+                           self._boards,
+                           overlays)
+
+
+class RegenPortageCacheStage(generic_stages.BuilderStage):
+  """Regenerates the Portage ebuild cache."""
+
+  # We only need to run this if we're pushing at least one overlay.
+  config_name = 'push_overlays'
+
+  def PerformStage(self):
+    _, push_overlays = self._ExtractOverlays()
+    inputs = [[overlay] for overlay in push_overlays if os.path.isdir(overlay)]
+    parallel.RunTasksInProcessPool(portage_util.RegenCache, inputs)
diff --git a/cbuildbot/stages/build_stages_unittest b/cbuildbot/stages/build_stages_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/stages/build_stages_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/stages/build_stages_unittest.py b/cbuildbot/stages/build_stages_unittest.py
new file mode 100644
index 0000000..2f1c4a1
--- /dev/null
+++ b/cbuildbot/stages/build_stages_unittest.py
@@ -0,0 +1,342 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for build stages."""
+
+from __future__ import print_function
+
+import contextlib
+import os
+
+from chromite.cbuildbot import cbuildbot_unittest
+from chromite.cbuildbot import chromeos_config
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot.stages import build_stages
+from chromite.cbuildbot.stages import generic_stages_unittest
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import parallel
+from chromite.lib import parallel_unittest
+from chromite.lib import partial_mock
+from chromite.lib import path_util
+from chromite.lib import portage_util
+
+from chromite.cbuildbot.stages.generic_stages_unittest import patch
+from chromite.cbuildbot.stages.generic_stages_unittest import patches
+
+
+# pylint: disable=too-many-ancestors
+
+
+class InitSDKTest(generic_stages_unittest.RunCommandAbstractStageTestCase):
+  """Test building the SDK"""
+
+  # pylint: disable=protected-access
+
+  def setUp(self):
+    self.PatchObject(cros_build_lib, 'GetChrootVersion', return_value='12')
+    self.cros_sdk = os.path.join(self.tempdir, 'buildroot',
+                                 constants.CHROMITE_BIN_SUBDIR, 'cros_sdk')
+
+  def ConstructStage(self):
+    return build_stages.InitSDKStage(self._run)
+
+  def testFullBuildWithExistingChroot(self):
+    """Tests whether we create chroots for full builds."""
+    self._PrepareFull()
+    self._Run(dir_exists=True)
+    self.assertCommandContains([self.cros_sdk])
+
+  def testBinBuildWithMissingChroot(self):
+    """Tests whether we create chroots when needed."""
+    self._PrepareBin()
+    # Do not force chroot replacement in build config.
+    self._run._config.chroot_replace = False
+    self._Run(dir_exists=False)
+    self.assertCommandContains([self.cros_sdk])
+
+  def testFullBuildWithMissingChroot(self):
+    """Tests whether we create chroots when needed."""
+    self._PrepareFull()
+    self._Run(dir_exists=True)
+    self.assertCommandContains([self.cros_sdk])
+
+  def testFullBuildWithNoSDK(self):
+    """Tests whether the --nosdk option works."""
+    self._PrepareFull(extra_cmd_args=['--nosdk'])
+    self._Run(dir_exists=False)
+    self.assertCommandContains([self.cros_sdk, '--bootstrap'])
+
+  def testBinBuildWithExistingChroot(self):
+    """Tests whether the --nosdk option works."""
+    self._PrepareFull(extra_cmd_args=['--nosdk'])
+    # Do not force chroot replacement in build config.
+    self._run._config.chroot_replace = False
+    self._run._config.separate_debug_symbols = False
+    self._run.config.useflags = ['foo']
+    self._Run(dir_exists=True)
+    self.assertCommandContains([self.cros_sdk], expected=False)
+    self.assertCommandContains(['./run_chroot_version_hooks'],
+                               enter_chroot=True, extra_env={'USE': 'foo'})
+
+
+class SetupBoardTest(generic_stages_unittest.RunCommandAbstractStageTestCase):
+  """Test building the board"""
+
+  def ConstructStage(self):
+    return build_stages.SetupBoardStage(self._run, self._current_board)
+
+  def _RunFull(self, dir_exists=False):
+    """Helper for testing a full builder."""
+    self._Run(dir_exists)
+    self.assertCommandContains(['./update_chroot'])
+    cmd = ['./setup_board', '--board=%s' % self._current_board, '--nousepkg']
+    self.assertCommandContains(cmd, expected=not dir_exists)
+    cmd = ['./setup_board', '--skip_chroot_upgrade']
+    self.assertCommandContains(cmd)
+
+  def testFullBuildWithProfile(self):
+    """Tests whether full builds add profile flag when requested."""
+    self._PrepareFull(extra_config={'profile': 'foo'})
+    self._RunFull(dir_exists=False)
+    self.assertCommandContains(['./setup_board', '--profile=foo'])
+
+  def testFullBuildWithOverriddenProfile(self):
+    """Tests whether full builds add overridden profile flag when requested."""
+    self._PrepareFull(extra_cmd_args=['--profile', 'smock'])
+    self._RunFull(dir_exists=False)
+    self.assertCommandContains(['./setup_board', '--profile=smock'])
+
+  def _RunBin(self, dir_exists):
+    """Helper for testing a binary builder."""
+    self._Run(dir_exists)
+    update_nousepkg = (not self._run.config.usepkg_toolchain or
+                       self._run.options.latest_toolchain)
+    self.assertCommandContains(['./update_chroot', '--nousepkg'],
+                               expected=update_nousepkg)
+    run_setup_board = not dir_exists or self._run.config.board_replace
+    self.assertCommandContains(['./setup_board'], expected=run_setup_board)
+    cmd = ['./setup_board', '--skip_chroot_upgrade']
+    self.assertCommandContains(cmd, expected=run_setup_board)
+    cmd = ['./setup_board', '--nousepkg']
+    self.assertCommandContains(
+        cmd,
+        expected=run_setup_board and not self._run.config.usepkg_build_packages)
+
+  def testBinBuildWithBoard(self):
+    """Tests whether we don't create the board when it's there."""
+    self._PrepareBin()
+    self._RunBin(dir_exists=True)
+
+  def testBinBuildWithBoardReplace(self):
+    """Tests whether we don't create the board when it's there."""
+    self._PrepareBin()
+    self._run.config.board_replace = True
+    self._RunBin(dir_exists=True)
+
+  def testBinBuildWithMissingBoard(self):
+    """Tests whether we create the board when it's missing."""
+    self._PrepareBin()
+    self._RunBin(dir_exists=False)
+
+  def testBinBuildWithLatestToolchain(self):
+    """Tests whether we use --nousepkg for creating the board."""
+    self._PrepareBin()
+    self._run.options.latest_toolchain = True
+    self._RunBin(dir_exists=False)
+
+  def testBinBuildWithLatestToolchainAndDirExists(self):
+    """Tests whether we use --nousepkg for creating the board."""
+    self._PrepareBin()
+    self._run.options.latest_toolchain = True
+    self._RunBin(dir_exists=True)
+
+  def testBinBuildWithNoToolchainPackages(self):
+    """Tests whether we use --nousepkg for creating the board."""
+    self._PrepareBin()
+    self._run.config.usepkg_toolchain = False
+    self._RunBin(dir_exists=False)
+
+  def testSDKBuild(self):
+    """Tests whether we use --skip_chroot_upgrade for SDK builds."""
+    extra_config = {'build_type': constants.CHROOT_BUILDER_TYPE}
+    self._PrepareFull(extra_config=extra_config)
+    self._Run(dir_exists=False)
+    self.assertCommandContains(['./update_chroot'], expected=False)
+    self.assertCommandContains(['./setup_board', '--skip_chroot_upgrade'])
+
+
+class UprevStageTest(generic_stages_unittest.AbstractStageTestCase):
+  """Tests for the UprevStage class."""
+
+  def setUp(self):
+    self.uprev_mock = self.PatchObject(commands, 'UprevPackages')
+
+    self._Prepare()
+
+  def ConstructStage(self):
+    return build_stages.UprevStage(self._run)
+
+  def testBuildRev(self):
+    """Uprevving the build without uprevving chrome."""
+    self._run.config['uprev'] = True
+    self.RunStage()
+    self.assertTrue(self.uprev_mock.called)
+
+  def testNoRev(self):
+    """No paths are enabled."""
+    self._run.config['uprev'] = False
+    self.RunStage()
+    self.assertFalse(self.uprev_mock.called)
+
+
+class AllConfigsTestCase(generic_stages_unittest.AbstractStageTestCase,
+                         cros_test_lib.OutputTestCase):
+  """Test case for testing against all bot configs."""
+
+  def ConstructStage(self):
+    """Bypass lint warning"""
+    generic_stages_unittest.AbstractStageTestCase.ConstructStage(self)
+
+  @contextlib.contextmanager
+  def RunStageWithConfig(self, mock_configurator=None):
+    """Run the given config"""
+    try:
+      with cros_build_lib_unittest.RunCommandMock() as rc:
+        rc.SetDefaultCmdResult()
+        if mock_configurator:
+          mock_configurator(rc)
+        with self.OutputCapturer():
+          with cros_test_lib.LoggingCapturer():
+            self.RunStage()
+
+        yield rc
+
+    except AssertionError as ex:
+      msg = '%s failed the following test:\n%s' % (self._bot_id, ex)
+      raise AssertionError(msg)
+
+  def RunAllConfigs(self, task, skip_missing=False, site_config=None):
+    """Run |task| against all major configurations"""
+    if site_config is None:
+      site_config = chromeos_config.GetConfig()
+
+    with parallel.BackgroundTaskRunner(task) as queue:
+      # Loop through all major configuration types and pick one from each.
+      for bot_type in config_lib.CONFIG_TYPE_DUMP_ORDER:
+        for bot_id in site_config:
+          if bot_id.endswith(bot_type):
+            # Skip any config without a board, since those configs do not
+            # build packages.
+            cfg = site_config[bot_id]
+            if cfg.boards:
+              # Skip boards w/out a local overlay.  Like when running a
+              # public manifest and testing private-only boards.
+              if skip_missing:
+                try:
+                  for b in cfg.boards:
+                    portage_util.FindPrimaryOverlay(constants.BOTH_OVERLAYS, b)
+                except portage_util.MissingOverlayException:
+                  continue
+
+              queue.put([bot_id])
+              break
+
+
+class BuildPackagesStageTest(AllConfigsTestCase,
+                             cbuildbot_unittest.SimpleBuilderTestCase):
+  """Tests BuildPackagesStage."""
+
+  def setUp(self):
+    self._release_tag = None
+    self.PatchObject(commands, 'ExtractDependencies', return_value=dict())
+
+  def ConstructStage(self):
+    self._run.attrs.release_tag = self._release_tag
+    return build_stages.BuildPackagesStage(self._run, self._current_board)
+
+  def RunTestsWithBotId(self, bot_id, options_tests=True):
+    """Test with the config for the specified bot_id."""
+    self._Prepare(bot_id)
+    self._run.options.tests = options_tests
+
+    with self.RunStageWithConfig() as rc:
+      cfg = self._run.config
+      rc.assertCommandContains(['./build_packages'])
+      rc.assertCommandContains(['./build_packages', '--skip_chroot_upgrade'])
+      rc.assertCommandContains(['./build_packages', '--nousepkg'],
+                               expected=not cfg['usepkg_build_packages'])
+      build_tests = cfg['build_tests'] and self._run.options.tests
+      rc.assertCommandContains(['./build_packages', '--nowithautotest'],
+                               expected=not build_tests)
+
+  def testAllConfigs(self):
+    """Test all major configurations"""
+    self.RunAllConfigs(self.RunTestsWithBotId)
+
+  def testNoTests(self):
+    """Test that self.options.tests = False works."""
+    self.RunTestsWithBotId('x86-generic-paladin', options_tests=False)
+
+  def testIgnoreExtractDependenciesError(self):
+    """Igore errors when failing to extract dependencies."""
+    self.PatchObject(commands, 'ExtractDependencies',
+                     side_effect=Exception('unmet dependency'))
+    self.RunTestsWithBotId('x86-generic-paladin')
+
+
+class BuildImageStageMock(partial_mock.PartialMock):
+  """Partial mock for BuildImageStage."""
+
+  TARGET = 'chromite.cbuildbot.stages.build_stages.BuildImageStage'
+  ATTRS = ('_BuildImages', '_GenerateAuZip')
+
+  def _BuildImages(self, *args, **kwargs):
+    with patches(
+        patch(os, 'symlink'),
+        patch(os, 'readlink', return_value='foo.txt')):
+      self.backup['_BuildImages'](*args, **kwargs)
+
+  def _GenerateAuZip(self, *args, **kwargs):
+    with patch(path_util, 'ToChrootPath',
+               return_value='/chroot/path'):
+      self.backup['_GenerateAuZip'](*args, **kwargs)
+
+
+class BuildImageStageTest(BuildPackagesStageTest):
+  """Tests BuildImageStage."""
+
+  def setUp(self):
+    self.StartPatcher(BuildImageStageMock())
+
+  def ConstructStage(self):
+    return build_stages.BuildImageStage(self._run, self._current_board)
+
+  def RunTestsWithReleaseConfig(self, release_tag):
+    self._release_tag = release_tag
+
+    with parallel_unittest.ParallelMock():
+      with self.RunStageWithConfig() as rc:
+        cfg = self._run.config
+        cmd = ['./build_image', '--version=%s' % (self._release_tag or '')]
+        rc.assertCommandContains(cmd, expected=cfg['images'])
+        rc.assertCommandContains(['./image_to_vm.sh'],
+                                 expected=cfg['vm_tests'])
+        cmd = ['./build_library/generate_au_zip.py', '-o', '/chroot/path']
+        rc.assertCommandContains(cmd, expected=cfg['images'])
+
+  def RunTestsWithBotId(self, bot_id, options_tests=True):
+    """Test with the config for the specified bot_id."""
+    release_tag = '0.0.1'
+    self._Prepare(bot_id)
+    self._run.options.tests = options_tests
+    self._run.attrs.release_tag = release_tag
+
+    task = self.RunTestsWithReleaseConfig
+    # TODO: This test is broken atm with tag=None.
+    steps = [lambda tag=x: task(tag) for x in (release_tag,)]
+    parallel.RunParallelSteps(steps)
diff --git a/cbuildbot/stages/chrome_stages.py b/cbuildbot/stages/chrome_stages.py
new file mode 100644
index 0000000..da175ba
--- /dev/null
+++ b/cbuildbot/stages/chrome_stages.py
@@ -0,0 +1,292 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the Chrome stages."""
+
+from __future__ import print_function
+
+import glob
+import multiprocessing
+import platform
+import os
+import sys
+
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot import results_lib
+from chromite.cbuildbot.stages import artifact_stages
+from chromite.cbuildbot.stages import generic_stages
+from chromite.cbuildbot.stages import sync_stages
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import path_util
+
+MASK_CHANGES_ERROR_SNIPPET = 'The following mask changes are necessary'
+CHROMEPIN_MASK_PATH = os.path.join(constants.SOURCE_ROOT,
+                                   constants.CHROMIUMOS_OVERLAY_DIR,
+                                   'profiles', 'default', 'linux',
+                                   'package.mask', 'chromepin')
+
+class SyncChromeStage(generic_stages.BuilderStage,
+                      generic_stages.ArchivingStageMixin):
+  """Stage that syncs Chrome sources if needed."""
+
+  option_name = 'managed_chrome'
+
+  def __init__(self, builder_run, **kwargs):
+    super(SyncChromeStage, self).__init__(builder_run, **kwargs)
+    # PerformStage() will fill this out for us.
+    # TODO(mtennant): Replace with a run param.
+    self.chrome_version = None
+
+  def HandleSkip(self):
+    """Set run.attrs.chrome_version to chrome version in buildroot now."""
+    self._run.attrs.chrome_version = self._run.DetermineChromeVersion()
+    logging.debug('Existing chrome version is %s.',
+                  self._run.attrs.chrome_version)
+    self._WriteChromeVersionToMetadata()
+    super(SyncChromeStage, self).HandleSkip()
+
+  def _GetChromeVersionFromMetadata(self):
+    """Return the Chrome version from metadata; None if is does not exist."""
+    version_dict = self._run.attrs.metadata.GetDict().get('version')
+    return None if not version_dict else version_dict.get('chrome')
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    chrome_atom_to_build = None
+    if self._chrome_rev:
+      if (self._chrome_rev == constants.CHROME_REV_SPEC and
+          self._run.options.chrome_version):
+        self.chrome_version = self._run.options.chrome_version
+        logging.info('Using chrome version from options.chrome_version: %s',
+                     self.chrome_version)
+      else:
+        self.chrome_version = self._GetChromeVersionFromMetadata()
+        if self.chrome_version:
+          logging.info('Using chrome version from the metadata dictionary: %s',
+                       self.chrome_version)
+
+      # Perform chrome uprev.
+      try:
+        chrome_atom_to_build = commands.MarkChromeAsStable(
+            self._build_root, self._run.manifest_branch,
+            self._chrome_rev, self._boards,
+            chrome_version=self.chrome_version)
+      except commands.ChromeIsPinnedUprevError as e:
+        # If uprev failed due to a chrome pin, record that failure (so that the
+        # build ultimately fails) but try again without the pin, to allow the
+        # slave to test the newer chrome anyway).
+        chrome_atom_to_build = e.new_chrome_atom
+        if chrome_atom_to_build:
+          results_lib.Results.Record(self.name, e)
+          logging.PrintBuildbotStepFailure()
+          logging.error('Chrome is pinned. Attempting to continue build for '
+                        'chrome atom %s anyway but build will ultimately fail.',
+                        chrome_atom_to_build)
+          logging.info('Deleting pin file at %s and proceeding.',
+                       CHROMEPIN_MASK_PATH)
+          osutils.SafeUnlink(CHROMEPIN_MASK_PATH)
+        else:
+          raise
+
+    kwargs = {}
+    if self._chrome_rev == constants.CHROME_REV_SPEC:
+      kwargs['revision'] = self.chrome_version
+      logging.PrintBuildbotStepText('revision %s' % kwargs['revision'])
+    else:
+      if not self.chrome_version:
+        self.chrome_version = self._run.DetermineChromeVersion()
+
+      kwargs['tag'] = self.chrome_version
+      logging.PrintBuildbotStepText('tag %s' % kwargs['tag'])
+
+    useflags = self._run.config.useflags
+    commands.SyncChrome(self._build_root, self._run.options.chrome_root,
+                        useflags, **kwargs)
+    if (self._chrome_rev and not chrome_atom_to_build and
+        self._run.options.buildbot and
+        self._run.config.build_type == constants.CHROME_PFQ_TYPE):
+      logging.info('Chrome already uprevved. Nothing else to do.')
+      sys.exit(0)
+
+  def _WriteChromeVersionToMetadata(self):
+    """Write chrome version to metadata and upload partial json file."""
+    self._run.attrs.metadata.UpdateKeyDictWithDict(
+        'version',
+        {'chrome': self._run.attrs.chrome_version})
+    self.UploadMetadata(filename=constants.PARTIAL_METADATA_JSON)
+
+  def _Finish(self):
+    """Provide chrome_version to the rest of the run."""
+    # Even if the stage failed, a None value for chrome_version still
+    # means something.  In other words, this stage tried to run.
+    self._run.attrs.chrome_version = self.chrome_version
+    self._WriteChromeVersionToMetadata()
+    super(SyncChromeStage, self)._Finish()
+
+
+class PatchChromeStage(generic_stages.BuilderStage):
+  """Stage that applies Chrome patches if needed."""
+
+  option_name = 'rietveld_patches'
+
+  URL_BASE = 'https://codereview.chromium.org/%(id)s'
+
+  def PerformStage(self):
+    for spatch in ' '.join(self._run.options.rietveld_patches).split():
+      patch, colon, subdir = spatch.partition(':')
+      if not colon:
+        subdir = 'src'
+      url = self.URL_BASE % {'id': patch}
+      logging.PrintBuildbotLink(spatch, url)
+      commands.PatchChrome(self._run.options.chrome_root, patch, subdir)
+
+
+class ChromeSDKStage(generic_stages.BoardSpecificBuilderStage,
+                     generic_stages.ArchivingStageMixin):
+  """Run through the simple chrome workflow."""
+
+  option_name = 'chrome_sdk'
+  config_name = 'chrome_sdk'
+
+  def __init__(self, *args, **kwargs):
+    super(ChromeSDKStage, self).__init__(*args, **kwargs)
+    self._upload_queue = multiprocessing.Queue()
+    self._pkg_dir = os.path.join(
+        self._build_root, constants.DEFAULT_CHROOT_DIR,
+        'build', self._current_board, 'var', 'db', 'pkg')
+    if self._run.options.chrome_root:
+      self.chrome_src = os.path.join(self._run.options.chrome_root, 'src')
+      self.out_board_dir = os.path.join(
+          self.chrome_src, 'out_%s' % self._current_board)
+
+  def _BuildAndArchiveChromeSysroot(self):
+    """Generate and upload sysroot for building Chrome."""
+    assert self.archive_path.startswith(self._build_root)
+    extra_env = {}
+    if self._run.config.useflags:
+      extra_env['USE'] = ' '.join(self._run.config.useflags)
+    in_chroot_path = path_util.ToChrootPath(self.archive_path)
+    cmd = ['cros_generate_sysroot', '--out-dir', in_chroot_path, '--board',
+           self._current_board, '--package', constants.CHROME_CP]
+    cros_build_lib.RunCommand(cmd, cwd=self._build_root, enter_chroot=True,
+                              extra_env=extra_env)
+    self._upload_queue.put([constants.CHROME_SYSROOT_TAR])
+
+  def _ArchiveChromeEbuildEnv(self):
+    """Generate and upload Chrome ebuild environment."""
+    files = glob.glob(os.path.join(self._pkg_dir, constants.CHROME_CP) + '-*')
+    if not files:
+      raise artifact_stages.NothingToArchiveException(
+          'Failed to find package %s' % constants.CHROME_CP)
+    if len(files) > 1:
+      logging.PrintBuildbotStepWarnings()
+      logging.warning('Expected one package for %s, found %d',
+                      constants.CHROME_CP, len(files))
+
+    chrome_dir = sorted(files)[-1]
+    env_bzip = os.path.join(chrome_dir, 'environment.bz2')
+    with osutils.TempDir(prefix='chrome-sdk-stage') as tempdir:
+      # Convert from bzip2 to tar format.
+      bzip2 = cros_build_lib.FindCompressor(cros_build_lib.COMP_BZIP2)
+      cros_build_lib.RunCommand(
+          [bzip2, '-d', env_bzip, '-c'],
+          log_stdout_to_file=os.path.join(tempdir, constants.CHROME_ENV_FILE))
+      env_tar = os.path.join(self.archive_path, constants.CHROME_ENV_TAR)
+      cros_build_lib.CreateTarball(env_tar, tempdir)
+      self._upload_queue.put([os.path.basename(env_tar)])
+
+  def _VerifyChromeDeployed(self, tempdir):
+    """Check to make sure deploy_chrome ran correctly."""
+    if not os.path.exists(os.path.join(tempdir, 'chrome')):
+      raise AssertionError('deploy_chrome did not run successfully!')
+
+  def _VerifySDKEnvironment(self):
+    """Make sure the SDK environment is set up properly."""
+    # If the environment wasn't set up, then the output directory wouldn't be
+    # created after 'gclient runhooks'.
+    # TODO: Make this check actually look at the environment.
+    if not os.path.exists(self.out_board_dir):
+      raise AssertionError('%s not created!' % self.out_board_dir)
+
+  def _BuildChrome(self, sdk_cmd):
+    """Use the generated SDK to build Chrome."""
+    # Validate fetching of the SDK and setting everything up.
+    sdk_cmd.Run(['true'])
+    # Actually build chromium.
+    sdk_cmd.Run(['gclient', 'runhooks'])
+    self._VerifySDKEnvironment()
+    sdk_cmd.Ninja()
+
+  def _TestDeploy(self, sdk_cmd):
+    """Test SDK deployment."""
+    with osutils.TempDir(prefix='chrome-sdk-stage') as tempdir:
+      # Use the TOT deploy_chrome.
+      script_path = os.path.join(
+          self._build_root, constants.CHROMITE_BIN_SUBDIR, 'deploy_chrome')
+      sdk_cmd.Run([script_path, '--build-dir',
+                   os.path.join(self.out_board_dir, 'Release'),
+                   '--staging-only', '--staging-dir', tempdir])
+      self._VerifyChromeDeployed(tempdir)
+
+  def _GenerateAndUploadMetadata(self):
+    self.UploadMetadata(upload_queue=self._upload_queue,
+                        filename=constants.PARTIAL_METADATA_JSON)
+
+  def PerformStage(self):
+    if platform.dist()[-1] == 'lucid':
+      # Chrome no longer builds on Lucid. See crbug.com/276311
+      print('Ubuntu lucid is no longer supported.')
+      print('Please upgrade to Ubuntu Precise.')
+      logging.PrintBuildbotStepWarnings()
+      return
+
+    steps = [self._BuildAndArchiveChromeSysroot, self._ArchiveChromeEbuildEnv,
+             self._GenerateAndUploadMetadata]
+    with self.ArtifactUploader(self._upload_queue, archive=False):
+      parallel.RunParallelSteps(steps)
+
+      if self._run.config.chrome_sdk_build_chrome:
+        with osutils.TempDir(prefix='chrome-sdk-cache') as tempdir:
+          cache_dir = os.path.join(tempdir, 'cache')
+          extra_args = ['--cwd', self.chrome_src, '--sdk-path',
+                        self.archive_path]
+          sdk_cmd = commands.ChromeSDK(
+              self._build_root, self._current_board, chrome_src=self.chrome_src,
+              goma=self._run.config.chrome_sdk_goma,
+              extra_args=extra_args, cache_dir=cache_dir)
+          self._BuildChrome(sdk_cmd)
+          self._TestDeploy(sdk_cmd)
+
+
+class ChromeLKGMSyncStage(sync_stages.SyncStage):
+  """Stage that syncs to the last known good manifest for Chrome."""
+
+  output_manifest_sha1 = False
+
+  def GetNextManifest(self):
+    """Override: Gets the LKGM from the Chrome tree."""
+    chrome_lkgm = commands.GetChromeLKGM(self._run.options.chrome_version)
+
+    # We need a full buildspecs manager here as we need an initialized manifest
+    # manager with paths to the spec.
+    # TODO(mtennant): Consider registering as manifest_manager run param, for
+    # consistency, but be careful that consumers do not get confused.
+    # Currently only the "manifest_manager" from ManifestVersionedSync (and
+    # subclasses) is used later in the flow.
+    manifest_manager = manifest_version.BuildSpecsManager(
+        source_repo=self.repo,
+        manifest_repo=self._GetManifestVersionsRepoUrl(),
+        build_names=self._run.GetBuilderIds(),
+        incr_type='build',
+        force=False,
+        branch=self._run.manifest_branch)
+
+    manifest_manager.BootstrapFromVersion(chrome_lkgm)
+    return manifest_manager.GetLocalManifest(chrome_lkgm)
diff --git a/cbuildbot/stages/chrome_stages_unittest b/cbuildbot/stages/chrome_stages_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/stages/chrome_stages_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/stages/chrome_stages_unittest.py b/cbuildbot/stages/chrome_stages_unittest.py
new file mode 100644
index 0000000..b1504a0
--- /dev/null
+++ b/cbuildbot/stages/chrome_stages_unittest.py
@@ -0,0 +1,129 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for chrome stages."""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot import cbuildbot_unittest
+from chromite.cbuildbot.stages import chrome_stages
+from chromite.cbuildbot.stages import generic_stages_unittest
+from chromite.lib import cidb
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import parallel_unittest
+
+
+# pylint: disable=too-many-ancestors
+
+
+class ChromeSDKStageTest(cbuildbot_unittest.SimpleBuilderTestCase,
+                         generic_stages_unittest.AbstractStageTestCase,
+                         cros_test_lib.LoggingTestCase):
+  """Verify stage that creates the chrome-sdk and builds chrome with it."""
+  BOT_ID = 'link-paladin'
+  RELEASE_TAG = ''
+
+  # pylint: disable=protected-access
+
+  def setUp(self):
+    self.StartPatcher(parallel_unittest.ParallelMock())
+
+    # Set up a general purpose cidb mock. Tests with more specific
+    # mock requirements can replace this with a separate call to
+    # SetupMockCidb
+    cidb.CIDBConnectionFactory.SetupMockCidb(mock.MagicMock())
+
+    self._Prepare()
+
+  def _Prepare(self, bot_id=None, **kwargs):
+    super(ChromeSDKStageTest, self)._Prepare(bot_id, **kwargs)
+
+    self._run.options.chrome_root = '/tmp/non-existent'
+    self._run.attrs.metadata.UpdateWithDict({'toolchain-tuple': ['target'],
+                                             'toolchain-url' : 'some-url'})
+
+  def ConstructStage(self):
+    self._run.GetArchive().SetupArchivePath()
+    return chrome_stages.ChromeSDKStage(self._run, self._current_board)
+
+  def testIt(self):
+    """A simple run-through test."""
+    rc_mock = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
+    rc_mock.SetDefaultCmdResult()
+    self.PatchObject(chrome_stages.ChromeSDKStage, '_ArchiveChromeEbuildEnv',
+                     autospec=True)
+    self.PatchObject(chrome_stages.ChromeSDKStage, '_VerifyChromeDeployed',
+                     autospec=True)
+    self.PatchObject(chrome_stages.ChromeSDKStage, '_VerifySDKEnvironment',
+                     autospec=True)
+    self.RunStage()
+
+  def testChromeEnvironment(self):
+    """Test that the Chrome environment is built."""
+    # Create the chrome environment compressed file.
+    stage = self.ConstructStage()
+    chrome_env_dir = os.path.join(
+        stage._pkg_dir, constants.CHROME_CP + '-25.3643.0_rc1')
+    env_file = os.path.join(chrome_env_dir, 'environment')
+    osutils.Touch(env_file, makedirs=True)
+
+    cros_build_lib.RunCommand(['bzip2', env_file])
+
+    # Run the code.
+    stage._ArchiveChromeEbuildEnv()
+
+    env_tar_base = stage._upload_queue.get()[0]
+    env_tar = os.path.join(stage.archive_path, env_tar_base)
+    self.assertTrue(os.path.exists(env_tar))
+    cros_test_lib.VerifyTarball(env_tar, ['./', 'environment'])
+
+
+class PatchChromeStageTest(generic_stages_unittest.AbstractStageTestCase):
+  """Tests for PatchChromeStage."""
+
+  def setUp(self):
+    self._Prepare(cmd_args=[
+        '-r', self.build_root,
+        '--rietveld-patches=1234',
+        '--rietveld-patches=555:adir',
+    ])
+    self.PatchObject(commands, 'PatchChrome')
+
+  def ConstructStage(self):
+    return chrome_stages.PatchChromeStage(self._run)
+
+  def testBasic(self):
+    """Verify requested patches are applied."""
+    stage = self.ConstructStage()
+    stage.PerformStage()
+
+
+class SyncChromeStageTest(generic_stages_unittest.AbstractStageTestCase,
+                          cros_build_lib_unittest.RunCommandTestCase):
+  """Tests for SyncChromeStage."""
+
+  # pylint: disable-msg=protected-access
+  def setUp(self):
+    self._Prepare()
+    self.PatchObject(cbuildbot_run._BuilderRunBase, 'DetermineChromeVersion',
+                     return_value='35.0.1863.0')
+    self.PatchObject(commands, 'SyncChrome')
+
+  def ConstructStage(self):
+    return chrome_stages.SyncChromeStage(self._run)
+
+  def testBasic(self):
+    """Basic syntax sanity test."""
+    stage = self.ConstructStage()
+    stage.PerformStage()
+
diff --git a/cbuildbot/stages/completion_stages.py b/cbuildbot/stages/completion_stages.py
new file mode 100644
index 0000000..2c6b08d
--- /dev/null
+++ b/cbuildbot/stages/completion_stages.py
@@ -0,0 +1,852 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the completion stages."""
+
+from __future__ import print_function
+
+from chromite.cbuildbot import chroot_lib
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import results_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot import tree_status
+from chromite.cbuildbot.stages import generic_stages
+from chromite.cbuildbot.stages import sync_stages
+from chromite.lib import clactions
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import patch as cros_patch
+from chromite.lib import portage_util
+
+
+def GetBuilderSuccessMap(builder_run, overall_success):
+  """Get the pass/fail status of all builders.
+
+  A builder is marked as passed if all of its steps ran all of the way to
+  completion. We determine this by looking at whether all of the steps for
+  all of the constituent boards ran to completion.
+
+  In cases where a builder does not have any boards, or has child boards, we
+  fall back and instead just look at whether the entire build was successful.
+
+  Args:
+    builder_run: The builder run we wish to get the status of.
+    overall_success: The overall status of the build.
+
+  Returns:
+    A dict, mapping the builder names to whether they succeeded.
+  """
+  success_map = {}
+  for run in [builder_run] + builder_run.GetChildren():
+    if run.config.boards and not run.config.child_configs:
+      success_map[run.config.name] = True
+      for board in run.config.boards:
+        board_runattrs = run.GetBoardRunAttrs(board)
+        if not board_runattrs.HasParallel('success'):
+          success_map[run.config.name] = False
+    else:
+      # If a builder does not have boards, or if it has child configs, we
+      # will just use the overall status instead.
+      success_map[run.config.name] = overall_success
+  return success_map
+
+
+def CreateBuildFailureMessage(overlays, builder_name, dashboard_url):
+  """Creates a message summarizing the failures.
+
+  Args:
+    overlays: The overlays used for the build.
+    builder_name: The name of the builder.
+    dashboard_url: The URL of the build.
+
+  Returns:
+    A failures_lib.BuildFailureMessage object.
+  """
+  internal = overlays in [constants.PRIVATE_OVERLAYS,
+                          constants.BOTH_OVERLAYS]
+  details = []
+  tracebacks = tuple(results_lib.Results.GetTracebacks())
+  for x in tracebacks:
+    if isinstance(x.exception, failures_lib.CompoundFailure):
+      # We do not want the textual tracebacks included in the
+      # stringified CompoundFailure instance because this will be
+      # printed on the waterfall.
+      ex_str = x.exception.ToSummaryString()
+    else:
+      ex_str = str(x.exception)
+    # Truncate displayed failure reason to 1000 characters.
+    ex_str = ex_str[:200]
+    details.append('The %s stage failed: %s' % (x.failed_stage, ex_str))
+  if not details:
+    details = ['cbuildbot failed']
+
+  # reason does not include builder name or URL. This is mainly for
+  # populating the "failure message" column in the stats sheet.
+  reason = ' '.join(details)
+  details.append('in %s' % dashboard_url)
+  msg = '%s: %s' % (builder_name, ' '.join(details))
+
+  return failures_lib.BuildFailureMessage(msg, tracebacks, internal, reason,
+                                          builder_name)
+
+
+class ManifestVersionedSyncCompletionStage(
+    generic_stages.ForgivingBuilderStage):
+  """Stage that records board specific results for a unique manifest file."""
+
+  option_name = 'sync'
+
+  def __init__(self, builder_run, sync_stage, success, **kwargs):
+    super(ManifestVersionedSyncCompletionStage, self).__init__(
+        builder_run, **kwargs)
+    self.sync_stage = sync_stage
+    self.success = success
+    # Message that can be set that well be sent along with the status in
+    # UpdateStatus.
+    self.message = None
+
+  def GetBuildFailureMessage(self):
+    """Returns message summarizing the failures."""
+    return CreateBuildFailureMessage(self._run.config.overlays,
+                                     self._run.config.name,
+                                     self._run.ConstructDashboardURL())
+
+  def PerformStage(self):
+    if not self.success:
+      self.message = self.GetBuildFailureMessage()
+
+    if not config_lib.IsPFQType(self._run.config.build_type):
+      # Update the pass/fail status in the manifest-versions
+      # repo. Suite scheduler checks the build status to schedule
+      # suites.
+      self._run.attrs.manifest_manager.UpdateStatus(
+          success_map=GetBuilderSuccessMap(self._run, self.success),
+          message=self.message, dashboard_url=self.ConstructDashboardURL())
+
+
+class ImportantBuilderFailedException(failures_lib.StepFailure):
+  """Exception thrown when an important build fails to build."""
+
+
+class MasterSlaveSyncCompletionStage(ManifestVersionedSyncCompletionStage):
+  """Stage that records whether we passed or failed to build/test manifest."""
+
+  def __init__(self, *args, **kwargs):
+    super(MasterSlaveSyncCompletionStage, self).__init__(*args, **kwargs)
+    self._slave_statuses = {}
+
+  def _GetLocalBuildStatus(self):
+    """Return the status for this build as a dictionary."""
+    status = manifest_version.BuilderStatus.GetCompletedStatus(self.success)
+    status_obj = manifest_version.BuilderStatus(status, self.message)
+    return {self._bot_id: status_obj}
+
+  def _FetchSlaveStatuses(self):
+    """Fetch and return build status for slaves of this build.
+
+    If this build is not a master then return just the status of this build.
+
+    Returns:
+      A dict of build_config name -> BuilderStatus objects, for all important
+      slave build configs. Build configs that never started will have a
+      BuilderStatus of MISSING.
+    """
+    # Wait for slaves if we're a master, in production or mock-production.
+    # Otherwise just look at our own status.
+    slave_statuses = self._GetLocalBuildStatus()
+    if not self._run.config.master:
+      # The slave build returns its own status.
+      logging.warning('The build is not a master.')
+    elif self._run.options.mock_slave_status or not self._run.options.debug:
+      # The master build.
+      builders = self._GetSlaveConfigs()
+      builder_names = [b.name for b in builders]
+      timeout = None
+      build_id, db = self._run.GetCIDBHandle()
+      if db:
+        timeout = db.GetTimeToDeadline(build_id)
+      if timeout is None:
+        # Catch-all: This could happen if cidb is not setup, or the deadline
+        # query fails.
+        timeout = constants.MASTER_BUILD_TIMEOUT_DEFAULT_SECONDS
+
+      if self._run.options.debug:
+        # For debug runs, wait for three minutes to ensure most code
+        # paths are executed.
+        logging.info('Waiting for 3 minutes only for debug run. '
+                     'Would have waited for %s seconds.', timeout)
+        timeout = 3 * 60
+
+      manager = self._run.attrs.manifest_manager
+      if sync_stages.MasterSlaveLKGMSyncStage.sub_manager:
+        manager = sync_stages.MasterSlaveLKGMSyncStage.sub_manager
+      slave_statuses.update(manager.GetBuildersStatus(
+          self._run.attrs.metadata.GetValue('build_id'),
+          builder_names,
+          timeout=timeout))
+    return slave_statuses
+
+  def _HandleStageException(self, exc_info):
+    """Decide whether an exception should be treated as fatal."""
+    # Besides the master, the completion stages also run on slaves, to report
+    # their status back to the master. If the build failed, they throw an
+    # exception here. For slave builders, marking this stage 'red' would be
+    # redundant, since the build itself would already be red. In this case,
+    # report a warning instead.
+    # pylint: disable=protected-access
+    exc_type = exc_info[0]
+    if (issubclass(exc_type, ImportantBuilderFailedException) and
+        not self._run.config.master):
+      return self._HandleExceptionAsWarning(exc_info)
+    else:
+      # In all other cases, exceptions should be treated as fatal. To
+      # implement this, we bypass ForgivingStage and call
+      # generic_stages.BuilderStage._HandleStageException explicitly.
+      return generic_stages.BuilderStage._HandleStageException(self, exc_info)
+
+  def HandleSuccess(self):
+    """Handle a successful build.
+
+    This function is called whenever the cbuildbot run is successful.
+    For the master, this will only be called when all slave builders
+    are also successful. This function may be overridden by subclasses.
+    """
+    # We only promote for the pfq, not chrome pfq.
+    # TODO(build): Run this logic in debug mode too.
+    if (not self._run.options.debug and
+        config_lib.IsPFQType(self._run.config.build_type) and
+        self._run.config.master and
+        self._run.manifest_branch == 'master' and
+        self._run.config.build_type != constants.CHROME_PFQ_TYPE):
+      self._run.attrs.manifest_manager.PromoteCandidate()
+      if sync_stages.MasterSlaveLKGMSyncStage.sub_manager:
+        sync_stages.MasterSlaveLKGMSyncStage.sub_manager.PromoteCandidate()
+
+  def HandleFailure(self, failing, inflight, no_stat):
+    """Handle a build failure.
+
+    This function is called whenever the cbuildbot run fails.
+    For the master, this will be called when any slave fails or times
+    out. This function may be overridden by subclasses.
+
+    Args:
+      failing: The names of the failing builders.
+      inflight: The names of the builders that are still running.
+      no_stat: Set of builder names of slave builders that had status None.
+    """
+    if failing or inflight or no_stat:
+      logging.PrintBuildbotStepWarnings()
+
+    if failing:
+      logging.warning('\n'.join([
+          'The following builders failed with this manifest:',
+          ', '.join(sorted(failing)),
+          'Please check the logs of the failing builders for details.']))
+
+    if inflight:
+      logging.warning('\n'.join([
+          'The following builders took too long to finish:',
+          ', '.join(sorted(inflight)),
+          'Please check the logs of these builders for details.']))
+
+    if no_stat:
+      logging.warning('\n'.join([
+          'The following builders did not start or failed prematurely:',
+          ', '.join(sorted(no_stat)),
+          'Please check the logs of these builders for details.']))
+
+  def PerformStage(self):
+    super(MasterSlaveSyncCompletionStage, self).PerformStage()
+
+    # Upload our pass/fail status to Google Storage.
+    self._run.attrs.manifest_manager.UploadStatus(
+        success=self.success, message=self.message,
+        dashboard_url=self.ConstructDashboardURL())
+
+    statuses = self._FetchSlaveStatuses()
+    self._slave_statuses = statuses
+    no_stat = set(builder for builder, status in statuses.iteritems()
+                  if status.Missing())
+    failing = set(builder for builder, status in statuses.iteritems()
+                  if status.Failed())
+    inflight = set(builder for builder, status in statuses.iteritems()
+                   if status.Inflight())
+
+    # If all the failing or inflight builders were sanity checkers
+    # then ignore the failure.
+    fatal = self._IsFailureFatal(failing, inflight, no_stat)
+
+    if fatal:
+      self._AnnotateFailingBuilders(failing, inflight, no_stat, statuses)
+      self.HandleFailure(failing, inflight, no_stat)
+      raise ImportantBuilderFailedException()
+    else:
+      self.HandleSuccess()
+
+  def _IsFailureFatal(self, failing, inflight, no_stat):
+    """Returns a boolean indicating whether the build should fail.
+
+    Args:
+      failing: Set of builder names of slave builders that failed.
+      inflight: Set of builder names of slave builders that are inflight
+      no_stat: Set of builder names of slave builders that had status None.
+
+    Returns:
+      True if any of the failing or inflight builders are not sanity check
+      builders for this master, or if there were any non-sanity-check builders
+      with status None.
+    """
+    sanity_builders = self._run.config.sanity_check_slaves or []
+    sanity_builders = set(sanity_builders)
+    return not sanity_builders.issuperset(failing | inflight | no_stat)
+
+  def _AnnotateFailingBuilders(self, failing, inflight, no_stat, statuses):
+    """Add annotations that link to either failing or inflight builders.
+
+    Adds buildbot links to failing builder dashboards. If no builders are
+    failing, adds links to inflight builders. Adds step text for builders
+    with status None.
+
+    Args:
+      failing: Set of builder names of slave builders that failed.
+      inflight: Set of builder names of slave builders that are inflight.
+      no_stat: Set of builder names of slave builders that had status None.
+      statuses: A builder-name->status dictionary, which will provide
+                the dashboard_url values for any links.
+    """
+    builders_to_link = set.union(failing, inflight)
+    for builder in builders_to_link:
+      if statuses[builder].dashboard_url:
+        if statuses[builder].message:
+          text = '%s: %s' % (builder, statuses[builder].message.reason)
+        else:
+          text = '%s: timed out' % builder
+
+        logging.PrintBuildbotLink(text, statuses[builder].dashboard_url)
+
+    for builder in no_stat:
+      logging.PrintBuildbotStepText('%s did not start.' % builder)
+
+  def GetSlaveStatuses(self):
+    """Returns cached slave status results.
+
+    Cached results are populated during PerformStage, so this function
+    should only be called after PerformStage has returned.
+
+    Returns:
+      A dictionary from build names to manifest_version.BuilderStatus
+      builder status objects.
+    """
+    return self._slave_statuses
+
+  def _GetFailedMessages(self, failing):
+    """Gathers the BuildFailureMessages from the |failing| builders.
+
+    Args:
+      failing: Names of the builders that failed.
+
+    Returns:
+      A list of BuildFailureMessage or NoneType objects.
+    """
+    return [self._slave_statuses[x].message for x in failing]
+
+  def _GetBuildersWithNoneMessages(self, failing):
+    """Returns a list of failed builders with NoneType failure message.
+
+    Args:
+      failing: Names of the builders that failed.
+
+    Returns:
+      A list of builder names.
+    """
+    return [x for x in failing if self._slave_statuses[x].message is None]
+
+
+class CanaryCompletionStage(MasterSlaveSyncCompletionStage):
+  """Collect build slave statuses and handle the failures."""
+
+  def HandleFailure(self, failing, inflight, no_stat):
+    """Handle a build failure or timeout in the Canary builders.
+
+    Args:
+      failing: Names of the builders that failed.
+      inflight: Names of the builders that timed out.
+      no_stat: Set of builder names of slave builders that had status None.
+    """
+    # Print out the status about what builds failed or not.
+    MasterSlaveSyncCompletionStage.HandleFailure(
+        self, failing, inflight, no_stat)
+
+    if self._run.config.master:
+      self.CanaryMasterHandleFailure(failing, inflight, no_stat)
+
+  def SendCanaryFailureAlert(self, failing, inflight, no_stat):
+    """Send an alert email to summarize canary failures.
+
+    Args:
+      failing: The names of the failing builders.
+      inflight: The names of the builders that are still running.
+      no_stat: The names of the builders that had status None.
+    """
+    builder_name = 'Canary Master'
+    title = '%s has detected build failures:' % builder_name
+    msgs = [str(x) for x in self._GetFailedMessages(failing)]
+    slaves = self._GetBuildersWithNoneMessages(failing)
+    msgs += ['%s failed with unknown reason.' % x for x in slaves]
+    msgs += ['%s timed out' % x for x in inflight]
+    msgs += ['%s did not start' % x for x in no_stat]
+    msgs.insert(0, title)
+    msgs.append('You can also view the summary of the slave failures from '
+                'the %s stage of %s. Click on the failure message to go '
+                'to an individual slave\'s build status page: %s' % (
+                    self.name, builder_name, self.ConstructDashboardURL()))
+    msg = '\n\n'.join(msgs)
+    logging.warning(msg)
+    extra_fields = {'X-cbuildbot-alert': 'canary-fail-alert'}
+    tree_status.SendHealthAlert(self._run, 'Canary builder failures', msg,
+                                extra_fields=extra_fields)
+
+  def _ComposeTreeStatusMessage(self, failing, inflight, no_stat):
+    """Composes a tres status message.
+
+    Args:
+      failing: Names of the builders that failed.
+      inflight: Names of the builders that timed out.
+      no_stat: Set of builder names of slave builders that had status None.
+
+    Returns:
+      A string.
+    """
+    slave_status_list = [
+        ('did not start', list(no_stat)),
+        ('timed out', list(inflight)),
+        ('failed', list(failing)),]
+    # Print maximum 2 slaves for each category to not clutter the
+    # message.
+    max_num = 2
+    messages = []
+    for status, slaves in slave_status_list:
+      if not slaves:
+        continue
+      slaves_str = ','.join(slaves[:max_num])
+      if len(slaves) <= max_num:
+        messages.append('%s %s' % (slaves_str, status))
+      else:
+        messages.append('%s and %d others %s' % (slaves_str,
+                                                 len(slaves) - max_num,
+                                                 status))
+    return '; '.join(messages)
+
+  def CanaryMasterHandleFailure(self, failing, inflight, no_stat):
+    """Handles the failure by sending out an alert email.
+
+    Args:
+      failing: Names of the builders that failed.
+      inflight: Names of the builders that timed out.
+      no_stat: Set of builder names of slave builders that had status None.
+    """
+    if self._run.manifest_branch == 'master':
+      self.SendCanaryFailureAlert(failing, inflight, no_stat)
+      tree_status.ThrottleOrCloseTheTree(
+          '"Canary master"',
+          self._ComposeTreeStatusMessage(failing, inflight, no_stat),
+          internal=self._run.config.internal,
+          buildnumber=self._run.buildnumber,
+          dryrun=self._run.debug)
+
+  def _HandleStageException(self, exc_info):
+    """Decide whether an exception should be treated as fatal."""
+    # Canary master already updates the tree status for slave
+    # failures. There is no need to mark this stage red. For slave
+    # builders, the build itself would already be red. In this case,
+    # report a warning instead.
+    # pylint: disable=protected-access
+    exc_type = exc_info[0]
+    if issubclass(exc_type, ImportantBuilderFailedException):
+      return self._HandleExceptionAsWarning(exc_info)
+    else:
+      # In all other cases, exceptions should be treated as fatal.
+      return super(CanaryCompletionStage, self)._HandleStageException(exc_info)
+
+
+class CommitQueueCompletionStage(MasterSlaveSyncCompletionStage):
+  """Commits or reports errors to CL's that failed to be validated."""
+
+  # These stages are required to have run at least once and to never have
+  # failed, on each important slave. Otherwise, we may have incomplete
+  # information on which CLs affect which builders, and thus skip all
+  # board-aware submission.
+  _CRITICAL_STAGES = ('CommitQueueSync',)
+
+  def HandleSuccess(self):
+    if self._run.config.master:
+      self.sync_stage.pool.SubmitPool(reason=constants.STRATEGY_CQ_SUCCESS)
+      # After submitting the pool, update the commit hashes for uprevved
+      # ebuilds.
+      manifest = git.ManifestCheckout.Cached(self._build_root)
+      portage_util.EBuild.UpdateCommitHashesForChanges(
+          self.sync_stage.pool.changes, self._build_root, manifest)
+      if config_lib.IsPFQType(self._run.config.build_type):
+        super(CommitQueueCompletionStage, self).HandleSuccess()
+
+    manager = self._run.attrs.manifest_manager
+    version = manager.current_version
+    if version:
+      chroot_manager = chroot_lib.ChrootManager(self._build_root)
+      chroot_manager.SetChrootVersion(version)
+
+  def HandleFailure(self, failing, inflight, no_stat):
+    """Handle a build failure or timeout in the Commit Queue.
+
+    This function performs any tasks that need to happen when the Commit Queue
+    fails:
+      - Abort the HWTests if necessary.
+      - Push any CLs that indicate that they don't care about this failure.
+      - Determine what CLs to reject.
+
+    See MasterSlaveSyncCompletionStage.HandleFailure.
+
+    Args:
+      failing: Names of the builders that failed.
+      inflight: Names of the builders that timed out.
+      no_stat: Set of builder names of slave builders that had status None.
+    """
+    # Print out the status about what builds failed or not.
+    MasterSlaveSyncCompletionStage.HandleFailure(
+        self, failing, inflight, no_stat)
+
+    if self._run.config.master:
+      self.CQMasterHandleFailure(failing, inflight, no_stat)
+
+  def _GetSlaveMappingAndCLActions(self, changes):
+    """Query CIDB to for slaves and CL actions.
+
+    Args:
+      changes: A list of GerritPatch instances to examine.
+
+    Returns:
+      A tuple of (config_map, action_history), where the config_map
+      is a dictionary mapping build_id to config name for all slaves
+      in this run plus the master, and action_history is a list of all
+      CL actions associated with |changes|.
+    """
+    # build_id is the master build id for the run.
+    build_id, db = self._run.GetCIDBHandle()
+    assert db, 'No database connection to use.'
+    slave_list = db.GetSlaveStatuses(build_id)
+    # TODO(akeshet): We are getting the full action history for all changes that
+    # were in this CQ run. It would make more sense to only get the actions from
+    # build_ids of this master and its slaves.
+    action_history = db.GetActionsForChanges(changes)
+
+    config_map = dict()
+
+    # Build the build_id to config_name mapping. Note that if add the
+    # "relaunch" feature in cbuildbot, there may be multiple build ids
+    # for the same slave config. We will have to make sure
+    # GetSlaveStatuses() returns only the valid slaves (e.g. with
+    # latest start time).
+    for d in slave_list:
+      config_map[d['id']] = d['build_config']
+
+    # TODO(akeshet): We are giving special treatment to the CQ master, which
+    # makes this logic CQ specific. We only use this logic in the CQ anyway at
+    # the moment, but may need to reconsider if we need to generalize to other
+    # master-slave builds.
+    assert self._run.config.name == constants.CQ_MASTER
+    config_map[build_id] = constants.CQ_MASTER
+
+    return config_map, action_history
+
+  def GetRelevantChangesForSlaves(self, changes, no_stat):
+    """Compile a set of relevant changes for each slave.
+
+    Args:
+      changes: A list of GerritPatch instances to examine.
+      no_stat: Set of builder names of slave builders that had status None.
+
+    Returns:
+      A dictionary mapping a slave config name to a set of relevant changes.
+    """
+    # Retrieve the slaves and clactions from CIDB.
+    config_map, action_history = self._GetSlaveMappingAndCLActions(changes)
+    changes_by_build_id = clactions.GetRelevantChangesForBuilds(
+        changes, action_history, config_map.keys())
+
+    # Convert index from build_ids to config names.
+    changes_by_config = dict()
+    for k, v in changes_by_build_id.iteritems():
+      changes_by_config[config_map[k]] = v
+
+    for config in no_stat:
+      # If a slave is in |no_stat|, it means that the slave never
+      # finished applying the changes in the sync stage. Hence the CL
+      # pickup actions for this slave may be
+      # inaccurate. Conservatively assume all changes are relevant.
+      changes_by_config[config] = set(changes)
+
+    return changes_by_config
+
+  def _ShouldSubmitPartialPool(self):
+    """Determine whether we should attempt or skip SubmitPartialPool.
+
+    Returns:
+      True if all important, non-sanity-check slaves ran and completed all
+      critical stages, and hence it is safe to attempt SubmitPartialPool. False
+      otherwise.
+    """
+    # sanity_check_slaves should not block board-aware submission, since they do
+    # not actually apply test patches.
+    sanity_check_slaves = set(self._run.config.sanity_check_slaves)
+    all_slaves = set([x.name for x in self._GetSlaveConfigs()])
+    all_slaves -= sanity_check_slaves
+    assert self._run.config.name not in all_slaves
+
+    # Get slave stages.
+    build_id, db = self._run.GetCIDBHandle()
+    assert db, 'No database connection to use.'
+    slave_stages = db.GetSlaveStages(build_id)
+
+    should_submit = True
+    ACCEPTED_STATUSES = (constants.BUILDER_STATUS_PASSED,
+                         constants.BUILDER_STATUS_SKIPPED,)
+
+    # Configs that have passed critical stages.
+    configs_per_stage = {stage: set() for stage in self._CRITICAL_STAGES}
+
+    for stage in slave_stages:
+      if (stage['name'] in self._CRITICAL_STAGES and
+          stage['status'] in ACCEPTED_STATUSES):
+        configs_per_stage[stage['name']].add(stage['build_config'])
+
+    for stage in self._CRITICAL_STAGES:
+      missing_configs = all_slaves - configs_per_stage[stage]
+      if missing_configs:
+        logging.warning('Config(s) %s did not complete critical stage %s.',
+                        ' '.join(missing_configs), stage)
+        should_submit = False
+
+    return should_submit
+
+  def CQMasterHandleFailure(self, failing, inflight, no_stat):
+    """Handle changes in the validation pool upon build failure or timeout.
+
+    This function determines whether to reject CLs and what CLs to
+    reject based on the category of the failures and whether the
+    sanity check builder(s) passed.
+
+    Args:
+      failing: Names of the builders that failed.
+      inflight: Names of the builders that timed out.
+      no_stat: Set of builder names of slave builders that had status None.
+    """
+    messages = self._GetFailedMessages(failing)
+    self.SendInfraAlertIfNeeded(failing, inflight, no_stat)
+
+    changes = self.sync_stage.pool.changes
+
+    do_partial_submission = self._ShouldSubmitPartialPool()
+
+    if do_partial_submission:
+      changes_by_config = self.GetRelevantChangesForSlaves(changes, no_stat)
+
+      # Even if there was a failure, we can submit the changes that indicate
+      # that they don't care about this failure.
+      changes = self.sync_stage.pool.SubmitPartialPool(
+          changes, messages, changes_by_config, failing, inflight, no_stat,
+          reason=constants.STRATEGY_CQ_PARTIAL)
+    else:
+      logging.warning('Not doing any partial submission, due to critical stage '
+                      'failure(s).')
+      title = 'CQ encountered a critical failure.'
+      msg = ('CQ encountered a critical failure, and hence skipped '
+             'board-aware submission. See %s' % self.ConstructDashboardURL())
+      tree_status.SendHealthAlert(self._run, title, msg)
+
+    sanity_check_slaves = set(self._run.config.sanity_check_slaves)
+    tot_sanity = self._ToTSanity(sanity_check_slaves, self._slave_statuses)
+
+    if not tot_sanity:
+      # Sanity check slave failure may have been caused by bug(s)
+      # in ToT or broken infrastructure. In any of those cases, we
+      # should not reject any changes.
+      logging.warning('Detected that a sanity-check builder failed. '
+                      'Will not reject any changes.')
+
+    # If the tree was not open when we acquired a pool, do not assume that
+    # tot was sane.
+    if not self.sync_stage.pool.tree_was_open:
+      logging.info('The tree was not open when changes were acquired so we are '
+                   'attributing failures to the broken tree rather than the '
+                   'changes.')
+      tot_sanity = False
+
+    if inflight:
+      # Some slave(s) timed out due to unknown causes, so only reject infra
+      # changes (probably just chromite changes).
+      self.sync_stage.pool.HandleValidationTimeout(sanity=tot_sanity,
+                                                   changes=changes)
+      return
+
+    # Some builder failed, or some builder did not report stats, or
+    # the intersection of both. Let HandleValidationFailure decide
+    # what changes to reject.
+    self.sync_stage.pool.HandleValidationFailure(
+        messages, sanity=tot_sanity, changes=changes, no_stat=no_stat)
+
+  def _GetInfraFailMessages(self, failing):
+    """Returns a list of messages containing infra failures.
+
+    Args:
+      failing: The names of the failing builders.
+
+    Returns:
+      A list of BuildFailureMessage objects.
+    """
+    msgs = self._GetFailedMessages(failing)
+    # Filter out None messages because we cannot analyze them.
+    return [x for x in msgs if x and
+            x.HasFailureType(failures_lib.InfrastructureFailure)]
+
+  def SendInfraAlertIfNeeded(self, failing, inflight, no_stat):
+    """Send infra alerts if needed.
+
+    Args:
+      failing: The names of the failing builders.
+      inflight: The names of the builders that are still running.
+      no_stat: The names of the builders that had status None.
+    """
+    msgs = [str(x) for x in self._GetInfraFailMessages(failing)]
+    # Failed to report a non-None messages is an infra failure.
+    slaves = self._GetBuildersWithNoneMessages(failing)
+    msgs += ['%s failed with unknown reason.' % x for x in slaves]
+    msgs += ['%s timed out' % x for x in inflight]
+    msgs += ['%s did not start' % x for x in no_stat]
+    if msgs:
+      builder_name = self._run.config.name
+      title = '%s has encountered infra failures:' % (builder_name,)
+      msgs.insert(0, title)
+      msgs.append('See %s' % self.ConstructDashboardURL())
+      msg = '\n\n'.join(msgs)
+      subject = '%s infra failures' % (builder_name,)
+      extra_fields = {'X-cbuildbot-alert': 'cq-infra-alert'}
+      tree_status.SendHealthAlert(self._run, subject, msg,
+                                  extra_fields=extra_fields)
+
+  @staticmethod
+  def _ToTSanity(sanity_check_slaves, slave_statuses):
+    """Returns False if any sanity check slaves failed.
+
+    Args:
+      sanity_check_slaves: Names of slave builders that are "sanity check"
+        builders for the current master.
+      slave_statuses: Dict of BuilderStatus objects by builder name keys.
+
+    Returns:
+      True if no sanity builders ran and failed.
+    """
+    sanity_check_slaves = sanity_check_slaves or []
+    return not any([x in slave_statuses and slave_statuses[x].Failed() for
+                    x in sanity_check_slaves])
+
+  def GetIrrelevantChanges(self, board_metadata):
+    """Calculates irrelevant changes.
+
+    Args:
+      board_metadata: A dictionary of board specific metadata.
+
+    Returns:
+      A set of irrelevant changes to the build.
+    """
+    if not board_metadata:
+      return set()
+    # changes irrelevant to all the boards are irrelevant to the build
+    changeset_per_board_list = list()
+    for v in board_metadata.values():
+      changes_dict_list = v.get('irrelevant_changes', None)
+      if changes_dict_list:
+        changes_set = set(cros_patch.GerritFetchOnlyPatch.FromAttrDict(d) for d
+                          in changes_dict_list)
+        changeset_per_board_list.append(changes_set)
+      else:
+        # If any board has no irrelevant change, the whole build not have also.
+        return set()
+
+    return set.intersection(*changeset_per_board_list)
+
+  def PerformStage(self):
+    """Run CommitQueueCompletionStage."""
+    if (not self._run.config.master and
+        not self._run.config.do_not_apply_cq_patches):
+      # Slave needs to record what change are irrelevant to this build.
+      board_metadata = self._run.attrs.metadata.GetDict().get('board-metadata')
+      irrelevant_changes = self.GetIrrelevantChanges(board_metadata)
+      self.sync_stage.pool.RecordIrrelevantChanges(irrelevant_changes)
+
+    super(CommitQueueCompletionStage, self).PerformStage()
+
+
+class PreCQCompletionStage(generic_stages.BuilderStage):
+  """Reports the status of a trybot run to Google Storage and Gerrit."""
+
+  def __init__(self, builder_run, sync_stage, success, **kwargs):
+    super(PreCQCompletionStage, self).__init__(builder_run, **kwargs)
+    self.sync_stage = sync_stage
+    self.success = success
+
+  def GetBuildFailureMessage(self):
+    """Returns message summarizing the failures."""
+    return CreateBuildFailureMessage(self._run.config.overlays,
+                                     self._run.config.name,
+                                     self._run.ConstructDashboardURL())
+
+  def PerformStage(self):
+    # Update Gerrit and Google Storage with the Pre-CQ status.
+    if self.success:
+      self.sync_stage.pool.HandlePreCQPerConfigSuccess()
+    else:
+      message = self.GetBuildFailureMessage()
+      self.sync_stage.pool.HandleValidationFailure([message])
+
+
+class PublishUprevChangesStage(generic_stages.BuilderStage):
+  """Makes uprev changes from pfq live for developers."""
+
+  def __init__(self, builder_run, success, **kwargs):
+    """Constructor.
+
+    Args:
+      builder_run: BuilderRun object.
+      success: Boolean indicating whether the build succeeded.
+    """
+    super(PublishUprevChangesStage, self).__init__(builder_run, **kwargs)
+    self.success = success
+
+  def PerformStage(self):
+    overlays, push_overlays = self._ExtractOverlays()
+    assert push_overlays, 'push_overlays must be set to run this stage'
+
+    # If the build failed, we don't want to push our local changes, because
+    # they might include some CLs that failed. Instead, clean up our local
+    # changes and do a fresh uprev.
+    if not self.success:
+      # Clean up our root and sync down the latest changes that were
+      # submitted.
+      commands.BuildRootGitCleanup(self._build_root)
+
+      # Sync down the latest changes we have submitted.
+      if self._run.options.sync:
+        next_manifest = self._run.config.manifest
+        repo = self.GetRepoRepository()
+        repo.Sync(next_manifest)
+
+      # Commit an uprev locally.
+      if self._run.options.uprev and self._run.config.uprev:
+        commands.UprevPackages(self._build_root, self._boards, overlays)
+
+    # Push the uprev commit.
+    commands.UprevPush(self._build_root, push_overlays, self._run.options.debug)
diff --git a/cbuildbot/stages/completion_stages_unittest b/cbuildbot/stages/completion_stages_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/stages/completion_stages_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/stages/completion_stages_unittest.py b/cbuildbot/stages/completion_stages_unittest.py
new file mode 100644
index 0000000..7e900be
--- /dev/null
+++ b/cbuildbot/stages/completion_stages_unittest.py
@@ -0,0 +1,701 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for completion stages."""
+
+from __future__ import print_function
+
+import itertools
+import mock
+import sys
+
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot import results_lib
+from chromite.cbuildbot.stages import completion_stages
+from chromite.cbuildbot.stages import generic_stages_unittest
+from chromite.cbuildbot.stages import sync_stages_unittest
+from chromite.cbuildbot.stages import sync_stages
+from chromite.lib import alerts
+from chromite.lib import cidb
+from chromite.lib import clactions
+from chromite.lib import patch as cros_patch
+from chromite.lib import patch_unittest
+
+
+# pylint: disable=protected-access
+
+
+class ManifestVersionedSyncCompletionStageTest(
+    sync_stages_unittest.ManifestVersionedSyncStageTest):
+  """Tests the ManifestVersionedSyncCompletion stage."""
+
+  # pylint: disable=abstract-method
+
+  BOT_ID = 'x86-mario-release'
+
+  def testManifestVersionedSyncCompletedSuccess(self):
+    """Tests basic ManifestVersionedSyncStageCompleted on success"""
+    board_runattrs = self._run.GetBoardRunAttrs('x86-mario')
+    board_runattrs.SetParallel('success', True)
+    update_status_mock = self.PatchObject(
+        manifest_version.BuildSpecsManager, 'UpdateStatus')
+
+    stage = completion_stages.ManifestVersionedSyncCompletionStage(
+        self._run, self.sync_stage, success=True)
+
+    stage.Run()
+    update_status_mock.assert_called_once_with(
+        message=None, success_map={self.BOT_ID: True}, dashboard_url=mock.ANY)
+
+  def testManifestVersionedSyncCompletedFailure(self):
+    """Tests basic ManifestVersionedSyncStageCompleted on failure"""
+    stage = completion_stages.ManifestVersionedSyncCompletionStage(
+        self._run, self.sync_stage, success=False)
+    message = 'foo'
+    self.PatchObject(stage, 'GetBuildFailureMessage', return_value=message)
+    update_status_mock = self.PatchObject(
+        manifest_version.BuildSpecsManager, 'UpdateStatus')
+
+    stage.Run()
+    update_status_mock.assert_called_once_with(
+        message='foo', success_map={self.BOT_ID: False},
+        dashboard_url=mock.ANY)
+
+  def testManifestVersionedSyncCompletedIncomplete(self):
+    """Tests basic ManifestVersionedSyncStageCompleted on incomplete build."""
+    stage = completion_stages.ManifestVersionedSyncCompletionStage(
+        self._run, self.sync_stage, success=False)
+    stage.Run()
+
+  def testMeaningfulMessage(self):
+    """Tests that all essential components are in the message."""
+    stage = completion_stages.ManifestVersionedSyncCompletionStage(
+        self._run, self.sync_stage, success=False)
+
+    exception = Exception('failed!')
+    traceback = results_lib.RecordedTraceback(
+        'TacoStage', 'Taco', exception, 'traceback_str')
+    self.PatchObject(
+        results_lib.Results, 'GetTracebacks', return_value=[traceback])
+
+    msg = stage.GetBuildFailureMessage()
+    self.assertTrue(stage._run.config.name in msg.message)
+    self.assertTrue(stage._run.ConstructDashboardURL() in msg.message)
+    self.assertTrue('TacoStage' in msg.message)
+    self.assertTrue(str(exception) in msg.message)
+
+    self.assertTrue('TacoStage' in msg.reason)
+    self.assertTrue(str(exception) in msg.reason)
+
+  def testGetBuilderSuccessMap(self):
+    """Tests that the builder success map is properly created."""
+    board_runattrs = self._run.GetBoardRunAttrs('x86-mario')
+    board_runattrs.SetParallel('success', True)
+    builder_success_map = completion_stages.GetBuilderSuccessMap(
+        self._run, True)
+    expected_map = {self.BOT_ID: True}
+    self.assertEqual(expected_map, builder_success_map)
+
+
+class MasterSlaveSyncCompletionStageMockConfigTest(
+    generic_stages_unittest.AbstractStageTestCase):
+  """Tests MasterSlaveSyncCompletionStage with ManifestVersionedSyncStage."""
+  BOT_ID = 'master'
+
+  def setUp(self):
+    self.source_repo = 'ssh://source/repo'
+    self.manifest_version_url = 'fake manifest url'
+    self.branch = 'master'
+    self.build_type = constants.PFQ_TYPE
+
+    # Use our mocked out SiteConfig for all tests.
+    self.test_config = self._GetTestConfig()
+    self._Prepare(site_config=self.test_config)
+
+  def ConstructStage(self):
+    sync_stage = sync_stages.ManifestVersionedSyncStage(self._run)
+    return completion_stages.MasterSlaveSyncCompletionStage(
+        self._run, sync_stage, success=True)
+
+  def _GetTestConfig(self):
+    test_config = config_lib.SiteConfig()
+    test_config.AddConfigWithoutTemplate(
+        'master',
+        boards=[],
+        build_type=self.build_type,
+        master=True,
+        manifest_version=True,
+    )
+    test_config.AddConfigWithoutTemplate(
+        'test1',
+        boards=['x86-generic'],
+        manifest_version=True,
+        build_type=constants.PFQ_TYPE,
+        overlays='public',
+        important=False,
+        chrome_rev=None,
+        branch=False,
+        internal=False,
+        master=False,
+    )
+    test_config.AddConfigWithoutTemplate(
+        'test2',
+        boards=['x86-generic'],
+        manifest_version=False,
+        build_type=constants.PFQ_TYPE,
+        overlays='public',
+        important=True,
+        chrome_rev=None,
+        branch=False,
+        internal=False,
+        master=False,
+    )
+    test_config.AddConfigWithoutTemplate(
+        'test3',
+        boards=['x86-generic'],
+        manifest_version=True,
+        build_type=constants.PFQ_TYPE,
+        overlays='both',
+        important=True,
+        chrome_rev=None,
+        branch=False,
+        internal=True,
+        master=False,
+    )
+    test_config.AddConfigWithoutTemplate(
+        'test4',
+        boards=['x86-generic'],
+        manifest_version=True,
+        build_type=constants.PFQ_TYPE,
+        overlays='both',
+        important=True,
+        chrome_rev=None,
+        branch=True,
+        internal=True,
+        master=False,
+    )
+    test_config.AddConfigWithoutTemplate(
+        'test5',
+        boards=['x86-generic'],
+        manifest_version=True,
+        build_type=constants.PFQ_TYPE,
+        overlays='public',
+        important=True,
+        chrome_rev=None,
+        branch=False,
+        internal=False,
+        master=False,
+    )
+    return test_config
+
+  def testGetSlavesForMaster(self):
+    """Tests that we get the slaves for a fake unified master configuration."""
+    self.maxDiff = None
+    stage = self.ConstructStage()
+    p = stage._GetSlaveConfigs()
+    self.assertEqual([self.test_config['test3'], self.test_config['test5']], p)
+
+
+class MasterSlaveSyncCompletionStageTest(
+    generic_stages_unittest.AbstractStageTestCase):
+  """Tests MasterSlaveSyncCompletionStage with ManifestVersionedSyncStage."""
+  BOT_ID = 'x86-generic-paladin'
+
+  def setUp(self):
+    self.source_repo = 'ssh://source/repo'
+    self.manifest_version_url = 'fake manifest url'
+    self.branch = 'master'
+    self.build_type = constants.PFQ_TYPE
+
+    self._Prepare()
+
+  def _Prepare(self, bot_id=None, **kwargs):
+    super(MasterSlaveSyncCompletionStageTest, self)._Prepare(bot_id, **kwargs)
+
+    self._run.config['manifest_version'] = True
+    self._run.config['build_type'] = self.build_type
+    self._run.config['master'] = True
+
+  def ConstructStage(self):
+    sync_stage = sync_stages.ManifestVersionedSyncStage(self._run)
+    return completion_stages.MasterSlaveSyncCompletionStage(
+        self._run, sync_stage, success=True)
+
+  def testIsFailureFatal(self):
+    """Tests the correctness of the _IsFailureFatal method"""
+    stage = self.ConstructStage()
+
+    # Test behavior when there are no sanity check builders
+    self.assertFalse(stage._IsFailureFatal(set(), set(), set()))
+    self.assertTrue(stage._IsFailureFatal(set(['test3']), set(), set()))
+    self.assertTrue(stage._IsFailureFatal(set(), set(['test5']), set()))
+    self.assertTrue(stage._IsFailureFatal(set(), set(), set(['test1'])))
+
+    # Test behavior where there is a sanity check builder
+    stage._run.config.sanity_check_slaves = ['sanity']
+    self.assertTrue(stage._IsFailureFatal(set(['test5']), set(['sanity']),
+                                          set()))
+    self.assertFalse(stage._IsFailureFatal(set(), set(['sanity']), set()))
+    self.assertTrue(stage._IsFailureFatal(set(), set(['sanity']),
+                                          set(['test1'])))
+    self.assertFalse(stage._IsFailureFatal(set(), set(),
+                                           set(['sanity'])))
+
+  def testAnnotateFailingBuilders(self):
+    """Tests that _AnnotateFailingBuilders is free of syntax errors."""
+    stage = self.ConstructStage()
+
+    failing = {'a'}
+    inflight = {}
+    failed_msg = failures_lib.BuildFailureMessage(
+        'message', [], True, 'reason', 'bot')
+    status = manifest_version.BuilderStatus('failed', failed_msg, 'url')
+
+    statuses = {'a' : status}
+    no_stat = set()
+    stage._AnnotateFailingBuilders(failing, inflight, no_stat, statuses)
+
+  def testExceptionHandler(self):
+    """Verify _HandleStageException is sane."""
+    stage = self.ConstructStage()
+    e = ValueError('foo')
+    try:
+      raise e
+    except ValueError:
+      ret = stage._HandleStageException(sys.exc_info())
+      self.assertTrue(isinstance(ret, tuple))
+      self.assertEqual(len(ret), 3)
+      self.assertEqual(ret[0], e)
+
+
+class MasterSlaveSyncCompletionStageTestWithLKGMSync(
+    MasterSlaveSyncCompletionStageTest):
+  """Tests the MasterSlaveSyncCompletionStage with MasterSlaveLKGMSyncStage."""
+  BOT_ID = 'x86-generic-paladin'
+
+  def ConstructStage(self):
+    sync_stage = sync_stages.MasterSlaveLKGMSyncStage(self._run)
+    return completion_stages.MasterSlaveSyncCompletionStage(
+        self._run, sync_stage, success=True)
+
+
+class CanaryCompletionStageTest(
+    generic_stages_unittest.AbstractStageTestCase):
+  """Tests how canary master handles failures in CanaryCompletionStage."""
+  BOT_ID = 'master-release'
+
+  def _Prepare(self, bot_id=BOT_ID, **kwargs):
+    super(CanaryCompletionStageTest, self)._Prepare(bot_id, **kwargs)
+
+  def setUp(self):
+    self.build_type = constants.CANARY_TYPE
+    self._Prepare()
+
+  def ConstructStage(self):
+    """Returns a CanaryCompletionStage object."""
+    sync_stage = sync_stages.ManifestVersionedSyncStage(self._run)
+    return completion_stages.CanaryCompletionStage(
+        self._run, sync_stage, success=True)
+
+  def testComposeTreeStatusMessage(self):
+    """Tests that the status message is constructed as expected."""
+    failing = ['foo1', 'foo2', 'foo3', 'foo4', 'foo5']
+    inflight = ['bar']
+    no_stat = []
+    stage = self.ConstructStage()
+    self.assertEqual(
+        stage._ComposeTreeStatusMessage(failing, inflight, no_stat),
+        'bar timed out; foo1,foo2 and 3 others failed')
+
+
+class BaseCommitQueueCompletionStageTest(
+    generic_stages_unittest.AbstractStageTestCase,
+    patch_unittest.MockPatchBase):
+  """Tests how CQ handles changes in CommitQueueCompletionStage."""
+
+  def setUp(self):
+    self.build_type = constants.PFQ_TYPE
+    self._Prepare()
+
+    self.partial_submit_changes = ['C', 'D']
+    self.other_changes = ['A', 'B']
+    self.changes = self.other_changes + self.partial_submit_changes
+    self.tot_sanity_mock = self.PatchObject(
+        completion_stages.CommitQueueCompletionStage,
+        '_ToTSanity',
+        return_value=True)
+
+    self.alert_email_mock = self.PatchObject(alerts, 'SendEmail')
+    self.PatchObject(cbuildbot_run._BuilderRunBase,
+                     'InProduction', return_value=True)
+    self.PatchObject(completion_stages.MasterSlaveSyncCompletionStage,
+                     'HandleFailure')
+    self.PatchObject(completion_stages.CommitQueueCompletionStage,
+                     '_GetFailedMessages')
+    self.PatchObject(completion_stages.CommitQueueCompletionStage,
+                     '_GetSlaveMappingAndCLActions',
+                     return_value=(dict(), []))
+    self.PatchObject(clactions, 'GetRelevantChangesForBuilds')
+
+  # pylint: disable=W0221
+  def ConstructStage(self, tree_was_open=True):
+    """Returns a CommitQueueCompletionStage object.
+
+    Args:
+      tree_was_open: If not true, tree was not open when we acquired changes.
+    """
+    sync_stage = sync_stages.CommitQueueSyncStage(self._run)
+    sync_stage.pool = mock.MagicMock()
+    sync_stage.pool.changes = self.changes
+    sync_stage.pool.tree_was_open = tree_was_open
+
+    sync_stage.pool.handle_failure_mock = self.PatchObject(
+        sync_stage.pool, 'HandleValidationFailure')
+    sync_stage.pool.handle_timeout_mock = self.PatchObject(
+        sync_stage.pool, 'HandleValidationTimeout')
+    return completion_stages.CommitQueueCompletionStage(
+        self._run, sync_stage, success=True)
+
+  def VerifyStage(self, failing, inflight, handle_failure=True,
+                  handle_timeout=False, sane_tot=True, submit_partial=False,
+                  alert=False, stage=None, all_slaves=None, slave_stages=None,
+                  do_submit_partial=True, build_passed=False):
+    """Runs and Verifies PerformStage.
+
+    Args:
+      failing: The names of the builders that failed.
+      inflight: The names of the buiders that timed out.
+      handle_failure: If True, calls HandleValidationFailure.
+      handle_timeout: If True, calls HandleValidationTimeout.
+      sane_tot: If not true, assumes TOT is not sane.
+      submit_partial: If True, submit partial pool will submit some changes.
+      alert: If True, sends out an alert email for infra failures.
+      stage: If set, use this constructed stage, otherwise create own.
+      all_slaves: Optional set of all slave configs.
+      slave_stages: Optional list of slave stages.
+      do_submit_partial: If True, assert that there was no call to
+                         SubmitPartialPool.
+      build_passed: Whether the build passed or failed.
+    """
+    if not stage:
+      stage = self.ConstructStage()
+
+    # Setup the stage to look at the specified configs.
+    all_slaves = list(all_slaves or set(failing + inflight))
+    configs = [config_lib.BuildConfig(name=x) for x in all_slaves]
+    self.PatchObject(stage, '_GetSlaveConfigs', return_value=configs)
+
+    # Setup builder statuses.
+    stage._run.attrs.manifest_manager = mock.MagicMock()
+    statuses = {}
+    for x in failing:
+      statuses[x] = manifest_version.BuilderStatus(
+          constants.BUILDER_STATUS_FAILED, message=None)
+    for x in inflight:
+      statuses[x] = manifest_version.BuilderStatus(
+          constants.BUILDER_STATUS_INFLIGHT, message=None)
+    if self._run.config.master:
+      self.PatchObject(stage._run.attrs.manifest_manager, 'GetBuildersStatus',
+                       return_value=statuses)
+    else:
+      self.PatchObject(stage, '_GetLocalBuildStatus', return_value=statuses)
+
+    # Setup DB and provide list of slave stages.
+    mock_cidb = mock.MagicMock()
+    cidb.CIDBConnectionFactory.SetupMockCidb(mock_cidb)
+    if slave_stages is None:
+      slave_stages = []
+      critical_stages = (
+          completion_stages.CommitQueueCompletionStage._CRITICAL_STAGES)
+      for stage_name, slave in itertools.product(critical_stages, all_slaves):
+        slave_stages.append({'name': stage_name,
+                             'build_config': slave,
+                             'status': constants.BUILDER_STATUS_PASSED})
+    self.PatchObject(mock_cidb, 'GetSlaveStages', return_value=slave_stages)
+
+
+    # Set up SubmitPartialPool to provide a list of changes to look at.
+    if submit_partial:
+      spmock = self.PatchObject(stage.sync_stage.pool, 'SubmitPartialPool',
+                                return_value=self.other_changes)
+      handlefailure_changes = self.other_changes
+    else:
+      spmock = self.PatchObject(stage.sync_stage.pool, 'SubmitPartialPool',
+                                return_value=self.changes)
+      handlefailure_changes = self.changes
+
+    # Track whether 'HandleSuccess' is called.
+    success_mock = self.PatchObject(stage, 'HandleSuccess')
+
+    # Actually run the stage.
+    if build_passed:
+      stage.PerformStage()
+    else:
+      with self.assertRaises(completion_stages.ImportantBuilderFailedException):
+        stage.PerformStage()
+
+    # Verify the calls.
+    self.assertEqual(success_mock.called, build_passed)
+
+    if not build_passed and self._run.config.master:
+      self.tot_sanity_mock.assert_called_once_with(mock.ANY, mock.ANY)
+
+      if alert:
+        self.alert_email_mock.called_once_with(
+            mock.ANY, mock.ANY, mock.ANY, mock.ANY)
+
+      self.assertEqual(do_submit_partial, spmock.called)
+
+      if handle_failure:
+        stage.sync_stage.pool.handle_failure_mock.assert_called_once_with(
+            mock.ANY, no_stat=set([]), sanity=sane_tot,
+            changes=handlefailure_changes)
+
+      if handle_timeout:
+        stage.sync_stage.pool.handle_timeout_mock.assert_called_once_with(
+            sanity=mock.ANY, changes=self.changes)
+
+
+# pylint: disable=too-many-ancestors
+class SlaveCommitQueueCompletionStageTest(BaseCommitQueueCompletionStageTest):
+  """Tests how CQ a slave handles changes in CommitQueueCompletionStage."""
+  BOT_ID = 'x86-mario-paladin'
+
+  def testSuccess(self):
+    """Test the slave succeeding."""
+    self.VerifyStage([], [], build_passed=True)
+
+  def testFail(self):
+    """Test the slave failing."""
+    self.VerifyStage(['foo'], [], build_passed=False)
+
+  def testTimeout(self):
+    """Test the slave timing out."""
+    self.VerifyStage([], ['foo'], build_passed=False)
+
+
+class MasterCommitQueueCompletionStageTest(BaseCommitQueueCompletionStageTest):
+  """Tests how CQ master handles changes in CommitQueueCompletionStage."""
+  BOT_ID = 'master-paladin'
+
+  def _Prepare(self, bot_id=BOT_ID, **kwargs):
+    super(MasterCommitQueueCompletionStageTest, self)._Prepare(bot_id, **kwargs)
+    self.assertTrue(self._run.config['master'])
+
+  def testNoInflightBuildersWithInfraFail(self):
+    """Test case where there are no inflight builders but are infra failures."""
+    failing = ['foo']
+    inflight = []
+
+    self.PatchObject(completion_stages.CommitQueueCompletionStage,
+                     '_GetInfraFailMessages', return_value=['msg'])
+    self.PatchObject(completion_stages.CommitQueueCompletionStage,
+                     '_GetBuildersWithNoneMessages', return_value=[])
+    # An alert is sent, since there are infra failures.
+    self.VerifyStage(failing, inflight, submit_partial=True, alert=True)
+
+  def testMissingCriticalStage(self):
+    """Test case where a slave failed to run a critical stage."""
+    self.VerifyStage(['foo'], [], slave_stages=[],
+                     do_submit_partial=False)
+
+  def testFailedCriticalStage(self):
+    """Test case where a slave failed a critical stage."""
+    fake_stages = [{'name': 'CommitQueueSync', 'build_config': 'foo',
+                    'status': constants.BUILDER_STATUS_FAILED}]
+    self.VerifyStage(['foo'], [],
+                     slave_stages=fake_stages, do_submit_partial=False)
+
+  def testMissingCriticalStageOnSanitySlave(self):
+    """Test case where a sanity slave failed to run a critical stage."""
+    stage = self.ConstructStage()
+    fake_stages = [{'name': 'CommitQueueSync', 'build_config': 'foo',
+                    'status': constants.BUILDER_STATUS_PASSED}]
+    stage._run.config.sanity_check_slaves = ['sanity']
+    self.VerifyStage(['sanity', 'foo'], [], slave_stages=fake_stages,
+                     do_submit_partial=True, stage=stage)
+
+  def testMissingCriticalStageOnTimedOutSanitySlave(self):
+    """Test case where a sanity slave failed to run a critical stage."""
+    stage = self.ConstructStage()
+    fake_stages = [{'name': 'CommitQueueSync', 'build_config': 'foo',
+                    'status': constants.BUILDER_STATUS_PASSED}]
+    stage._run.config.sanity_check_slaves = ['sanity']
+    self.VerifyStage(['foo'], ['sanity'], slave_stages=fake_stages,
+                     do_submit_partial=True, stage=stage,
+                     handle_failure=False, handle_timeout=True)
+
+  def testNoInflightBuildersWithNoneFailureMessages(self):
+    """Test case where failed builders reported NoneType messages."""
+    failing = ['foo']
+    inflight = []
+
+    self.PatchObject(completion_stages.CommitQueueCompletionStage,
+                     '_GetInfraFailMessages', return_value=[])
+    self.PatchObject(completion_stages.CommitQueueCompletionStage,
+                     '_GetBuildersWithNoneMessages', return_value=['foo'])
+    # An alert is sent, since NonType messages are considered infra failures.
+    self.VerifyStage(failing, inflight, submit_partial=True, alert=True)
+
+  def testWithInflightBuildersNoInfraFail(self):
+    """Tests that we don't submit partial pool on non-empty inflight."""
+    failing = ['foo', 'bar']
+    inflight = ['inflight']
+
+    self.PatchObject(completion_stages.CommitQueueCompletionStage,
+                     '_GetInfraFailMessages', return_value=[])
+    self.PatchObject(completion_stages.CommitQueueCompletionStage,
+                     '_GetBuildersWithNoneMessages', return_value=[])
+
+    # An alert is sent, since we have an inflight build still.
+    self.VerifyStage(failing, inflight, handle_failure=False,
+                     handle_timeout=True, alert=True)
+
+  def testSanityFailed(self):
+    """Test case where the sanity builder failed."""
+    stage = self.ConstructStage()
+    stage._run.config.sanity_check_slaves = ['sanity']
+    self.VerifyStage(['sanity'], [], build_passed=True)
+
+  def testSanityTimeout(self):
+    """Test case where the sanity builder timed out."""
+    stage = self.ConstructStage()
+    stage._run.config.sanity_check_slaves = ['sanity']
+    self.VerifyStage([], ['sanity'], build_passed=True)
+
+  def testGetRelevantChangesForSlave(self):
+    """Tests the logic of GetRelevantChangesForSlaves()."""
+    change_set1 = set(self.GetPatches(how_many=2))
+    change_set2 = set(self.GetPatches(how_many=3))
+    changes = set.union(change_set1, change_set2)
+    no_stat = ['no_stat-paladin']
+    config_map = {'123': 'foo-paladin',
+                  '124': 'bar-paladin',
+                  '125': 'no_stat-paladin'}
+    changes_by_build_id = {'123': change_set1,
+                           '124': change_set2}
+    # If a slave did not report status (no_stat), assume all changes
+    # are relevant.
+    expected = {'foo-paladin': change_set1,
+                'bar-paladin': change_set2,
+                'no_stat-paladin': changes}
+    self.PatchObject(completion_stages.CommitQueueCompletionStage,
+                     '_GetSlaveMappingAndCLActions',
+                     return_value=(config_map, []))
+    self.PatchObject(clactions, 'GetRelevantChangesForBuilds',
+                     return_value=changes_by_build_id)
+
+    stage = self.ConstructStage()
+    results = stage.GetRelevantChangesForSlaves(changes, no_stat)
+    self.assertEqual(results, expected)
+
+  def testWithExponentialFallbackApplied(self):
+    """Tests that we don't treat TOT as sane when it isn't."""
+    failing = ['foo', 'bar']
+    inflight = ['inflight']
+    stage = self.ConstructStage(tree_was_open=False)
+    self.PatchObject(completion_stages.CommitQueueCompletionStage,
+                     '_GetInfraFailMessages', return_value=[])
+    self.PatchObject(completion_stages.CommitQueueCompletionStage,
+                     '_GetBuildersWithNoneMessages', return_value=['foo'])
+
+    # An alert is sent, since we have an inflight build still.
+    self.VerifyStage(failing, inflight, handle_failure=False,
+                     handle_timeout=False, sane_tot=False, alert=True,
+                     stage=stage)
+
+  def testGetIrrelevantChanges(self):
+    """Tests the logic of GetIrrelevantChanges()."""
+    change_dict_1 = {
+        cros_patch.ATTR_PROJECT_URL: 'https://host/chromite/tacos',
+        cros_patch.ATTR_PROJECT: 'chromite/tacos',
+        cros_patch.ATTR_REF: 'refs/changes/11/12345/4',
+        cros_patch.ATTR_BRANCH: 'master',
+        cros_patch.ATTR_REMOTE: 'cros-internal',
+        cros_patch.ATTR_COMMIT: '7181e4b5e182b6f7d68461b04253de095bad74f9',
+        cros_patch.ATTR_CHANGE_ID: 'I47ea30385af60ae4cc2acc5d1a283a46423bc6e1',
+        cros_patch.ATTR_GERRIT_NUMBER: '12345',
+        cros_patch.ATTR_PATCH_NUMBER: '4',
+        cros_patch.ATTR_OWNER_EMAIL: 'foo@chromium.org',
+        cros_patch.ATTR_FAIL_COUNT: 1,
+        cros_patch.ATTR_PASS_COUNT: 1,
+        cros_patch.ATTR_TOTAL_FAIL_COUNT: 3}
+    change_dict_2 = {
+        cros_patch.ATTR_PROJECT_URL: 'https://host/chromite/foo',
+        cros_patch.ATTR_PROJECT: 'chromite/foo',
+        cros_patch.ATTR_REF: 'refs/changes/11/12344/3',
+        cros_patch.ATTR_BRANCH: 'master',
+        cros_patch.ATTR_REMOTE: 'cros-internal',
+        cros_patch.ATTR_COMMIT: 'cf23df2207d99a74fbe169e3eba035e633b65d94',
+        cros_patch.ATTR_CHANGE_ID: 'Iab9bf08b9b9bd4f72721cfc36e843ed302aca11a',
+        cros_patch.ATTR_GERRIT_NUMBER: '12344',
+        cros_patch.ATTR_PATCH_NUMBER: '3',
+        cros_patch.ATTR_OWNER_EMAIL: 'foo@chromium.org',
+        cros_patch.ATTR_FAIL_COUNT: 0,
+        cros_patch.ATTR_PASS_COUNT: 0,
+        cros_patch.ATTR_TOTAL_FAIL_COUNT: 1}
+    change_1 = cros_patch.GerritFetchOnlyPatch.FromAttrDict(change_dict_1)
+    change_2 = cros_patch.GerritFetchOnlyPatch.FromAttrDict(change_dict_2)
+
+    board_metadata_1 = {
+        'board-1': {'info':'foo', 'irrelevant_changes': [change_dict_1,
+                                                         change_dict_2]},
+        'board-2': {'info':'foo', 'irrelevant_changes': [change_dict_1]}
+    }
+    board_metadata_2 = {
+        'board-1': {'info':'foo', 'irrelevant_changes': [change_dict_1]},
+        'board-2': {'info':'foo', 'irrelevant_changes': [change_dict_2]}
+    }
+    board_metadata_3 = {
+        'board-1': {'info':'foo', 'irrelevant_changes': [change_dict_1,
+                                                         change_dict_2]},
+        'board-2': {'info':'foo', 'irrelevant_changes': []}
+    }
+    board_metadata_4 = {
+        'board-1': {'info':'foo', 'irrelevant_changes': [change_dict_1,
+                                                         change_dict_2]},
+        'board-2': {'info':'foo'}
+    }
+    board_metadata_5 = {}
+    board_metadata_6 = {
+        'board-1': {'info':'foo', 'irrelevant_changes': [change_dict_1,
+                                                         change_dict_2]},
+    }
+    stage = self.ConstructStage()
+    self.assertEqual(stage.GetIrrelevantChanges(board_metadata_1), {change_1})
+    self.assertEqual(stage.GetIrrelevantChanges(board_metadata_2), set())
+    self.assertEqual(stage.GetIrrelevantChanges(board_metadata_3), set())
+    self.assertEqual(stage.GetIrrelevantChanges(board_metadata_4), set())
+    self.assertEqual(stage.GetIrrelevantChanges(board_metadata_5), set())
+    self.assertEqual(stage.GetIrrelevantChanges(board_metadata_6), {change_1,
+                                                                    change_2})
+
+class PublishUprevChangesStageTest(
+    generic_stages_unittest.AbstractStageTestCase):
+  """Tests for the PublishUprevChanges stage."""
+
+  def setUp(self):
+    self.PatchObject(completion_stages.PublishUprevChangesStage,
+                     '_GetPortageEnvVar')
+    self.PatchObject(completion_stages.PublishUprevChangesStage,
+                     '_ExtractOverlays', return_value=[['foo'], ['bar']])
+    self.push_mock = self.PatchObject(commands, 'UprevPush')
+
+  def ConstructStage(self):
+    return completion_stages.PublishUprevChangesStage(self._run, success=True)
+
+  def testPush(self):
+    """Test values for PublishUprevChanges."""
+    self._Prepare(extra_config={'build_type': constants.BUILD_FROM_SOURCE_TYPE,
+                                'push_overlays': constants.PUBLIC_OVERLAYS,
+                                'master': True},
+                  extra_cmd_args=['--chrome_rev', constants.CHROME_REV_TOT])
+    self._run.options.prebuilts = True
+    self.RunStage()
+    self.push_mock.assert_called_once_with(self.build_root, ['bar'], False)
diff --git a/cbuildbot/stages/generic_stages.py b/cbuildbot/stages/generic_stages.py
new file mode 100644
index 0000000..5fb58c9
--- /dev/null
+++ b/cbuildbot/stages/generic_stages.py
@@ -0,0 +1,889 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the generic stages."""
+
+from __future__ import print_function
+
+import contextlib
+import fnmatch
+import json
+import os
+import re
+import sys
+import time
+import traceback
+
+# We import mox so that we can identify mox exceptions and pass them through
+# in our exception handling code.
+try:
+  import mox
+except ImportError:
+  mox = None
+
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import results_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import repository
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import portage_util
+from chromite.lib import retry_util
+from chromite.lib import timeout_util
+
+
+class BuilderStage(object):
+  """Parent class for stages to be performed by a builder."""
+  # Used to remove 'Stage' suffix of stage class when generating stage name.
+  name_stage_re = re.compile(r'(\w+)Stage')
+
+  # TODO(sosa): Remove these once we have a SEND/RECIEVE IPC mechanism
+  # implemented.
+  overlays = None
+  push_overlays = None
+
+  # Class should set this if they have a corresponding no<stage> option that
+  # skips their stage.
+  # TODO(mtennant): Rename this something like skip_option_name.
+  option_name = None
+
+  # Class should set this if they have a corresponding setting in
+  # the build_config that skips their stage.
+  # TODO(mtennant): Rename this something like skip_config_name.
+  config_name = None
+
+  @classmethod
+  def StageNamePrefix(cls):
+    """Return cls.__name__ with any 'Stage' suffix removed."""
+    match = cls.name_stage_re.match(cls.__name__)
+    assert match, 'Class name %s does not end with Stage' % cls.__name__
+    return match.group(1)
+
+  def __init__(self, builder_run, suffix=None, attempt=None, max_retry=None):
+    """Create a builder stage.
+
+    Args:
+      builder_run: The BuilderRun object for the run this stage is part of.
+      suffix: The suffix to append to the buildbot name. Defaults to None.
+      attempt: If this build is to be retried, the current attempt number
+        (starting from 1). Defaults to None. Is only valid if |max_retry| is
+        also specified.
+      max_retry: The maximum number of retries. Defaults to None. Is only valid
+        if |attempt| is also specified.
+    """
+    self._run = builder_run
+
+    self._attempt = attempt
+    self._max_retry = max_retry
+    self._build_stage_id = None
+
+    # Construct self.name, the name string for this stage instance.
+    self.name = self._prefix = self.StageNamePrefix()
+    if suffix:
+      self.name += suffix
+
+    # TODO(mtennant): Phase this out and use self._run.bot_id directly.
+    self._bot_id = self._run.bot_id
+
+    # self._boards holds list of boards involved in this run.
+    # TODO(mtennant): Replace self._boards with a self._run.boards?
+    self._boards = self._run.config.boards
+
+    # TODO(mtennant): Try to rely on just self._run.buildroot directly, if
+    # the os.path.abspath can be applied there instead.
+    self._build_root = os.path.abspath(self._run.buildroot)
+    self._prebuilt_type = None
+    if self._run.ShouldUploadPrebuilts():
+      self._prebuilt_type = self._run.config.build_type
+
+    # Determine correct chrome_rev.
+    self._chrome_rev = self._run.config.chrome_rev
+    if self._run.options.chrome_rev:
+      self._chrome_rev = self._run.options.chrome_rev
+
+    # USE and enviroment variable settings.
+    self._portage_extra_env = {}
+    useflags = self._run.config.useflags[:]
+
+    if self._run.options.clobber:
+      self._portage_extra_env['IGNORE_PREFLIGHT_BINHOST'] = '1'
+
+    if self._run.options.chrome_root:
+      self._portage_extra_env['CHROME_ORIGIN'] = 'LOCAL_SOURCE'
+
+    self._latest_toolchain = (self._run.config.latest_toolchain or
+                              self._run.options.latest_toolchain)
+    if self._latest_toolchain and self._run.config.gcc_githash:
+      useflags.append('git_gcc')
+      self._portage_extra_env['GCC_GITHASH'] = self._run.config.gcc_githash
+
+    if useflags:
+      self._portage_extra_env['USE'] = ' '.join(useflags)
+
+    if self._run.config.separate_debug_symbols:
+      self._portage_extra_env['FEATURES'] = 'separatedebug'
+
+    # Note: BuildStartStage is a special case: Since it is created before we
+    # have a valid |build_id|, it is not logged in cidb.
+    self._InsertBuildStageInCIDB(name=self.name)
+
+  def GetStageNames(self):
+    """Get a list of the places where this stage has recorded results."""
+    return [self.name]
+
+  def GetBuildStageIDs(self):
+    """Get a list of build stage ids in cidb corresponding to this stage."""
+    return [self._build_stage_id] if self._build_stage_id is not None else []
+
+  def UpdateSuffix(self, tag, child_suffix):
+    """Update the suffix arg for the init call.
+
+    Use this function to concatenate the tag for the current class with the
+    suffix passed in by a child class.
+    This function is expected to be called before __init__, and as such should
+    not use any object attributes.
+
+    Args:
+      tag: The tag for this class. Should not be None.
+      child_suffix: The suffix passed up by the child class. May be None.
+
+    Returns:
+      Extended suffix that incoroporates the tag, to be passed up to the parent
+      class's __init__.
+    """
+    if child_suffix is None:
+      child_suffix = ''
+    return ' [%s]%s' % (tag, child_suffix)
+
+  # TODO(akeshet): Eliminate this method and update the callers to use
+  # builder run directly.
+  def ConstructDashboardURL(self, stage=None):
+    """Return the dashboard URL
+
+    This is the direct link to buildbot logs as seen in build.chromium.org
+
+    Args:
+      stage: Link to a specific |stage|, otherwise the general buildbot log
+
+    Returns:
+      The fully formed URL
+    """
+    return self._run.ConstructDashboardURL(stage=stage)
+
+  def _InsertBuildStageInCIDB(self, **kwargs):
+    """Insert a build stage in cidb.
+
+      Expected arguments are the same as cidb.InsertBuildStage, except
+      |build_id|, which is populated here.
+    """
+    build_id, db = self._run.GetCIDBHandle()
+    if db:
+      kwargs['build_id'] = build_id
+      self._build_stage_id = db.InsertBuildStage(**kwargs)
+
+  def _FinishBuildStageInCIDB(self, status):
+    """Mark the stage as finished in cidb.
+
+    Args:
+      status: The finish status of the build. Enum type
+          constants.BUILDER_COMPLETED_STATUSES
+    """
+    _, db = self._run.GetCIDBHandle()
+    if self._build_stage_id is not None and db is not None:
+      db.FinishBuildStage(self._build_stage_id, status)
+
+  def _TranslateResultToCIDBStatus(self, result):
+    """Translates the different result_lib.Result results to builder statuses.
+
+    Args:
+      result: Same as the result passed to results_lib.Result.Record()
+
+    Returns:
+      A value in the enum constants.BUILDER_ALL_STATUSES.
+    """
+    if result == results_lib.Results.SUCCESS:
+      return constants.BUILDER_STATUS_PASSED
+    elif result == results_lib.Results.FORGIVEN:
+      return constants.BUILDER_STATUS_FORGIVEN
+    elif result == results_lib.Results.SKIPPED:
+      return constants.BUILDER_STATUS_SKIPPED
+    else:
+      return constants.BUILDER_STATUS_FAILED
+
+  def _ExtractOverlays(self):
+    """Extracts list of overlays into class."""
+    overlays = portage_util.FindOverlays(
+        self._run.config.overlays, buildroot=self._build_root)
+    push_overlays = portage_util.FindOverlays(
+        self._run.config.push_overlays, buildroot=self._build_root)
+
+    # Sanity checks.
+    # We cannot push to overlays that we don't rev.
+    assert set(push_overlays).issubset(set(overlays))
+    # Either has to be a master or not have any push overlays.
+    assert self._run.config.master or not push_overlays
+
+    return overlays, push_overlays
+
+  def GetRepoRepository(self, **kwargs):
+    """Create a new repo repository object."""
+    manifest_url = self._run.options.manifest_repo_url
+    if manifest_url is None:
+      manifest_url = self._run.config.manifest_repo_url
+
+    manifest_branch = self._run.config.manifest_branch
+    if manifest_branch is None:
+      manifest_branch = self._run.manifest_branch
+
+    kwargs.setdefault('referenced_repo', self._run.options.reference_repo)
+    kwargs.setdefault('branch', manifest_branch)
+    kwargs.setdefault('manifest', self._run.config.manifest)
+
+    return repository.RepoRepository(manifest_url, self._build_root, **kwargs)
+
+  def _Print(self, msg):
+    """Prints a msg to stderr."""
+    sys.stdout.flush()
+    print(msg, file=sys.stderr)
+    sys.stderr.flush()
+
+  def _PrintLoudly(self, msg):
+    """Prints a msg with loudly."""
+
+    border_line = '*' * 60
+    edge = '*' * 2
+
+    sys.stdout.flush()
+    print(border_line, file=sys.stderr)
+
+    msg_lines = msg.split('\n')
+
+    # If the last line is whitespace only drop it.
+    if not msg_lines[-1].rstrip():
+      del msg_lines[-1]
+
+    for msg_line in msg_lines:
+      print('%s %s' % (edge, msg_line), file=sys.stderr)
+
+    print(border_line, file=sys.stderr)
+    sys.stderr.flush()
+
+  def _GetPortageEnvVar(self, envvar, board):
+    """Get a portage environment variable for the configuration's board.
+
+    Args:
+      envvar: The environment variable to get. E.g. 'PORTAGE_BINHOST'.
+      board: The board to apply, if any.  Specify None to use host.
+
+    Returns:
+      The value of the environment variable, as a string. If no such variable
+      can be found, return the empty string.
+    """
+    cwd = os.path.join(self._build_root, 'src', 'scripts')
+    if board:
+      portageq = 'portageq-%s' % board
+    else:
+      portageq = 'portageq'
+    binhost = cros_build_lib.RunCommand(
+        [portageq, 'envvar', envvar], cwd=cwd, redirect_stdout=True,
+        enter_chroot=True, error_code_ok=True)
+    return binhost.output.rstrip('\n')
+
+  def _GetSlaveConfigs(self):
+    """Get the slave configs for the current build config.
+
+    This assumes self._run.config is a master config.
+
+    Returns:
+      A list of build configs corresponding to the slaves for the master
+        build config at self._run.config.
+
+    Raises:
+      See config_lib.Config.GetSlavesForMaster for details.
+    """
+    return self._run.site_config.GetSlavesForMaster(
+        self._run.config, self._run.options)
+
+  def _Begin(self):
+    """Can be overridden.  Called before a stage is performed."""
+
+    # Tell the buildbot we are starting a new step for the waterfall
+    logging.PrintBuildbotStepName(self.name)
+
+    self._PrintLoudly('Start Stage %s - %s\n\n%s' % (
+        self.name, cros_build_lib.UserDateTimeFormat(), self.__doc__))
+
+  def _Finish(self):
+    """Can be overridden.  Called after a stage has been performed."""
+    self._PrintLoudly('Finished Stage %s - %s' %
+                      (self.name, cros_build_lib.UserDateTimeFormat()))
+
+  def PerformStage(self):
+    """Run the actual commands needed for this stage.
+
+    Subclassed stages must override this function.
+    """
+
+  def _HandleExceptionAsSuccess(self, _exc_info):
+    """Use instead of HandleStageException to ignore an exception."""
+    return results_lib.Results.SUCCESS, None
+
+  @staticmethod
+  def _StringifyException(exc_info):
+    """Convert an exception into a string.
+
+    Args:
+      exc_info: A (type, value, traceback) tuple as returned by sys.exc_info().
+
+    Returns:
+      A string description of the exception.
+    """
+    exc_type, exc_value = exc_info[:2]
+    if issubclass(exc_type, failures_lib.StepFailure):
+      return str(exc_value)
+    else:
+      return ''.join(traceback.format_exception(*exc_info))
+
+  @classmethod
+  def _HandleExceptionAsWarning(cls, exc_info, retrying=False):
+    """Use instead of HandleStageException to treat an exception as a warning.
+
+    This is used by the ForgivingBuilderStage's to treat any exceptions as
+    warnings instead of stage failures.
+    """
+    description = cls._StringifyException(exc_info)
+    logging.PrintBuildbotStepWarnings()
+    logging.warning(description)
+    return (results_lib.Results.FORGIVEN, description, retrying)
+
+  @classmethod
+  def _HandleExceptionAsError(cls, exc_info):
+    """Handle an exception as an error, but ignore stage retry settings.
+
+    Meant as a helper for _HandleStageException code only.
+
+    Args:
+      exc_info: A (type, value, traceback) tuple as returned by sys.exc_info().
+
+    Returns:
+      Result tuple of (exception, description, retrying).
+    """
+    # Tell the user about the exception, and record it.
+    retrying = False
+    description = cls._StringifyException(exc_info)
+    logging.PrintBuildbotStepFailure()
+    logging.error(description)
+    return (exc_info[1], description, retrying)
+
+  def _HandleStageException(self, exc_info):
+    """Called when PerformStage throws an exception.  Can be overriden.
+
+    Args:
+      exc_info: A (type, value, traceback) tuple as returned by sys.exc_info().
+
+    Returns:
+      Result tuple of (exception, description, retrying).  If it isn't an
+      exception, then description will be None.
+    """
+    if self._attempt and self._max_retry and self._attempt <= self._max_retry:
+      return self._HandleExceptionAsWarning(exc_info, retrying=True)
+    else:
+      return self._HandleExceptionAsError(exc_info)
+
+  def _TopHandleStageException(self):
+    """Called when PerformStage throws an unhandled exception.
+
+    Should only be called by the Run function.  Provides a wrapper around
+    _HandleStageException to handle buggy handlers.  We must go deeper...
+    """
+    exc_info = sys.exc_info()
+    try:
+      return self._HandleStageException(exc_info)
+    except Exception:
+      logging.error(
+          'An exception was thrown while running _HandleStageException')
+      logging.error('The original exception was:', exc_info=exc_info)
+      logging.error('The new exception is:', exc_info=True)
+      return self._HandleExceptionAsError(exc_info)
+
+  def HandleSkip(self):
+    """Run if the stage is skipped."""
+    pass
+
+  def _RecordResult(self, *args, **kwargs):
+    """Record a successful or failed result."""
+    results_lib.Results.Record(*args, **kwargs)
+
+  def Run(self):
+    """Have the builder execute the stage."""
+    _, db = self._run.GetCIDBHandle()
+    if self._build_stage_id is not None and db is not None:
+      db.StartBuildStage(self._build_stage_id)
+
+    # See if this stage should be skipped.
+    if (self.option_name and not getattr(self._run.options, self.option_name) or
+        self.config_name and not getattr(self._run.config, self.config_name)):
+      self._PrintLoudly('Not running Stage %s' % self.name)
+      self.HandleSkip()
+      self._RecordResult(self.name, results_lib.Results.SKIPPED,
+                         prefix=self._prefix)
+      self._FinishBuildStageInCIDB(constants.BUILDER_STATUS_SKIPPED)
+      return
+
+    record = results_lib.Results.PreviouslyCompletedRecord(self.name)
+    if record:
+      # Success is stored in the results log for a stage that completed
+      # successfully in a previous run.
+      self._PrintLoudly('Stage %s processed previously' % self.name)
+      self.HandleSkip()
+      self._RecordResult(self.name, results_lib.Results.SUCCESS,
+                         prefix=self._prefix, board=record.board,
+                         time=float(record.time))
+      self._FinishBuildStageInCIDB(constants.BUILDER_STATUS_SKIPPED)
+      return
+
+    start_time = time.time()
+
+    # Set default values
+    result = results_lib.Results.SUCCESS
+    description = None
+
+    sys.stdout.flush()
+    sys.stderr.flush()
+    self._Begin()
+    try:
+      # TODO(davidjames): Verify that PerformStage always returns None. See
+      # crbug.com/264781
+      self.PerformStage()
+    except SystemExit as e:
+      if e.code != 0:
+        result, description, retrying = self._TopHandleStageException()
+
+      raise
+    except Exception as e:
+      if mox is not None and isinstance(e, mox.Error):
+        raise
+
+      # Tell the build bot this step failed for the waterfall.
+      result, description, retrying = self._TopHandleStageException()
+      if result not in (results_lib.Results.FORGIVEN,
+                        results_lib.Results.SUCCESS):
+        raise failures_lib.StepFailure()
+      elif retrying:
+        raise failures_lib.RetriableStepFailure()
+    except BaseException:
+      result, description, retrying = self._TopHandleStageException()
+      raise
+    finally:
+      elapsed_time = time.time() - start_time
+      self._RecordResult(self.name, result, description, prefix=self._prefix,
+                         time=elapsed_time)
+      self._FinishBuildStageInCIDB(self._TranslateResultToCIDBStatus(result))
+      if isinstance(result, BaseException) and self._build_stage_id is not None:
+        _, db = self._run.GetCIDBHandle()
+        if db:
+          failures_lib.ReportStageFailureToCIDB(db,
+                                                self._build_stage_id,
+                                                result)
+      self._Finish()
+      sys.stdout.flush()
+      sys.stderr.flush()
+
+
+class NonHaltingBuilderStage(BuilderStage):
+  """Build stage that fails a build but finishes the other steps."""
+
+  def Run(self):
+    try:
+      super(NonHaltingBuilderStage, self).Run()
+    except failures_lib.StepFailure:
+      name = self.__class__.__name__
+      logging.error('Ignoring StepFailure in %s', name)
+
+
+class ForgivingBuilderStage(BuilderStage):
+  """Build stage that turns a build step red but not a build."""
+
+  def _HandleStageException(self, exc_info):
+    """Override and don't set status to FAIL but FORGIVEN instead."""
+    return self._HandleExceptionAsWarning(exc_info)
+
+
+class RetryStage(object):
+  """Retry a given stage multiple times to see if it passes."""
+
+  def __init__(self, builder_run, max_retry, stage, *args, **kwargs):
+    """Create a RetryStage object.
+
+    Args:
+      builder_run: See arguments to BuilderStage.__init__()
+      max_retry: The number of times to try the given stage.
+      stage: The stage class to create.
+      *args: A list of arguments to pass to the stage constructor.
+      **kwargs: A list of keyword arguments to pass to the stage constructor.
+    """
+    self._run = builder_run
+    self.max_retry = max_retry
+    self.stage = stage
+    self.args = (builder_run,) + args
+    self.kwargs = kwargs
+    self.names = []
+    self._build_stage_ids = []
+    self.attempt = None
+
+  def GetStageNames(self):
+    """Get a list of the places where this stage has recorded results."""
+    return self.names[:]
+
+  def GetBuildStageIDs(self):
+    """Get a list of build stage ids in cidb corresponding to this stage."""
+    return self._build_stage_ids[:]
+
+  def _PerformStage(self):
+    """Run the stage once, incrementing the attempt number as needed."""
+    suffix = ' (attempt %d)' % (self.attempt,)
+    stage_obj = self.stage(
+        *self.args, attempt=self.attempt, max_retry=self.max_retry,
+        suffix=suffix, **self.kwargs)
+    self.names.extend(stage_obj.GetStageNames())
+    self._build_stage_ids.extend(stage_obj.GetBuildStageIDs())
+    self.attempt += 1
+    stage_obj.Run()
+
+  def Run(self):
+    """Retry the given stage multiple times to see if it passes."""
+    self.attempt = 1
+    retry_util.RetryException(
+        failures_lib.RetriableStepFailure, self.max_retry, self._PerformStage)
+
+
+class RepeatStage(object):
+  """Run a given stage multiple times to see if it fails."""
+
+  def __init__(self, builder_run, count, stage, *args, **kwargs):
+    """Create a RepeatStage object.
+
+    Args:
+      builder_run: See arguments to BuilderStage.__init__()
+      count: The number of times to try the given stage.
+      stage: The stage class to create.
+      *args: A list of arguments to pass to the stage constructor.
+      **kwargs: A list of keyword arguments to pass to the stage constructor.
+    """
+    self._run = builder_run
+    self.count = count
+    self.stage = stage
+    self.args = (builder_run,) + args
+    self.kwargs = kwargs
+    self.names = []
+    self._build_stage_ids = []
+    self.attempt = None
+
+  def GetStageNames(self):
+    """Get a list of the places where this stage has recorded results."""
+    return self.names[:]
+
+  def GetBuildStageIDs(self):
+    """Get a list of build stage ids in cidb corresponding to this stage."""
+    return self._build_stage_ids[:]
+
+  def _PerformStage(self):
+    """Run the stage once."""
+    suffix = ' (attempt %d)' % (self.attempt,)
+    stage_obj = self.stage(
+        *self.args, attempt=self.attempt, suffix=suffix, **self.kwargs)
+    self.names.extend(stage_obj.GetStageNames())
+    self._build_stage_ids.extend(stage_obj.GetBuildStageIDs())
+    stage_obj.Run()
+
+  def Run(self):
+    """Retry the given stage multiple times to see if it passes."""
+    for i in range(self.count):
+      self.attempt = i + 1
+      self._PerformStage()
+
+
+class BoardSpecificBuilderStage(BuilderStage):
+  """Builder stage that is specific to a board.
+
+  The following attributes are provided on self:
+    _current_board: The active board for this stage.
+    board_runattrs: BoardRunAttributes object for this stage.
+  """
+
+  def __init__(self, builder_run, board, suffix=None, **kwargs):
+    if not isinstance(board, basestring):
+      raise TypeError('Expected string, got %r' % (board,))
+
+    self._current_board = board
+
+    self.board_runattrs = builder_run.GetBoardRunAttrs(board)
+
+    # Add a board name suffix to differentiate between various boards (in case
+    # more than one board is built on a single builder.)
+    if len(builder_run.config.boards) > 1 or builder_run.config.grouped:
+      suffix = self.UpdateSuffix(board, suffix)
+
+    super(BoardSpecificBuilderStage, self).__init__(builder_run, suffix=suffix,
+                                                    **kwargs)
+
+  def _RecordResult(self, *args, **kwargs):
+    """Record a successful or failed result."""
+    kwargs.setdefault('board', self._current_board)
+    super(BoardSpecificBuilderStage, self)._RecordResult(*args, **kwargs)
+
+  def _InsertBuildStageInCIDB(self, **kwargs):
+    """Insert a build stage in cidb."""
+    kwargs.setdefault('board', self._current_board)
+    super(BoardSpecificBuilderStage, self)._InsertBuildStageInCIDB(**kwargs)
+
+  def GetParallel(self, board_attr, timeout=None, pretty_name=None):
+    """Wait for given |board_attr| to show up.
+
+    Args:
+      board_attr: A valid board runattribute name.
+      timeout: Timeout in seconds.  None value means wait forever.
+      pretty_name: Optional name to use instead of raw board_attr in
+        log messages.
+
+    Returns:
+      Value of board_attr found.
+
+    Raises:
+      AttrTimeoutError if timeout occurs.
+    """
+    timeout_str = 'forever'
+    if timeout is not None:
+      timeout_str = '%d minutes' % int((timeout / 60) + 0.5)
+
+    if pretty_name is None:
+      pretty_name = board_attr
+
+    logging.info('Waiting up to %s for %s ...', timeout_str, pretty_name)
+    return self.board_runattrs.GetParallel(board_attr, timeout=timeout)
+
+  def GetImageDirSymlink(self, pointer='latest-cbuildbot'):
+    """Get the location of the current image."""
+    return os.path.join(self._run.buildroot, 'src', 'build', 'images',
+                        self._current_board, pointer)
+
+
+class ArchivingStageMixin(object):
+  """Stage with utilities for uploading artifacts.
+
+  This provides functionality for doing archiving.  All it needs is access
+  to the BuilderRun object at self._run.  No __init__ needed.
+
+  Attributes:
+    acl: GS ACL to use for uploads.
+    archive: Archive object.
+    archive_path: Local path where archives are kept for this run.  Also copy
+      of self.archive.archive_path.
+    download_url: The URL where artifacts for this run can be downloaded.
+      Also copy of self.archive.download_url.
+    upload_url: The Google Storage location where artifacts for this run should
+      be uploaded.  Also copy of self.archive.upload_url.
+    version: Copy of self.archive.version.
+  """
+
+  PROCESSES = 10
+
+  @property
+  def archive(self):
+    """Retrieve the Archive object to use."""
+    # pylint: disable=W0201
+    if not hasattr(self, '_archive'):
+      self._archive = self._run.GetArchive()
+
+    return self._archive
+
+  @property
+  def acl(self):
+    """Retrieve GS ACL to use for uploads."""
+    return self.archive.upload_acl
+
+  # TODO(mtennant): Get rid of this property.
+  @property
+  def version(self):
+    """Retrieve the ChromeOS version for the archiving."""
+    return self.archive.version
+
+  @property
+  def archive_path(self):
+    """Local path where archives are kept for this run."""
+    return self.archive.archive_path
+
+  # TODO(mtennant): Rename base_archive_path.
+  @property
+  def bot_archive_root(self):
+    """Path of directory one level up from self.archive_path."""
+    return os.path.dirname(self.archive_path)
+
+  @property
+  def upload_url(self):
+    """The GS location where artifacts should be uploaded for this run."""
+    return self.archive.upload_url
+
+  @property
+  def download_url(self):
+    """The URL where artifacts for this run can be downloaded."""
+    return self.archive.download_url
+
+  @contextlib.contextmanager
+  def ArtifactUploader(self, queue=None, archive=True, strict=True):
+    """Upload each queued input in the background.
+
+    This context manager starts a set of workers in the background, who each
+    wait for input on the specified queue. These workers run
+    self.UploadArtifact(*args, archive=archive) for each input in the queue.
+
+    Args:
+      queue: Queue to use. Add artifacts to this queue, and they will be
+        uploaded in the background.  If None, one will be created on the fly.
+      archive: Whether to automatically copy files to the archive dir.
+      strict: Whether to treat upload errors as fatal.
+
+    Returns:
+      The queue to use. This is only useful if you did not supply a queue.
+    """
+    upload = lambda path: self.UploadArtifact(path, archive, strict)
+    with parallel.BackgroundTaskRunner(upload, queue=queue,
+                                       processes=self.PROCESSES) as bg_queue:
+      yield bg_queue
+
+  def PrintDownloadLink(self, filename, prefix='', text_to_display=None):
+    """Print a link to an artifact in Google Storage.
+
+    Args:
+      filename: The filename of the uploaded file.
+      prefix: The prefix to put in front of the filename.
+      text_to_display: Text to display. If None, use |prefix| + |filename|.
+    """
+    url = '%s/%s' % (self.download_url.rstrip('/'), filename)
+    if not text_to_display:
+      text_to_display = '%s%s' % (prefix, filename)
+    logging.PrintBuildbotLink(text_to_display, url)
+
+  def _IsInUploadBlacklist(self, filename):
+    """Check if this file is blacklisted to go into a board's extra buckets.
+
+    Args:
+      filename: The filename of the file we want to check is in the blacklist.
+
+    Returns:
+      True if the file is blacklisted, False otherwise.
+    """
+    for blacklisted_file in constants.EXTRA_BUCKETS_FILES_BLACKLIST:
+      if fnmatch.fnmatch(filename, blacklisted_file):
+        return True
+    return False
+
+  def _GetUploadUrls(self, filename, builder_run=None):
+    """Returns a list of all urls for which to upload filename to.
+
+    Args:
+      filename: The filename of the file we want to upload.
+      builder_run: builder_run object from which to get the board, base upload
+                   url, and bot_id. If none, this stage's values.
+    """
+    board = None
+    urls = [self.upload_url]
+    bot_id = self._bot_id
+    if builder_run:
+      urls = [builder_run.GetArchive().upload_url]
+      bot_id = builder_run.GetArchive().bot_id
+      if (builder_run.config['boards'] and
+          len(builder_run.config['boards']) == 1):
+        board = builder_run.config['boards'][0]
+    if (not self._IsInUploadBlacklist(filename) and
+        (hasattr(self, '_current_board') or board)):
+      if self._run.config.pre_cq:
+        # Do not load artifacts.json for pre-cq configs. This is a
+        # workaround for crbug.com/440167.
+        return urls
+
+      board = board or self._current_board
+      custom_artifacts_file = portage_util.ReadOverlayFile(
+          'scripts/artifacts.json', board=board)
+      if custom_artifacts_file is not None:
+        json_file = json.loads(custom_artifacts_file)
+        for url in json_file.get('extra_upload_urls', []):
+          urls.append('/'.join([url, bot_id, self.version]))
+    return urls
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def UploadArtifact(self, path, archive=True, strict=True):
+    """Upload generated artifact to Google Storage.
+
+    Args:
+      path: Path of local file to upload to Google Storage
+        if |archive| is True. Otherwise, this is the name of the file
+        in self.archive_path.
+      archive: Whether to automatically copy files to the archive dir.
+      strict: Whether to treat upload errors as fatal.
+    """
+    filename = path
+    if archive:
+      filename = commands.ArchiveFile(path, self.archive_path)
+    upload_urls = self._GetUploadUrls(filename)
+    try:
+      commands.UploadArchivedFile(
+          self.archive_path, upload_urls, filename, self._run.debug,
+          update_list=True, acl=self.acl)
+    except failures_lib.GSUploadFailure as e:
+      logging.PrintBuildbotStepText('Upload failed')
+      if e.HasFatalFailure(
+          whitelist=[gs.GSContextException, timeout_util.TimeoutError]):
+        raise
+      elif strict:
+        raise
+      else:
+        # Treat gsutil flake as a warning if it's the only problem.
+        self._HandleExceptionAsWarning(sys.exc_info())
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def UploadMetadata(self, upload_queue=None, filename=None):
+    """Create and upload JSON file of the builder run's metadata, and to cidb.
+
+    This uses the existing metadata stored in the builder run. The default
+    metadata.json file should only be uploaded once, at the end of the run,
+    and considered immutable. During the build, intermediate metadata snapshots
+    can be uploaded to other files, such as partial-metadata.json.
+
+    This method also updates the metadata in the cidb database, if there is a
+    valid cidb connection set up.
+
+    Args:
+      upload_queue: If specified then put the artifact file to upload on
+        this queue.  If None then upload it directly now.
+      filename: Name of file to dump metadata to.
+                Defaults to constants.METADATA_JSON
+    """
+    filename = filename or constants.METADATA_JSON
+
+    metadata_json = os.path.join(self.archive_path, filename)
+
+    # Stages may run in parallel, so we have to do atomic updates on this.
+    logging.info('Writing metadata to %s.', metadata_json)
+    osutils.WriteFile(metadata_json, self._run.attrs.metadata.GetJSON(),
+                      atomic=True, makedirs=True)
+
+    if upload_queue is not None:
+      logging.info('Adding metadata file %s to upload queue.', metadata_json)
+      upload_queue.put([filename])
+    else:
+      logging.info('Uploading metadata file %s now.', metadata_json)
+      self.UploadArtifact(filename, archive=False)
+
+    build_id, db = self._run.GetCIDBHandle()
+    if db:
+      logging.info('Writing updated metadata to database for build_id %s.',
+                   build_id)
+      db.UpdateMetadata(build_id, self._run.attrs.metadata)
+    else:
+      logging.info('Skipping database update, no database or build_id.')
diff --git a/cbuildbot/stages/generic_stages_unittest b/cbuildbot/stages/generic_stages_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/stages/generic_stages_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/stages/generic_stages_unittest.py b/cbuildbot/stages/generic_stages_unittest.py
new file mode 100644
index 0000000..4266bc3
--- /dev/null
+++ b/cbuildbot/stages/generic_stages_unittest.py
@@ -0,0 +1,509 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for generic stages."""
+
+from __future__ import print_function
+
+import contextlib
+import copy
+import mock
+import os
+import sys
+import unittest
+
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import chromeos_config
+from chromite.cbuildbot import results_lib
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot.stages import generic_stages
+from chromite.lib import cidb
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import partial_mock
+from chromite.lib import portage_util
+from chromite.scripts import cbuildbot
+
+
+DEFAULT_BUILD_NUMBER = 1234321
+DEFAULT_BUILD_ID = 31337
+DEFAULT_BUILD_STAGE_ID = 313377
+
+
+# pylint: disable=protected-access
+
+
+# The inheritence order ensures the patchers are stopped before
+# cleaning up the temporary directories.
+class StageTestCase(cros_test_lib.MockOutputTestCase,
+                    cros_test_lib.TempDirTestCase):
+  """Test running a single stage in isolation."""
+
+  TARGET_MANIFEST_BRANCH = 'ooga_booga'
+  BUILDROOT = 'buildroot'
+
+  # Subclass should override this to default to a different build config
+  # for its tests.
+  BOT_ID = 'x86-generic-paladin'
+
+  # Subclasses can override this.  If non-None, value is inserted into
+  # self._run.attrs.release_tag.
+  RELEASE_TAG = None
+
+  def setUp(self):
+    # Prepare a fake build root in self.tempdir, save at self.build_root.
+    self.build_root = os.path.join(self.tempdir, self.BUILDROOT)
+    osutils.SafeMakedirs(os.path.join(self.build_root, '.repo'))
+
+    self._manager = parallel.Manager()
+    self._manager.__enter__()
+
+    # These are here to make pylint happy.  Values filled in by _Prepare.
+    self._bot_id = None
+    self._current_board = None
+    self._boards = None
+    self._run = None
+
+  def _Prepare(self, bot_id=None, extra_config=None, cmd_args=None,
+               extra_cmd_args=None, build_id=DEFAULT_BUILD_ID,
+               waterfall=constants.WATERFALL_INTERNAL,
+               waterfall_url=constants.BUILD_INT_DASHBOARD,
+               master_build_id=None,
+               site_config=None):
+    """Prepare a BuilderRun at self._run for this test.
+
+    This method must allow being called more than once.  Subclasses can
+    override this method, but those subclass methods should also call this one.
+
+    The idea is that all test preparation that falls out from the choice of
+    build config and cbuildbot options should go in _Prepare.
+
+    This will populate the following attributes on self:
+      run: A BuilderRun object.
+      bot_id: The bot id (name) that was used from the site_config.
+      self._boards: Same as self._run.config.boards.  TODO(mtennant): remove.
+      self._current_board: First board in list, if there is one.
+
+    Args:
+      bot_id: Name of build config to use, defaults to self.BOT_ID.
+      extra_config: Dict used to add to the build config for the given
+        bot_id.  Example: {'push_image': True}.
+      cmd_args: List to override the default cbuildbot command args.
+      extra_cmd_args: List to add to default cbuildbot command args.  This
+        is a good way to adjust an options value for your test.
+        Example: ['branch-name', 'some-branch-name'] will effectively cause
+        self._run.options.branch_name to be set to 'some-branch-name'.
+      build_id: mock build id
+      waterfall: Name of the current waterfall.
+                 Possibly from constants.CIDB_KNOWN_WATERFALLS.
+      waterfall_url: Url for the current waterfall.
+      master_build_id: mock build id of master build.
+      site_config: SiteConfig to use (or MockSiteConfig)
+    """
+    # Use cbuildbot parser to create options object and populate default values.
+    parser = cbuildbot._CreateParser()
+    if not cmd_args:
+      # Fill in default command args.
+      cmd_args = [
+          '-r', self.build_root, '--buildbot', '--noprebuilts',
+          '--buildnumber', str(DEFAULT_BUILD_NUMBER),
+          '--branch', self.TARGET_MANIFEST_BRANCH,
+      ]
+    if extra_cmd_args:
+      cmd_args += extra_cmd_args
+    (options, args) = parser.parse_args(cmd_args)
+
+    # The bot_id can either be specified as arg to _Prepare method or in the
+    # cmd_args (as cbuildbot normally accepts it from command line).
+    if args:
+      self._bot_id = args[0]
+      if bot_id:
+        # This means bot_id was specified as _Prepare arg and in cmd_args.
+        # Make sure they are the same.
+        self.assertEquals(self._bot_id, bot_id)
+    else:
+      self._bot_id = bot_id or self.BOT_ID
+      args = [self._bot_id]
+    cbuildbot._FinishParsing(options, args)
+
+    if site_config is None:
+      site_config = chromeos_config.GetConfig()
+
+    # Populate build_config corresponding to self._bot_id.
+    build_config = copy.deepcopy(site_config[self._bot_id])
+    build_config['manifest_repo_url'] = 'fake_url'
+    if extra_config:
+      build_config.update(extra_config)
+    if options.remote_trybot:
+      build_config = config_lib.OverrideConfigForTrybot(
+          build_config, options)
+    options.managed_chrome = build_config['sync_chrome']
+
+    self._boards = build_config['boards']
+    self._current_board = self._boards[0] if self._boards else None
+
+    # Some preliminary sanity checks.
+    self.assertEquals(options.buildroot, self.build_root)
+
+    # Construct a real BuilderRun using options and build_config.
+    self._run = cbuildbot_run.BuilderRun(
+        options, site_config, build_config, self._manager)
+
+    if build_id is not None:
+      self._run.attrs.metadata.UpdateWithDict({'build_id': build_id})
+
+    if master_build_id is not None:
+      self._run.options.master_build_id = master_build_id
+
+    self._run.attrs.metadata.UpdateWithDict({'buildbot-master-name': waterfall})
+    self._run.attrs.metadata.UpdateWithDict({'buildbot-url': waterfall_url})
+
+    if self.RELEASE_TAG is not None:
+      self._run.attrs.release_tag = self.RELEASE_TAG
+
+    portage_util._OVERLAY_LIST_CMD = '/bin/true'
+
+  def tearDown(self):
+    # Mimic exiting with statement for self._manager.
+    self._manager.__exit__(None, None, None)
+
+  def AutoPatch(self, to_patch):
+    """Patch a list of objects with autospec=True.
+
+    Args:
+      to_patch: A list of tuples in the form (target, attr) to patch.  Will be
+      directly passed to mock.patch.object.
+    """
+    for item in to_patch:
+      self.PatchObject(*item, autospec=True)
+
+  def GetHWTestSuite(self):
+    """Get the HW test suite for the current bot."""
+    hw_tests = self._run.config['hw_tests']
+    if not hw_tests:
+      # TODO(milleral): Add HWTests back to lumpy-chrome-perf.
+      raise unittest.SkipTest('Missing HWTest for %s' % (self._bot_id,))
+
+    return hw_tests[0]
+
+  def assertRaisesStringifyable(self, exception, functor, *args, **kwargs):
+    """assertRaises replacement that also verifies exception is Stringifyable.
+
+    This helper is intended to be used anywhere assertRaises can be used, but
+    will also verify the exception raised can pass through
+    BuilderStage._StringifyException.
+
+    Args:
+      exception: See unittest.TestCase.assertRaises.
+      functor: See unittest.TestCase.assertRaises.
+      args: See unittest.TestCase.assertRaises.
+      kwargs: See unittest.TestCase.assertRaises.
+
+    Raises:
+      Unittest failures if the expected exception is not raised, or
+      _StringifyException exceptions if that process fails.
+    """
+    try:
+      functor(*args, **kwargs)
+
+      # We didn't get the exception, fail the test.
+      self.fail('%s was not raised.' % exception)
+
+    except exception:
+      # Ensure that this exception can be converted properly.
+      # Verifies fix for crbug.com/418358 and related.
+      generic_stages.BuilderStage._StringifyException(sys.exc_info())
+
+    except Exception as e:
+      # We didn't get the exception, fail the test.
+      self.fail('%s raised instead of %s' % (e, exception))
+
+
+class AbstractStageTestCase(StageTestCase):
+  """Base class for tests that test a particular build stage.
+
+  Abstract base class that sets up the build config and options with some
+  default values for testing BuilderStage and its derivatives.
+  """
+
+  def ConstructStage(self):
+    """Returns an instance of the stage to be tested.
+
+    Note: Must be implemented in subclasses.
+    """
+    raise NotImplementedError(self, "ConstructStage: Implement in your test")
+
+  def RunStage(self):
+    """Creates and runs an instance of the stage to be tested.
+
+    Note: Requires ConstructStage() to be implemented.
+
+    Raises:
+      NotImplementedError: ConstructStage() was not implemented.
+    """
+
+    # Stage construction is usually done as late as possible because the tests
+    # set up the build configuration and options used in constructing the stage.
+    results_lib.Results.Clear()
+    stage = self.ConstructStage()
+    stage.Run()
+    self.assertTrue(results_lib.Results.BuildSucceededSoFar())
+
+
+def patch(*args, **kwargs):
+  """Convenience wrapper for mock.patch.object.
+
+  Sets autospec=True by default.
+  """
+  kwargs.setdefault('autospec', True)
+  return mock.patch.object(*args, **kwargs)
+
+
+@contextlib.contextmanager
+def patches(*args):
+  """Context manager for a list of patch objects."""
+  with cros_build_lib.ContextManagerStack() as stack:
+    for arg in args:
+      stack.Add(lambda ret=arg: ret)
+    yield
+
+
+class BuilderStageTest(AbstractStageTestCase):
+  """Tests for BuilderStage class."""
+
+  def setUp(self):
+    self._Prepare(waterfall=constants.WATERFALL_EXTERNAL)
+    self.mock_cidb = mock.MagicMock()
+    cidb.CIDBConnectionFactory.SetupMockCidb(self.mock_cidb)
+
+  def tearDown(self):
+    cidb.CIDBConnectionFactory.ClearMock()
+
+  def _ConstructStageWithExpectations(self, stage_class):
+    """Construct an instance of the stage, verifying expectations from init.
+
+    Args:
+      stage_class: The class to instantitate.
+
+    Returns:
+      The instantiated class instance.
+    """
+    if stage_class is None:
+      stage_class = generic_stages.BuilderStage
+
+    self.PatchObject(self.mock_cidb, 'InsertBuildStage',
+                     return_value=DEFAULT_BUILD_STAGE_ID)
+    stage = stage_class(self._run)
+    self.mock_cidb.InsertBuildStage.assert_called_once_with(
+        build_id=DEFAULT_BUILD_ID,
+        name=mock.ANY)
+    return stage
+
+  def ConstructStage(self):
+    return self._ConstructStageWithExpectations(generic_stages.BuilderStage)
+
+  def testGetPortageEnvVar(self):
+    """Basic test case for _GetPortageEnvVar function."""
+    stage = self.ConstructStage()
+    board = self._current_board
+
+    envvar = 'EXAMPLE'
+    rc_mock = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
+    rc_mock.AddCmdResult(['portageq-%s' % board, 'envvar', envvar],
+                         output='RESULT\n')
+
+    result = stage._GetPortageEnvVar(envvar, board)
+    self.assertEqual(result, 'RESULT')
+
+  def testStageNamePrefixSmoke(self):
+    """Basic test for the StageNamePrefix() function."""
+    stage = self.ConstructStage()
+    self.assertEqual(stage.StageNamePrefix(), 'Builder')
+
+  def testGetStageNamesSmoke(self):
+    """Basic test for the GetStageNames() function."""
+    stage = self.ConstructStage()
+    self.assertEqual(stage.GetStageNames(), ['Builder'])
+
+  def testConstructDashboardURLSmoke(self):
+    """Basic test for the ConstructDashboardURL() function."""
+    stage = self.ConstructStage()
+
+    exp_url = ('https://uberchromegw.corp.google.com/i/chromeos/builders/'
+               'x86-generic-paladin/builds/%s' % DEFAULT_BUILD_NUMBER)
+    self.assertEqual(stage.ConstructDashboardURL(), exp_url)
+
+    stage_name = 'Archive'
+    exp_url = '%s/steps/%s/logs/stdio' % (exp_url, stage_name)
+    self.assertEqual(stage.ConstructDashboardURL(stage=stage_name), exp_url)
+
+  def test_ExtractOverlaysSmoke(self):
+    """Basic test for the _ExtractOverlays() function."""
+    stage = self.ConstructStage()
+    self.assertEqual(stage._ExtractOverlays(), ([], []))
+
+  def test_PrintSmoke(self):
+    """Basic test for the _Print() function."""
+    stage = self.ConstructStage()
+    with self.OutputCapturer():
+      stage._Print('hi there')
+    self.AssertOutputContainsLine('hi there', check_stderr=True)
+
+  def test_PrintLoudlySmoke(self):
+    """Basic test for the _PrintLoudly() function."""
+    stage = self.ConstructStage()
+    with self.OutputCapturer():
+      stage._PrintLoudly('hi there')
+    self.AssertOutputContainsLine(r'\*{10}', check_stderr=True)
+    self.AssertOutputContainsLine('hi there', check_stderr=True)
+
+  def testRunSmoke(self):
+    """Basic passing test for the Run() function."""
+    stage = self.ConstructStage()
+    with self.OutputCapturer():
+      stage.Run()
+
+  def _RunCapture(self, stage):
+    """Helper method to run Run() with captured output."""
+    output = self.OutputCapturer()
+    output.StartCapturing()
+    try:
+      stage.Run()
+    finally:
+      output.StopCapturing()
+
+  def testRunException(self):
+    """Verify stage exceptions are handled."""
+    class TestError(Exception):
+      """Unique test exception"""
+
+    perform_mock = self.PatchObject(generic_stages.BuilderStage, 'PerformStage')
+    perform_mock.side_effect = TestError('fail!')
+
+    stage = self.ConstructStage()
+    results_lib.Results.Clear()
+    self.assertRaises(failures_lib.StepFailure, self._RunCapture, stage)
+
+    results = results_lib.Results.Get()[0]
+    self.assertTrue(isinstance(results.result, TestError))
+    self.assertEqual(str(results.result), 'fail!')
+    self.mock_cidb.StartBuildStage.assert_called_once_with(
+        DEFAULT_BUILD_STAGE_ID)
+    self.mock_cidb.FinishBuildStage.assert_called_once_with(
+        DEFAULT_BUILD_STAGE_ID,
+        constants.BUILDER_STATUS_FAILED)
+
+  def testHandleExceptionException(self):
+    """Verify exceptions in HandleException handlers are themselves handled."""
+    class TestError(Exception):
+      """Unique test exception"""
+
+    class BadStage(generic_stages.BuilderStage):
+      """Stage that throws an exception when PerformStage is called."""
+
+      handled_exceptions = []
+
+      def PerformStage(self):
+        raise TestError('first fail')
+
+      def _HandleStageException(self, exc_info):
+        self.handled_exceptions.append(str(exc_info[1]))
+        raise TestError('nested')
+
+    stage = self._ConstructStageWithExpectations(BadStage)
+    results_lib.Results.Clear()
+    self.assertRaises(failures_lib.StepFailure, self._RunCapture, stage)
+
+    # Verify the results tracked the original exception.
+    results = results_lib.Results.Get()[0]
+    self.assertTrue(isinstance(results.result, TestError))
+    self.assertEqual(str(results.result), 'first fail')
+
+    self.assertEqual(stage.handled_exceptions, ['first fail'])
+
+    # Verify the stage is still marked as failed in cidb.
+    self.mock_cidb.StartBuildStage.assert_called_once_with(
+        DEFAULT_BUILD_STAGE_ID)
+    self.mock_cidb.FinishBuildStage.assert_called_once_with(
+        DEFAULT_BUILD_STAGE_ID,
+        constants.BUILDER_STATUS_FAILED)
+
+
+class BoardSpecificBuilderStageTest(AbstractStageTestCase):
+  """Tests option/config settings on board-specific stages."""
+
+  DEFAULT_BOARD_NAME = 'my_shiny_test_board'
+
+  def setUp(self):
+    self._Prepare()
+
+  def ConstructStage(self):
+    return generic_stages.BoardSpecificBuilderStage(self._run,
+                                                    self.DEFAULT_BOARD_NAME)
+
+  def testBuilderNameContainsBoardName(self):
+    self._run.config.grouped = True
+    stage = self.ConstructStage()
+    self.assertTrue(self.DEFAULT_BOARD_NAME in stage.name)
+
+  # TODO (yjhong): Fix this test.
+  # def testCheckOptions(self):
+  #   """Makes sure options/config settings are setup correctly."""
+  #   parser = cbuildbot._CreateParser()
+  #   (options, _) = parser.parse_args([])
+
+  #   for attr in dir(stages):
+  #     obj = eval('stages.' + attr)
+  #     if not hasattr(obj, '__base__'):
+  #       continue
+  #     if not obj.__base__ is stages.BoardSpecificBuilderStage:
+  #       continue
+  #     if obj.option_name:
+  #       self.assertTrue(getattr(options, obj.option_name))
+  #     if obj.config_name:
+  #       if not obj.config_name in config._settings:
+  #         self.fail(('cbuildbot_stages.%s.config_name "%s" is missing from '
+  #                    'cbuildbot_config._settings') % (attr, obj.config_name))
+
+
+class RunCommandAbstractStageTestCase(
+    AbstractStageTestCase, cros_build_lib_unittest.RunCommandTestCase):
+  """Base test class for testing a stage and mocking RunCommand."""
+
+  # pylint: disable=abstract-method
+
+  FULL_BOT_ID = 'x86-generic-full'
+  BIN_BOT_ID = 'x86-generic-paladin'
+
+  def _Prepare(self, bot_id, **kwargs):
+    super(RunCommandAbstractStageTestCase, self)._Prepare(bot_id, **kwargs)
+
+  def _PrepareFull(self, **kwargs):
+    self._Prepare(self.FULL_BOT_ID, **kwargs)
+
+  def _PrepareBin(self, **kwargs):
+    self._Prepare(self.BIN_BOT_ID, **kwargs)
+
+  def _Run(self, dir_exists):
+    """Helper for running the build."""
+    with patch(os.path, 'isdir', return_value=dir_exists):
+      self.RunStage()
+
+
+class ArchivingStageMixinMock(partial_mock.PartialMock):
+  """Partial mock for ArchivingStageMixin."""
+
+  TARGET = 'chromite.cbuildbot.stages.generic_stages.ArchivingStageMixin'
+  ATTRS = ('UploadArtifact',)
+
+  def UploadArtifact(self, *args, **kwargs):
+    with patch(commands, 'ArchiveFile', return_value='foo.txt'):
+      with patch(commands, 'UploadArchivedFile'):
+        self.backup['UploadArtifact'](*args, **kwargs)
diff --git a/cbuildbot/stages/release_stages.py b/cbuildbot/stages/release_stages.py
new file mode 100644
index 0000000..c382724
--- /dev/null
+++ b/cbuildbot/stages/release_stages.py
@@ -0,0 +1,385 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the various stages that a builder runs."""
+
+from __future__ import print_function
+
+import json
+import os
+
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot.stages import artifact_stages
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import timeout_util
+from chromite.lib.paygen import gspaths
+from chromite.lib.paygen import paygen_build_lib
+
+
+class InvalidTestConditionException(Exception):
+  """Raised when pre-conditions for a test aren't met."""
+
+
+class SignerTestStage(artifact_stages.ArchivingStage):
+  """Run signer related tests."""
+
+  option_name = 'tests'
+  config_name = 'signer_tests'
+
+  # If the signer tests take longer than 30 minutes, abort. They usually take
+  # five minutes to run.
+  SIGNER_TEST_TIMEOUT = 30 * 60
+
+  def PerformStage(self):
+    if not self.archive_stage.WaitForRecoveryImage():
+      raise InvalidTestConditionException('Missing recovery image.')
+    with timeout_util.Timeout(self.SIGNER_TEST_TIMEOUT):
+      commands.RunSignerTests(self._build_root, self._current_board)
+
+
+class SignerResultsTimeout(failures_lib.StepFailure):
+  """The signer did not produce any results inside the expected time."""
+
+
+class SignerFailure(failures_lib.StepFailure):
+  """The signer returned an error result."""
+
+
+class MissingInstructionException(failures_lib.StepFailure):
+  """We didn't receive the list of signing instructions PushImage uploaded."""
+
+
+class MalformedResultsException(failures_lib.StepFailure):
+  """The Signer results aren't formatted as we expect."""
+
+
+class PaygenSigningRequirementsError(failures_lib.StepFailure):
+  """Paygen stage can't run if signing failed."""
+
+
+class PaygenCrostoolsNotAvailableError(failures_lib.StepFailure):
+  """Paygen stage can't run if signing failed."""
+
+
+class PaygenNoPaygenConfigForBoard(failures_lib.StepFailure):
+  """Paygen can't run with a release.conf config for the board."""
+
+
+class PaygenStage(artifact_stages.ArchivingStage):
+  """Stage that generates release payloads.
+
+  If this stage is created with a 'channels' argument, it can run
+  independently. Otherwise, it's dependent on values queued up by
+  the ArchiveStage (push_image).
+  """
+  option_name = 'paygen'
+  config_name = 'paygen'
+
+  # Poll for new results every 30 seconds.
+  SIGNING_PERIOD = 30
+
+  # Timeout for the signing process. 2 hours in seconds.
+  SIGNING_TIMEOUT = 2 * 60 * 60
+
+  FINISHED = 'finished'
+
+  def __init__(self, builder_run, board, archive_stage, channels=None,
+               **kwargs):
+    """Init that accepts the channels argument, if present.
+
+    Args:
+      builder_run: See builder_run on ArchivingStage.
+      board: See board on ArchivingStage.
+      archive_stage: See archive_stage on ArchivingStage.
+      channels: Explicit list of channels to generate payloads for.
+                If empty, will instead wait on values from push_image.
+                Channels is normally None in release builds, and normally set
+                for trybot 'payloads' builds.
+    """
+    super(PaygenStage, self).__init__(builder_run, board, archive_stage,
+                                      **kwargs)
+    self.signing_results = {}
+    self.channels = channels
+
+  def _HandleStageException(self, exc_info):
+    """Override and don't set status to FAIL but FORGIVEN instead."""
+    exc_type, exc_value, _exc_tb = exc_info
+
+    # If Paygen fails to find anything needed in release.conf, treat it
+    # as a warning, not a failure. This is common during new board bring up.
+    if issubclass(exc_type, PaygenNoPaygenConfigForBoard):
+      return self._HandleExceptionAsWarning(exc_info)
+
+    # Warn so people look at ArchiveStage for the real error.
+    if issubclass(exc_type, MissingInstructionException):
+      return self._HandleExceptionAsWarning(exc_info)
+
+    # If the exception is a TestLabFailure that means we couldn't schedule the
+    # test. We don't fail the build for that. We do the CompoundFailure dance,
+    # because that's how we'll get failures from background processes returned
+    # to us.
+    if (issubclass(exc_type, failures_lib.TestLabFailure) or
+        (issubclass(exc_type, failures_lib.CompoundFailure) and
+         exc_value.MatchesFailureType(failures_lib.TestLabFailure))):
+      return self._HandleExceptionAsWarning(exc_info)
+
+    return super(PaygenStage, self)._HandleStageException(exc_info)
+
+  def _JsonFromUrl(self, gs_ctx, url):
+    """Fetch a GS Url, and parse it as Json.
+
+    Args:
+      gs_ctx: GS Context.
+      url: Url to fetch and parse.
+
+    Returns:
+      None if the Url doesn't exist.
+      Parsed Json structure if it did.
+
+    Raises:
+      MalformedResultsException if it failed to parse.
+    """
+    try:
+      signer_txt = gs_ctx.Cat(url)
+    except gs.GSNoSuchKey:
+      return None
+
+    try:
+      return json.loads(signer_txt)
+    except ValueError:
+      # We should never see malformed Json, even for intermediate statuses.
+      raise MalformedResultsException(signer_txt)
+
+  def _SigningStatusFromJson(self, signer_json):
+    """Extract a signing status from a signer result Json DOM.
+
+    Args:
+      signer_json: The parsed json status from a signer operation.
+
+    Returns:
+      string with a simple status: 'passed', 'failed', 'downloading', etc,
+      or '' if the json doesn't contain a status.
+    """
+    return (signer_json or {}).get('status', {}).get('status', '')
+
+  def _CheckForResults(self, gs_ctx, instruction_urls_per_channel,
+                       channel_notifier):
+    """timeout_util.WaitForSuccess func to check a list of signer results.
+
+    Args:
+      gs_ctx: Google Storage Context.
+      instruction_urls_per_channel: Urls of the signer result files
+                                    we're expecting.
+      channel_notifier: BackgroundTaskRunner into which we push channels for
+                        processing.
+
+    Returns:
+      Number of results not yet collected.
+    """
+    COMPLETED_STATUS = ('passed', 'failed')
+
+    # Assume we are done, then try to prove otherwise.
+    results_completed = True
+
+    for channel in instruction_urls_per_channel.keys():
+      self.signing_results.setdefault(channel, {})
+
+      if (len(self.signing_results[channel]) ==
+          len(instruction_urls_per_channel[channel])):
+        continue
+
+      for url in instruction_urls_per_channel[channel]:
+        # Convert from instructions URL to instructions result URL.
+        url += '.json'
+
+        # We already have a result for this URL.
+        if url in self.signing_results[channel]:
+          continue
+
+        try:
+          signer_json = self._JsonFromUrl(gs_ctx, url)
+        except MalformedResultsException as e:
+          logging.warning('Received malformed json: %s', e)
+          continue
+
+        if self._SigningStatusFromJson(signer_json) in COMPLETED_STATUS:
+          # If we find a completed result, remember it.
+          self.signing_results[channel][url] = signer_json
+
+      # If we don't have full results for this channel, we aren't done
+      # waiting.
+      if (len(self.signing_results[channel]) !=
+          len(instruction_urls_per_channel[channel])):
+        results_completed = False
+        continue
+
+      # If we reach here, the channel has just been completed for the first
+      # time.
+
+      # If all results 'passed' the channel was successfully signed.
+      channel_success = True
+      for signer_result in self.signing_results[channel].values():
+        if self._SigningStatusFromJson(signer_result) != 'passed':
+          channel_success = False
+
+      # If we successfully completed the channel, inform paygen.
+      if channel_success:
+        channel_notifier(channel)
+
+    return results_completed
+
+  def _WaitForPushImage(self):
+    """Block until push_image data is ready.
+
+    Returns:
+      Push_image results, expected to be of the form:
+      { 'channel': ['gs://instruction_uri1', 'gs://signer_instruction_uri2'] }
+
+    Raises:
+      MissingInstructionException: If push_image sent us an error, or timed out.
+    """
+    # This call will NEVER time out.
+    instruction_urls_per_channel = self.board_runattrs.GetParallel(
+        'instruction_urls_per_channel', timeout=None)
+
+    # A value of None signals an error in PushImage.
+    if instruction_urls_per_channel is None:
+      raise MissingInstructionException(
+          'ArchiveStage: PushImage failed. No images means no Paygen.')
+
+    return instruction_urls_per_channel
+
+  def _WaitForSigningResults(self,
+                             instruction_urls_per_channel,
+                             channel_notifier):
+    """Do the work of waiting for signer results and logging them.
+
+    Args:
+      instruction_urls_per_channel: push_image data (see _WaitForPushImage).
+      channel_notifier: BackgroundTaskRunner into which we push channels for
+                        processing.
+
+    Raises:
+      ValueError: If the signer result isn't valid json.
+      RunCommandError: If we are unable to download signer results.
+    """
+    gs_ctx = gs.GSContext(dry_run=self._run.debug)
+
+    try:
+      logging.info('Waiting for signer results.')
+      timeout_util.WaitForReturnTrue(
+          self._CheckForResults,
+          func_args=(gs_ctx, instruction_urls_per_channel, channel_notifier),
+          timeout=self.SIGNING_TIMEOUT, period=self.SIGNING_PERIOD)
+    except timeout_util.TimeoutError:
+      msg = 'Image signing timed out.'
+      logging.error(msg)
+      logging.PrintBuildbotStepText(msg)
+      raise SignerResultsTimeout(msg)
+
+    # Log all signer results, then handle any signing failures.
+    failures = []
+    for url_results in self.signing_results.values():
+      for url, signer_result in url_results.iteritems():
+        result_description = os.path.basename(url)
+        logging.PrintBuildbotStepText(result_description)
+        logging.info('Received results for: %s', result_description)
+        logging.info(json.dumps(signer_result, indent=4))
+
+        status = self._SigningStatusFromJson(signer_result)
+        if status != 'passed':
+          failures.append(result_description)
+          logging.error('Signing failed for: %s', result_description)
+
+    if failures:
+      logging.error('Failure summary:')
+      for failure in failures:
+        logging.error('  %s', failure)
+      raise SignerFailure(', '.join([str(f) for f in failures]))
+
+  def PerformStage(self):
+    """Do the work of generating our release payloads."""
+    # Convert to release tools naming for boards.
+    board = self._current_board.replace('_', '-')
+    version = self._run.attrs.release_tag
+
+    assert version, "We can't generate payloads without a release_tag."
+    logging.info("Generating payloads for: %s, %s", board, version)
+
+    # Test to see if the current board has a Paygen configuration. We do
+    # this here, no in the sub-process so we don't have to pass back a
+    # failure reason.
+    try:
+      paygen_build_lib.ValidateBoardConfig(board)
+    except  paygen_build_lib.BoardNotConfigured:
+      raise PaygenNoPaygenConfigForBoard(
+          'No release.conf entry was found for board %s. Get a TPM to fix.' %
+          board)
+
+    with parallel.BackgroundTaskRunner(self._RunPaygenInProcess) as per_channel:
+      def channel_notifier(channel):
+        per_channel.put((channel, board, version, self._run.debug,
+                         self._run.config.paygen_skip_testing,
+                         self._run.config.paygen_skip_delta_payloads))
+
+      if self.channels:
+        logging.info("Using explicit channels: %s", self.channels)
+        # If we have an explicit list of channels, use it.
+        for channel in self.channels:
+          channel_notifier(channel)
+      else:
+        instruction_urls_per_channel = self._WaitForPushImage()
+        self._WaitForSigningResults(instruction_urls_per_channel,
+                                    channel_notifier)
+
+  def _RunPaygenInProcess(self, channel, board, version, debug,
+                          disable_tests, skip_delta_payloads):
+    """Helper for PaygenStage that invokes payload generation.
+
+    This method is intended to be safe to invoke inside a process.
+
+    Args:
+      channel: Channel of payloads to generate ('stable', 'beta', etc)
+      board: Board of payloads to generate ('x86-mario', 'x86-alex-he', etc)
+      version: Version of payloads to generate.
+      debug: Flag telling if this is a real run, or a test run.
+      disable_tests: Do not generate test artifacts are run payload tests.
+      skip_delta_payloads: Skip generating delta payloads.
+    """
+    # Convert to release tools naming for channels.
+    if not channel.endswith('-channel'):
+      channel += '-channel'
+
+    with osutils.TempDir(sudo_rm=True) as tempdir:
+      # Create the definition of the build to generate payloads for.
+      build = gspaths.Build(channel=channel,
+                            board=board,
+                            version=version)
+
+      try:
+        # Generate the payloads.
+        self._PrintLoudly('Starting %s, %s, %s' % (channel, version, board))
+        paygen_build_lib.CreatePayloads(build,
+                                        work_dir=tempdir,
+                                        site_config=self._run.site_config,
+                                        dry_run=debug,
+                                        run_parallel=True,
+                                        run_on_builder=True,
+                                        skip_delta_payloads=skip_delta_payloads,
+                                        disable_tests=disable_tests)
+      except (paygen_build_lib.BuildFinished,
+              paygen_build_lib.BuildLocked,
+              paygen_build_lib.BuildSkip) as e:
+        # These errors are normal if it's possible for another process to
+        # work on the same build. This process could be a Paygen server, or
+        # another builder (perhaps by a trybot generating payloads on request).
+        #
+        # This means the build was finished by the other process, is already
+        # being processed (so the build is locked), or that it's been marked
+        # to skip (probably done manually).
+        logging.info('Paygen skipped because: %s', e)
diff --git a/cbuildbot/stages/release_stages_unittest b/cbuildbot/stages/release_stages_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/stages/release_stages_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/stages/release_stages_unittest.py b/cbuildbot/stages/release_stages_unittest.py
new file mode 100644
index 0000000..89dbe09
--- /dev/null
+++ b/cbuildbot/stages/release_stages_unittest.py
@@ -0,0 +1,448 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for build stages."""
+
+from __future__ import print_function
+
+import mock
+
+from chromite.cbuildbot import cbuildbot_unittest
+from chromite.cbuildbot.stages import artifact_stages
+from chromite.cbuildbot.stages import generic_stages_unittest
+from chromite.cbuildbot.stages import release_stages
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import results_lib
+from chromite.lib import timeout_util
+
+from chromite.cbuildbot.stages.generic_stages_unittest import patch
+
+from chromite.lib.paygen import gspaths
+from chromite.lib.paygen import paygen_build_lib
+
+
+# pylint: disable=protected-access
+
+
+class PaygenStageTest(generic_stages_unittest.AbstractStageTestCase,
+                      cbuildbot_unittest.SimpleBuilderTestCase):
+  """Test the PaygenStageStage."""
+
+  BOT_ID = 'x86-mario-release'
+  RELEASE_TAG = '0.0.1'
+
+  SIGNER_RESULT = """
+    { "status": { "status": "passed" }, "board": "link",
+    "keyset": "link-mp-v4", "type": "recovery", "channel": "stable" }
+    """
+
+  INSNS_URLS_PER_CHANNEL = {
+      'chan1': ['chan1_uri1', 'chan1_uri2'],
+      'chan2': ['chan2_uri1'],
+  }
+
+  def setUp(self):
+    self._Prepare()
+
+  def ConstructStage(self):
+    archive_stage = artifact_stages.ArchiveStage(self._run, self._current_board)
+    return release_stages.PaygenStage(self._run, self._current_board,
+                                      archive_stage)
+
+  def testWaitForPushImageSuccess(self):
+    """Test waiting for input from PushImage."""
+    stage = self.ConstructStage()
+    stage.board_runattrs.SetParallel(
+        'instruction_urls_per_channel', self.INSNS_URLS_PER_CHANNEL)
+
+    self.assertEqual(stage._WaitForPushImage(), self.INSNS_URLS_PER_CHANNEL)
+
+  def testWaitForPushImageError(self):
+    """Test WaitForPushImageError with an error output from pushimage."""
+    stage = self.ConstructStage()
+    stage.board_runattrs.SetParallel(
+        'instruction_urls_per_channel', None)
+
+    self.assertRaises(release_stages.MissingInstructionException,
+                      stage._WaitForPushImage)
+
+  def testWaitForSigningResultsSuccess(self):
+    """Test that _WaitForSigningResults works when signing works."""
+    results = ['chan1_uri1.json', 'chan1_uri2.json', 'chan2_uri1.json']
+
+    with patch(release_stages.gs, 'GSContext') as mock_gs_ctx_init:
+      mock_gs_ctx = mock_gs_ctx_init.return_value
+      mock_gs_ctx.Cat.return_value = self.SIGNER_RESULT
+      notifier = mock.Mock()
+
+      stage = self.ConstructStage()
+      stage._WaitForSigningResults(self.INSNS_URLS_PER_CHANNEL, notifier)
+
+      self.assertEqual(notifier.mock_calls,
+                       [mock.call('chan1'),
+                        mock.call('chan2')])
+
+      for result in results:
+        mock_gs_ctx.Cat.assert_any_call(result)
+
+  def testWaitForSigningResultsSuccessNothingSigned(self):
+    """Test _WaitForSigningResults when there are no signed images."""
+    with patch(release_stages.gs, 'GSContext') as mock_gs_ctx_init:
+      mock_gs_ctx = mock_gs_ctx_init.return_value
+      mock_gs_ctx.Cat.return_value = self.SIGNER_RESULT
+      notifier = mock.Mock()
+
+      stage = self.ConstructStage()
+      stage._WaitForSigningResults({}, notifier)
+
+      self.assertEqual(notifier.mock_calls, [])
+      self.assertEqual(mock_gs_ctx.Cat.mock_calls, [])
+
+  def testWaitForSigningResultsFailure(self):
+    """Test _WaitForSigningResults when the signers report an error."""
+    with patch(release_stages.gs, 'GSContext') as mock_gs_ctx_init:
+      mock_gs_ctx = mock_gs_ctx_init.return_value
+      mock_gs_ctx.Cat.return_value = """
+          { "status": { "status": "failed" }, "board": "link",
+            "keyset": "link-mp-v4", "type": "recovery", "channel": "stable" }
+          """
+      notifier = mock.Mock()
+
+      stage = self.ConstructStage()
+
+      self.assertRaisesStringifyable(
+          release_stages.SignerFailure,
+          stage._WaitForSigningResults,
+          {'chan1': ['chan1_uri1']}, notifier)
+
+      # Ensure we didn't notify anyone of success.
+      self.assertEqual(notifier.mock_calls, [])
+      self.assertEqual(mock_gs_ctx.Cat.mock_calls,
+                       [mock.call('chan1_uri1.json')])
+
+  def testWaitForSigningResultsTimeout(self):
+    """Test that _WaitForSigningResults reports timeouts correctly."""
+    with patch(release_stages.timeout_util, 'WaitForSuccess') as mock_wait:
+      mock_wait.side_effect = timeout_util.TimeoutError
+      notifier = mock.Mock()
+
+      stage = self.ConstructStage()
+
+      self.assertRaises(release_stages.SignerResultsTimeout,
+                        stage._WaitForSigningResults,
+                        {'chan1': ['chan1_uri1']}, notifier)
+
+      self.assertEqual(notifier.mock_calls, [])
+
+  def testCheckForResultsSuccess(self):
+    """Test that _CheckForResults works when signing works."""
+    with patch(release_stages.gs, 'GSContext') as mock_gs_ctx_init:
+      mock_gs_ctx = mock_gs_ctx_init.return_value
+      mock_gs_ctx.Cat.return_value = self.SIGNER_RESULT
+      notifier = mock.Mock()
+
+      stage = self.ConstructStage()
+      self.assertTrue(
+          stage._CheckForResults(mock_gs_ctx,
+                                 self.INSNS_URLS_PER_CHANNEL,
+                                 notifier))
+      self.assertEqual(notifier.mock_calls,
+                       [mock.call('chan1'), mock.call('chan2')])
+
+  def testCheckForResultsSuccessNoChannels(self):
+    """Test that _CheckForResults works when there is nothing to check for."""
+    with patch(release_stages.gs, 'GSContext') as mock_gs_ctx_init:
+      mock_gs_ctx = mock_gs_ctx_init.return_value
+      notifier = mock.Mock()
+
+      stage = self.ConstructStage()
+
+      # Ensure we find that we are ready if there are no channels to look for.
+      self.assertTrue(stage._CheckForResults(mock_gs_ctx, {}, notifier))
+
+      # Ensure we didn't contact GS while checking for no channels.
+      self.assertFalse(mock_gs_ctx.Cat.called)
+      self.assertEqual(notifier.mock_calls, [])
+
+  def testCheckForResultsPartialComplete(self):
+    """Verify _CheckForResults handles partial signing results."""
+    def catChan2Success(url):
+      if url.startswith('chan2'):
+        return self.SIGNER_RESULT
+      else:
+        raise release_stages.gs.GSNoSuchKey()
+
+    with patch(release_stages.gs, 'GSContext') as mock_gs_ctx_init:
+      mock_gs_ctx = mock_gs_ctx_init.return_value
+      mock_gs_ctx.Cat.side_effect = catChan2Success
+      notifier = mock.Mock()
+
+      stage = self.ConstructStage()
+      self.assertFalse(
+          stage._CheckForResults(mock_gs_ctx,
+                                 self.INSNS_URLS_PER_CHANNEL,
+                                 notifier))
+      self.assertEqual(stage.signing_results, {
+          'chan1': {},
+          'chan2': {
+              'chan2_uri1.json': {
+                  'board': 'link',
+                  'channel': 'stable',
+                  'keyset': 'link-mp-v4',
+                  'status': {'status': 'passed'},
+                  'type': 'recovery'
+              }
+          }
+      })
+      self.assertEqual(notifier.mock_calls, [mock.call('chan2')])
+
+  def testCheckForResultsUnexpectedJson(self):
+    """Verify _CheckForResults handles unexpected Json values."""
+    with patch(release_stages.gs, 'GSContext') as mock_gs_ctx_init:
+      mock_gs_ctx = mock_gs_ctx_init.return_value
+      mock_gs_ctx.Cat.return_value = '{}'
+      notifier = mock.Mock()
+
+      stage = self.ConstructStage()
+      self.assertFalse(
+          stage._CheckForResults(mock_gs_ctx,
+                                 self.INSNS_URLS_PER_CHANNEL,
+                                 notifier))
+      self.assertEqual(stage.signing_results, {
+          'chan1': {}, 'chan2': {}
+      })
+      self.assertEqual(notifier.mock_calls, [])
+
+  def testCheckForResultsMalformedJson(self):
+    """Verify _CheckForResults handles unexpected Json values."""
+    with patch(release_stages.gs, 'GSContext') as mock_gs_ctx_init:
+      mock_gs_ctx = mock_gs_ctx_init.return_value
+      mock_gs_ctx.Cat.return_value = '{'
+      notifier = mock.Mock()
+
+      stage = self.ConstructStage()
+      self.assertFalse(
+          stage._CheckForResults(mock_gs_ctx,
+                                 self.INSNS_URLS_PER_CHANNEL,
+                                 notifier))
+      self.assertEqual(stage.signing_results, {
+          'chan1': {}, 'chan2': {}
+      })
+      self.assertEqual(notifier.mock_calls, [])
+
+  def testCheckForResultsNoResult(self):
+    """Verify _CheckForResults handles missing signer results."""
+    with patch(release_stages.gs, 'GSContext') as mock_gs_ctx_init:
+      mock_gs_ctx = mock_gs_ctx_init.return_value
+      mock_gs_ctx.Cat.side_effect = release_stages.gs.GSNoSuchKey
+      notifier = mock.Mock()
+
+      stage = self.ConstructStage()
+      self.assertFalse(
+          stage._CheckForResults(mock_gs_ctx,
+                                 self.INSNS_URLS_PER_CHANNEL,
+                                 notifier))
+      self.assertEqual(stage.signing_results, {
+          'chan1': {}, 'chan2': {}
+      })
+      self.assertEqual(notifier.mock_calls, [])
+
+  def testCheckForResultsFailed(self):
+    """Verify _CheckForResults handles missing signer results."""
+    with patch(release_stages.gs, 'GSContext') as mock_gs_ctx_init:
+      mock_gs_ctx = mock_gs_ctx_init.return_value
+      mock_gs_ctx.Cat.side_effect = release_stages.gs.GSNoSuchKey
+      notifier = mock.Mock()
+
+      stage = self.ConstructStage()
+      self.assertFalse(
+          stage._CheckForResults(mock_gs_ctx,
+                                 self.INSNS_URLS_PER_CHANNEL,
+                                 notifier))
+      self.assertEqual(stage.signing_results, {
+          'chan1': {}, 'chan2': {}
+      })
+      self.assertEqual(notifier.mock_calls, [])
+
+  def generateNotifyCalls(self, channels):
+    def side_effect(_, notifier):
+      for channel in channels:
+        notifier(channel)
+    return side_effect
+
+  def testPerformStageSuccess(self):
+    """Test that PaygenStage works when signing works."""
+
+    with patch(release_stages.parallel, 'BackgroundTaskRunner') as background:
+      queue = background().__enter__()
+
+      # This patch is only required for external builds with no config data.
+      with patch(paygen_build_lib, 'ValidateBoardConfig'):
+
+        stage = self.ConstructStage()
+
+        with patch(stage, '_WaitForPushImage') as wait_push:
+          with patch(stage, '_WaitForSigningResults') as wait_signing:
+            wait_push.return_value = self.INSNS_URLS_PER_CHANNEL
+            wait_signing.side_effect = self.generateNotifyCalls(('stable',
+                                                                 'beta'))
+            stage.PerformStage()
+
+        # Verify that we queue up work
+        self.assertEqual(
+            queue.put.call_args_list,
+            [mock.call(('stable', 'x86-mario', '0.0.1', False, False, False)),
+             mock.call(('beta', 'x86-mario', '0.0.1', False, False, False))])
+
+  def testPerformStageSuccessVarientBoard(self):
+    """Test that SignerResultsStage works with varient boards.
+
+    Varient boards need some name conversion. Make sure that's okay.
+    """
+    self._current_board = 'x86-alex_he'
+
+    with patch(release_stages.parallel, 'BackgroundTaskRunner') as background:
+      queue = background().__enter__()
+
+      # This patch is only required for external builds with no config data.
+      with patch(paygen_build_lib, 'ValidateBoardConfig'):
+        stage = self.ConstructStage()
+
+        with patch(stage, '_WaitForPushImage') as wait_push:
+          with patch(stage, '_WaitForSigningResults') as wait_signing:
+            wait_push.return_value = self.INSNS_URLS_PER_CHANNEL
+            wait_signing.side_effect = self.generateNotifyCalls(('stable',
+                                                                 'beta'))
+            stage.PerformStage()
+
+        # Verify that we queue up work
+        self.assertEqual(
+            queue.put.call_args_list,
+            [mock.call(('stable', 'x86-alex-he', '0.0.1', False, False, False)),
+             mock.call(('beta', 'x86-alex-he', '0.0.1', False, False, False))])
+
+  def testPerformStageSigningFailed(self):
+    """Test that PaygenStage works when signing works."""
+    with patch(release_stages.parallel, 'BackgroundTaskRunner') as background:
+      queue = background().__enter__()
+
+      # This patch is only required for external builds with no config data.
+      with patch(paygen_build_lib, 'ValidateBoardConfig'):
+        stage = self.ConstructStage()
+
+        with patch(stage, '_WaitForPushImage') as wait_push:
+          with patch(stage, '_WaitForSigningResults') as wait_signing:
+            wait_push.return_value = self.INSNS_URLS_PER_CHANNEL
+            wait_signing.side_effect = release_stages.SignerFailure
+
+            self.assertRaises(release_stages.SignerFailure,
+                              stage.PerformStage)
+
+        # Ensure no work was queued up.
+        self.assertFalse(queue.put.called)
+
+  def testPerformStageBackgroundFail(self):
+    """Test that exception from background processes are properly handled."""
+    with patch(paygen_build_lib, 'CreatePayloads') as create_payloads:
+      create_payloads.side_effect = failures_lib.TestLabFailure
+
+      # This patch is only required for external builds with no config data.
+      with patch(paygen_build_lib, 'ValidateBoardConfig'):
+        stage = release_stages.PaygenStage(
+            self._run, self._current_board,
+            archive_stage=None, channels=['foo', 'bar'])
+
+        with patch(stage, '_HandleExceptionAsWarning') as warning_handler:
+          warning_handler.return_value = (results_lib.Results.FORGIVEN,
+                                          'description',
+                                          0)
+
+          stage.Run()
+
+          # This proves the exception was turned into a warning.
+          self.assertTrue(warning_handler.called)
+
+  def testPerformStageTrybot(self):
+    """Test the PerformStage alternate behavior for trybot runs."""
+    with patch(release_stages.parallel, 'BackgroundTaskRunner') as background:
+      queue = background().__enter__()
+
+      # This patch is only required for external builds with no config data.
+      with patch(paygen_build_lib, 'ValidateBoardConfig'):
+        # The stage is constructed differently for trybots, so don't use
+        # ConstructStage.
+        stage = release_stages.PaygenStage(
+            self._run, self._current_board, archive_stage=None,
+            channels=['foo', 'bar'])
+        with patch(stage, '_WaitForPushImage') as wait_push:
+          with patch(stage, '_WaitForSigningResults') as wait_signing:
+            stage.PerformStage()
+
+          # Make sure we don't wait on push_image or signing in this case.
+          self.assertEqual(wait_push.mock_calls, [])
+          self.assertEqual(wait_signing.mock_calls, [])
+
+        # Notice that we didn't put anything in _wait_for_channel_signing, but
+        # still got results right away.
+        self.assertEqual(
+            queue.put.call_args_list,
+            [mock.call(('foo', 'x86-mario', '0.0.1', False, False, False)),
+             mock.call(('bar', 'x86-mario', '0.0.1', False, False, False))])
+
+  def testPerformStageUnknownBoard(self):
+    """Test that PaygenStage exits when an unknown board is specified."""
+    self._current_board = 'unknown-board-name'
+
+    badBoardException = paygen_build_lib.BoardNotConfigured(self._current_board)
+
+    # This patch is only required for external builds with no config data.
+    with patch(paygen_build_lib, 'ValidateBoardConfig') as validate_boards:
+      validate_boards.side_effect = badBoardException
+
+      stage = self.ConstructStage()
+
+      self.assertRaises(release_stages.PaygenNoPaygenConfigForBoard,
+                        stage.PerformStage)
+
+  def testRunPaygenInProcess(self):
+    """Test that _RunPaygenInProcess works in the simple case."""
+    with patch(paygen_build_lib, 'CreatePayloads') as create_payloads:
+      # Call the method under test.
+      stage = self.ConstructStage()
+      stage._RunPaygenInProcess('foo', 'foo-board', 'foo-version',
+                                False, False, False)
+
+      # Ensure arguments are properly converted and passed along.
+      create_payloads.assert_called_with(gspaths.Build(version='foo-version',
+                                                       board='foo-board',
+                                                       channel='foo-channel'),
+                                         work_dir=mock.ANY,
+                                         site_config=stage._run.site_config,
+                                         dry_run=False,
+                                         run_parallel=True,
+                                         run_on_builder=True,
+                                         skip_delta_payloads=False,
+                                         disable_tests=False)
+
+  def testRunPaygenInProcessComplex(self):
+    """Test that _RunPaygenInProcess with arguments that are more unusual."""
+    with patch(paygen_build_lib, 'CreatePayloads') as create_payloads:
+      # Call the method under test.
+      # Use release tools channel naming, and a board name including a variant.
+      stage = self.ConstructStage()
+      stage._RunPaygenInProcess('foo-channel', 'foo-board-variant',
+                                'foo-version', True, True, True)
+
+      # Ensure arguments are properly converted and passed along.
+      create_payloads.assert_called_with(
+          gspaths.Build(version='foo-version',
+                        board='foo-board-variant',
+                        channel='foo-channel'),
+          dry_run=True,
+          work_dir=mock.ANY,
+          site_config=stage._run.site_config,
+          run_parallel=True,
+          run_on_builder=True,
+          skip_delta_payloads=True,
+          disable_tests=True)
diff --git a/cbuildbot/stages/report_stages.py b/cbuildbot/stages/report_stages.py
new file mode 100644
index 0000000..c89a38f
--- /dev/null
+++ b/cbuildbot/stages/report_stages.py
@@ -0,0 +1,777 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the report stages."""
+
+
+from __future__ import print_function
+
+import os
+import sys
+
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import metadata_lib
+from chromite.cbuildbot import results_lib
+from chromite.cbuildbot import tree_status
+from chromite.cbuildbot import triage_lib
+from chromite.cbuildbot import validation_pool
+from chromite.cbuildbot.stages import completion_stages
+from chromite.cbuildbot.stages import generic_stages
+from chromite.lib import cidb
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import graphite
+from chromite.lib import git
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import patch as cros_patch
+from chromite.lib import portage_util
+from chromite.lib import retry_stats
+from chromite.lib import toolchain
+
+
+site_config = config_lib.GetConfig()
+
+
+def WriteBasicMetadata(builder_run):
+  """Writes basic metadata that should be known at start of execution.
+
+  This method writes to |build_run|'s metadata instance the basic metadata
+  values that should be known at the beginning of the first cbuildbot
+  execution, prior to any reexecutions.
+
+  In particular, this method does not write any metadata values that depend
+  on the builder config, as the config may be modified by patches that are
+  applied before the final reexectuion.
+
+  This method is safe to run more than once (for instance, once per cbuildbot
+  execution) because it will write the same data each time.
+
+  Args:
+    builder_run: The BuilderRun instance for this build.
+  """
+  start_time = results_lib.Results.start_time
+  start_time_stamp = cros_build_lib.UserDateTimeFormat(timeval=start_time)
+
+  metadata = {
+      # Data for this build.
+      'bot-hostname': cros_build_lib.GetHostName(fully_qualified=True),
+      'build-number': builder_run.buildnumber,
+      'builder-name': builder_run.GetBuilderName(),
+      'buildbot-url': os.environ.get('BUILDBOT_BUILDBOTURL', ''),
+      'buildbot-master-name':
+          os.environ.get('BUILDBOT_MASTERNAME', ''),
+      'bot-config': builder_run.config['name'],
+      'time': {
+          'start': start_time_stamp,
+      },
+      'master_build_id': builder_run.options.master_build_id,
+  }
+
+  builder_run.attrs.metadata.UpdateWithDict(metadata)
+
+
+def GetChildConfigListMetadata(child_configs, config_status_map):
+  """Creates a list for the child configs metadata.
+
+  This creates a list of child config dictionaries from the given child
+  configs, optionally adding the final status if the success map is
+  specified.
+
+  Args:
+    child_configs: The list of child configs for this build.
+    config_status_map: The map of config name to final build status.
+
+  Returns:
+    List of child config dictionaries, with optional final status
+  """
+  child_config_list = []
+  for c in child_configs:
+    pass_fail_status = None
+    if config_status_map:
+      if config_status_map[c['name']]:
+        pass_fail_status = constants.FINAL_STATUS_PASSED
+      else:
+        pass_fail_status = constants.FINAL_STATUS_FAILED
+    child_config_list.append({'name': c['name'],
+                              'boards': c['boards'],
+                              'status': pass_fail_status})
+  return child_config_list
+
+
+class BuildStartStage(generic_stages.BuilderStage):
+  """The first stage to run.
+
+  This stage writes a few basic metadata values that are known at the start of
+  build, and inserts the build into the database, if appropriate.
+  """
+
+  def _GetBuildTimeoutSeconds(self):
+    """Get the overall build timeout to be published to cidb.
+
+    Returns:
+      Timeout in seconds. None if no sensible timeout can be inferred.
+    """
+    timeout_seconds = self._run.options.timeout
+    if self._run.config.master:
+      master_timeout = constants.MASTER_BUILD_TIMEOUT_SECONDS.get(
+          self._run.config.build_type,
+          constants.MASTER_BUILD_TIMEOUT_DEFAULT_SECONDS)
+      if timeout_seconds > 0:
+        master_timeout = min(master_timeout, timeout_seconds)
+      return master_timeout
+
+    return timeout_seconds if timeout_seconds > 0 else None
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    if self._run.config['doc']:
+      logging.PrintBuildbotLink('Builder documentation',
+                                self._run.config['doc'])
+
+    WriteBasicMetadata(self._run)
+    d = self._run.attrs.metadata.GetDict()
+
+    # BuildStartStage should only run once per build. But just in case it
+    # is somehow running a second time, we do not want to insert an additional
+    # database entry. Detect if a database entry has been inserted already
+    # and if so quit the stage.
+    if 'build_id' in d:
+      logging.info('Already have build_id %s, not inserting an entry.',
+                   d['build_id'])
+      return
+
+    graphite.StatsFactory.GetInstance().Counter('build_started').increment(
+        self._run.config['name'] or 'NO_CONFIG')
+
+    # Note: In other build stages we use self._run.GetCIDBHandle to fetch
+    # a cidb handle. However, since we don't yet have a build_id, we can't
+    # do that here.
+    if cidb.CIDBConnectionFactory.IsCIDBSetup():
+      db_type = cidb.CIDBConnectionFactory.GetCIDBConnectionType()
+      db = cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
+      if db:
+        waterfall = d['buildbot-master-name']
+        assert waterfall in constants.CIDB_KNOWN_WATERFALLS
+        build_id = db.InsertBuild(
+            builder_name=d['builder-name'],
+            waterfall=waterfall,
+            build_number=d['build-number'],
+            build_config=d['bot-config'],
+            bot_hostname=d['bot-hostname'],
+            master_build_id=d['master_build_id'],
+            timeout_seconds=self._GetBuildTimeoutSeconds())
+        self._run.attrs.metadata.UpdateWithDict({'build_id': build_id,
+                                                 'db_type': db_type})
+        logging.info('Inserted build_id %s into cidb database type %s.',
+                     build_id, db_type)
+        logging.PrintBuildbotStepText('database: %s, build_id: %s' %
+                                      (db_type, build_id))
+
+        master_build_id = d['master_build_id']
+        if master_build_id is not None:
+          master_build_status = db.GetBuildStatus(master_build_id)
+          master_waterfall_url = constants.WATERFALL_TO_DASHBOARD[
+              master_build_status['waterfall']]
+
+          master_url = tree_status.ConstructDashboardURL(
+              master_waterfall_url,
+              master_build_status['builder_name'],
+              master_build_status['build_number'])
+          logging.PrintBuildbotLink('Link to master build', master_url)
+
+  def HandleSkip(self):
+    """Ensure that re-executions use the same db instance as initial db."""
+    metadata_dict = self._run.attrs.metadata.GetDict()
+    if 'build_id' in metadata_dict:
+      db_type = cidb.CIDBConnectionFactory.GetCIDBConnectionType()
+      if not 'db_type' in metadata_dict:
+        # This will only execute while this CL is in the commit queue. After
+        # this CL lands, this block can be removed.
+        self._run.attrs.metadata.UpdateWithDict({'db_type': db_type})
+        return
+
+      if db_type != metadata_dict['db_type']:
+        cidb.CIDBConnectionFactory.InvalidateCIDBSetup()
+        raise AssertionError('Invalid attempt to switch from database %s to '
+                             '%s.' % (metadata_dict['db_type'], db_type))
+
+
+class BuildReexecutionFinishedStage(generic_stages.BuilderStage,
+                                    generic_stages.ArchivingStageMixin):
+  """The first stage to run after the final cbuildbot reexecution.
+
+  This stage is the first stage run after the final cbuildbot
+  bootstrap/reexecution. By the time this stage is run, the sync stages
+  are complete and version numbers of chromeos are known (though chrome
+  version may not be known until SyncChrome).
+
+  This stage writes metadata values that are first known after the final
+  reexecution (such as those that come from the config). This stage also
+  updates the build's cidb entry if appropriate.
+
+  Where possible, metadata that is already known at this time should be
+  written at this time rather than in ReportStage.
+  """
+
+  def _AbortPreviousHWTestSuites(self):
+    """Abort any outstanding synchronous hwtest suites from this builder."""
+    # Only try to clean up previous HWTests if this is really running on one of
+    # our builders in a non-trybot build.
+    debug = (self._run.options.remote_trybot or
+             (not self._run.options.buildbot) or
+             self._run.options.debug)
+    build_id, db = self._run.GetCIDBHandle()
+    if db:
+      builds = db.GetBuildHistory(self._run.config.name, 2,
+                                  ignore_build_id=build_id)
+      for build in builds:
+        old_version = build['full_version']
+        if old_version is None:
+          continue
+        for suite_config in self._run.config.hw_tests:
+          if not suite_config.async:
+            commands.AbortHWTests(self._run.config.name, old_version,
+                                  debug, suite_config.suite)
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    config = self._run.config
+    build_root = self._build_root
+
+    # Flat list of all child config boards. Since child configs
+    # are not allowed to have children, it is not necessary to search
+    # deeper than one generation.
+    child_configs = GetChildConfigListMetadata(
+        child_configs=config['child_configs'], config_status_map=None)
+
+    sdk_verinfo = cros_build_lib.LoadKeyValueFile(
+        os.path.join(build_root, constants.SDK_VERSION_FILE),
+        ignore_missing=True)
+
+    verinfo = self._run.GetVersionInfo()
+    platform_tag = getattr(self._run.attrs, 'release_tag')
+    if not platform_tag:
+      platform_tag = verinfo.VersionString()
+
+    version = {
+        'full': self._run.GetVersion(),
+        'milestone': verinfo.chrome_branch,
+        'platform': platform_tag,
+    }
+
+    metadata = {
+        # Version of the metadata format.
+        'metadata-version': '2',
+        'boards': config['boards'],
+        'child-configs': child_configs,
+        'build_type': config['build_type'],
+
+        # Data for the toolchain used.
+        'sdk-version': sdk_verinfo.get('SDK_LATEST_VERSION', '<unknown>'),
+        'toolchain-url': sdk_verinfo.get('TC_PATH', '<unknown>'),
+    }
+
+    if len(config['boards']) == 1:
+      toolchains = toolchain.GetToolchainsForBoard(config['boards'][0],
+                                                   buildroot=build_root)
+      metadata['toolchain-tuple'] = (
+          toolchain.FilterToolchains(toolchains, 'default', True).keys() +
+          toolchain.FilterToolchains(toolchains, 'default', False).keys())
+
+    logging.info('Metadata being written: %s', metadata)
+    self._run.attrs.metadata.UpdateWithDict(metadata)
+    # Update 'version' separately to avoid overwriting the existing
+    # entries in it (e.g. PFQ builders may have written the Chrome
+    # version to uprev).
+    logging.info("Metadata 'version' being written: %s", version)
+    self._run.attrs.metadata.UpdateKeyDictWithDict('version', version)
+
+    # Ensure that all boards and child config boards have a per-board
+    # metadata subdict.
+    for b in config['boards']:
+      self._run.attrs.metadata.UpdateBoardDictWithDict(b, {})
+
+    for cc in child_configs:
+      for b in cc['boards']:
+        self._run.attrs.metadata.UpdateBoardDictWithDict(b, {})
+
+    # Upload build metadata (and write it to database if necessary)
+    self.UploadMetadata(filename=constants.PARTIAL_METADATA_JSON)
+
+    # Write child-per-build and board-per-build rows to database
+    build_id, db = self._run.GetCIDBHandle()
+    if db:
+      # TODO(akeshet): replace this with a GetValue call once crbug.com/406522
+      # is resolved
+      per_board_dict = self._run.attrs.metadata.GetDict()['board-metadata']
+      for board, board_metadata in per_board_dict.items():
+        db.InsertBoardPerBuild(build_id, board)
+        if board_metadata:
+          db.UpdateBoardPerBuildMetadata(build_id, board, board_metadata)
+      for child_config in self._run.attrs.metadata.GetValue('child-configs'):
+        db.InsertChildConfigPerBuild(build_id, child_config['name'])
+
+      # If this build has a master build, ensure that the master full_version
+      # is the same as this build's full_version. This is a sanity check to
+      # avoid bugs in master-slave logic.
+      master_id = self._run.attrs.metadata.GetDict().get('master_build_id')
+      if master_id is not None:
+        master_full_version = db.GetBuildStatus(master_id)['full_version']
+        my_full_version = self._run.attrs.metadata.GetValue('version').get(
+            'full')
+        if master_full_version != my_full_version:
+          raise failures_lib.MasterSlaveVersionMismatchFailure(
+              'Master build id %s has full_version %s, while slave version is '
+              '%s.' % (master_id, master_full_version, my_full_version))
+
+    # Abort previous hw test suites. This happens after reexecution as it
+    # requires chromite/third_party/swarming.client, which is not available
+    # untill after reexecution.
+    self._AbortPreviousHWTestSuites()
+
+
+class ReportStage(generic_stages.BuilderStage,
+                  generic_stages.ArchivingStageMixin):
+  """Summarize all the builds."""
+
+  _HTML_HEAD = """<html>
+<head>
+ <title>Archive Index: %(board)s / %(version)s</title>
+</head>
+<body>
+<h2>Artifacts Index: %(board)s / %(version)s (%(config)s config)</h2>"""
+
+  def __init__(self, builder_run, completion_instance, **kwargs):
+    super(ReportStage, self).__init__(builder_run, **kwargs)
+
+    # TODO(mtennant): All these should be retrieved from builder_run instead.
+    # Or, more correctly, the info currently retrieved from these stages should
+    # be stored and retrieved from builder_run instead.
+    self._completion_instance = completion_instance
+
+  def _UpdateRunStreak(self, builder_run, final_status):
+    """Update the streak counter for this builder, if applicable, and notify.
+
+    Update the pass/fail streak counter for the builder.  If the new
+    streak should trigger a notification email then send it now.
+
+    Args:
+      builder_run: BuilderRun for this run.
+      final_status: Final status string for this run.
+    """
+    if builder_run.InProduction():
+      streak_value = self._UpdateStreakCounter(
+          final_status=final_status, counter_name=builder_run.config.name,
+          dry_run=self._run.debug)
+      verb = 'passed' if streak_value > 0 else 'failed'
+      logging.info('Builder %s has %s %s time(s) in a row.',
+                   builder_run.config.name, verb, abs(streak_value))
+      # See if updated streak should trigger a notification email.
+      if (builder_run.config.health_alert_recipients and
+          builder_run.config.health_threshold > 0 and
+          streak_value <= -builder_run.config.health_threshold):
+        logging.info('Builder failed %i consecutive times, sending health '
+                     'alert email to %s.', -streak_value,
+                     builder_run.config.health_alert_recipients)
+
+        subject = '%s health alert' % builder_run.config.name
+        body = self._HealthAlertMessage(-streak_value)
+        extra_fields = {'X-cbuildbot-alert': 'cq-health'}
+        tree_status.SendHealthAlert(builder_run, subject, body,
+                                    extra_fields=extra_fields)
+
+  def _UpdateStreakCounter(self, final_status, counter_name,
+                           dry_run=False):
+    """Update the given streak counter based on the final status of build.
+
+    A streak counter counts the number of consecutive passes or failures of
+    a particular builder. Consecutive passes are indicated by a positive value,
+    consecutive failures by a negative value.
+
+    Args:
+      final_status: String indicating final status of build,
+                    constants.FINAL_STATUS_PASSED indicating success.
+      counter_name: Name of counter to increment, typically the name of the
+                    build config.
+      dry_run: Pretend to update counter only. Default: False.
+
+    Returns:
+      The new value of the streak counter.
+    """
+    gs_ctx = gs.GSContext(dry_run=dry_run)
+    counter_url = os.path.join(site_config.params.MANIFEST_VERSIONS_GS_URL,
+                               constants.STREAK_COUNTERS,
+                               counter_name)
+    gs_counter = gs.GSCounter(gs_ctx, counter_url)
+
+    if final_status == constants.FINAL_STATUS_PASSED:
+      streak_value = gs_counter.StreakIncrement()
+    else:
+      streak_value = gs_counter.StreakDecrement()
+
+    return streak_value
+
+  def _HealthAlertMessage(self, fail_count):
+    """Returns the body of a health alert email message."""
+    return 'The builder named %s has failed %i consecutive times. See %s' % (
+        self._run.config['name'], fail_count, self.ConstructDashboardURL())
+
+  def _SendPreCQInfraAlertMessageIfNeeded(self):
+    """Send alerts on Pre-CQ infra failures."""
+    msg = completion_stages.CreateBuildFailureMessage(
+        self._run.config.overlays,
+        self._run.config.name,
+        self._run.ConstructDashboardURL())
+    pre_cq = self._run.config.pre_cq
+    if pre_cq and msg.HasFailureType(failures_lib.InfrastructureFailure):
+      name = self._run.config.name
+      title = 'pre-cq infra failures'
+      body = ['%s failed on %s' % (name, cros_build_lib.GetHostName()),
+              '%s' % msg]
+      extra_fields = {'X-cbuildbot-alert': 'pre-cq-infra-alert'}
+      tree_status.SendHealthAlert(self._run, title, '\n\n'.join(body),
+                                  extra_fields=extra_fields)
+
+  def _UploadMetadataForRun(self, final_status):
+    """Upload metadata.json for this entire run.
+
+    Args:
+      final_status: Final status string for this run.
+    """
+    self._run.attrs.metadata.UpdateWithDict(
+        self.GetReportMetadata(final_status=final_status,
+                               completion_instance=self._completion_instance))
+    self.UploadMetadata()
+
+  def _UploadArchiveIndex(self, builder_run):
+    """Upload an HTML index for the artifacts at remote archive location.
+
+    If there are no artifacts in the archive then do nothing.
+
+    Args:
+      builder_run: BuilderRun object for this run.
+
+    Returns:
+      If an index file is uploaded then a dict is returned where each value
+        is the same (the URL for the uploaded HTML index) and the keys are
+        the boards it applies to, including None if applicable.  If no index
+        file is uploaded then this returns None.
+    """
+    archive = builder_run.GetArchive()
+    archive_path = archive.archive_path
+
+    config = builder_run.config
+    boards = config.boards
+    if boards:
+      board_names = ' '.join(boards)
+    else:
+      boards = [None]
+      board_names = '<no board>'
+
+    # See if there are any artifacts found for this run.
+    uploaded = os.path.join(archive_path, commands.UPLOADED_LIST_FILENAME)
+    if not os.path.exists(uploaded):
+      # UPLOADED doesn't exist.  Normal if Archive stage never ran, which
+      # is possibly normal.  Regardless, no archive index is needed.
+      logging.info('No archived artifacts found for %s run (%s)',
+                   builder_run.config.name, board_names)
+
+    else:
+      # Prepare html head.
+      head_data = {
+          'board': board_names,
+          'config': config.name,
+          'version': builder_run.GetVersion(),
+      }
+      head = self._HTML_HEAD % head_data
+
+      files = osutils.ReadFile(uploaded).splitlines() + [
+          '.|Google Storage Index',
+          '..|',
+      ]
+      index = os.path.join(archive_path, 'index.html')
+      # TODO (sbasi) crbug.com/362776: Rework the way we do uploading to
+      # multiple buckets. Currently this can only be done in the Archive Stage
+      # therefore index.html will only end up in the normal Chrome OS bucket.
+      commands.GenerateHtmlIndex(index, files, url_base=archive.download_url,
+                                 head=head)
+      commands.UploadArchivedFile(
+          archive_path, [archive.upload_url], os.path.basename(index),
+          debug=self._run.debug, acl=self.acl)
+      return dict((b, archive.download_url + '/index.html') for b in boards)
+
+  def GetReportMetadata(self, config=None, stage=None, final_status=None,
+                        completion_instance=None):
+    """Generate ReportStage metadata.
+
+    Args:
+      config: The build config for this run.  Defaults to self._run.config.
+      stage: The stage name that this metadata file is being uploaded for.
+      final_status: Whether the build passed or failed. If None, the build
+        will be treated as still running.
+      completion_instance: The stage instance that was used to wait for slave
+        completion. Used to add slave build information to master builder's
+        metadata. If None, no such status information will be included. It not
+        None, this should be a derivative of MasterSlaveSyncCompletionStage.
+
+    Returns:
+      A JSON-able dictionary representation of the metadata object.
+    """
+    builder_run = self._run
+    config = config or builder_run.config
+
+    get_statuses_from_slaves = (
+        config['master'] and
+        completion_instance and
+        isinstance(completion_instance,
+                   completion_stages.MasterSlaveSyncCompletionStage)
+    )
+
+    child_configs_list = GetChildConfigListMetadata(
+        child_configs=config['child_configs'],
+        config_status_map=completion_stages.GetBuilderSuccessMap(self._run,
+                                                                 final_status))
+
+    return metadata_lib.CBuildbotMetadata.GetReportMetadataDict(
+        builder_run, get_statuses_from_slaves,
+        config, stage, final_status, completion_instance,
+        child_configs_list)
+
+  def ArchiveResults(self, final_status):
+    """Archive our build results.
+
+    Args:
+      final_status: constants.FINAL_STATUS_PASSED or
+                    constants.FINAL_STATUS_FAILED
+
+    Returns:
+      A dictionary with the aggregated _UploadArchiveIndex results.
+    """
+    # Make sure local archive directory is prepared, if it was not already.
+    if not os.path.exists(self.archive_path):
+      self.archive.SetupArchivePath()
+
+    # Upload metadata, and update the pass/fail streak counter for the main
+    # run only. These aren't needed for the child builder runs.
+    self._UploadMetadataForRun(final_status)
+    self._UpdateRunStreak(self._run, final_status)
+
+    # Alert if the Pre-CQ has infra failures.
+    if final_status == constants.FINAL_STATUS_FAILED:
+      self._SendPreCQInfraAlertMessageIfNeeded()
+
+    # Iterate through each builder run, whether there is just the main one
+    # or multiple child builder runs.
+    archive_urls = {}
+    for builder_run in self._run.GetUngroupedBuilderRuns():
+      # Generate an index for archived artifacts if there are any.  All the
+      # archived artifacts for one run/config are in one location, so the index
+      # is only specific to each run/config.  In theory multiple boards could
+      # share that archive, but in practice it is usually one board.  A
+      # run/config without a board will also usually not have artifacts to
+      # archive, but that restriction is not assumed here.
+      run_archive_urls = self._UploadArchiveIndex(builder_run)
+      if run_archive_urls:
+        archive_urls.update(run_archive_urls)
+        # Check if the builder_run is tied to any boards and if so get all
+        # upload urls.
+        if final_status == constants.FINAL_STATUS_PASSED:
+          # Update the LATEST files if the build passed.
+          try:
+            upload_urls = self._GetUploadUrls(
+                'LATEST-*', builder_run=builder_run)
+          except portage_util.MissingOverlayException as e:
+            # If the build failed prematurely, some overlays might be
+            # missing. Ignore them in this stage.
+            logging.warning(e)
+          else:
+            archive = builder_run.GetArchive()
+            archive.UpdateLatestMarkers(builder_run.manifest_branch,
+                                        builder_run.debug,
+                                        upload_urls=upload_urls)
+
+    return archive_urls
+
+  def PerformStage(self):
+    """Perform the actual work for this stage.
+
+    This includes final metadata archival, and update CIDB with our final status
+    as well as producting a logged build result summary.
+    """
+    if results_lib.Results.BuildSucceededSoFar():
+      final_status = constants.FINAL_STATUS_PASSED
+    else:
+      final_status = constants.FINAL_STATUS_FAILED
+
+    if not hasattr(self._run.attrs, 'release_tag'):
+      # If, for some reason, sync stage was not completed and
+      # release_tag was not set. Set it to None here because
+      # ArchiveResults() depends the existence of this attr.
+      self._run.attrs.release_tag = None
+
+    # Some operations can only be performed if a valid version is available.
+    try:
+      self._run.GetVersionInfo()
+      archive_urls = self.ArchiveResults(final_status)
+      metadata_url = os.path.join(self.upload_url, constants.METADATA_JSON)
+    except cbuildbot_run.VersionNotSetError:
+      logging.error('A valid version was never set for this run. '
+                    'Can not archive results.')
+      archive_urls = ''
+      metadata_url = ''
+
+
+    results_lib.Results.Report(
+        sys.stdout, archive_urls=archive_urls,
+        current_version=(self._run.attrs.release_tag or ''))
+
+    retry_stats.ReportStats(sys.stdout)
+
+    build_id, db = self._run.GetCIDBHandle()
+    if db:
+      # TODO(akeshet): Eliminate this status string translate once
+      # these differing status strings are merged, crbug.com/318930
+      translateStatus = lambda s: (constants.BUILDER_STATUS_PASSED
+                                   if s == constants.FINAL_STATUS_PASSED
+                                   else constants.BUILDER_STATUS_FAILED)
+      status_for_db = translateStatus(final_status)
+
+      child_metadatas = self._run.attrs.metadata.GetDict().get(
+          'child-configs', [])
+      for child_metadata in child_metadatas:
+        db.FinishChildConfig(build_id, child_metadata['name'],
+                             translateStatus(child_metadata['status']))
+
+      # TODO(pprabhu): After BuildData and CBuildbotMetdata are merged, remove
+      # this extra temporary object creation.
+      # XXX:HACK We're creating a BuildData with an empty URL. Don't try to
+      # MarkGathered this object.
+      build_data = metadata_lib.BuildData("",
+                                          self._run.attrs.metadata.GetDict())
+      # TODO(akeshet): Find a clearer way to get the "primary upload url" for
+      # the metadata.json file. One alternative is _GetUploadUrls(...)[0].
+      # Today it seems that element 0 of its return list is the primary upload
+      # url, but there is no guarantee or unit test coverage of that.
+      db.FinishBuild(build_id, status=status_for_db,
+                     summary=build_data.failure_message,
+                     metadata_url=metadata_url)
+
+
+class RefreshPackageStatusStage(generic_stages.BuilderStage):
+  """Stage for refreshing Portage package status in online spreadsheet."""
+
+  def PerformStage(self):
+    commands.RefreshPackageStatus(buildroot=self._build_root,
+                                  boards=self._boards,
+                                  debug=self._run.options.debug)
+
+
+class DetectIrrelevantChangesStage(generic_stages.BoardSpecificBuilderStage):
+  """Stage to detect irrelevant changes for slave per board base.
+
+  This stage will get the irrelevant changes for the current board of the build,
+  and record the irrelevant changes and the subsystem of the relevant changes
+  test to board_metadata.
+  """
+
+  def __init__(self, builder_run, board, changes, suffix=None, **kwargs):
+    super(DetectIrrelevantChangesStage, self).__init__(builder_run, board,
+                                                       suffix=suffix, **kwargs)
+    # changes is a list of GerritPatch instances.
+    self.changes = changes
+
+  def _GetIrrelevantChangesBoardBase(self, changes):
+    """Calculates irrelevant changes to the current board.
+
+    Returns:
+      A subset of |changes| which are irrelevant to current board.
+    """
+    manifest = git.ManifestCheckout.Cached(self._build_root)
+    packages = self._GetPackagesUnderTestForCurrentBoard()
+
+    irrelevant_changes = triage_lib.CategorizeChanges.GetIrrelevantChanges(
+        changes, self._run.config, self._build_root, manifest, packages)
+    return irrelevant_changes
+
+  def _GetPackagesUnderTestForCurrentBoard(self):
+    """Get a list of packages used in this build for current board.
+
+    Returns:
+      A set of packages used in this build. E.g.,
+      set(['chromeos-base/chromite-0.0.1-r1258']); returns None if
+      the information is missing for any board in the current config.
+    """
+    packages_under_test = set()
+
+    for run in [self._run] + self._run.GetChildren():
+      board_runattrs = run.GetBoardRunAttrs(self._current_board)
+      if not board_runattrs.HasParallel('packages_under_test'):
+        logging.warning('Packages under test were not recorded correctly')
+        return None
+      packages_under_test.update(
+          board_runattrs.GetParallel('packages_under_test'))
+
+    return packages_under_test
+
+  def GetSubsystemToTest(self, relevant_changes):
+    """Get subsystems from relevant cls for current board, write to BOARD_ATTRS.
+
+    Args:
+      relevant_changes: A set of changes that are relevant to current board.
+
+    Returns:
+      A set of the subsystems. An empty set indicates that all subsystems should
+      be tested.
+    """
+    # Go through all the relevant changes, collect subsystem info from them. If
+    # there exists a change without subsystem info, we assume it affects all
+    # subsystems. Then set the superset of all the subsystems to be empty, which
+    # means that need to test all subsystems.
+    subsystem_set = set()
+    for change in relevant_changes:
+      sys_lst = triage_lib.GetTestSubsystemForChange(self._build_root, change)
+      if sys_lst:
+        subsystem_set = subsystem_set.union(sys_lst)
+      else:
+        subsystem_set = set()
+        break
+
+    return subsystem_set
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    """Run DetectIrrelevantChangesStage."""
+    irrelevant_changes = None
+    if not self._run.config.master:
+      # Slave writes the irrelevant changes to current board to metadata.
+      irrelevant_changes = self._GetIrrelevantChangesBoardBase(self.changes)
+      change_dict_list = [c.GetAttributeDict() for c in irrelevant_changes]
+      change_dict_list = sorted(change_dict_list,
+                                key=lambda x: (x[cros_patch.ATTR_GERRIT_NUMBER],
+                                               x[cros_patch.ATTR_PATCH_NUMBER],
+                                               x[cros_patch.ATTR_REMOTE]))
+
+      self._run.attrs.metadata.UpdateBoardDictWithDict(
+          self._current_board, {'irrelevant_changes': change_dict_list})
+
+    if irrelevant_changes:
+      relevant_changes = list(set(self.changes) - irrelevant_changes)
+      logging.info('Below are the irrelevant changes for board: %s.',
+                   self._current_board)
+      (validation_pool.ValidationPool.
+       PrintLinksToChanges(list(irrelevant_changes)))
+    else:
+      relevant_changes = self.changes
+
+    subsystem_set = self.GetSubsystemToTest(relevant_changes)
+    logging.info('Subsystems need to be tested: %s. Empty set represents '
+                 'testing all subsystems.', subsystem_set)
+    # Record subsystems to metadata
+    self._run.attrs.metadata.UpdateBoardDictWithDict(
+        self._current_board, {'subsystems_to_test': list(subsystem_set)})
diff --git a/cbuildbot/stages/report_stages_unittest b/cbuildbot/stages/report_stages_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/stages/report_stages_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/stages/report_stages_unittest.py b/cbuildbot/stages/report_stages_unittest.py
new file mode 100644
index 0000000..c6a4609
--- /dev/null
+++ b/cbuildbot/stages/report_stages_unittest.py
@@ -0,0 +1,309 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for report stages."""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot import cbuildbot_unittest
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot import metadata_lib
+from chromite.cbuildbot import results_lib
+from chromite.cbuildbot import triage_lib
+from chromite.cbuildbot.stages import generic_stages_unittest
+from chromite.cbuildbot.stages import report_stages
+from chromite.lib import alerts
+from chromite.lib import cidb
+from chromite.lib import cros_build_lib
+from chromite.lib import fake_cidb
+from chromite.lib import gs_unittest
+from chromite.lib import osutils
+from chromite.lib import patch_unittest
+from chromite.lib import retry_stats
+from chromite.lib import toolchain
+
+
+# pylint: disable=protected-access
+# pylint: disable=too-many-ancestors
+
+
+class BuildReexecutionStageTest(generic_stages_unittest.AbstractStageTestCase):
+  """Tests that BuildReexecutionFinishedStage behaves as expected."""
+  def setUp(self):
+    self.fake_db = fake_cidb.FakeCIDBConnection()
+    cidb.CIDBConnectionFactory.SetupMockCidb(self.fake_db)
+    build_id = self.fake_db.InsertBuild(
+        'builder name', 'waterfall', 1, 'build config', 'bot hostname')
+
+    self._Prepare(build_id=build_id)
+
+    release_tag = '4815.0.0-rc1'
+    self._run.attrs.release_tag = '4815.0.0-rc1'
+    fake_versioninfo = manifest_version.VersionInfo(release_tag, '39')
+    self.gs_mock = self.StartPatcher(gs_unittest.GSContextMock())
+    self.gs_mock.SetDefaultCmdResult()
+    self.PatchObject(cbuildbot_run._BuilderRunBase, 'GetVersionInfo',
+                     return_value=fake_versioninfo)
+    self.PatchObject(toolchain, 'GetToolchainsForBoard')
+
+  def tearDown(self):
+    cidb.CIDBConnectionFactory.SetupMockCidb()
+
+  def testPerformStage(self):
+    """Test that a normal runs completes without error."""
+    self.RunStage()
+
+  def testMasterSlaveVersionMismatch(self):
+    """Test that master/slave version mismatch causes failure."""
+    master_release_tag = '9999.0.0-rc1'
+    master_build_id = self.fake_db.InsertBuild(
+        'master', constants.WATERFALL_INTERNAL, 2, 'master config',
+        'master hostname')
+    master_metadata = metadata_lib.CBuildbotMetadata()
+    master_metadata.UpdateKeyDictWithDict(
+        'version', {'full' : 'R39-9999.0.0-rc1',
+                    'milestone': '39',
+                    'platform': master_release_tag})
+    self._run.attrs.metadata.UpdateWithDict(
+        {'master_build_id': master_build_id})
+    self.fake_db.UpdateMetadata(master_build_id, master_metadata)
+
+    stage = self.ConstructStage()
+    with self.assertRaises(failures_lib.StepFailure):
+      stage.Run()
+
+  def ConstructStage(self):
+    return report_stages.BuildReexecutionFinishedStage(self._run)
+
+class BuildStartStageTest(generic_stages_unittest.AbstractStageTestCase):
+  """Tests that BuildStartStage behaves as expected."""
+
+  def setUp(self):
+    self.db = fake_cidb.FakeCIDBConnection()
+    cidb.CIDBConnectionFactory.SetupMockCidb(self.db)
+    retry_stats.SetupStats()
+    os.environ['BUILDBOT_MASTERNAME'] = constants.WATERFALL_EXTERNAL
+
+    master_build_id = self.db.InsertBuild(
+        'master_build', constants.WATERFALL_EXTERNAL, 1,
+        'master_build_config', 'bot_hostname')
+
+    self._Prepare(build_id=None, master_build_id=master_build_id)
+
+  def testUnknownWaterfall(self):
+    """Test that an assertion is thrown if master name is not valid."""
+    os.environ['BUILDBOT_MASTERNAME'] = 'gibberish'
+    self.assertRaises(failures_lib.StepFailure, self.RunStage)
+
+  def testPerformStage(self):
+    """Test that a normal run of the stage does a database insert."""
+    self.RunStage()
+
+    build_id = self._run.attrs.metadata.GetValue('build_id')
+    self.assertGreater(build_id, 0)
+    self.assertEqual(self._run.attrs.metadata.GetValue('db_type'),
+                     cidb.CONNECTION_TYPE_MOCK)
+
+  def testHandleSkipWithInstanceChange(self):
+    """Test that HandleSkip disables cidb and dies when necessary."""
+    # This test verifies that switching to a 'mock' database type once
+    # metadata already has an id in 'previous_db_type' will fail.
+    self._run.attrs.metadata.UpdateWithDict({'build_id': 31337,
+                                             'db_type': 'previous_db_type'})
+    stage = self.ConstructStage()
+    self.assertRaises(AssertionError, stage.HandleSkip)
+    self.assertEqual(cidb.CIDBConnectionFactory.GetCIDBConnectionType(),
+                     cidb.CONNECTION_TYPE_INV)
+    # The above test has the side effect of invalidating CIDBConnectionFactory.
+    # Undo that side effect so other unit tests can run.
+    cidb.CIDBConnectionFactory.SetupMockCidb()
+
+  def testHandleSkipWithNoDbType(self):
+    """Test that HandleSkip passes when db_type is missing."""
+    self._run.attrs.metadata.UpdateWithDict({'build_id': 31337})
+    stage = self.ConstructStage()
+    stage.HandleSkip()
+
+  def testHandleSkipWithDbType(self):
+    """Test that HandleSkip passes when db_type is specified."""
+    self._run.attrs.metadata.UpdateWithDict(
+        {'build_id': 31337,
+         'db_type': cidb.CONNECTION_TYPE_MOCK})
+    stage = self.ConstructStage()
+    stage.HandleSkip()
+
+  def ConstructStage(self):
+    return report_stages.BuildStartStage(self._run)
+
+
+class AbstractReportStageTestCase(
+    generic_stages_unittest.AbstractStageTestCase,
+    cbuildbot_unittest.SimpleBuilderTestCase):
+  """Base class for testing the Report stage."""
+
+  def setUp(self):
+    for cmd in ((osutils, 'WriteFile'),
+                (commands, 'UploadArchivedFile'),
+                (alerts, 'SendEmail')):
+      self.StartPatcher(mock.patch.object(*cmd, autospec=True))
+    retry_stats.SetupStats()
+
+    # Set up a general purpose cidb mock. Tests with more specific
+    # mock requirements can replace this with a separate call to
+    # SetupMockCidb
+    cidb.CIDBConnectionFactory.SetupMockCidb(mock.MagicMock())
+
+    self._Prepare()
+
+  def _SetupUpdateStreakCounter(self, counter_value=-1):
+    self.PatchObject(report_stages.ReportStage, '_UpdateStreakCounter',
+                     autospec=True, return_value=counter_value)
+
+  def ConstructStage(self):
+    return report_stages.ReportStage(self._run, None)
+
+
+class ReportStageTest(AbstractReportStageTestCase):
+  """Test the Report stage."""
+
+  RELEASE_TAG = ''
+
+  def testCheckResults(self):
+    """Basic sanity check for results stage functionality"""
+    self._SetupUpdateStreakCounter()
+    self.PatchObject(report_stages.ReportStage, '_UploadArchiveIndex',
+                     return_value={'any': 'dict'})
+    self.RunStage()
+    filenames = (
+        'LATEST-%s' % self.TARGET_MANIFEST_BRANCH,
+        'LATEST-%s' % self.VERSION,
+    )
+    calls = [mock.call(mock.ANY, mock.ANY, 'metadata.json', False,
+                       update_list=True, acl=mock.ANY)]
+    calls += [mock.call(mock.ANY, mock.ANY, filename, False,
+                        acl=mock.ANY) for filename in filenames]
+    self.assertEquals(calls, commands.UploadArchivedFile.call_args_list)
+
+  def testDoNotUpdateLATESTMarkersWhenBuildFailed(self):
+    """Check that we do not update the latest markers on failed build."""
+    self._SetupUpdateStreakCounter()
+    self.PatchObject(report_stages.ReportStage, '_UploadArchiveIndex',
+                     return_value={'any': 'dict'})
+    self.PatchObject(results_lib.Results, 'BuildSucceededSoFar',
+                     return_value=False)
+    stage = self.ConstructStage()
+    stage.Run()
+    calls = [mock.call(mock.ANY, mock.ANY, 'metadata.json', False,
+                       update_list=True, acl=mock.ANY)]
+    self.assertEquals(calls, commands.UploadArchivedFile.call_args_list)
+
+  def testAlertEmail(self):
+    """Send out alerts when streak counter reaches the threshold."""
+    self.PatchObject(cbuildbot_run._BuilderRunBase,
+                     'InProduction', return_value=True)
+    self.PatchObject(cros_build_lib, 'HostIsCIBuilder', return_value=True)
+    self._Prepare(extra_config={'health_threshold': 3,
+                                'health_alert_recipients': ['foo@bar.org']})
+    self._SetupUpdateStreakCounter(counter_value=-3)
+    self.RunStage()
+    # The mocking logic gets confused with SendEmail.
+    # pylint: disable=no-member
+    self.assertGreater(alerts.SendEmail.call_count, 0,
+                       'CQ health alerts emails were not sent.')
+
+  def testAlertEmailOnFailingStreak(self):
+    """Continue sending out alerts when streak counter exceeds the threshold."""
+    self.PatchObject(cbuildbot_run._BuilderRunBase,
+                     'InProduction', return_value=True)
+    self.PatchObject(cros_build_lib, 'HostIsCIBuilder', return_value=True)
+    self._Prepare(extra_config={'health_threshold': 3,
+                                'health_alert_recipients': ['foo@bar.org']})
+    self._SetupUpdateStreakCounter(counter_value=-5)
+    self.RunStage()
+    # The mocking logic gets confused with SendEmail.
+    # pylint: disable=no-member
+    self.assertGreater(alerts.SendEmail.call_count, 0,
+                       'CQ health alerts emails were not sent.')
+
+  def testWriteBasicMetadata(self):
+    """Test that WriteBasicMetadata writes expected keys correctly."""
+    report_stages.WriteBasicMetadata(self._run)
+    metadata_dict = self._run.attrs.metadata.GetDict()
+    self.assertEqual(metadata_dict['build-number'],
+                     generic_stages_unittest.DEFAULT_BUILD_NUMBER)
+    self.assertTrue(metadata_dict.has_key('builder-name'))
+    self.assertTrue(metadata_dict.has_key('bot-hostname'))
+
+  def testGetChildConfigsMetadataList(self):
+    """Test that GetChildConfigListMetadata generates child config metadata."""
+    child_configs = [{'name': 'config1', 'boards': ['board1']},
+                     {'name': 'config2', 'boards': ['board2']}]
+    config_status_map = {'config1': True,
+                         'config2': False}
+    expected = [{'name': 'config1', 'boards': ['board1'],
+                 'status': constants.FINAL_STATUS_PASSED},
+                {'name': 'config2', 'boards': ['board2'],
+                 'status': constants.FINAL_STATUS_FAILED}]
+    child_config_list = report_stages.GetChildConfigListMetadata(
+        child_configs, config_status_map)
+    self.assertEqual(expected, child_config_list)
+
+
+class ReportStageNoSyncTest(AbstractReportStageTestCase):
+  """Test the Report stage if SyncStage didn't complete.
+
+  If SyncStage doesn't complete, we don't know the release tag, and can't
+  archive results.
+  """
+  RELEASE_TAG = None
+
+  def testCommitQueueResults(self):
+    """Check that we can run with a RELEASE_TAG of None."""
+    self._SetupUpdateStreakCounter()
+    self.RunStage()
+
+
+class DetectIrrelevantChangesStageTest(
+    generic_stages_unittest.AbstractStageTestCase,
+    patch_unittest.MockPatchBase):
+  """Test the DetectIrrelevantChangesStage."""
+
+  def setUp(self):
+    self.changes = self.GetPatches(how_many=2)
+
+    self._Prepare()
+
+  def testGetSubsystemsWithoutEmptyEntry(self):
+    """Tests the logic of GetSubsystemTobeTested() under normal case."""
+    relevant_changes = self.changes
+    self.PatchObject(triage_lib, 'GetTestSubsystemForChange',
+                     side_effect=[['light'], ['light', 'power']])
+
+    expected = {'light', 'power'}
+    stage = self.ConstructStage()
+    results = stage.GetSubsystemToTest(relevant_changes)
+    self.assertEqual(results, expected)
+
+  def testGetSubsystemsWithEmptyEntry(self):
+    """Tests whether return empty set when have empty entry in subsystems."""
+    relevant_changes = self.changes
+    self.PatchObject(triage_lib, 'GetTestSubsystemForChange',
+                     side_effect=[['light'], []])
+
+    expected = set()
+    stage = self.ConstructStage()
+    results = stage.GetSubsystemToTest(relevant_changes)
+    self.assertEqual(results, expected)
+
+  def ConstructStage(self):
+    return report_stages.DetectIrrelevantChangesStage(self._run,
+                                                      self._current_board,
+                                                      self.changes)
diff --git a/cbuildbot/stages/sdk_stages.py b/cbuildbot/stages/sdk_stages.py
new file mode 100644
index 0000000..bfe8c7e
--- /dev/null
+++ b/cbuildbot/stages/sdk_stages.py
@@ -0,0 +1,314 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing SDK stages."""
+
+from __future__ import print_function
+
+import glob
+import json
+import os
+
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import constants
+from chromite.cbuildbot.stages import generic_stages
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import perf_uploader
+from chromite.lib import portage_util
+from chromite.lib import retry_util
+from chromite.lib import toolchain
+
+
+# Version of the Manifest file being generated for SDK artifacts. Should be
+# incremented for major format changes.
+PACKAGE_MANIFEST_VERSION = '1'
+
+# Paths excluded when packaging SDK artifacts. These are relative to the target
+# build root where SDK packages are being installed (e.g. /build/amd64-host).
+PACKAGE_EXCLUDED_PATHS = (
+    'usr/lib/debug',
+    'usr/lib64/debug',
+    constants.AUTOTEST_BUILD_PATH,
+    'packages',
+    'tmp'
+)
+
+# Names of various packaged artifacts.
+SDK_TARBALL_NAME = 'built-sdk.tar.xz'
+TOOLCHAINS_OVERLAY_TARBALL_TEMPLATE = \
+    'built-sdk-overlay-toolchains-%(toolchains)s.tar.xz'
+
+
+def SdkPerfPath(buildroot):
+  """Return the path to the perf file for sdk stages."""
+  return os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR, 'tmp',
+                      'cros-sdk.perf')
+
+
+def CreateTarball(source_root, tarball_path, exclude_paths=None):
+  """Packs |source_root| into |tarball_path|.
+
+  Args:
+    source_root: Path to the directory we want to package.
+    tarball_path: Path of the tarball that should be created.
+    exclude_paths: Subdirectories to exclude.
+  """
+  # TODO(zbehan): We cannot use xz from the chroot unless it's
+  # statically linked.
+  extra_args = None
+  if exclude_paths is not None:
+    extra_args = ['--exclude=%s/*' % path for path in exclude_paths]
+  # Options for maximum compression.
+  extra_env = {'XZ_OPT': '-e9'}
+  cros_build_lib.CreateTarball(
+      tarball_path, source_root, sudo=True, extra_args=extra_args,
+      debug_level=logging.INFO, extra_env=extra_env)
+  # Make sure the regular user has the permission to read.
+  cmd = ['chmod', 'a+r', tarball_path]
+  cros_build_lib.SudoRunCommand(cmd)
+
+
+class SDKBuildToolchainsStage(generic_stages.BuilderStage):
+  """Stage that builds all the cross-compilers we care about"""
+
+  def PerformStage(self):
+    chroot_location = os.path.join(self._build_root,
+                                   constants.DEFAULT_CHROOT_DIR)
+
+    # Build the toolchains first.  Since we're building & installing the
+    # compilers, need to run as root.
+    self.CrosSetupToolchains(['--nousepkg'], sudo=True)
+
+    # Create toolchain packages.
+    self.CreateRedistributableToolchains(chroot_location)
+
+  def CrosSetupToolchains(self, cmd_args, **kwargs):
+    """Wrapper around cros_setup_toolchains to simplify things."""
+    commands.RunBuildScript(self._build_root,
+                            ['cros_setup_toolchains'] + list(cmd_args),
+                            chromite_cmd=True, enter_chroot=True, **kwargs)
+
+  def CreateRedistributableToolchains(self, chroot_location):
+    """Create the toolchain packages"""
+    osutils.RmDir(os.path.join(chroot_location,
+                               constants.SDK_TOOLCHAINS_OUTPUT),
+                  ignore_missing=True)
+
+    # We need to run this as root because the tool creates hard links to root
+    # owned files and our bots enable security features which disallow that.
+    # Specifically, these features cause problems:
+    #  /proc/sys/kernel/yama/protected_nonaccess_hardlinks
+    #  /proc/sys/fs/protected_hardlinks
+    self.CrosSetupToolchains([
+        '--create-packages',
+        '--output-dir', os.path.join('/', constants.SDK_TOOLCHAINS_OUTPUT),
+    ], sudo=True)
+
+
+class SDKPackageStage(generic_stages.BuilderStage):
+  """Stage that performs preparing and packaging SDK files"""
+
+  def __init__(self, builder_run, version=None, **kwargs):
+    self.sdk_version = version
+    super(SDKPackageStage, self).__init__(builder_run, **kwargs)
+
+  def PerformStage(self):
+    tarball_location = os.path.join(self._build_root, SDK_TARBALL_NAME)
+    chroot_location = os.path.join(self._build_root,
+                                   constants.DEFAULT_CHROOT_DIR)
+    board_location = os.path.join(chroot_location, 'build/amd64-host')
+    manifest_location = tarball_location + '.Manifest'
+
+    # Create a tarball of the latest SDK.
+    CreateTarball(board_location, tarball_location)
+
+    # Create a package manifest for the tarball.
+    self.CreateManifestFromSDK(board_location, manifest_location)
+
+    self.SendPerfValues(tarball_location)
+
+  def CreateManifestFromSDK(self, sdk_path, dest_manifest):
+    """Creates a manifest from a given source chroot.
+
+    Args:
+      sdk_path: Path to the root of the SDK to describe.
+      dest_manifest: Path to the manifest that should be generated.
+    """
+    logging.info('Generating manifest for new sdk')
+    package_data = {}
+    for key, version in portage_util.ListInstalledPackages(sdk_path):
+      package_data.setdefault(key, []).append((version, {}))
+    self._WriteManifest(package_data, dest_manifest)
+
+  def _WriteManifest(self, data, manifest):
+    """Encode manifest into a json file."""
+    json_input = dict(version=PACKAGE_MANIFEST_VERSION, packages=data)
+    osutils.WriteFile(manifest, json.dumps(json_input))
+
+  @staticmethod
+  def _SendPerfValues(buildroot, sdk_tarball, buildbot_uri_log, version,
+                      platform_name):
+    """Generate & upload perf data for the build"""
+    perf_path = SdkPerfPath(buildroot)
+    test_name = 'sdk'
+    units = 'bytes'
+
+    # Make sure the file doesn't contain previous data.
+    osutils.SafeUnlink(perf_path)
+
+    common_kwargs = {
+        'higher_is_better': False,
+        'graph': 'cros-sdk-size',
+        'stdio_uri': buildbot_uri_log,
+    }
+
+    sdk_size = os.path.getsize(sdk_tarball)
+    perf_uploader.OutputPerfValue(perf_path, 'base', sdk_size, units,
+                                  **common_kwargs)
+
+    for tarball in glob.glob(os.path.join(
+        buildroot, constants.DEFAULT_CHROOT_DIR,
+        constants.SDK_TOOLCHAINS_OUTPUT, '*.tar.*')):
+      name = os.path.basename(tarball).rsplit('.', 2)[0]
+      size = os.path.getsize(tarball)
+      perf_uploader.OutputPerfValue(perf_path, name, size, units,
+                                    **common_kwargs)
+      perf_uploader.OutputPerfValue(perf_path, 'base_plus_%s' % name,
+                                    sdk_size + size, units, **common_kwargs)
+
+    # Due to limitations in the perf dashboard, we have to create an integer
+    # based on the current timestamp.  This field only accepts integers, and
+    # the perf dashboard accepts this or CrOS+Chrome official versions.
+    revision = int(version.replace('.', ''))
+    perf_values = perf_uploader.LoadPerfValues(perf_path)
+    retry_util.RetryException(perf_uploader.PerfUploadingError, 3,
+                              perf_uploader.UploadPerfValues,
+                              perf_values, platform_name, test_name,
+                              revision=revision)
+
+  def SendPerfValues(self, sdk_tarball):
+    """Generate & upload perf data for the build"""
+    self._SendPerfValues(self._build_root, sdk_tarball,
+                         self.ConstructDashboardURL(), self.sdk_version,
+                         self._run.bot_id)
+
+
+class SDKPackageToolchainOverlaysStage(generic_stages.BuilderStage):
+  """Stage that creates and packages per-board toolchain overlays."""
+
+  def __init__(self, builder_run, version=None, **kwargs):
+    self.sdk_version = version
+    super(SDKPackageToolchainOverlaysStage, self).__init__(builder_run,
+                                                           **kwargs)
+
+  def PerformStage(self):
+    chroot_dir = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR)
+    sdk_dir = os.path.join(chroot_dir, 'build/amd64-host')
+    tmp_dir = os.path.join(chroot_dir, 'tmp')
+    osutils.SafeMakedirs(tmp_dir, mode=0o777, sudo=True)
+    overlay_output_dir = os.path.join(chroot_dir,
+                                      constants.SDK_OVERLAYS_OUTPUT)
+    osutils.RmDir(overlay_output_dir, ignore_missing=True, sudo=True)
+    osutils.SafeMakedirs(overlay_output_dir, mode=0o777, sudo=True)
+    overlay_tarball_template = os.path.join(
+        overlay_output_dir, TOOLCHAINS_OVERLAY_TARBALL_TEMPLATE)
+
+    # Generate an overlay tarball for each unique toolchain combination. We
+    # restrict ourselves to (a) board configs that are available to the builder
+    # (naturally), and (b) toolchains that are part of the 'sdk' set.
+    sdk_toolchains = set(toolchain.GetToolchainsForBoard('sdk'))
+    generated = set()
+    for board in self._run.site_config.GetBoards():
+      try:
+        toolchains = set(toolchain.GetToolchainsForBoard(board).iterkeys())
+      except portage_util.MissingOverlayException:
+        # The board overlay may not exist, e.g. on external builders.
+        continue
+
+      toolchains_str = '-'.join(sorted(toolchains))
+      if not toolchains.issubset(sdk_toolchains) or toolchains_str in generated:
+        continue
+
+      with osutils.TempDir(prefix='toolchains-overlay-%s.' % toolchains_str,
+                           base_dir=tmp_dir, sudo_rm=True) as overlay_dir:
+        # NOTE: We let MountOverlayContext remove the mount point created by
+        # the TempDir context below, because it has built-in retries for rmdir
+        # EBUSY errors that are due to unmount lag.
+        with osutils.TempDir(prefix='amd64-host-%s.' % toolchains_str,
+                             base_dir=tmp_dir, delete=False) as merged_dir:
+          with osutils.MountOverlayContext(sdk_dir, overlay_dir, merged_dir,
+                                           cleanup=True):
+            sysroot = merged_dir[len(chroot_dir):]
+            cmd = ['cros_setup_toolchains', '--targets=boards',
+                   '--include-boards=%s' % board,
+                   '--sysroot=%s' % sysroot]
+            commands.RunBuildScript(self._build_root, cmd, chromite_cmd=True,
+                                    enter_chroot=True, sudo=True,
+                                    extra_env=self._portage_extra_env)
+
+        # NOTE: Make sure that the overlay directory is owned root:root and has
+        # 0o755 perms; apparently, these things are preserved through
+        # tarring/untarring and might cause havoc if overlooked.
+        os.chmod(overlay_dir, 0o755)
+        cros_build_lib.SudoRunCommand(['chown', 'root:root', overlay_dir])
+        CreateTarball(overlay_dir,
+                      overlay_tarball_template % {'toolchains': toolchains_str})
+
+      generated.add(toolchains_str)
+
+
+class SDKTestStage(generic_stages.BuilderStage):
+  """Stage that performs testing an SDK created in a previous stage"""
+
+  option_name = 'tests'
+
+  def PerformStage(self):
+    new_chroot_dir = 'new-sdk-chroot'
+    tarball_location = os.path.join(self._build_root, SDK_TARBALL_NAME)
+    new_chroot_args = ['--chroot', new_chroot_dir]
+    if self._run.options.chrome_root:
+      new_chroot_args += ['--chrome_root', self._run.options.chrome_root]
+
+    # Build a new SDK using the provided tarball.
+    chroot_args = new_chroot_args + ['--download', '--replace', '--nousepkg',
+                                     '--url', 'file://' + tarball_location]
+    cros_build_lib.RunCommand(
+        [], cwd=self._build_root, enter_chroot=True, chroot_args=chroot_args,
+        extra_env=self._portage_extra_env)
+
+    # Inject the toolchain binpkgs from the previous sdk build.  On end user
+    # systems, they'd be fetched from the binpkg mirror, but we don't have one
+    # set up for this local build.
+    pkgdir = os.path.join('var', 'lib', 'portage', 'pkgs')
+    old_pkgdir = os.path.join(self._build_root, constants.DEFAULT_CHROOT_DIR,
+                              pkgdir)
+    new_pkgdir = os.path.join(self._build_root, new_chroot_dir, pkgdir)
+    osutils.SafeMakedirs(new_pkgdir, sudo=True)
+    cros_build_lib.SudoRunCommand(
+        ['cp', '-r'] + glob.glob(os.path.join(old_pkgdir, 'cross-*')) +
+        [new_pkgdir])
+
+    # Now install those toolchains in the new chroot.  We skip the chroot
+    # upgrade below which means we need to install the toolchain manually.
+    cmd = ['cros_setup_toolchains', '--targets=boards',
+           '--include-boards=%s' % ','.join(self._boards)]
+    commands.RunBuildScript(self._build_root, cmd, chromite_cmd=True,
+                            enter_chroot=True, sudo=True,
+                            chroot_args=new_chroot_args,
+                            extra_env=self._portage_extra_env)
+
+    # Build all the boards with the new sdk.
+    for board in self._boards:
+      logging.PrintBuildbotStepText(board)
+      cmd = ['./setup_board', '--board', board, '--skip_chroot_upgrade']
+      cros_build_lib.RunCommand(
+          cmd, cwd=self._build_root, enter_chroot=True,
+          chroot_args=new_chroot_args, extra_env=self._portage_extra_env)
+      cmd = ['./build_packages', '--board', board, '--nousepkg',
+             '--skip_chroot_upgrade']
+      cros_build_lib.RunCommand(cmd, cwd=self._build_root, enter_chroot=True,
+                                chroot_args=new_chroot_args,
+                                extra_env=self._portage_extra_env)
diff --git a/cbuildbot/stages/sdk_stages_unittest b/cbuildbot/stages/sdk_stages_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/stages/sdk_stages_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/stages/sdk_stages_unittest.py b/cbuildbot/stages/sdk_stages_unittest.py
new file mode 100644
index 0000000..96e3d11
--- /dev/null
+++ b/cbuildbot/stages/sdk_stages_unittest.py
@@ -0,0 +1,272 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for SDK stages."""
+
+from __future__ import print_function
+
+import json
+import os
+
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import constants
+from chromite.cbuildbot.stages import sdk_stages
+from chromite.cbuildbot.stages import generic_stages_unittest
+from chromite.lib import cros_build_lib
+from chromite.lib import osutils
+from chromite.lib import perf_uploader
+from chromite.lib import portage_util
+from chromite.lib import toolchain
+
+
+class SDKBuildToolchainsStageTest(
+    generic_stages_unittest.AbstractStageTestCase):
+  """Tests SDK toolchain building."""
+
+  def setUp(self):
+    # This code has its own unit tests, so no need to go testing it here.
+    self.run_mock = self.PatchObject(commands, 'RunBuildScript')
+
+  def ConstructStage(self):
+    return sdk_stages.SDKBuildToolchainsStage(self._run)
+
+  def testNormal(self):
+    """Basic run through the main code."""
+    self._Prepare('chromiumos-sdk')
+    self.RunStage()
+    self.assertEqual(self.run_mock.call_count, 2)
+
+    # Sanity check args passed to RunBuildScript.
+    for call in self.run_mock.call_args_list:
+      buildroot, cmd = call[0]
+      self.assertTrue(isinstance(buildroot, basestring))
+      self.assertTrue(isinstance(cmd, (tuple, list)))
+      for ele in cmd:
+        self.assertTrue(isinstance(ele, basestring))
+
+
+class SDKPackageStageTest(generic_stages_unittest.AbstractStageTestCase):
+  """Tests SDK package and Manifest creation."""
+
+  fake_packages = (('cat1/package', '1'), ('cat1/package', '2'),
+                   ('cat2/package', '3'), ('cat2/package', '4'))
+
+  def setUp(self):
+    # Replace SudoRunCommand, since we don't care about sudo.
+    self.PatchObject(cros_build_lib, 'SudoRunCommand',
+                     wraps=cros_build_lib.RunCommand)
+
+    # Prepare a fake chroot.
+    self.fake_chroot = os.path.join(self.build_root, 'chroot/build/amd64-host')
+    self.fake_json_data = {}
+    osutils.SafeMakedirs(self.fake_chroot)
+    osutils.Touch(os.path.join(self.fake_chroot, 'file'))
+    for package, v in self.fake_packages:
+      cpv = portage_util.SplitCPV('%s-%s' % (package, v))
+      key = '%s/%s' % (cpv.category, cpv.package)
+      self.fake_json_data.setdefault(key, []).append([v, {}])
+
+  def ConstructStage(self):
+    return sdk_stages.SDKPackageStage(self._run)
+
+  def testTarballCreation(self):
+    """Tests whether we package the tarball and correctly create a Manifest."""
+    # We'll test this separately.
+    self.PatchObject(sdk_stages.SDKPackageStage, '_SendPerfValues')
+
+    self._Prepare('chromiumos-sdk')
+    fake_tarball = os.path.join(self.build_root, 'built-sdk.tar.xz')
+    fake_manifest = os.path.join(self.build_root,
+                                 'built-sdk.tar.xz.Manifest')
+
+    self.PatchObject(portage_util, 'ListInstalledPackages',
+                     return_value=self.fake_packages)
+
+    self.RunStage()
+
+    # Check tarball for the correct contents.
+    output = cros_build_lib.RunCommand(
+        ['tar', '-I', 'xz', '-tvf', fake_tarball],
+        capture_output=True).output.splitlines()
+    # First line is './', use it as an anchor, count the chars, and strip as
+    # much from all other lines.
+    stripchars = len(output[0]) - 1
+    tar_lines = [x[stripchars:] for x in output]
+    self.assertNotIn('/build/amd64-host/', tar_lines)
+    self.assertIn('/file', tar_lines)
+    # Verify manifest contents.
+    real_json_data = json.loads(osutils.ReadFile(fake_manifest))
+    self.assertEqual(real_json_data['packages'],
+                     self.fake_json_data)
+
+  def testPerf(self):
+    """Check perf data points are generated/uploaded."""
+    m = self.PatchObject(perf_uploader, 'UploadPerfValues')
+
+    sdk_data = 'asldjfasf'
+    sdk_size = len(sdk_data)
+    sdk_tarball = os.path.join(self.tempdir, 'sdk.tar.xz')
+    osutils.WriteFile(sdk_tarball, sdk_data)
+
+    tarball_dir = os.path.join(self.tempdir, constants.DEFAULT_CHROOT_DIR,
+                               constants.SDK_TOOLCHAINS_OUTPUT)
+    arm_tar = os.path.join(tarball_dir, 'arm-cros-linux-gnu.tar.xz')
+    x86_tar = os.path.join(tarball_dir, 'i686-pc-linux-gnu.tar.xz')
+    osutils.Touch(arm_tar, makedirs=True)
+    osutils.Touch(x86_tar, makedirs=True)
+
+    # pylint: disable=protected-access
+    sdk_stages.SDKPackageStage._SendPerfValues(
+        self.tempdir, sdk_tarball, 'http://some/log', '123.4.5.6', 'sdk-bot')
+    # pylint: enable=protected-access
+
+    perf_values = m.call_args[0][0]
+    exp = perf_uploader.PerformanceValue(
+        description='base',
+        value=sdk_size,
+        units='bytes',
+        higher_is_better=False,
+        graph='cros-sdk-size',
+        stdio_uri='http://some/log',
+    )
+    self.assertEqual(exp, perf_values[0])
+
+    exp = set((
+        perf_uploader.PerformanceValue(
+            description='arm-cros-linux-gnu',
+            value=0,
+            units='bytes',
+            higher_is_better=False,
+            graph='cros-sdk-size',
+            stdio_uri='http://some/log',
+        ),
+        perf_uploader.PerformanceValue(
+            description='i686-pc-linux-gnu',
+            value=0,
+            units='bytes',
+            higher_is_better=False,
+            graph='cros-sdk-size',
+            stdio_uri='http://some/log',
+        ),
+        perf_uploader.PerformanceValue(
+            description='base_plus_arm-cros-linux-gnu',
+            value=sdk_size,
+            units='bytes',
+            higher_is_better=False,
+            graph='cros-sdk-size',
+            stdio_uri='http://some/log',
+        ),
+        perf_uploader.PerformanceValue(
+            description='base_plus_i686-pc-linux-gnu',
+            value=sdk_size,
+            units='bytes',
+            higher_is_better=False,
+            graph='cros-sdk-size',
+            stdio_uri='http://some/log',
+        ),
+    ))
+    self.assertEqual(exp, set(perf_values[1:]))
+
+    platform_name = m.call_args[0][1]
+    self.assertEqual(platform_name, 'sdk-bot')
+
+    test_name = m.call_args[0][2]
+    self.assertEqual(test_name, 'sdk')
+
+    kwargs = m.call_args[1]
+    self.assertEqual(kwargs['revision'], 123456)
+
+
+class SDKPackageToolchainOverlaysStageTest(
+    generic_stages_unittest.AbstractStageTestCase):
+  """Tests board toolchain overlay installation and packaging."""
+
+  def setUp(self):
+    # Mock out running of cros_setup_toolchains.
+    self.PatchObject(commands, 'RunBuildScript', wraps=self.FakeRunBuildScript)
+    self._setup_toolchain_cmds = []
+
+    # Prepare a fake chroot.
+    self.fake_chroot = os.path.join(self.build_root, 'chroot/build/amd64-host')
+    osutils.SafeMakedirs(self.fake_chroot)
+    osutils.Touch(os.path.join(self.fake_chroot, 'file'))
+
+  def FakeRunBuildScript(self, build_root, cmd, chromite_cmd=False, **kwargs):
+    if cmd[0] == 'cros_setup_toolchains':
+      self.assertEqual(self.build_root, build_root)
+      self.assertTrue(chromite_cmd)
+      self.assertTrue(kwargs.get('enter_chroot', False))
+      self.assertTrue(kwargs.get('sudo', False))
+
+      # Drop a uniquely named file in the toolchain overlay merged location.
+      sysroot = None
+      board = None
+      targets = None
+      for opt in cmd[1:]:
+        if opt.startswith('--sysroot='):
+          sysroot = opt[len('--sysroot='):]
+        elif opt.startswith('--include-boards='):
+          board = opt[len('--include-boards='):]
+        elif opt.startswith('--targets='):
+          targets = opt[len('--targets='):]
+
+      self.assertTrue(sysroot)
+      self.assertTrue(board)
+      self.assertEqual('boards', targets)
+      merged_dir = os.path.join(self.build_root, constants.DEFAULT_CHROOT_DIR,
+                                sysroot.lstrip(os.path.sep))
+      osutils.Touch(os.path.join(merged_dir, board + '.tmp'))
+
+  def ConstructStage(self):
+    return sdk_stages.SDKPackageToolchainOverlaysStage(self._run)
+
+  def testTarballCreation(self):
+    """Tests that tarballs are created for all board toolchains."""
+    self._Prepare('chromiumos-sdk')
+    self.RunStage()
+
+    # Check that a tarball was created correctly for all toolchain sets.
+    sdk_toolchains = set(toolchain.GetToolchainsForBoard('sdk'))
+    all_toolchain_combos = set()
+    for board in self._run.site_config.GetBoards():
+      try:
+        toolchains = set(toolchain.GetToolchainsForBoard(board).iterkeys())
+        if toolchains.issubset(sdk_toolchains):
+          all_toolchain_combos.add('-'.join(sorted(toolchains)))
+      except portage_util.MissingOverlayException:
+        pass
+
+    for toolchains in all_toolchain_combos:
+      overlay_tarball = os.path.join(
+          self.build_root, constants.DEFAULT_CHROOT_DIR,
+          constants.SDK_OVERLAYS_OUTPUT,
+          'built-sdk-overlay-toolchains-%s.tar.xz' % toolchains)
+      output = cros_build_lib.RunCommand(
+          ['tar', '-I', 'xz', '-tf', overlay_tarball],
+          capture_output=True).output.splitlines()
+      # Check that the overlay tarball contains a marker file and that the
+      # board recorded by this marker file indeed uses the toolchains for which
+      # the tarball was built.
+      tmp_files = [os.path.basename(x) for x in output if x.endswith('.tmp')]
+      self.assertEqual(1, len(tmp_files))
+      board = tmp_files[0][:-len('.tmp')]
+      board_toolchains = '-'.join(sorted(
+          toolchain.GetToolchainsForBoard(board).iterkeys()))
+      self.assertEqual(toolchains, board_toolchains)
+
+
+class SDKTestStageTest(generic_stages_unittest.AbstractStageTestCase):
+  """Tests SDK test phase."""
+
+  def setUp(self):
+    # This code has its own unit tests, so no need to go testing it here.
+    self.run_mock = self.PatchObject(cros_build_lib, 'RunCommand')
+
+  def ConstructStage(self):
+    return sdk_stages.SDKTestStage(self._run)
+
+  def testNormal(self):
+    """Basic run through the main code."""
+    self._Prepare('chromiumos-sdk')
+    self.RunStage()
diff --git a/cbuildbot/stages/stage_results_unittest b/cbuildbot/stages/stage_results_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/stages/stage_results_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/stages/stage_results_unittest.py b/cbuildbot/stages/stage_results_unittest.py
new file mode 100644
index 0000000..e971e4f
--- /dev/null
+++ b/cbuildbot/stages/stage_results_unittest.py
@@ -0,0 +1,482 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the stage results."""
+
+from __future__ import print_function
+
+import mock
+import os
+import signal
+import StringIO
+import time
+
+from chromite.cbuildbot import config_lib_unittest
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import results_lib
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot.builders import simple_builders
+from chromite.cbuildbot.stages import generic_stages
+from chromite.cbuildbot.stages import sync_stages
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import cros_test_lib
+from chromite.lib import parallel
+
+
+class PassStage(generic_stages.BuilderStage):
+  """PassStage always works"""
+
+
+class Pass2Stage(generic_stages.BuilderStage):
+  """Pass2Stage always works"""
+
+
+class FailStage(generic_stages.BuilderStage):
+  """FailStage always throws an exception"""
+
+  FAIL_EXCEPTION = failures_lib.StepFailure("Fail stage needs to fail.")
+
+  def PerformStage(self):
+    """Throw the exception to make us fail."""
+    raise self.FAIL_EXCEPTION
+
+
+class SkipStage(generic_stages.BuilderStage):
+  """SkipStage is skipped."""
+  config_name = 'signer_tests'
+
+
+class SneakyFailStage(generic_stages.BuilderStage):
+  """SneakyFailStage exits with an error."""
+
+  def PerformStage(self):
+    """Exit without reporting back."""
+    # pylint: disable=protected-access
+    os._exit(1)
+
+
+class SuicideStage(generic_stages.BuilderStage):
+  """SuicideStage kills itself with kill -9."""
+
+  def PerformStage(self):
+    """Exit without reporting back."""
+    os.kill(os.getpid(), signal.SIGKILL)
+
+
+class SetAttrStage(generic_stages.BuilderStage):
+  """Stage that sets requested run attribute to a value."""
+
+  DEFAULT_ATTR = 'unittest_value'
+  VALUE = 'HereTakeThis'
+
+  def __init__(self, builder_run, delay=2, attr=DEFAULT_ATTR, *args, **kwargs):
+    super(SetAttrStage, self).__init__(builder_run, *args, **kwargs)
+    self.delay = delay
+    self.attr = attr
+
+  def PerformStage(self):
+    """Wait self.delay seconds then set requested run attribute."""
+    time.sleep(self.delay)
+    self._run.attrs.SetParallel(self.attr, self.VALUE)
+
+  def QueueableException(self):
+    return cbuildbot_run.ParallelAttributeError(self.attr)
+
+
+class GetAttrStage(generic_stages.BuilderStage):
+  """Stage that accesses requested run attribute and confirms value."""
+
+  DEFAULT_ATTR = 'unittest_value'
+
+  def __init__(self, builder_run, tester=None, timeout=5, attr=DEFAULT_ATTR,
+               *args, **kwargs):
+    super(GetAttrStage, self).__init__(builder_run, *args, **kwargs)
+    self.tester = tester
+    self.timeout = timeout
+    self.attr = attr
+
+  def PerformStage(self):
+    """Wait for attrs.test value to show up."""
+    assert not self._run.attrs.HasParallel(self.attr)
+    value = self._run.attrs.GetParallel(self.attr, self.timeout)
+    if self.tester:
+      self.tester(value)
+
+  def QueueableException(self):
+    return cbuildbot_run.ParallelAttributeError(self.attr)
+
+  def TimeoutException(self):
+    return cbuildbot_run.AttrTimeoutError(self.attr)
+
+
+class BuildStagesResultsTest(cros_test_lib.TestCase):
+  """Tests for stage results and reporting."""
+
+  def setUp(self):
+    # Always stub RunCommmand out as we use it in every method.
+    self._bot_id = 'x86-generic-paladin'
+    site_config = config_lib_unittest.MockSiteConfig()
+    build_config = site_config[self._bot_id]
+    self.build_root = '/fake_root'
+    # This test compares log output from the stages, so turn on buildbot
+    # logging.
+    logging.EnableBuildbotMarkers()
+
+    # Create a class to hold
+    class Options(object):
+      """Dummy class to hold option values."""
+
+    options = Options()
+    options.archive_base = 'gs://dontcare'
+    options.buildroot = self.build_root
+    options.debug = False
+    options.prebuilts = False
+    options.clobber = False
+    options.nosdk = False
+    options.remote_trybot = False
+    options.latest_toolchain = False
+    options.buildnumber = 1234
+    options.chrome_rev = None
+    options.branch = 'dontcare'
+    options.chrome_root = False
+
+    self._manager = parallel.Manager()
+    self._manager.__enter__()
+
+    self._run = cbuildbot_run.BuilderRun(
+        options, site_config, build_config, self._manager)
+
+    results_lib.Results.Clear()
+
+  def tearDown(self):
+    # Mimic exiting with statement for self._manager.
+    if hasattr(self, '_manager') and self._manager is not None:
+      self._manager.__exit__(None, None, None)
+
+  def _runStages(self):
+    """Run a couple of stages so we can capture the results"""
+    # Run two pass stages, and one fail stage.
+    PassStage(self._run).Run()
+    Pass2Stage(self._run).Run()
+    self.assertRaises(
+        failures_lib.StepFailure,
+        FailStage(self._run).Run)
+
+  def _verifyRunResults(self, expectedResults, max_time=2.0):
+    actualResults = results_lib.Results.Get()
+
+    # Break out the asserts to be per item to make debugging easier
+    self.assertEqual(len(expectedResults), len(actualResults))
+    for i in xrange(len(expectedResults)):
+      entry = actualResults[i]
+      xname, xresult = expectedResults[i]
+
+      if entry.result not in results_lib.Results.NON_FAILURE_TYPES:
+        self.assertTrue(isinstance(entry.result, BaseException))
+        if isinstance(entry.result, failures_lib.StepFailure):
+          self.assertEqual(str(entry.result), entry.description)
+
+      self.assertTrue(entry.time >= 0 and entry.time < max_time)
+      self.assertEqual(xname, entry.name)
+      self.assertEqual(type(xresult), type(entry.result))
+      self.assertEqual(repr(xresult), repr(entry.result))
+
+  def _PassString(self):
+    record = results_lib.Result('Pass', results_lib.Results.SUCCESS, 'None',
+                                'Pass', '', '0')
+    return results_lib.Results.SPLIT_TOKEN.join(record) + '\n'
+
+  def testRunStages(self):
+    """Run some stages and verify the captured results"""
+
+    self.assertEqual(results_lib.Results.Get(), [])
+
+    self._runStages()
+
+    # Verify that the results are what we expect.
+    expectedResults = [
+        ('Pass', results_lib.Results.SUCCESS),
+        ('Pass2', results_lib.Results.SUCCESS),
+        ('Fail', FailStage.FAIL_EXCEPTION),
+    ]
+    self._verifyRunResults(expectedResults)
+
+  def testSuccessTest(self):
+    """Run some stages and verify the captured results"""
+
+    results_lib.Results.Record('Pass', results_lib.Results.SUCCESS)
+
+    self.assertTrue(results_lib.Results.BuildSucceededSoFar())
+
+    results_lib.Results.Record('Fail', FailStage.FAIL_EXCEPTION, time=1)
+
+    self.assertFalse(results_lib.Results.BuildSucceededSoFar())
+
+    results_lib.Results.Record('Pass2', results_lib.Results.SUCCESS)
+
+    self.assertFalse(results_lib.Results.BuildSucceededSoFar())
+
+  def _TestParallelStages(self, stage_objs):
+    builder = simple_builders.SimpleBuilder(self._run)
+    error = None
+    # pylint: disable=protected-access
+    with mock.patch.multiple(parallel._BackgroundTask, PRINT_INTERVAL=0.01):
+      try:
+        builder._RunParallelStages(stage_objs)
+      except parallel.BackgroundFailure as ex:
+        error = ex
+
+    return error
+
+  def testParallelStages(self):
+    stage_objs = [stage(self._run) for stage in
+                  (PassStage, SneakyFailStage, FailStage, SuicideStage,
+                   Pass2Stage)]
+    error = self._TestParallelStages(stage_objs)
+    self.assertTrue(error)
+    expectedResults = [
+        ('Pass', results_lib.Results.SUCCESS),
+        ('Fail', FailStage.FAIL_EXCEPTION),
+        ('Pass2', results_lib.Results.SUCCESS),
+        ('SneakyFail', error),
+        ('Suicide', error),
+    ]
+    self._verifyRunResults(expectedResults)
+
+  def testParallelStageCommunicationOK(self):
+    """Test run attr communication betweeen parallel stages."""
+    def assert_test(value):
+      self.assertEqual(value, SetAttrStage.VALUE,
+                       'Expected value %r to be passed between stages, but'
+                       ' got %r.' % (SetAttrStage.VALUE, value))
+    stage_objs = [
+        SetAttrStage(self._run),
+        GetAttrStage(self._run, assert_test, timeout=30),
+        GetAttrStage(self._run, assert_test, timeout=30),
+    ]
+    error = self._TestParallelStages(stage_objs)
+    self.assertFalse(error)
+    expectedResults = [
+        ('SetAttr', results_lib.Results.SUCCESS),
+        ('GetAttr', results_lib.Results.SUCCESS),
+        ('GetAttr', results_lib.Results.SUCCESS),
+    ]
+    self._verifyRunResults(expectedResults, max_time=30.0)
+
+    # Make sure run attribute propagated up to the top, too.
+    value = self._run.attrs.GetParallel('unittest_value')
+    self.assertEqual(SetAttrStage.VALUE, value)
+
+  def testParallelStageCommunicationTimeout(self):
+    """Test run attr communication between parallel stages that times out."""
+    def assert_test(value):
+      self.assertEqual(value, SetAttrStage.VALUE,
+                       'Expected value %r to be passed between stages, but'
+                       ' got %r.' % (SetAttrStage.VALUE, value))
+    stage_objs = [SetAttrStage(self._run, delay=11),
+                  GetAttrStage(self._run, assert_test, timeout=1),
+                 ]
+    error = self._TestParallelStages(stage_objs)
+    self.assertTrue(error)
+    expectedResults = [
+        ('SetAttr', results_lib.Results.SUCCESS),
+        ('GetAttr', stage_objs[1].TimeoutException()),
+    ]
+    self._verifyRunResults(expectedResults, max_time=12.0)
+
+  def testParallelStageCommunicationNotQueueable(self):
+    """Test setting non-queueable run attr in parallel stage."""
+    stage_objs = [SetAttrStage(self._run, attr='release_tag'),
+                  GetAttrStage(self._run, timeout=2),
+                 ]
+    error = self._TestParallelStages(stage_objs)
+    self.assertTrue(error)
+    expectedResults = [
+        ('SetAttr', stage_objs[0].QueueableException()),
+        ('GetAttr', stage_objs[1].TimeoutException()),
+    ]
+    self._verifyRunResults(expectedResults, max_time=12.0)
+
+  def testStagesReportSuccess(self):
+    """Tests Stage reporting."""
+
+    sync_stages.ManifestVersionedSyncStage.manifest_manager = None
+
+    # Store off a known set of results and generate a report
+    results_lib.Results.Record('Sync', results_lib.Results.SUCCESS, time=1)
+    results_lib.Results.Record('Build', results_lib.Results.SUCCESS, time=2)
+    results_lib.Results.Record('Test', FailStage.FAIL_EXCEPTION, time=3)
+    results_lib.Results.Record('SignerTests', results_lib.Results.SKIPPED)
+    result = cros_build_lib.CommandResult(cmd=['/bin/false', '/nosuchdir'],
+                                          returncode=2)
+    results_lib.Results.Record(
+        'Archive',
+        cros_build_lib.RunCommandError(
+            'Command "/bin/false /nosuchdir" failed.\n',
+            result), time=4)
+
+    results = StringIO.StringIO()
+
+    results_lib.Results.Report(results)
+
+    expectedResults = (
+        "************************************************************\n"
+        "** Stage Results\n"
+        "************************************************************\n"
+        "** PASS Sync (0:00:01)\n"
+        "************************************************************\n"
+        "** PASS Build (0:00:02)\n"
+        "************************************************************\n"
+        "** FAIL Test (0:00:03) with StepFailure\n"
+        "************************************************************\n"
+        "** FAIL Archive (0:00:04) in /bin/false\n"
+        "************************************************************\n"
+    )
+
+    expectedLines = expectedResults.split('\n')
+    actualLines = results.getvalue().split('\n')
+
+    # Break out the asserts to be per item to make debugging easier
+    for i in xrange(min(len(actualLines), len(expectedLines))):
+      self.assertEqual(expectedLines[i], actualLines[i])
+    self.assertEqual(len(expectedLines), len(actualLines))
+
+  def testStagesReportError(self):
+    """Tests Stage reporting with exceptions."""
+
+    sync_stages.ManifestVersionedSyncStage.manifest_manager = None
+
+    # Store off a known set of results and generate a report
+    results_lib.Results.Record('Sync', results_lib.Results.SUCCESS, time=1)
+    results_lib.Results.Record('Build', results_lib.Results.SUCCESS, time=2)
+    results_lib.Results.Record('Test', FailStage.FAIL_EXCEPTION,
+                               'failException Msg\nLine 2', time=3)
+    result = cros_build_lib.CommandResult(cmd=['/bin/false', '/nosuchdir'],
+                                          returncode=2)
+    results_lib.Results.Record(
+        'Archive',
+        cros_build_lib.RunCommandError(
+            'Command "/bin/false /nosuchdir" failed.\n',
+            result),
+        'FailRunCommand msg', time=4)
+
+    results = StringIO.StringIO()
+
+    results_lib.Results.Report(results)
+
+    expectedResults = (
+        "************************************************************\n"
+        "** Stage Results\n"
+        "************************************************************\n"
+        "** PASS Sync (0:00:01)\n"
+        "************************************************************\n"
+        "** PASS Build (0:00:02)\n"
+        "************************************************************\n"
+        "** FAIL Test (0:00:03) with StepFailure\n"
+        "************************************************************\n"
+        "** FAIL Archive (0:00:04) in /bin/false\n"
+        "************************************************************\n"
+        "\n"
+        "Failed in stage Test:\n"
+        "\n"
+        "failException Msg\n"
+        "Line 2\n"
+        "\n"
+        "Failed in stage Archive:\n"
+        "\n"
+        "FailRunCommand msg\n"
+    )
+
+    expectedLines = expectedResults.split('\n')
+    actualLines = results.getvalue().split('\n')
+
+    # Break out the asserts to be per item to make debugging easier
+    for i in xrange(min(len(actualLines), len(expectedLines))):
+      self.assertEqual(expectedLines[i], actualLines[i])
+    self.assertEqual(len(expectedLines), len(actualLines))
+
+  def testStagesReportReleaseTag(self):
+    """Tests Release Tag entry in stages report."""
+
+    current_version = "release_tag_string"
+    archive_urls = {
+        'board1': 'http://foo.com/bucket/bot-id1/version/index.html',
+        'board2': 'http://foo.com/bucket/bot-id2/version/index.html',}
+    # Store off a known set of results and generate a report
+    results_lib.Results.Record('Pass', results_lib.Results.SUCCESS, time=1)
+
+    results = StringIO.StringIO()
+
+    results_lib.Results.Report(results, archive_urls, current_version)
+
+    expectedResults = (
+        "************************************************************\n"
+        "** RELEASE VERSION: release_tag_string\n"
+        "************************************************************\n"
+        "** Stage Results\n"
+        "************************************************************\n"
+        "** PASS Pass (0:00:01)\n"
+        "************************************************************\n"
+        "** BUILD ARTIFACTS FOR THIS BUILD CAN BE FOUND AT:\n"
+        "**  board1: %s\n"
+        "@@@STEP_LINK@Artifacts[board1]: bot-id1/version@%s@@@\n"
+        "**  board2: %s\n"
+        "@@@STEP_LINK@Artifacts[board2]: bot-id2/version@%s@@@\n"
+        "************************************************************\n"
+        % (archive_urls['board1'], archive_urls['board1'],
+           archive_urls['board2'], archive_urls['board2']))
+
+    expectedLines = expectedResults.split('\n')
+    actualLines = results.getvalue().split('\n')
+
+    # Break out the asserts to be per item to make debugging easier
+    for i in xrange(len(expectedLines)):
+      self.assertEqual(expectedLines[i], actualLines[i])
+    self.assertEqual(len(expectedLines), len(actualLines))
+
+  def testSaveCompletedStages(self):
+    """Tests that we can save out completed stages."""
+
+    # Run this again to make sure we have the expected results stored
+    results_lib.Results.Record('Pass', results_lib.Results.SUCCESS)
+    results_lib.Results.Record('Fail', FailStage.FAIL_EXCEPTION)
+    results_lib.Results.Record('Pass2', results_lib.Results.SUCCESS)
+
+    saveFile = StringIO.StringIO()
+    results_lib.Results.SaveCompletedStages(saveFile)
+    self.assertEqual(saveFile.getvalue(), self._PassString())
+
+  def testRestoreCompletedStages(self):
+    """Tests that we can read in completed stages."""
+
+    results_lib.Results.RestoreCompletedStages(
+        StringIO.StringIO(self._PassString()))
+
+    previous = results_lib.Results.GetPrevious()
+    self.assertEqual(previous.keys(), ['Pass'])
+
+  def testRunAfterRestore(self):
+    """Tests that we skip previously completed stages."""
+
+    # Fake results_lib.Results.RestoreCompletedStages
+    results_lib.Results.RestoreCompletedStages(
+        StringIO.StringIO(self._PassString()))
+
+    self._runStages()
+
+    # Verify that the results are what we expect.
+    expectedResults = [
+        ('Pass', results_lib.Results.SUCCESS),
+        ('Pass2', results_lib.Results.SUCCESS),
+        ('Fail', FailStage.FAIL_EXCEPTION),
+    ]
+    self._verifyRunResults(expectedResults)
+
+  def testFailedButForgiven(self):
+    """Tests that warnings are flagged as such."""
+    results_lib.Results.Record('Warn', results_lib.Results.FORGIVEN, time=1)
+    results = StringIO.StringIO()
+    results_lib.Results.Report(results)
+    self.assertTrue('@@@STEP_WARNINGS@@@' in results.getvalue())
diff --git a/cbuildbot/stages/sync_stages.py b/cbuildbot/stages/sync_stages.py
new file mode 100644
index 0000000..dc2df54
--- /dev/null
+++ b/cbuildbot/stages/sync_stages.py
@@ -0,0 +1,1637 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the sync stages."""
+
+from __future__ import print_function
+
+import collections
+import ConfigParser
+import contextlib
+import datetime
+import itertools
+import os
+import re
+import sys
+import time
+from xml.etree import ElementTree
+from xml.dom import minidom
+
+from chromite.cbuildbot import chroot_lib
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import lkgm_manager
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot import repository
+from chromite.cbuildbot import tree_status
+from chromite.cbuildbot import triage_lib
+from chromite.cbuildbot import trybot_patch_pool
+from chromite.cbuildbot import validation_pool
+from chromite.cbuildbot.stages import generic_stages
+from chromite.cbuildbot.stages import build_stages
+from chromite.lib import clactions
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import graphite
+from chromite.lib import osutils
+from chromite.lib import patch as cros_patch
+from chromite.lib import timeout_util
+from chromite.scripts import cros_mark_chrome_as_stable
+
+
+site_config = config_lib.GetConfig()
+
+
+PRE_CQ = validation_pool.PRE_CQ
+
+PRECQ_LAUNCH_TIMEOUT_MSG = (
+    'We were not able to launch a %s trybot for your change within '
+    '%s minutes.\n\n'
+    'This problem can happen if the trybot waterfall is very '
+    'busy, or if there is an infrastructure issue. Please '
+    'notify the sheriff and mark your change as ready again. If '
+    'this problem occurs multiple times in a row, please file a '
+    'bug.')
+PRECQ_INFLIGHT_TIMEOUT_MSG = (
+    'The %s trybot for your change timed out after %s minutes.'
+    '\n\n'
+    'This problem can happen if your change causes the builder '
+    'to hang, or if there is some infrastructure issue. If your '
+    'change is not at fault you may mark your change as ready '
+    'again. If this problem occurs multiple times please notify '
+    'the sheriff and file a bug.')
+PRECQ_EXPIRY_MSG = (
+    'The pre-cq verification for this change expired after %s minutes. No '
+    'action is required on your part.'
+    '\n\n'
+    'In order to protect the CQ from picking up stale changes, the pre-cq '
+    'status for changes are cleared after a generous timeout. This change '
+    'will be re-tested by the pre-cq before the CQ picks it up.')
+
+class PatchChangesStage(generic_stages.BuilderStage):
+  """Stage that patches a set of Gerrit changes to the buildroot source tree."""
+
+  def __init__(self, builder_run, patch_pool, **kwargs):
+    """Construct a PatchChangesStage.
+
+    Args:
+      builder_run: BuilderRun object.
+      patch_pool: A TrybotPatchPool object containing the different types of
+                  patches to apply.
+    """
+    super(PatchChangesStage, self).__init__(builder_run, **kwargs)
+    self.patch_pool = patch_pool
+
+  @staticmethod
+  def _CheckForDuplicatePatches(_series, changes):
+    conflicts = {}
+    duplicates = []
+    for change in changes:
+      if change.id is None:
+        logging.warning(
+            "Change %s lacks a usable ChangeId; duplicate checking cannot "
+            "be done for this change.  If cherry-picking fails, this is a "
+            "potential cause.", change)
+        continue
+      conflicts.setdefault(change.id, []).append(change)
+
+    duplicates = [x for x in conflicts.itervalues() if len(x) > 1]
+    if not duplicates:
+      return changes
+
+    for conflict in duplicates:
+      logging.error(
+          "Changes %s conflict with each other- they have same id %s., "
+          .join(map(str, conflict)), conflict[0].id)
+
+    cros_build_lib.Die("Duplicate patches were encountered: %s", duplicates)
+
+  def _PatchSeriesFilter(self, series, changes):
+    return self._CheckForDuplicatePatches(series, changes)
+
+  def _ApplyPatchSeries(self, series, patch_pool, **kwargs):
+    """Applies a patch pool using a patch series."""
+    kwargs.setdefault('frozen', False)
+    # Honor the given ordering, so that if a gerrit/remote patch
+    # conflicts w/ a local patch, the gerrit/remote patch are
+    # blamed rather than local (patch ordering is typically
+    # local, gerrit, then remote).
+    kwargs.setdefault('honor_ordering', True)
+    kwargs['changes_filter'] = self._PatchSeriesFilter
+
+    _applied, failed_tot, failed_inflight = series.Apply(
+        list(patch_pool), **kwargs)
+
+    failures = failed_tot + failed_inflight
+    if failures:
+      self.HandleApplyFailures(failures)
+
+  def HandleApplyFailures(self, failures):
+    cros_build_lib.Die("Failed applying patches: %s",
+                       "\n".join(map(str, failures)))
+
+  def PerformStage(self):
+    class NoisyPatchSeries(validation_pool.PatchSeries):
+      """Custom PatchSeries that adds links to buildbot logs for remote trys."""
+
+      def ApplyChange(self, change):
+        if isinstance(change, cros_patch.GerritPatch):
+          logging.PrintBuildbotLink(str(change), change.url)
+        elif isinstance(change, cros_patch.UploadedLocalPatch):
+          logging.PrintBuildbotStepText(str(change))
+
+        return validation_pool.PatchSeries.ApplyChange(self, change)
+
+    # If we're an external builder, ignore internal patches.
+    helper_pool = validation_pool.HelperPool.SimpleCreate(
+        cros_internal=self._run.config.internal, cros=True)
+
+    # Limit our resolution to non-manifest patches.
+    patch_series = NoisyPatchSeries(
+        self._build_root,
+        helper_pool=helper_pool,
+        deps_filter_fn=lambda p: not trybot_patch_pool.ManifestFilter(p))
+
+    self._ApplyPatchSeries(patch_series, self.patch_pool)
+
+
+class BootstrapStage(PatchChangesStage):
+  """Stage that patches a chromite repo and re-executes inside it.
+
+  Attributes:
+    returncode - the returncode of the cbuildbot re-execution.  Valid after
+                 calling stage.Run().
+  """
+  option_name = 'bootstrap'
+
+  def __init__(self, builder_run, patch_pool, **kwargs):
+    super(BootstrapStage, self).__init__(
+        builder_run, trybot_patch_pool.TrybotPatchPool(), **kwargs)
+
+    self.patch_pool = patch_pool
+    self.config_repo = self._run.options.config_repo
+    self.returncode = None
+    self.tempdir = None
+
+  def _ApplyManifestPatches(self, patch_pool):
+    """Apply a pool of manifest patches to a temp manifest checkout.
+
+    Args:
+      patch_pool: The pool to apply.
+
+    Returns:
+      The path to the patched manifest checkout.
+
+    Raises:
+      Exception, if the new patched manifest cannot be parsed.
+    """
+    checkout_dir = os.path.join(self.tempdir, 'manfest-checkout')
+    repository.CloneGitRepo(checkout_dir,
+                            self._run.config.manifest_repo_url)
+
+    patch_series = validation_pool.PatchSeries.WorkOnSingleRepo(
+        checkout_dir, tracking_branch=self._run.manifest_branch)
+
+    self._ApplyPatchSeries(patch_series, patch_pool)
+    # Create the branch that 'repo init -b <target_branch> -u <patched_repo>'
+    # will look for.
+    cmd = ['branch', '-f', self._run.manifest_branch,
+           constants.PATCH_BRANCH]
+    git.RunGit(checkout_dir, cmd)
+
+    # Verify that the patched manifest loads properly. Propagate any errors as
+    # exceptions.
+    manifest = os.path.join(checkout_dir, self._run.config.manifest)
+    git.Manifest.Cached(manifest, manifest_include_dir=checkout_dir)
+    return checkout_dir
+
+  @staticmethod
+  def _FilterArgsForApi(parsed_args, api_minor):
+    """Remove arguments that are introduced after an api version."""
+    def filter_fn(passed_arg):
+      return passed_arg.opt_inst.api_version <= api_minor
+
+    accepted, removed = commandline.FilteringParser.FilterArgs(
+        parsed_args, filter_fn)
+
+    if removed:
+      logging.warning("The following arguments were removed due to api: '%s'"
+                      % ' '.join(removed))
+    return accepted
+
+  @classmethod
+  def FilterArgsForTargetCbuildbot(cls, buildroot, cbuildbot_path, options):
+    _, minor = cros_build_lib.GetTargetChromiteApiVersion(buildroot)
+    args = [cbuildbot_path]
+    args.extend(options.build_targets)
+    args.extend(cls._FilterArgsForApi(options.parsed_args, minor))
+
+    # Only pass down --cache-dir if it was specified. By default, we want
+    # the cache dir to live in the root of each checkout, so this means that
+    # each instance of cbuildbot needs to calculate the default separately.
+    if minor >= 2 and options.cache_dir_specified:
+      args += ['--cache-dir', options.cache_dir]
+
+    return args
+
+  @classmethod
+  def BootstrapPatchesNeeded(cls, builder_run, patch_pool):
+    """See if bootstrapping is needed for any of the given patches.
+
+    Does NOT determine if they have already been applied.
+
+    Args:
+      builder_run: BuilderRun object for this build.
+      patch_pool: All patches to be applied this run.
+
+    Returns:
+      boolean True if bootstrapping is needed.
+    """
+    chromite_pool = patch_pool.Filter(project=constants.CHROMITE_PROJECT)
+    if builder_run.config.internal:
+      manifest_pool = patch_pool.FilterIntManifest()
+    else:
+      manifest_pool = patch_pool.FilterExtManifest()
+
+    return bool(chromite_pool or manifest_pool)
+
+  def HandleApplyFailures(self, failures):
+    """Handle the case where patches fail to apply."""
+    if self._run.config.pre_cq:
+      # Let the PreCQSync stage handle this failure. The PreCQSync stage will
+      # comment on CLs with the appropriate message when they fail to apply.
+      #
+      # WARNING: For manifest patches, the Pre-CQ attempts to apply external
+      # patches to the internal manifest, and this means we may flag a conflict
+      # here even if the patch applies cleanly. TODO(davidjames): Fix this.
+      logging.PrintBuildbotStepWarnings()
+      logging.error('Failed applying patches: %s\n'.join(map(str, failures)))
+    else:
+      PatchChangesStage.HandleApplyFailures(self, failures)
+
+  def _PerformStageInTempDir(self):
+    # The plan for the builders is to use master branch to bootstrap other
+    # branches. Now, if we wanted to test patches for both the bootstrap code
+    # (on master) and the branched chromite (say, R20), we need to filter the
+    # patches by branch.
+    filter_branch = self._run.manifest_branch
+    if self._run.options.test_bootstrap:
+      filter_branch = 'master'
+
+    # Filter all requested patches for the branch.
+    branch_pool = self.patch_pool.FilterBranch(filter_branch)
+
+    # Checkout the new version of chromite, and patch it.
+    chromite_dir = os.path.join(self.tempdir, 'chromite')
+    reference_repo = os.path.join(constants.CHROMITE_DIR, '.git')
+    repository.CloneGitRepo(chromite_dir, constants.CHROMITE_URL,
+                            reference=reference_repo)
+    git.RunGit(chromite_dir, ['checkout', filter_branch])
+
+    chromite_pool = branch_pool.Filter(project=constants.CHROMITE_PROJECT)
+    if chromite_pool:
+      patch_series = validation_pool.PatchSeries.WorkOnSingleRepo(
+          chromite_dir, filter_branch)
+      self._ApplyPatchSeries(patch_series, chromite_pool)
+
+    # Checkout the new version of site config (no patching logic, yet).
+    if self.config_repo:
+      site_config_dir = os.path.join(chromite_dir, 'config')
+      site_config_reference_repo = os.path.join(constants.SITE_CONFIG_DIR,
+                                                '.git')
+      repository.CloneGitRepo(site_config_dir, self.config_repo,
+                              reference=site_config_reference_repo)
+      git.RunGit(site_config_dir, ['checkout', filter_branch])
+
+      site_config_pool = branch_pool.FilterGitRemoteUrl(self.config_repo)
+      if site_config_pool:
+        site_patch_series = validation_pool.PatchSeries.WorkOnSingleRepo(
+            site_config_dir, filter_branch)
+        self._ApplyPatchSeries(site_patch_series, site_config_pool)
+
+    # Re-exec into new instance of cbuildbot, with proper command line args.
+    cbuildbot_path = constants.PATH_TO_CBUILDBOT
+    if not os.path.exists(os.path.join(self.tempdir, cbuildbot_path)):
+      cbuildbot_path = 'chromite/cbuildbot/cbuildbot'
+    cmd = self.FilterArgsForTargetCbuildbot(self.tempdir, cbuildbot_path,
+                                            self._run.options)
+
+    extra_params = ['--sourceroot', self._run.options.sourceroot]
+    extra_params.extend(self._run.options.bootstrap_args)
+    if self._run.options.test_bootstrap:
+      # We don't want re-executed instance to see this.
+      cmd = [a for a in cmd if a != '--test-bootstrap']
+    else:
+      # If we've already done the desired number of bootstraps, disable
+      # bootstrapping for the next execution.  Also pass in the patched manifest
+      # repository.
+      extra_params.append('--nobootstrap')
+      if self._run.config.internal:
+        manifest_pool = branch_pool.FilterIntManifest()
+      else:
+        manifest_pool = branch_pool.FilterExtManifest()
+
+      if manifest_pool:
+        manifest_dir = self._ApplyManifestPatches(manifest_pool)
+        extra_params.extend(['--manifest-repo-url', manifest_dir])
+
+    cmd += extra_params
+    result_obj = cros_build_lib.RunCommand(
+        cmd, cwd=self.tempdir, kill_timeout=30, error_code_ok=True)
+    self.returncode = result_obj.returncode
+
+  def PerformStage(self):
+    with osutils.TempDir(base_dir=self._run.options.bootstrap_dir) as tempdir:
+      self.tempdir = tempdir
+      self._PerformStageInTempDir()
+    self.tempdir = None
+
+
+class SyncStage(generic_stages.BuilderStage):
+  """Stage that performs syncing for the builder."""
+
+  option_name = 'sync'
+  output_manifest_sha1 = True
+
+  def __init__(self, builder_run, **kwargs):
+    super(SyncStage, self).__init__(builder_run, **kwargs)
+    self.repo = None
+    self.skip_sync = False
+
+    # TODO(mtennant): Why keep a duplicate copy of this config value
+    # at self.internal when it can always be retrieved from config?
+    self.internal = self._run.config.internal
+
+  def _GetManifestVersionsRepoUrl(self, internal=None, test=False):
+    if internal is None:
+      internal = self._run.config.internal
+
+    if internal:
+      if test:
+        return site_config.params.MANIFEST_VERSIONS_INT_GOB_URL_TEST
+      else:
+        return site_config.params.MANIFEST_VERSIONS_INT_GOB_URL
+    else:
+      if test:
+        return site_config.params.MANIFEST_VERSIONS_GOB_URL_TEST
+      else:
+        return site_config.params.MANIFEST_VERSIONS_GOB_URL
+
+  def Initialize(self):
+    self._InitializeRepo()
+
+  def _InitializeRepo(self):
+    """Set up the RepoRepository object."""
+    self.repo = self.GetRepoRepository()
+
+  def GetNextManifest(self):
+    """Returns the manifest to use."""
+    return self._run.config.manifest
+
+  def ManifestCheckout(self, next_manifest):
+    """Checks out the repository to the given manifest."""
+    self._Print('\n'.join(['BUILDROOT: %s' % self.repo.directory,
+                           'TRACKING BRANCH: %s' % self.repo.branch,
+                           'NEXT MANIFEST: %s' % next_manifest]))
+
+    if not self.skip_sync:
+      self.repo.Sync(next_manifest)
+
+    print(self.repo.ExportManifest(mark_revision=self.output_manifest_sha1),
+          file=sys.stderr)
+
+  def RunPrePatchBuild(self):
+    """Run through a pre-patch build to prepare for incremental build.
+
+    This function runs though the InitSDKStage, SetupBoardStage, and
+    BuildPackagesStage. It is intended to be called before applying
+    any patches under test, to prepare the chroot and sysroot in a state
+    corresponding to ToT prior to an incremental build.
+
+    Returns:
+      True if all stages were successful, False if any of them failed.
+    """
+    suffix = ' (pre-Patch)'
+    try:
+      build_stages.InitSDKStage(
+          self._run, chroot_replace=True, suffix=suffix).Run()
+      for builder_run in self._run.GetUngroupedBuilderRuns():
+        for board in builder_run.config.boards:
+          build_stages.SetupBoardStage(
+              builder_run, board=board, suffix=suffix).Run()
+          build_stages.BuildPackagesStage(
+              builder_run, board=board, suffix=suffix).Run()
+    except failures_lib.StepFailure:
+      return False
+
+    return True
+
+  def WriteChangesToMetadata(self, changes):
+    """Write the changes under test into the metadata.
+
+    Args:
+      changes: A list of GerritPatch instances.
+    """
+    changes_list = self._run.attrs.metadata.GetDict().get('changes', [])
+    changes_list = changes_list + [c.GetAttributeDict() for c in set(changes)]
+    changes_list = sorted(changes_list,
+                          key=lambda x: (x[cros_patch.ATTR_GERRIT_NUMBER],
+                                         x[cros_patch.ATTR_PATCH_NUMBER],
+                                         x[cros_patch.ATTR_REMOTE]))
+    self._run.attrs.metadata.UpdateWithDict({'changes': changes_list})
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    self.Initialize()
+    with osutils.TempDir() as tempdir:
+      # Save off the last manifest.
+      fresh_sync = True
+      if os.path.exists(self.repo.directory) and not self._run.options.clobber:
+        old_filename = os.path.join(tempdir, 'old.xml')
+        try:
+          old_contents = self.repo.ExportManifest()
+        except cros_build_lib.RunCommandError as e:
+          logging.warning(str(e))
+        else:
+          osutils.WriteFile(old_filename, old_contents)
+          fresh_sync = False
+
+      # Sync.
+      self.ManifestCheckout(self.GetNextManifest())
+
+      # Print the blamelist.
+      if fresh_sync:
+        logging.PrintBuildbotStepText('(From scratch)')
+      elif self._run.options.buildbot:
+        lkgm_manager.GenerateBlameList(self.repo, old_filename)
+
+      # Incremental builds request an additional build before patching changes.
+      if self._run.config.build_before_patching:
+        pre_build_passed = self.RunPrePatchBuild()
+        if not pre_build_passed:
+          logging.PrintBuildbotStepText('Pre-patch build failed.')
+
+
+class LKGMSyncStage(SyncStage):
+  """Stage that syncs to the last known good manifest blessed by builders."""
+
+  output_manifest_sha1 = False
+
+  def GetNextManifest(self):
+    """Override: Gets the LKGM."""
+    # TODO(sosa):  Should really use an initialized manager here.
+    if self.internal:
+      mv_dir = site_config.params.INTERNAL_MANIFEST_VERSIONS_PATH
+    else:
+      mv_dir = site_config.params.EXTERNAL_MANIFEST_VERSIONS_PATH
+
+    manifest_path = os.path.join(self._build_root, mv_dir)
+    manifest_repo = self._GetManifestVersionsRepoUrl()
+    manifest_version.RefreshManifestCheckout(manifest_path, manifest_repo)
+    return os.path.join(manifest_path, self._run.config.lkgm_manifest)
+
+
+class ManifestVersionedSyncStage(SyncStage):
+  """Stage that generates a unique manifest file, and sync's to it."""
+
+  # TODO(mtennant): Make this into a builder run value.
+  output_manifest_sha1 = False
+
+  def __init__(self, builder_run, **kwargs):
+    # Perform the sync at the end of the stage to the given manifest.
+    super(ManifestVersionedSyncStage, self).__init__(builder_run, **kwargs)
+    self.repo = None
+    self.manifest_manager = None
+
+    # If a builder pushes changes (even with dryrun mode), we need a writable
+    # repository. Otherwise, the push will be rejected by the server.
+    self.manifest_repo = self._GetManifestVersionsRepoUrl()
+
+    # 1. If we're uprevving Chrome, Chrome might have changed even if the
+    #    manifest has not, so we should force a build to double check. This
+    #    means that we'll create a new manifest, even if there are no changes.
+    # 2. If we're running with --debug, we should always run through to
+    #    completion, so as to ensure a complete test.
+    self._force = self._chrome_rev or self._run.options.debug
+
+  def HandleSkip(self):
+    """Initializes a manifest manager to the specified version if skipped."""
+    super(ManifestVersionedSyncStage, self).HandleSkip()
+    if self._run.options.force_version:
+      self.Initialize()
+      self.ForceVersion(self._run.options.force_version)
+
+  def ForceVersion(self, version):
+    """Creates a manifest manager from given version and returns manifest."""
+    logging.PrintBuildbotStepText(version)
+    return self.manifest_manager.BootstrapFromVersion(version)
+
+  def VersionIncrementType(self):
+    """Return which part of the version number should be incremented."""
+    if self._run.manifest_branch == 'master':
+      return 'build'
+
+    return 'branch'
+
+  def RegisterManifestManager(self, manifest_manager):
+    """Save the given manifest manager for later use in this run.
+
+    Args:
+      manifest_manager: Expected to be a BuildSpecsManager.
+    """
+    self._run.attrs.manifest_manager = self.manifest_manager = manifest_manager
+
+  def Initialize(self):
+    """Initializes a manager that manages manifests for associated stages."""
+
+    dry_run = self._run.options.debug
+
+    self._InitializeRepo()
+
+    # If chrome_rev is somehow set, fail.
+    assert not self._chrome_rev, \
+        'chrome_rev is unsupported on release builders.'
+
+    self.RegisterManifestManager(manifest_version.BuildSpecsManager(
+        source_repo=self.repo,
+        manifest_repo=self.manifest_repo,
+        manifest=self._run.config.manifest,
+        build_names=self._run.GetBuilderIds(),
+        incr_type=self.VersionIncrementType(),
+        force=self._force,
+        branch=self._run.manifest_branch,
+        dry_run=dry_run,
+        master=self._run.config.master))
+
+  def _SetChromeVersionIfApplicable(self, manifest):
+    """If 'chrome' is in |manifest|, write the version to the BuilderRun object.
+
+    Args:
+      manifest: Path to the manifest.
+    """
+    manifest_dom = minidom.parse(manifest)
+    elements = manifest_dom.getElementsByTagName(lkgm_manager.CHROME_ELEMENT)
+
+    if elements:
+      chrome_version = elements[0].getAttribute(
+          lkgm_manager.CHROME_VERSION_ATTR)
+      logging.info(
+          'Chrome version was found in the manifest: %s', chrome_version)
+      # Update the metadata dictionary. This is necessary because the
+      # metadata dictionary is preserved through re-executions, so
+      # SyncChromeStage can read the version from the dictionary
+      # later. This is easier than parsing the manifest again after
+      # the re-execution.
+      self._run.attrs.metadata.UpdateKeyDictWithDict(
+          'version', {'chrome': chrome_version})
+
+  def GetNextManifest(self):
+    """Uses the initialized manifest manager to get the next manifest."""
+    assert self.manifest_manager, \
+        'Must run GetStageManager before checkout out build.'
+
+    build_id = self._run.attrs.metadata.GetDict().get('build_id')
+
+    to_return = self.manifest_manager.GetNextBuildSpec(build_id=build_id)
+    previous_version = self.manifest_manager.GetLatestPassingSpec()
+    target_version = self.manifest_manager.current_version
+
+    # Print the Blamelist here.
+    url_prefix = 'http://chromeos-images.corp.google.com/diff/report?'
+    url = url_prefix + 'from=%s&to=%s' % (previous_version, target_version)
+    logging.PrintBuildbotLink('Blamelist', url)
+    # The testManifestVersionedSyncOnePartBranch interacts badly with this
+    # function.  It doesn't fully initialize self.manifest_manager which
+    # causes target_version to be None.  Since there isn't a clean fix in
+    # either direction, just throw this through str().  In the normal case,
+    # it's already a string anyways.
+    logging.PrintBuildbotStepText(str(target_version))
+
+    return to_return
+
+  @contextlib.contextmanager
+  def LocalizeManifest(self, manifest, filter_cros=False):
+    """Remove restricted checkouts from the manifest if needed.
+
+    Args:
+      manifest: The manifest to localize.
+      filter_cros: If set, then only checkouts with a remote of 'cros' or
+        'cros-internal' are kept, and the rest are filtered out.
+    """
+    if filter_cros:
+      with osutils.TempDir() as tempdir:
+        filtered_manifest = os.path.join(tempdir, 'filtered.xml')
+        doc = ElementTree.parse(manifest)
+        root = doc.getroot()
+        for node in root.findall('project'):
+          remote = node.attrib.get('remote')
+          if remote and remote not in site_config.params.GIT_REMOTES:
+            root.remove(node)
+        doc.write(filtered_manifest)
+        yield filtered_manifest
+    else:
+      yield manifest
+
+  def _GetMasterVersion(self, master_id, timeout=5 * 60):
+    """Get the platform version associated with the master_build_id.
+
+    Args:
+      master_id: Our master build id.
+      timeout: How long to wait for the platform version to show up
+        in the database. This is needed because the slave builders are
+        triggered slightly before the platform version is written. Default
+        is 5 minutes.
+    """
+    # TODO(davidjames): Remove the wait loop here once we've updated slave
+    # builders to only get triggered after the platform version is written.
+    def _PrintRemainingTime(remaining):
+      logging.info('%s until timeout...', remaining)
+
+    def _GetPlatformVersion():
+      return db.GetBuildStatus(master_id)['platform_version']
+
+    # Retry until non-None version is returned.
+    def _ShouldRetry(x):
+      return not x
+
+    _, db = self._run.GetCIDBHandle()
+    return timeout_util.WaitForSuccess(_ShouldRetry,
+                                       _GetPlatformVersion,
+                                       timeout,
+                                       period=constants.SLEEP_TIMEOUT,
+                                       side_effect_func=_PrintRemainingTime)
+
+  def _VerifyMasterId(self, master_id):
+    """Verify that our master id is current and valid.
+
+    Args:
+      master_id: Our master build id.
+    """
+    _, db = self._run.GetCIDBHandle()
+    if db and master_id:
+      assert not self._run.options.force_version
+      master_build_status = db.GetBuildStatus(master_id)
+      latest = db.GetBuildHistory(master_build_status['build_config'], 1)
+      if latest and latest[0]['id'] != master_id:
+        raise failures_lib.MasterSlaveVersionMismatchFailure(
+            'This slave\'s master (id=%s) has been supplanted by a newer '
+            'master (id=%s). Aborting.' % (master_id, latest[0]['id']))
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    self.Initialize()
+
+    self._VerifyMasterId(self._run.options.master_build_id)
+    version = self._run.options.force_version
+    if self._run.options.master_build_id:
+      version = self._GetMasterVersion(self._run.options.master_build_id)
+
+    next_manifest = None
+    if version:
+      next_manifest = self.ForceVersion(version)
+    else:
+      self.skip_sync = True
+      try:
+        next_manifest = self.GetNextManifest()
+      except validation_pool.TreeIsClosedException as e:
+        logging.warning(str(e))
+
+    if not next_manifest:
+      logging.info('Found no work to do.')
+      if self._run.attrs.manifest_manager.DidLastBuildFail():
+        raise failures_lib.StepFailure('The previous build failed.')
+      else:
+        sys.exit(0)
+
+    # Log this early on for the release team to grep out before we finish.
+    if self.manifest_manager:
+      self._Print('\nRELEASETAG: %s\n' % (
+          self.manifest_manager.current_version))
+
+    self._SetChromeVersionIfApplicable(next_manifest)
+    # To keep local trybots working, remove restricted checkouts from the
+    # official manifest we get from manifest-versions.
+    with self.LocalizeManifest(
+        next_manifest, filter_cros=self._run.options.local) as new_manifest:
+      self.ManifestCheckout(new_manifest)
+
+    # Set the status inflight at the end of the ManifestVersionedSync
+    # stage. This guarantees that all syncing has completed.
+    if self.manifest_manager:
+      self.manifest_manager.SetInFlight(
+          self.manifest_manager.current_version,
+          dashboard_url=self.ConstructDashboardURL())
+
+
+class MasterSlaveLKGMSyncStage(ManifestVersionedSyncStage):
+  """Stage that generates a unique manifest file candidate, and sync's to it.
+
+  This stage uses an LKGM manifest manager that handles LKGM
+  candidates and their states.
+  """
+
+  # TODO(mtennant): Turn this into self._run.attrs.sub_manager or similar.
+  # An instance of lkgm_manager.LKGMManager for slave builds.
+  sub_manager = None
+  MAX_BUILD_HISTORY_LENGTH = 10
+  MilestoneVersion = collections.namedtuple(
+      'MilestoneVersion', ['milestone', 'platform'])
+
+  def __init__(self, builder_run, **kwargs):
+    super(MasterSlaveLKGMSyncStage, self).__init__(builder_run, **kwargs)
+    # lkgm_manager deals with making sure we're synced to whatever manifest
+    # we get back in GetNextManifest so syncing again is redundant.
+    self._chrome_version = None
+
+  def _GetInitializedManager(self, internal):
+    """Returns an initialized lkgm manager.
+
+    Args:
+      internal: Boolean.  True if this is using an internal manifest.
+
+    Returns:
+      lkgm_manager.LKGMManager.
+    """
+    increment = self.VersionIncrementType()
+    return lkgm_manager.LKGMManager(
+        source_repo=self.repo,
+        manifest_repo=self._GetManifestVersionsRepoUrl(internal=internal),
+        manifest=self._run.config.manifest,
+        build_names=self._run.GetBuilderIds(),
+        build_type=self._run.config.build_type,
+        incr_type=increment,
+        force=self._force,
+        branch=self._run.manifest_branch,
+        dry_run=self._run.options.debug,
+        master=self._run.config.master)
+
+  def Initialize(self):
+    """Override: Creates an LKGMManager rather than a ManifestManager."""
+    self._InitializeRepo()
+    self.RegisterManifestManager(self._GetInitializedManager(self.internal))
+    if self._run.config.master and self._GetSlaveConfigs():
+      assert self.internal, 'Unified masters must use an internal checkout.'
+      MasterSlaveLKGMSyncStage.sub_manager = self._GetInitializedManager(False)
+
+  def ForceVersion(self, version):
+    manifest = super(MasterSlaveLKGMSyncStage, self).ForceVersion(version)
+    if MasterSlaveLKGMSyncStage.sub_manager:
+      MasterSlaveLKGMSyncStage.sub_manager.BootstrapFromVersion(version)
+
+    return manifest
+
+  def _VerifyMasterId(self, master_id):
+    """Verify that our master id is current and valid."""
+    super(MasterSlaveLKGMSyncStage, self)._VerifyMasterId(master_id)
+    if not self._run.config.master and not master_id:
+      raise failures_lib.StepFailure(
+          'Cannot start build without a master_build_id. Did you hit force '
+          'build on a slave? Please hit force build on the master instead.')
+
+  def GetNextManifest(self):
+    """Gets the next manifest using LKGM logic."""
+    assert self.manifest_manager, \
+        'Must run Initialize before we can get a manifest.'
+    assert isinstance(self.manifest_manager, lkgm_manager.LKGMManager), \
+        'Manifest manager instantiated with wrong class.'
+    assert self._run.config.master
+
+    build_id = self._run.attrs.metadata.GetDict().get('build_id')
+    logging.info('Creating new candidate manifest, including chrome version '
+                 '%s.', self._chrome_version)
+    manifest = self.manifest_manager.CreateNewCandidate(
+        chrome_version=self._chrome_version,
+        build_id=build_id)
+    if MasterSlaveLKGMSyncStage.sub_manager:
+      MasterSlaveLKGMSyncStage.sub_manager.CreateFromManifest(
+          manifest, build_id=build_id)
+
+    return manifest
+
+  def GetLatestChromeVersion(self):
+    """Returns the version of Chrome to uprev."""
+    return cros_mark_chrome_as_stable.GetLatestRelease(
+        constants.CHROMIUM_GOB_URL)
+
+  def GetLastChromeOSVersion(self):
+    """Fetching ChromeOS version from the last run.
+
+    Fetching the chromeos version from the last run that published a manifest
+    by querying CIDB. Master builds that failed before publishing a manifest
+    will be ignored.
+
+    Returns:
+      A namedtuple MilestoneVersion,
+      e.g. MilestoneVersion(milestone='44', platform='7072.0.0-rc4')
+      or None if failed to retrieve milestone and platform versions.
+    """
+    build_id, db = self._run.GetCIDBHandle()
+
+    if db is None:
+      return None
+
+    builds = db.GetBuildHistory(
+        build_config=self._run.config.name,
+        num_results=self.MAX_BUILD_HISTORY_LENGTH,
+        ignore_build_id=build_id)
+    full_versions = [b.get('full_version') for b in builds]
+    old_version = next(itertools.ifilter(bool, full_versions), None)
+    if old_version:
+      pattern = r'^R(\d+)-(\d+.\d+.\d+(-rc\d+)*)'
+      m = re.match(pattern, old_version)
+      if m:
+        milestone = m.group(1)
+        platform = m.group(2)
+      return self.MilestoneVersion(
+          milestone=milestone, platform=platform)
+    return None
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    """Performs the stage."""
+    if (self._chrome_rev == constants.CHROME_REV_LATEST and
+        self._run.config.master):
+      # PFQ master needs to determine what version of Chrome to build
+      # for all slaves.
+      logging.info('I am a master running with CHROME_REV_LATEST, '
+                   'therefore getting latest chrome version.')
+      self._chrome_version = self.GetLatestChromeVersion()
+      logging.info('Latest chrome version is: %s', self._chrome_version)
+
+    ManifestVersionedSyncStage.PerformStage(self)
+
+    # Generate blamelist
+    cros_version = self.GetLastChromeOSVersion()
+    if cros_version:
+      old_filename = self.manifest_manager.GetBuildSpecFilePath(
+          cros_version.milestone, cros_version.platform)
+      if not os.path.exists(old_filename):
+        logging.error('Could not generate blamelist, '
+                      'manifest file does not exist: %s', old_filename)
+      else:
+        logging.debug('Generate blamelist against: %s', old_filename)
+        lkgm_manager.GenerateBlameList(self.repo, old_filename)
+
+class CommitQueueSyncStage(MasterSlaveLKGMSyncStage):
+  """Commit Queue Sync stage that handles syncing and applying patches.
+
+  Similar to the MasterSlaveLKGMsync Stage, this stage handles syncing
+  to a manifest, passing around that manifest to other builders.
+
+  What makes this stage different is that the CQ master finds the
+  patches on Gerrit which are ready to be committed, apply them, and
+  includes the pathces in the new manifest. The slaves sync to the
+  manifest, and apply the paches written in the manifest.
+  """
+
+  # The amount of time we wait before assuming that the Pre-CQ is down and
+  # that we should start testing changes that haven't been tested by the Pre-CQ.
+  PRE_CQ_TIMEOUT = 2 * 60 * 60
+
+  def __init__(self, builder_run, **kwargs):
+    super(CommitQueueSyncStage, self).__init__(builder_run, **kwargs)
+
+    # The pool of patches to be picked up by the commit queue.
+    # - For the master commit queue, it's initialized in GetNextManifest.
+    # - For slave commit queues, it's initialized in _SetPoolFromManifest.
+    #
+    # In all cases, the pool is saved to disk.
+    self.pool = None
+
+  def HandleSkip(self):
+    """Handles skip and initializes validation pool from manifest."""
+    super(CommitQueueSyncStage, self).HandleSkip()
+    filename = self._run.options.validation_pool
+    if filename:
+      self.pool = validation_pool.ValidationPool.Load(
+          filename, builder_run=self._run)
+    else:
+      self._SetPoolFromManifest(self.manifest_manager.GetLocalManifest())
+
+  def _ChangeFilter(self, _pool, changes, non_manifest_changes):
+    # First, look for changes that were tested by the Pre-CQ.
+    changes_to_test = []
+
+    _, db = self._run.GetCIDBHandle()
+    actions_for_changes = db.GetActionsForChanges(changes)
+    for change in changes:
+      status = clactions.GetCLPreCQStatus(change, actions_for_changes)
+      if status == constants.CL_STATUS_PASSED:
+        changes_to_test.append(change)
+
+    # Allow Commit-Ready=+2 changes to bypass the Pre-CQ, if there are no other
+    # changes.
+    if not changes_to_test:
+      changes_to_test = [x for x in changes if x.HasApproval('COMR', '2')]
+
+    # If we only see changes that weren't verified by Pre-CQ, and some of them
+    # are really old changes, try all of the changes. This ensures that the CQ
+    # continues to work (albeit slowly) even if the Pre-CQ is down.
+    if changes and not changes_to_test:
+      oldest = min(x.approval_timestamp for x in changes)
+      if time.time() > oldest + self.PRE_CQ_TIMEOUT:
+        # It's safest to try all changes here because some of the old changes
+        # might depend on newer changes (e.g. via CQ-DEPEND).
+        changes_to_test = changes
+
+    return changes_to_test, non_manifest_changes
+
+  def _SetPoolFromManifest(self, manifest):
+    """Sets validation pool based on manifest path passed in."""
+    # Note that this function is only called after the repo is already
+    # sync'd, so AcquirePoolFromManifest does not need to sync.
+    self.pool = validation_pool.ValidationPool.AcquirePoolFromManifest(
+        manifest, self._run.config.overlays, self.repo,
+        self._run.buildnumber, self._run.GetBuilderName(),
+        self._run.config.master, self._run.options.debug,
+        builder_run=self._run)
+
+  def _GetLGKMVersionFromManifest(self, manifest):
+    manifest_dom = minidom.parse(manifest)
+    elements = manifest_dom.getElementsByTagName(lkgm_manager.LKGM_ELEMENT)
+    if elements:
+      lkgm_version = elements[0].getAttribute(lkgm_manager.LKGM_VERSION_ATTR)
+      logging.info(
+          'LKGM version was found in the manifest: %s', lkgm_version)
+      return lkgm_version
+
+  def GetNextManifest(self):
+    """Gets the next manifest using LKGM logic."""
+    assert self.manifest_manager, \
+        'Must run Initialize before we can get a manifest.'
+    assert isinstance(self.manifest_manager, lkgm_manager.LKGMManager), \
+        'Manifest manager instantiated with wrong class.'
+    assert self._run.config.master
+
+    build_id = self._run.attrs.metadata.GetDict().get('build_id')
+
+    try:
+      # In order to acquire a pool, we need an initialized buildroot.
+      if not git.FindRepoDir(self.repo.directory):
+        self.repo.Initialize()
+
+      query = constants.CQ_READY_QUERY
+      if self._run.options.cq_gerrit_override:
+        query = (self._run.options.cq_gerrit_override, None)
+
+      self.pool = pool = validation_pool.ValidationPool.AcquirePool(
+          self._run.config.overlays, self.repo,
+          self._run.buildnumber, self._run.GetBuilderName(),
+          query,
+          dryrun=self._run.options.debug,
+          check_tree_open=(not self._run.options.debug or
+                           self._run.options.mock_tree_status),
+          change_filter=self._ChangeFilter, builder_run=self._run)
+    except validation_pool.TreeIsClosedException as e:
+      logging.warning(str(e))
+      return None
+
+    # We must extend the builder deadline before publishing a new manifest to
+    # ensure that slaves have enough time to complete the builds about to
+    # start.
+    build_id, db = self._run.GetCIDBHandle()
+    if db:
+      timeout = constants.MASTER_BUILD_TIMEOUT_SECONDS.get(
+          self._run.config.build_type,
+          constants.MASTER_BUILD_TIMEOUT_DEFAULT_SECONDS)
+      db.ExtendDeadline(build_id, timeout)
+
+    logging.info('Creating new candidate manifest.')
+    manifest = self.manifest_manager.CreateNewCandidate(validation_pool=pool,
+                                                        build_id=build_id)
+    if MasterSlaveLKGMSyncStage.sub_manager:
+      MasterSlaveLKGMSyncStage.sub_manager.CreateFromManifest(
+          manifest, build_id=build_id)
+
+    return manifest
+
+  def ManifestCheckout(self, next_manifest):
+    """Checks out the repository to the given manifest."""
+    if self._run.config.build_before_patching:
+      assert not self._run.config.master
+      pre_build_passed = self.RunPrePatchBuild()
+      logging.PrintBuildbotStepName('CommitQueueSync : Apply Patches')
+      if not pre_build_passed:
+        logging.PrintBuildbotStepText('Pre-patch build failed.')
+
+    # Make sure the chroot version is valid.
+    lkgm_version = self._GetLGKMVersionFromManifest(next_manifest)
+    chroot_manager = chroot_lib.ChrootManager(self._build_root)
+    chroot_manager.EnsureChrootAtVersion(lkgm_version)
+
+    # Clear the chroot version as we are in the middle of building it.
+    chroot_manager.ClearChrootVersion()
+
+    # Syncing to a pinned manifest ensures that we have the specified
+    # revisions, but, unfortunately, repo won't bother to update branches.
+    # Sync with an unpinned manifest first to ensure that branches are updated
+    # (e.g. in case somebody adds a new branch to a repo.) See crbug.com/482077
+    if not self.skip_sync:
+      self.repo.Sync(self._run.config.manifest, network_only=True)
+
+    # Sync to the provided manifest on slaves. On the master, we're
+    # already synced to this manifest, so self.skip_sync is set and
+    # this is a no-op.
+    super(CommitQueueSyncStage, self).ManifestCheckout(next_manifest)
+
+    # On slaves, initialize our pool and apply patches. On the master,
+    # we've already done that in GetNextManifest, so this is a no-op.
+    if not self._run.config.master:
+      # Print the list of CHUMP changes since the LKGM, then apply changes and
+      # print the list of applied changes.
+      self.manifest_manager.GenerateBlameListSinceLKGM()
+      self._SetPoolFromManifest(next_manifest)
+      self.pool.ApplyPoolIntoRepo()
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    """Performs normal stage and prints blamelist at end."""
+    if self._run.options.force_version:
+      self.HandleSkip()
+    else:
+      ManifestVersionedSyncStage.PerformStage(self)
+
+    self.WriteChangesToMetadata(self.pool.changes)
+
+
+class PreCQSyncStage(SyncStage):
+  """Sync and apply patches to test if they compile."""
+
+  def __init__(self, builder_run, patches, **kwargs):
+    super(PreCQSyncStage, self).__init__(builder_run, **kwargs)
+
+    # As a workaround for crbug.com/432706, we scan patches to see if they
+    # are already being merged. If they are, we don't test them in the PreCQ.
+    self.patches = [p for p in patches if not p.IsBeingMerged()]
+
+    if patches and not self.patches:
+      cros_build_lib.Die('No patches that still need testing.')
+
+    # The ValidationPool of patches to test. Initialized in PerformStage, and
+    # refreshed after bootstrapping by HandleSkip.
+    self.pool = None
+
+  def HandleSkip(self):
+    """Handles skip and loads validation pool from disk."""
+    super(PreCQSyncStage, self).HandleSkip()
+    filename = self._run.options.validation_pool
+    if filename:
+      self.pool = validation_pool.ValidationPool.Load(
+          filename, builder_run=self._run)
+
+  def PerformStage(self):
+    super(PreCQSyncStage, self).PerformStage()
+    self.pool = validation_pool.ValidationPool.AcquirePreCQPool(
+        self._run.config.overlays, self._build_root,
+        self._run.buildnumber, self._run.config.name,
+        dryrun=self._run.options.debug_forced, changes=self.patches,
+        builder_run=self._run)
+    self.pool.ApplyPoolIntoRepo()
+
+    if len(self.pool.changes) == 0 and self.patches:
+      cros_build_lib.Die('No changes have been applied.')
+
+    changes = self.pool.changes or self.patches
+    self.WriteChangesToMetadata(changes)
+
+class PreCQLauncherStage(SyncStage):
+  """Scans for CLs and automatically launches Pre-CQ jobs to test them."""
+
+  # The number of minutes we wait before launching Pre-CQ jobs. This measures
+  # the idle time of a given patch series, so, for example, if a user takes
+  # 20 minutes to mark a series of 20 patches as ready, we won't launch a
+  # tryjob on any of the patches until the user has been idle for 2 minutes.
+  LAUNCH_DELAY = 2
+
+  # The number of minutes we allow before considering a launch attempt failed.
+  LAUNCH_TIMEOUT = 30
+
+  # The number of minutes we allow before considering an in-flight job failed.
+  INFLIGHT_TIMEOUT = 240
+
+  # The number of minutes we allow before expiring a pre-cq PASSED or
+  # FULLY_VERIFIED status. After this timeout is hit, a CL's status will be
+  # reset to None. This prevents very stale CLs from entering the CQ.
+  STATUS_EXPIRY_TIMEOUT = 60 * 24 * 7
+
+  # The maximum number of patches we will allow in a given trybot run. This is
+  # needed because our trybot infrastructure can only handle so many patches at
+  # once.
+  MAX_PATCHES_PER_TRYBOT_RUN = 50
+
+  # The maximum derivative of the number of tryjobs we will launch in a given
+  # cycle of ProcessChanges. Used to rate-limit the launcher when reopening the
+  # tree after building up a large backlog.
+  MAX_LAUNCHES_PER_CYCLE_DERIVATIVE = 20
+
+  def __init__(self, builder_run, **kwargs):
+    super(PreCQLauncherStage, self).__init__(builder_run, **kwargs)
+    self.skip_sync = True
+    self.last_cycle_launch_count = 0
+
+
+  def _HasTimedOut(self, start, now, timeout_minutes):
+    """Check whether |timeout_minutes| has elapsed between |start| and |now|.
+
+    Args:
+      start: datetime.datetime start time.
+      now: datetime.datetime current time.
+      timeout_minutes: integer number of minutes for timeout.
+
+    Returns:
+      True if (now-start) > timeout_minutes.
+    """
+    diff = datetime.timedelta(minutes=timeout_minutes)
+    return (now - start) > diff
+
+
+  @staticmethod
+  def _PrintPatchStatus(patch, status):
+    """Print a link to |patch| with |status| info."""
+    items = (
+        status,
+        os.path.basename(patch.project),
+        str(patch),
+    )
+    logging.PrintBuildbotLink(' | '.join(items), patch.url)
+
+  def _ConfiguredVerificationsForChange(self, change):
+    """Determine which configs to test |change| with.
+
+    This method returns only the configs that are asked for by the config
+    file. It does not include special-case logic for adding additional bots
+    based on the type of the repository (see VerificationsForChange for that).
+
+    Args:
+      change: GerritPatch instance to get configs-to-test for.
+
+    Returns:
+      A set of configs to test.
+    """
+    configs_to_test = None
+    # If a pre-cq config is specified in the commit message, use that.
+    # Otherwise, look in appropriate COMMIT-QUEUE.ini. Otherwise, default to
+    # constants.PRE_CQ_DEFAULT_CONFIGS
+    lines = cros_patch.GetOptionLinesFromCommitMessage(
+        change.commit_message, constants.PRE_CQ_CONFIGS_OPTION_REGEX)
+    if lines is not None:
+      configs_to_test = self._ParsePreCQOption(' '.join(lines))
+    configs_to_test = configs_to_test or self._ParsePreCQOption(
+        triage_lib.GetOptionForChange(self._build_root, change, 'GENERAL',
+                                      constants.PRE_CQ_CONFIGS_OPTION))
+
+    return set(configs_to_test or constants.PRE_CQ_DEFAULT_CONFIGS)
+
+  def VerificationsForChange(self, change):
+    """Determine which configs to test |change| with.
+
+    Args:
+      change: GerritPatch instance to get configs-to-test for.
+
+    Returns:
+      A set of configs to test.
+    """
+    configs_to_test = set(self._ConfiguredVerificationsForChange(change))
+
+    # Add the BINHOST_PRE_CQ to any changes that affect an overlay.
+    if '/overlays/' in change.project:
+      configs_to_test.add(constants.BINHOST_PRE_CQ)
+
+    return configs_to_test
+
+  def _ParsePreCQOption(self, pre_cq_option):
+    """Gets a valid config list, or None, from |pre_cq_option|."""
+    if pre_cq_option and pre_cq_option.split():
+      configs_to_test = set(pre_cq_option.split())
+
+      # Replace 'default' with the default configs.
+      if 'default' in configs_to_test:
+        configs_to_test.discard('default')
+        configs_to_test.update(constants.PRE_CQ_DEFAULT_CONFIGS)
+
+      # Verify that all of the configs are valid.
+      if all(c in self._run.site_config for c in configs_to_test):
+        return configs_to_test
+
+    return None
+
+  def ScreenChangeForPreCQ(self, change):
+    """Record which pre-cq tryjobs to test |change| with.
+
+    This method determines which configs to test a given |change| with, and
+    writes those as pending tryjobs to the cidb.
+
+    Args:
+      change: GerritPatch instance to screen. This change should not yet have
+              been screened.
+    """
+    actions = []
+    configs_to_test = self.VerificationsForChange(change)
+    for c in configs_to_test:
+      actions.append(clactions.CLAction.FromGerritPatchAndAction(
+          change, constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+          reason=c))
+    actions.append(clactions.CLAction.FromGerritPatchAndAction(
+        change, constants.CL_ACTION_SCREENED_FOR_PRE_CQ))
+
+    build_id, db = self._run.GetCIDBHandle()
+    db.InsertCLActions(build_id, actions)
+
+  def CanSubmitChangeInPreCQ(self, change):
+    """Look up whether |change| is configured to be submitted in the pre-CQ.
+
+    This looks up the "submit-in-pre-cq" setting inside the project in
+    COMMIT-QUEUE.ini and checks whether it is set to "yes".
+
+    [GENERAL]
+      submit-in-pre-cq: yes
+
+    Args:
+      change: Change to examine.
+
+    Returns:
+      Boolean indicating if this change is configured to be submitted
+      in the pre-CQ.
+    """
+    result = None
+    try:
+      result = triage_lib.GetOptionForChange(
+          self._build_root, change, 'GENERAL', 'submit-in-pre-cq')
+    except ConfigParser.Error:
+      logging.error('%s has malformed config file', change, exc_info=True)
+    return bool(result and result.lower() == 'yes')
+
+  def LaunchTrybot(self, plan, configs):
+    """Launch a Pre-CQ run with the provided list of CLs.
+
+    Args:
+      pool: ValidationPool corresponding to |plan|.
+      plan: The list of patches to test in the pre-cq tryjob.
+      configs: A list of pre-cq config names to launch.
+    """
+    cmd = ['cbuildbot', '--remote',
+           '--timeout', str(self.INFLIGHT_TIMEOUT * 60)] + configs
+    for patch in plan:
+      cmd += ['-g', cros_patch.AddPrefix(patch, patch.gerrit_number)]
+      self._PrintPatchStatus(patch, 'testing')
+    if self._run.options.debug:
+      logging.debug('Would have launched tryjob with %s', cmd)
+    else:
+      cros_build_lib.RunCommand(cmd, cwd=self._build_root)
+
+    build_id, db = self._run.GetCIDBHandle()
+    actions = [
+        clactions.CLAction.FromGerritPatchAndAction(
+            patch, constants.CL_ACTION_TRYBOT_LAUNCHING, config)
+        for patch, config in itertools.product(plan, configs)]
+    db.InsertCLActions(build_id, actions)
+
+  def GetDisjointTransactionsToTest(self, pool, progress_map):
+    """Get the list of disjoint transactions to test.
+
+    Side effect: reject or retry changes that have timed out.
+
+    Args:
+      pool: The validation pool.
+      progress_map: See return type of clactions.GetPreCQProgressMap.
+
+    Returns:
+      A list of (transaction, config) tuples corresponding to different trybots
+      that should be launched.
+    """
+    # Get the set of busy and passed CLs.
+    busy, _, verified = clactions.GetPreCQCategories(progress_map)
+
+    screened_changes = set(progress_map)
+
+    # Create a list of disjoint transactions to test.
+    manifest = git.ManifestCheckout.Cached(self._build_root)
+    plans = pool.CreateDisjointTransactions(
+        manifest, screened_changes,
+        max_txn_length=self.MAX_PATCHES_PER_TRYBOT_RUN)
+    for plan in plans:
+      # If any of the CLs in the plan is not yet screened, wait for them to
+      # be screened.
+      #
+      # If any of the CLs in the plan are currently "busy" being tested,
+      # wait until they're done before starting to test this plan.
+      #
+      # Similarly, if all of the CLs in the plan have already been validated,
+      # there's no need to launch a trybot run.
+      plan = set(plan)
+      if not plan.issubset(screened_changes):
+        logging.info('CLs waiting to be screened: %s',
+                     cros_patch.GetChangesAsString(
+                         plan.difference(screened_changes)))
+      elif plan.issubset(verified):
+        logging.info('CLs already verified: %s',
+                     cros_patch.GetChangesAsString(plan))
+      elif plan.intersection(busy):
+        logging.info('CLs currently being verified: %s',
+                     cros_patch.GetChangesAsString(plan.intersection(busy)))
+        if plan.difference(busy):
+          logging.info('CLs waiting on verification of dependencies: %r',
+                       cros_patch.GetChangesAsString(plan.difference(busy)))
+      # TODO(akeshet): Consider using a database time rather than gerrit
+      # approval time and local clock for launch delay.
+      elif any(x.approval_timestamp + self.LAUNCH_DELAY * 60 > time.time()
+               for x in plan):
+        logging.info('CLs waiting on launch delay: %s',
+                     cros_patch.GetChangesAsString(plan))
+      else:
+        pending_configs = clactions.GetPreCQConfigsToTest(plan, progress_map)
+        for config in pending_configs:
+          yield (plan, config)
+
+  def _ProcessRequeuedAndSpeculative(self, change, action_history):
+    """Detect if |change| was requeued by developer, and mark in cidb.
+
+    Args:
+      change: GerritPatch instance to check.
+      action_history: List of CLActions.
+    """
+    action_string = clactions.GetRequeuedOrSpeculative(
+        change, action_history, not change.IsMergeable())
+    if action_string:
+      build_id, db = self._run.GetCIDBHandle()
+      action = clactions.CLAction.FromGerritPatchAndAction(
+          change, action_string)
+      db.InsertCLActions(build_id, [action])
+
+  def _ProcessExpiry(self, change, status, timestamp, pool, current_time):
+    """Enforce expiry of a PASSED or FULLY_VERIFIED status.
+
+    Args:
+      change: GerritPatch instance to process.
+      status: |change|'s pre-cq status.
+      timestamp: datetime.datetime for when |status| was achieved.
+      pool: The current validation pool.
+      current_time: datetime.datetime for current database time.
+    """
+    if not timestamp:
+      return
+    timed_out = self._HasTimedOut(timestamp, current_time,
+                                  self.STATUS_EXPIRY_TIMEOUT)
+    verified = status in (constants.CL_STATUS_PASSED,
+                          constants.CL_STATUS_FULLY_VERIFIED)
+    if timed_out and verified:
+      msg = PRECQ_EXPIRY_MSG % self.STATUS_EXPIRY_TIMEOUT
+      build_id, db = self._run.GetCIDBHandle()
+      if db:
+        pool.SendNotification(change, '%(details)s', details=msg)
+        action = clactions.CLAction.FromGerritPatchAndAction(
+            change, constants.CL_ACTION_PRE_CQ_RESET)
+        db.InsertCLActions(build_id, [action])
+
+  def _ProcessTimeouts(self, change, progress_map, pool, current_time):
+    """Enforce per-config launch and inflight timeouts.
+
+    Args:
+      change: GerritPatch instance to process.
+      progress_map: As returned by clactions.GetCLPreCQProgress a dict mapping
+                    each change in |changes| to a dict mapping config names
+                    to (status, timestamp) tuples for the configs under test.
+      pool: The current validation pool.
+      current_time: datetime.datetime timestamp giving current database time.
+    """
+    # TODO(akeshet) restore trybot launch retries here (there was
+    # no straightforward existing mechanism to include them in the
+    # transition to parallel pre-cq).
+    timeout_statuses = (constants.CL_PRECQ_CONFIG_STATUS_LAUNCHED,
+                        constants.CL_PRECQ_CONFIG_STATUS_INFLIGHT)
+    config_progress = progress_map[change]
+    for config, (config_status, timestamp, _) in config_progress.iteritems():
+      if not config_status in timeout_statuses:
+        continue
+      launched = config_status == constants.CL_PRECQ_CONFIG_STATUS_LAUNCHED
+      timeout = self.LAUNCH_TIMEOUT if launched else self.INFLIGHT_TIMEOUT
+      msg = (PRECQ_LAUNCH_TIMEOUT_MSG if launched
+             else PRECQ_INFLIGHT_TIMEOUT_MSG) % (config, timeout)
+
+      if self._HasTimedOut(timestamp, current_time, timeout):
+        pool.SendNotification(change, '%(details)s', details=msg)
+        pool.RemoveReady(change, reason=config)
+        pool.UpdateCLPreCQStatus(change, constants.CL_STATUS_FAILED)
+
+  def _ProcessVerified(self, change, can_submit, will_submit):
+    """Process a change that is fully pre-cq verified.
+
+    Args:
+      change: GerritPatch instance to process.
+      can_submit: set of changes that can be submitted by the pre-cq.
+      will_submit: set of changes that will be submitted by the pre-cq.
+
+    Returns:
+      A tuple of (set of changes that should be submitted by pre-cq,
+                  set of changes that should be passed by pre-cq)
+    """
+    # If this change and all its dependencies are pre-cq submittable,
+    # and none of them have yet been marked as pre-cq passed, then
+    # mark them for submission. Otherwise, mark this change as passed.
+    if change in will_submit:
+      return set(), set()
+
+    if change in can_submit:
+      logging.info('Attempting to determine if %s can be submitted.', change)
+      patch_series = validation_pool.PatchSeries(self._build_root)
+      try:
+        plan = patch_series.CreateTransaction(change, limit_to=can_submit)
+        return plan, set()
+      except cros_patch.DependencyError:
+        pass
+
+    # Changes that cannot be submitted are marked as passed.
+    return set(), set([change])
+
+  def UpdateChangeStatuses(self, changes, status):
+    """Update |changes| to |status|.
+
+    Args:
+      changes: A set of GerritPatch instances.
+      status: One of constants.CL_STATUS_* statuses.
+    """
+    if changes:
+      build_id, db = self._run.GetCIDBHandle()
+      a = clactions.TranslatePreCQStatusToAction(status)
+      actions = [clactions.CLAction.FromGerritPatchAndAction(c, a)
+                 for c in changes]
+      db.InsertCLActions(build_id, actions)
+
+  def ProcessChanges(self, pool, changes, _non_manifest_changes):
+    """Process a list of changes that were marked as Ready.
+
+    From our list of changes that were marked as Ready, we create a
+    list of disjoint transactions and send each one to a separate Pre-CQ
+    trybot.
+
+    Non-manifest changes are just submitted here because they don't need to be
+    verified by either the Pre-CQ or CQ.
+    """
+    _, db = self._run.GetCIDBHandle()
+    action_history = db.GetActionsForChanges(changes)
+    for change in changes:
+      self._ProcessRequeuedAndSpeculative(change, action_history)
+
+    status_and_timestamp_map = {
+        c: clactions.GetCLPreCQStatusAndTime(c, action_history)
+        for c in changes}
+    status_map = {c: v[0] for c, v in status_and_timestamp_map.items()}
+
+    # Filter out failed speculative changes.
+    changes = [c for c in changes if status_map[c] != constants.CL_STATUS_FAILED
+               or c.HasReadyFlag()]
+
+    progress_map = clactions.GetPreCQProgressMap(changes, action_history)
+    _, inflight, verified = clactions.GetPreCQCategories(progress_map)
+    current_db_time = db.GetTime()
+
+    to_process = set(c for c in changes
+                     if status_map[c] != constants.CL_STATUS_PASSED)
+
+    # Mark verified changes verified.
+    to_mark_verified = [c for c in verified.intersection(to_process) if
+                        status_map[c] != constants.CL_STATUS_FULLY_VERIFIED]
+    self.UpdateChangeStatuses(to_mark_verified,
+                              constants.CL_STATUS_FULLY_VERIFIED)
+    # Send notifications to the fully verified changes.
+    if to_mark_verified:
+      pool.HandlePreCQSuccess(to_mark_verified)
+
+    # Changes that can be submitted, if their dependencies can be too. Only
+    # include changes that have not already been marked as passed.
+    can_submit = set(c for c in (verified.intersection(to_process)) if
+                     c.IsMergeable() and self.CanSubmitChangeInPreCQ(c))
+
+    # Changes that will be submitted.
+    will_submit = set()
+    # Changes that will be passed.
+    will_pass = set()
+
+    # Separately count and log the number of mergable and speculative changes in
+    # each of the possible pre-cq statuses (or in status None).
+    POSSIBLE_STATUSES = clactions.PRE_CQ_CL_STATUSES | {None}
+    status_counts = {}
+    for count_bin in itertools.product((True, False), POSSIBLE_STATUSES):
+      status_counts[count_bin] = 0
+    for c, status in status_map.iteritems():
+      count_bin = (c.IsMergeable(), status)
+      status_counts[count_bin] = status_counts[count_bin] + 1
+    for count_bin, count in sorted(status_counts.items()):
+      subtype = 'mergeable' if count_bin[0] else 'speculative'
+      status = count_bin[1]
+      name = '.'.join(['pre-cq-status', status if status else 'None'])
+      logging.info('Sending stat (name, subtype, count): (%s, %s, %s)',
+                   name, subtype, count)
+      graphite.StatsFactory.GetInstance().Gauge(name).send(subtype, count)
+
+    for change in inflight:
+      if status_map[change] != constants.CL_STATUS_INFLIGHT:
+        build_ids = [x for _, _, x in progress_map[change].values()]
+        # Change the status to inflight.
+        self.UpdateChangeStatuses([change], constants.CL_STATUS_INFLIGHT)
+        build_dicts = db.GetBuildStatuses(build_ids)
+        lines = []
+        for b in build_dicts:
+          waterfall_url = constants.WATERFALL_TO_DASHBOARD[b['waterfall']]
+          url = tree_status.ConstructDashboardURL(
+              waterfall_url, b['builder_name'], b['build_number'])
+          lines.append('(%s) : %s' % (b['build_config'], url))
+
+        # Send notifications.
+        pool.HandleApplySuccess(change, build_log=('\n' + '\n'.join(lines)))
+
+    for change in to_process:
+      # Detect if change is ready to be marked as passed, or ready to submit.
+      if change in verified and change.IsMergeable():
+        to_submit, to_pass = self._ProcessVerified(change, can_submit,
+                                                   will_submit)
+        will_submit.update(to_submit)
+        will_pass.update(to_pass)
+        continue
+
+      # Screen unscreened changes to determine which trybots to test them with.
+      if not clactions.IsChangeScreened(change, action_history):
+        self.ScreenChangeForPreCQ(change)
+        continue
+
+      self._ProcessTimeouts(change, progress_map, pool, current_db_time)
+
+    # Filter out changes that have already failed, and aren't marked trybot
+    # ready or commit ready, before launching.
+    launchable_progress_map = {
+        k: v for k, v in progress_map.iteritems()
+        if k.HasReadyFlag() or status_map[k] != constants.CL_STATUS_FAILED}
+
+    is_tree_open = tree_status.IsTreeOpen(throttled_ok=True)
+    launch_count = 0
+    cl_launch_count = 0
+    launch_count_limit = (self.last_cycle_launch_count +
+                          self.MAX_LAUNCHES_PER_CYCLE_DERIVATIVE)
+    launches = {}
+    for plan, config in self.GetDisjointTransactionsToTest(
+        pool, launchable_progress_map):
+      launches.setdefault(frozenset(plan), []).append(config)
+
+    for plan, configs in launches.iteritems():
+      if not is_tree_open:
+        logging.info('Tree is closed, not launching configs %r for plan %s.',
+                     configs, cros_patch.GetChangesAsString(plan))
+      elif launch_count >= launch_count_limit:
+        logging.info('Hit or exceeded maximum launch count of %s this cycle, '
+                     'not launching configs %r for plan %s.',
+                     launch_count_limit, configs,
+                     cros_patch.GetChangesAsString(plan))
+      else:
+        self.LaunchTrybot(plan, configs)
+        launch_count += len(configs)
+        cl_launch_count += len(configs) * len(plan)
+
+    graphite.StatsFactory.GetInstance().Counter('pre-cq').increment(
+        'launch_count', launch_count)
+    graphite.StatsFactory.GetInstance().Counter('pre-cq').increment(
+        'cl_launch_count', cl_launch_count)
+    graphite.StatsFactory.GetInstance().Counter('pre-cq').increment(
+        'tick_count')
+
+    self.last_cycle_launch_count = launch_count
+
+    # Mark passed changes as passed
+    self.UpdateChangeStatuses(will_pass, constants.CL_STATUS_PASSED)
+
+    # Expire any very stale passed or fully verified changes.
+    for c, v in status_and_timestamp_map.items():
+      self._ProcessExpiry(c, v[0], v[1], pool, current_db_time)
+
+    # Submit changes that are ready to submit, if we can.
+    if tree_status.IsTreeOpen(throttled_ok=True):
+      pool.SubmitNonManifestChanges(check_tree_open=False,
+                                    reason=constants.STRATEGY_NONMANIFEST)
+      pool.SubmitChanges(will_submit, check_tree_open=False,
+                         reason=constants.STRATEGY_PRECQ_SUBMIT)
+
+    # Tell ValidationPool to keep waiting for more changes until we hit
+    # its internal timeout.
+    return [], []
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def PerformStage(self):
+    # Setup and initialize the repo.
+    super(PreCQLauncherStage, self).PerformStage()
+
+    query = constants.PRECQ_READY_QUERY
+    if self._run.options.cq_gerrit_override:
+      query = (self._run.options.cq_gerrit_override, None)
+
+    # Loop through all of the changes until we hit a timeout.
+    validation_pool.ValidationPool.AcquirePool(
+        self._run.config.overlays, self.repo,
+        self._run.buildnumber,
+        constants.PRE_CQ_LAUNCHER_NAME,
+        query,
+        dryrun=self._run.options.debug,
+        check_tree_open=False, change_filter=self.ProcessChanges,
+        builder_run=self._run)
diff --git a/cbuildbot/stages/sync_stages_unittest b/cbuildbot/stages/sync_stages_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/stages/sync_stages_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/stages/sync_stages_unittest.py b/cbuildbot/stages/sync_stages_unittest.py
new file mode 100644
index 0000000..e2dc829
--- /dev/null
+++ b/cbuildbot/stages/sync_stages_unittest.py
@@ -0,0 +1,1173 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for sync stages."""
+
+from __future__ import print_function
+
+import cPickle
+import datetime
+import itertools
+import mock
+import os
+import time
+import tempfile
+
+from chromite.cbuildbot import chromeos_config
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import lkgm_manager
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot import manifest_version_unittest
+from chromite.cbuildbot import metadata_lib
+from chromite.cbuildbot import repository
+from chromite.cbuildbot import tree_status
+from chromite.cbuildbot import triage_lib
+from chromite.cbuildbot import trybot_patch_pool
+from chromite.cbuildbot import validation_pool
+from chromite.cbuildbot.stages import generic_stages_unittest
+from chromite.cbuildbot.stages import sync_stages
+from chromite.lib import cidb
+from chromite.lib import clactions
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import fake_cidb
+from chromite.lib import gerrit
+from chromite.lib import git
+from chromite.lib import git_unittest
+from chromite.lib import gob_util
+from chromite.lib import osutils
+from chromite.lib import patch as cros_patch
+from chromite.lib import timeout_util
+
+# It's normal for unittests to access protected members.
+# pylint: disable=protected-access
+
+
+class BootstrapStageTest(
+    generic_stages_unittest.AbstractStageTestCase,
+    cros_build_lib_unittest.RunCommandTestCase):
+  """Tests the Bootstrap stage."""
+
+  BOT_ID = 'sync-test-cbuildbot'
+  RELEASE_TAG = ''
+
+  def setUp(self):
+    # Pretend API version is always current.
+    self.PatchObject(cros_build_lib, 'GetTargetChromiteApiVersion',
+                     return_value=(constants.REEXEC_API_MAJOR,
+                                   constants.REEXEC_API_MINOR))
+
+    self._Prepare()
+
+  def ConstructStage(self):
+    patch_pool = trybot_patch_pool.TrybotPatchPool()
+    return sync_stages.BootstrapStage(self._run, patch_pool)
+
+  def testSimpleBootstrap(self):
+    """Verify Bootstrap behavior in a simple case (with a branch)."""
+
+    self.RunStage()
+
+    # Clone next chromite checkout.
+    self.assertCommandContains([
+        'git', 'clone', constants.CHROMITE_URL,
+        mock.ANY,  # Can't predict new chromium checkout diretory.
+        '--reference', mock.ANY
+    ])
+
+    # Switch to the test branch.
+    self.assertCommandContains(['git', 'checkout', 'ooga_booga'])
+
+    # Re-exec cbuildbot. We mostly only want to test the CL options Bootstrap
+    # changes.
+    #   '--sourceroot=%s'
+    #   '--test-bootstrap'
+    #   '--nobootstrap'
+    #   '--manifest-repo-url'
+    self.assertCommandContains([
+        'chromite/cbuildbot/cbuildbot', 'sync-test-cbuildbot',
+        '-r', os.path.join(self.tempdir, 'buildroot'),
+        '--buildbot', '--noprebuilts', '--buildnumber', '1234321',
+        '--branch', 'ooga_booga',
+        '--sourceroot', mock.ANY,
+        '--nobootstrap',
+    ])
+
+
+  def testSiteConfigBootstrap(self):
+    """Verify Bootstrap behavior, if config_repo is passed in."""
+
+    # Set a new command line option to set the repo.
+    self._run.options.config_repo = 'http://happy/config/repo'
+
+    self.RunStage()
+
+    # Clone next chromite.
+    self.assertCommandContains([
+        'git', 'clone', 'https://chromium.googlesource.com/chromiumos/chromite',
+        mock.ANY, # Can't predict new chromium checkout diretory.
+        '--reference', mock.ANY
+    ])
+
+    # Switch to the test branch.
+    self.assertCommandContains(['git', 'checkout', 'ooga_booga'])
+
+    # Clone the site config.
+    self.assertCommandContains([
+        'git', 'clone', 'http://happy/config/repo',
+        mock.ANY, # Can't predict new chromium checkout diretory.
+        '--reference', mock.ANY
+    ])
+
+    # Switch to the test branch.
+    self.assertCommandContains(['git', 'checkout', 'ooga_booga'])
+
+    # Re-exec cbuildbot. We mostly only want to test the CL options Bootstrap
+    # changes.
+    #   '--sourceroot=%s'
+    #   '--test-bootstrap'
+    #   '--nobootstrap'
+    #   '--manifest-repo-url'
+    self.assertCommandContains([
+        'chromite/cbuildbot/cbuildbot', 'sync-test-cbuildbot',
+        '-r', os.path.join(self.tempdir, 'buildroot'),
+        '--buildbot', '--noprebuilts', '--buildnumber', '1234321',
+        '--branch', 'ooga_booga',
+        '--sourceroot', mock.ANY,
+        '--nobootstrap',
+    ])
+
+
+class ManifestVersionedSyncStageTest(
+    generic_stages_unittest.AbstractStageTestCase):
+  """Tests the ManifestVersionedSync stage."""
+  # pylint: disable=abstract-method
+
+  def setUp(self):
+    self.source_repo = 'ssh://source/repo'
+    self.manifest_version_url = 'fake manifest url'
+    self.branch = 'master'
+    self.build_name = 'x86-generic'
+    self.incr_type = 'branch'
+    self.next_version = 'next_version'
+    self.sync_stage = None
+    self.PatchObject(manifest_version.BuildSpecsManager, 'SetInFlight')
+
+    repo = repository.RepoRepository(
+        self.source_repo, self.tempdir, self.branch)
+    self.manager = manifest_version.BuildSpecsManager(
+        repo, self.manifest_version_url, [self.build_name], self.incr_type,
+        force=False, branch=self.branch, dry_run=True)
+
+    self._Prepare()
+
+  def _Prepare(self, bot_id=None, **kwargs):
+    super(ManifestVersionedSyncStageTest, self)._Prepare(bot_id, **kwargs)
+
+    self._run.config['manifest_version'] = self.manifest_version_url
+    self.sync_stage = sync_stages.ManifestVersionedSyncStage(self._run)
+    self.sync_stage.manifest_manager = self.manager
+    self._run.attrs.manifest_manager = self.manager
+
+  def testManifestVersionedSyncOnePartBranch(self):
+    """Tests basic ManifestVersionedSyncStage with branch ooga_booga"""
+    self.PatchObject(sync_stages.ManifestVersionedSyncStage, 'Initialize')
+    self.PatchObject(sync_stages.ManifestVersionedSyncStage,
+                     '_SetChromeVersionIfApplicable')
+    self.PatchObject(manifest_version.BuildSpecsManager, 'GetNextBuildSpec',
+                     return_value=self.next_version)
+    self.PatchObject(manifest_version.BuildSpecsManager, 'GetLatestPassingSpec')
+    self.PatchObject(sync_stages.SyncStage, 'ManifestCheckout',
+                     return_value=self.next_version)
+    self.PatchObject(sync_stages.ManifestVersionedSyncStage,
+                     '_GetMasterVersion', return_value='foo',
+                     autospec=True)
+    self.PatchObject(sync_stages.ManifestVersionedSyncStage,
+                     '_VerifyMasterId', autospec=True)
+    self.PatchObject(manifest_version.BuildSpecsManager, 'BootstrapFromVersion',
+                     autospec=True)
+    self.PatchObject(repository.RepoRepository, 'Sync', autospec=True)
+
+    self.sync_stage.Run()
+
+
+class MockPatch(mock.MagicMock):
+  """MagicMock for a GerritPatch-like object."""
+
+  gerrit_number = '1234'
+  patch_number = '1'
+  project = 'chromiumos/chromite'
+  status = 'NEW'
+  internal = False
+  current_patch_set = {
+      'number': patch_number,
+      'draft': False,
+  }
+  patch_dict = {
+      'currentPatchSet': current_patch_set,
+  }
+  remote = 'cros'
+  mock_diff_status = {}
+
+  def __init__(self, *args, **kwargs):
+    super(MockPatch, self).__init__(*args, **kwargs)
+
+    # Flags can vary per-patch.
+    self.flags = {
+        'CRVW': '2',
+        'VRIF': '1',
+        'COMR': '1',
+    }
+
+  def HasApproval(self, field, allowed):
+    """Pretends the patch is good.
+
+    Pretend the patch has all of the values listed in
+    constants.DEFAULT_CQ_READY_FIELDS, but not any other fields.
+
+    Args:
+      field: The name of the field as a string. 'CRVW', etc.
+      allowed: Value, or list of values that are acceptable expressed as
+               strings.
+    """
+    flag_value = self.flags.get(field, 0)
+    if isinstance(allowed, (tuple, list)):
+      return flag_value in allowed
+    else:
+      return flag_value == allowed
+
+  def IsDraft(self):
+    """Return whether this patch is a draft patchset."""
+    return self.current_patch_set['draft']
+
+  def IsBeingMerged(self):
+    """Return whether this patch is merged or in the middle of being merged."""
+    return self.status in ('SUBMITTED', 'MERGED')
+
+  def IsMergeable(self):
+    """Default implementation of IsMergeable, stubbed out by some tests."""
+    return True
+
+  def GetDiffStatus(self, _):
+    return self.mock_diff_status
+
+
+class SyncStageTest(generic_stages_unittest.AbstractStageTestCase):
+  """Tests the SyncStage."""
+
+  def setUp(self):
+    self._Prepare()
+
+  def ConstructStage(self):
+    return sync_stages.SyncStage(self._run)
+
+  def testWriteChangesToMetadata(self):
+    """Test whether WriteChangesToMetadata can handle duplicates properly."""
+    change_1 = cros_patch.GerritFetchOnlyPatch(
+        'https://host/chromite/tacos',
+        'chromite/tacos',
+        'refs/changes/11/12345/4',
+        'master',
+        'cros-internal',
+        '7181e4b5e182b6f7d68461b04253de095bad74f9',
+        'I47ea30385af60ae4cc2acc5d1a283a46423bc6e1',
+        '12345',
+        '4',
+        'foo@chromium.org',
+        1,
+        1,
+        3)
+    change_2 = cros_patch.GerritFetchOnlyPatch(
+        'https://host/chromite/foo',
+        'chromite/foo',
+        'refs/changes/11/12344/3',
+        'master',
+        'cros-internal',
+        'cf23df2207d99a74fbe169e3eba035e633b65d94',
+        'Iab9bf08b9b9bd4f72721cfc36e843ed302aca11a',
+        '12344',
+        '3',
+        'foo@chromium.org',
+        0,
+        0,
+        1)
+    stage = self.ConstructStage()
+    stage.WriteChangesToMetadata([change_1, change_1, change_2])
+    # Test whether the sort function works.
+    expected = [change_2.GetAttributeDict(), change_1.GetAttributeDict()]
+    result = self._run.attrs.metadata.GetValue('changes')
+    self.assertEqual(expected, result)
+
+class BaseCQTestCase(generic_stages_unittest.StageTestCase):
+  """Helper class for testing the CommitQueueSync stage"""
+  MANIFEST_CONTENTS = '<manifest/>'
+
+  def setUp(self):
+    """Setup patchers for specified bot id."""
+    # Mock out methods as needed.
+    self.PatchObject(lkgm_manager, 'GenerateBlameList')
+    self.PatchObject(lkgm_manager.LKGMManager, 'SetInFlight')
+    self.PatchObject(repository.RepoRepository, 'ExportManifest',
+                     return_value=self.MANIFEST_CONTENTS, autospec=True)
+    self.PatchObject(sync_stages.SyncStage, 'WriteChangesToMetadata')
+    self.StartPatcher(git_unittest.ManifestMock())
+    self.StartPatcher(git_unittest.ManifestCheckoutMock())
+    version_file = os.path.join(self.build_root, constants.VERSION_FILE)
+    manifest_version_unittest.VersionInfoTest.WriteFakeVersionFile(version_file)
+    rc_mock = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
+    rc_mock.SetDefaultCmdResult()
+
+    # Block the CQ from contacting GoB.
+    self.PatchObject(gerrit.GerritHelper, 'RemoveReady')
+    self.PatchObject(validation_pool.PaladinMessage, 'Send')
+    self.PatchObject(validation_pool.ValidationPool, 'SubmitChanges')
+
+    # If a test is still contacting GoB, something is busted.
+    self.PatchObject(gob_util, 'CreateHttpConn',
+                     side_effect=AssertionError('Test should not contact GoB'))
+    self.PatchObject(git, 'GitPush',
+                     side_effect=AssertionError('Test should not push.'))
+
+    # Create a fake repo / manifest on disk that is used by subclasses.
+    for subdir in ('repo', 'manifests'):
+      osutils.SafeMakedirs(os.path.join(self.build_root, '.repo', subdir))
+    self.manifest_path = os.path.join(self.build_root, '.repo', 'manifest.xml')
+    osutils.WriteFile(self.manifest_path, self.MANIFEST_CONTENTS)
+    self.PatchObject(validation_pool.ValidationPool, 'ReloadChanges',
+                     side_effect=lambda x: x)
+
+    # Create and set up a fake cidb instance.
+    self.fake_db = fake_cidb.FakeCIDBConnection()
+    cidb.CIDBConnectionFactory.SetupMockCidb(self.fake_db)
+
+    self.sync_stage = None
+    self._Prepare()
+
+  def tearDown(self):
+    cidb.CIDBConnectionFactory.ClearMock()
+
+  def _Prepare(self, bot_id=None, **kwargs):
+    super(BaseCQTestCase, self)._Prepare(bot_id, **kwargs)
+    self._run.config.overlays = constants.PUBLIC_OVERLAYS
+    self.sync_stage = sync_stages.CommitQueueSyncStage(self._run)
+
+    # BuildStart stage would have seeded the build.
+    build_id = self.fake_db.InsertBuild(
+        'test_builder', constants.WATERFALL_TRYBOT, 666, 'test_config',
+        'test_hostname',
+        timeout_seconds=constants.MASTER_BUILD_TIMEOUT_DEFAULT_SECONDS)
+    self._run.attrs.metadata.UpdateWithDict({'build_id': build_id})
+
+  def PerformSync(self, committed=False, num_patches=1, tree_open=True,
+                  tree_throttled=False,
+                  pre_cq_status=constants.CL_STATUS_PASSED,
+                  runs=0, changes=None, patch_objects=True,
+                  **kwargs):
+    """Helper to perform a basic sync for master commit queue.
+
+    Args:
+      committed: Value to be returned by mock patches' IsChangeCommitted.
+                 Default: False.
+      num_patches: The number of mock patches to create. Default: 1.
+      tree_open: If True, behave as if tree is open. Default: True.
+      tree_throttled: If True, behave as if tree is throttled
+                      (overriding the tree_open arg). Default: False.
+      pre_cq_status: PreCQ status for mock patches. Default: passed.
+      runs: The maximum number of times to allow validation_pool.AcquirePool
+            to wait for additional changes. runs=0 means never wait for
+            additional changes. Default: 0.
+      changes: Optional list of MockPatch instances that should be available
+               in validation pool. If not specified, a set of |num_patches|
+               patches will be created.
+      patch_objects: If your test will call PerformSync more than once, set
+                     this to false on subsequent calls to ensure that we do
+                     not re-patch already patched methods with mocks.
+      **kwargs: Additional arguments to pass to MockPatch when creating patches.
+
+    Returns:
+      A list of MockPatch objects which were created and used in PerformSync.
+    """
+    kwargs.setdefault(
+        'approval_timestamp',
+        time.time() - sync_stages.PreCQLauncherStage.LAUNCH_DELAY * 60)
+    changes = changes or [MockPatch(**kwargs)] * num_patches
+    if tree_throttled:
+      for change in changes:
+        change.flags['COMR'] = '2'
+    if pre_cq_status is not None:
+      config = constants.PRE_CQ_DEFAULT_CONFIGS[0]
+      new_build_id = self.fake_db.InsertBuild('Pre cq group',
+                                              constants.WATERFALL_TRYBOT,
+                                              1,
+                                              config,
+                                              'bot-hostname')
+      for change in changes:
+        action = clactions.TranslatePreCQStatusToAction(pre_cq_status)
+        self.fake_db.InsertCLActions(
+            new_build_id,
+            [clactions.CLAction.FromGerritPatchAndAction(change, action)])
+
+    if patch_objects:
+      self.PatchObject(gerrit.GerritHelper, 'IsChangeCommitted',
+                       return_value=committed, autospec=True)
+      # Validation pool will mutate the return value it receives from
+      # Query, therefore return a copy of the changes list.
+      def Query(*_args, **_kwargs):
+        return list(changes)
+      self.PatchObject(gerrit.GerritHelper, 'Query',
+                       side_effect=Query, autospec=True)
+      if tree_throttled:
+        self.PatchObject(tree_status, 'WaitForTreeStatus',
+                         return_value=constants.TREE_THROTTLED, autospec=True)
+      elif tree_open:
+        self.PatchObject(tree_status, 'WaitForTreeStatus',
+                         return_value=constants.TREE_OPEN, autospec=True)
+      else:
+        self.PatchObject(tree_status, 'WaitForTreeStatus',
+                         side_effect=timeout_util.TimeoutError())
+
+      exit_it = itertools.chain([False] * runs, itertools.repeat(True))
+      self.PatchObject(validation_pool.ValidationPool, 'ShouldExitEarly',
+                       side_effect=exit_it)
+
+    self.sync_stage.PerformStage()
+
+    return changes
+
+  def ReloadPool(self):
+    """Save the pool to disk and reload it."""
+    with tempfile.NamedTemporaryFile() as f:
+      cPickle.dump(self.sync_stage.pool, f)
+      f.flush()
+      self._run.options.validation_pool = f.name
+      self.sync_stage = sync_stages.CommitQueueSyncStage(self._run)
+      self.sync_stage.HandleSkip()
+
+
+class SlaveCQSyncTest(BaseCQTestCase):
+  """Tests the CommitQueueSync stage for the paladin slaves."""
+  BOT_ID = 'x86-alex-paladin'
+
+  def setUp(self):
+    self._run.options.master_build_id = 1234
+    self.PatchObject(sync_stages.ManifestVersionedSyncStage,
+                     '_GetMasterVersion', return_value='foo',
+                     autospec=True)
+    self.PatchObject(sync_stages.MasterSlaveLKGMSyncStage,
+                     '_VerifyMasterId', autospec=True)
+    self.PatchObject(lkgm_manager.LKGMManager, 'BootstrapFromVersion',
+                     return_value=self.manifest_path, autospec=True)
+    self.PatchObject(repository.RepoRepository, 'Sync', autospec=True)
+
+  def testReload(self):
+    """Test basic ability to sync and reload the patches from disk."""
+    self.sync_stage.PerformStage()
+    self.ReloadPool()
+
+
+class MasterCQSyncTestCase(BaseCQTestCase):
+  """Helper class for testing the CommitQueueSync stage masters."""
+
+  BOT_ID = 'master-paladin'
+
+  def setUp(self):
+    """Setup patchers for specified bot id."""
+    self.AutoPatch([[validation_pool.ValidationPool, 'ApplyPoolIntoRepo']])
+    self.PatchObject(lkgm_manager.LKGMManager, 'CreateNewCandidate',
+                     return_value=self.manifest_path, autospec=True)
+    self.PatchObject(lkgm_manager.LKGMManager, 'CreateFromManifest',
+                     return_value=self.manifest_path, autospec=True)
+
+  def _testCommitNonManifestChange(self, **kwargs):
+    """Test the commit of a non-manifest change.
+
+    Returns:
+      List of MockPatch objects that were used in PerformSync
+    """
+    # Setting tracking_branch=foo makes this a non-manifest change.
+    kwargs.setdefault('committed', True)
+    kwargs.setdefault('tracking_branch', 'foo')
+    return self.PerformSync(**kwargs)
+
+  def _testFailedCommitOfNonManifestChange(self):
+    """Test what happens when the commit of a non-manifest change fails.
+
+    Returns:
+      List of MockPatch objects that were used in PerformSync
+    """
+    return self._testCommitNonManifestChange(committed=False)
+
+  def _testCommitManifestChange(self, changes=None, **kwargs):
+    """Test committing a change to a project that's part of the manifest.
+
+    Args:
+      changes: Optional list of MockPatch instances to use in PerformSync.
+
+    Returns:
+      List of MockPatch objects that were used in PerformSync
+    """
+    self.PatchObject(validation_pool.ValidationPool, '_FilterNonCrosProjects',
+                     side_effect=lambda x, _: (x, []))
+    return self.PerformSync(changes=changes, **kwargs)
+
+  def _testDefaultSync(self):
+    """Test basic ability to sync with standard options.
+
+    Returns:
+      List of MockPatch objects that were used in PerformSync
+    """
+    return self.PerformSync()
+
+
+class MasterCQSyncTest(MasterCQSyncTestCase):
+  """Tests the CommitQueueSync stage for the paladin masters."""
+
+  def testCommitNonManifestChange(self):
+    """See MasterCQSyncTestCase"""
+    changes = self._testCommitNonManifestChange()
+    self.assertItemsEqual(self.sync_stage.pool.changes, changes)
+    self.assertItemsEqual(self.sync_stage.pool.non_manifest_changes, [])
+
+  def testFailedCommitOfNonManifestChange(self):
+    """See MasterCQSyncTestCase"""
+    changes = self._testFailedCommitOfNonManifestChange()
+    self.assertItemsEqual(self.sync_stage.pool.changes, changes)
+    self.assertItemsEqual(self.sync_stage.pool.non_manifest_changes, [])
+
+  def testCommitManifestChange(self):
+    """See MasterCQSyncTestCase"""
+    changes = self._testCommitManifestChange()
+    self.assertItemsEqual(self.sync_stage.pool.changes, changes)
+    self.assertItemsEqual(self.sync_stage.pool.non_manifest_changes, [])
+
+  def testCommitManifestChangeWithoutPreCQ(self):
+    """Changes get ignored if they aren't approved by pre-cq."""
+    self._testCommitManifestChange(pre_cq_status=None)
+    self.assertItemsEqual(self.sync_stage.pool.changes, [])
+    self.assertItemsEqual(self.sync_stage.pool.non_manifest_changes, [])
+
+  def testCommitManifestChangeWithoutPreCQAndOldPatches(self):
+    """Changes get tested without pre-cq if the approval_timestamp is old."""
+    changes = self._testCommitManifestChange(pre_cq_status=None,
+                                             approval_timestamp=0)
+    self.assertItemsEqual(self.sync_stage.pool.changes, changes)
+    self.assertItemsEqual(self.sync_stage.pool.non_manifest_changes, [])
+
+  def testDefaultSync(self):
+    """See MasterCQSyncTestCase"""
+    changes = self._testDefaultSync()
+    self.assertItemsEqual(self.sync_stage.pool.changes, changes)
+    self.assertItemsEqual(self.sync_stage.pool.non_manifest_changes, [])
+
+  def testReload(self):
+    """Test basic ability to sync and reload the patches from disk."""
+    # Use zero patches because mock patches can't be pickled.
+    changes = self.PerformSync(num_patches=0, runs=0)
+    self.ReloadPool()
+    self.assertItemsEqual(self.sync_stage.pool.changes, changes)
+    self.assertItemsEqual(self.sync_stage.pool.non_manifest_changes, [])
+
+  def testTreeClosureBlocksCommit(self):
+    """Test that tree closures block commits."""
+    self.assertRaises(SystemExit, self._testCommitNonManifestChange,
+                      tree_open=False)
+
+  def testTreeThrottleUsesAlternateGerritQuery(self):
+    """Test that if the tree is throttled, we use an alternate gerrit query."""
+    changes = self.PerformSync(tree_throttled=True)
+    gerrit.GerritHelper.Query.assert_called_with(
+        mock.ANY, constants.THROTTLED_CQ_READY_QUERY[0],
+        sort='lastUpdated')
+    self.assertItemsEqual(self.sync_stage.pool.changes, changes)
+    self.assertItemsEqual(self.sync_stage.pool.non_manifest_changes, [])
+
+
+class PreCQLauncherStageTest(MasterCQSyncTestCase):
+  """Tests for the PreCQLauncherStage."""
+
+  BOT_ID = constants.PRE_CQ_LAUNCHER_CONFIG
+  STATUS_LAUNCHING = constants.CL_STATUS_LAUNCHING
+  STATUS_WAITING = constants.CL_STATUS_WAITING
+  STATUS_FAILED = constants.CL_STATUS_FAILED
+  STATUS_READY_TO_SUBMIT = constants.CL_STATUS_READY_TO_SUBMIT
+  STATUS_INFLIGHT = constants.CL_STATUS_INFLIGHT
+
+  def setUp(self):
+    self.PatchObject(time, 'sleep', autospec=True)
+    self.PatchObject(validation_pool.ValidationPool, 'HandlePreCQSuccess',
+                     autospec=True)
+
+  def _Prepare(self, bot_id=None, **kwargs):
+    build_id = self.fake_db.InsertBuild(
+        constants.PRE_CQ_LAUNCHER_NAME, constants.WATERFALL_INTERNAL, 1,
+        constants.PRE_CQ_LAUNCHER_CONFIG, 'bot-hostname')
+
+    super(PreCQLauncherStageTest, self)._Prepare(
+        bot_id, build_id=build_id, **kwargs)
+
+    self.sync_stage = sync_stages.PreCQLauncherStage(self._run)
+
+  def testVerificationsForChangeValidConfig(self):
+    change = MockPatch()
+    configs_to_test = chromeos_config.GetConfig().keys()[:5]
+    return_string = ' '.join(configs_to_test)
+    self.PatchObject(triage_lib, 'GetOptionForChange',
+                     return_value=return_string)
+    self.assertItemsEqual(self.sync_stage.VerificationsForChange(change),
+                          configs_to_test)
+
+  def testVerificationsForChangeNoSuchConfig(self):
+    change = MockPatch()
+    self.PatchObject(triage_lib, 'GetOptionForChange',
+                     return_value='this_config_does_not_exist')
+    self.assertItemsEqual(self.sync_stage.VerificationsForChange(change),
+                          constants.PRE_CQ_DEFAULT_CONFIGS)
+
+  def testVerificationsForChangeEmptyField(self):
+    change = MockPatch()
+    self.PatchObject(triage_lib, 'GetOptionForChange',
+                     return_value=' ')
+    self.assertItemsEqual(self.sync_stage.VerificationsForChange(change),
+                          constants.PRE_CQ_DEFAULT_CONFIGS)
+
+  def testVerificationsForChangeNoneField(self):
+    change = MockPatch()
+    self.PatchObject(triage_lib, 'GetOptionForChange',
+                     return_value=None)
+    self.assertItemsEqual(self.sync_stage.VerificationsForChange(change),
+                          constants.PRE_CQ_DEFAULT_CONFIGS)
+
+  def testOverlayVerifications(self):
+    change = MockPatch(project='chromiumos/overlays/chromiumos-overlay')
+    self.PatchObject(triage_lib, 'GetOptionForChange',
+                     return_value=None)
+    configs = constants.PRE_CQ_DEFAULT_CONFIGS + [constants.BINHOST_PRE_CQ]
+    self.assertItemsEqual(self.sync_stage.VerificationsForChange(change),
+                          configs)
+
+  def testRequestedDefaultVerifications(self):
+    change = MockPatch()
+    self.PatchObject(triage_lib, 'GetOptionForChange',
+                     return_value='default x86-zgb-pre-cq')
+    configs = constants.PRE_CQ_DEFAULT_CONFIGS + ['x86-zgb-pre-cq']
+    self.assertItemsEqual(self.sync_stage.VerificationsForChange(change),
+                          configs)
+
+  def testVerificationsForChangeFromInvalidCommitMessage(self):
+    change = MockPatch(commit_message="""First line.
+
+Third line.
+pre-cq-configs: insect-pre-cq
+""")
+    self.PatchObject(triage_lib, 'GetOptionForChange',
+                     return_value='lumpy-pre-cq')
+    self.assertItemsEqual(self.sync_stage.VerificationsForChange(change),
+                          ['lumpy-pre-cq'])
+
+  def testVerificationsForChangeFromCommitMessage(self):
+    change = MockPatch(commit_message="""First line.
+
+Third line.
+pre-cq-configs: stumpy-pre-cq
+""")
+    self.PatchObject(triage_lib, 'GetOptionForChange',
+                     return_value='lumpy-pre-cq')
+    self.assertItemsEqual(self.sync_stage.VerificationsForChange(change),
+                          ['stumpy-pre-cq'])
+
+  def testMultiVerificationsForChangeFromCommitMessage(self):
+    change = MockPatch(commit_message="""First line.
+
+Third line.
+pre-cq-configs: stumpy-pre-cq
+pre-cq-configs: link-pre-cq
+""")
+    self.PatchObject(triage_lib, 'GetOptionForChange',
+                     return_value='lumpy-pre-cq')
+    self.assertItemsEqual(self.sync_stage.VerificationsForChange(change),
+                          ['stumpy-pre-cq', 'link-pre-cq'])
+
+  def _PrepareChangesWithPendingVerifications(self, verifications=None):
+    """Prepare changes and pending verifications for them.
+
+    This helper creates changes in the validation pool, each of which
+    require its own set of verifications.
+
+    Args:
+      verifications: A list of lists of configs. Each element in the
+                     outer list corresponds to a different CL. Defaults
+                     to [constants.PRE_CQ_DEFAULT_CONFIGS]
+
+    Returns:
+      A list of len(verifications) MockPatch instances.
+    """
+    verifications = verifications or [constants.PRE_CQ_DEFAULT_CONFIGS]
+    changes = [MockPatch(gerrit_number=n) for n in range(len(verifications))]
+    changes_to_verifications = {c: v for c, v in zip(changes, verifications)}
+
+    def VerificationsForChange(change):
+      return changes_to_verifications.get(change) or []
+
+    self.PatchObject(sync_stages.PreCQLauncherStage,
+                     'VerificationsForChange',
+                     side_effect=VerificationsForChange)
+    return changes
+
+
+  def _PrepareSubmittableChange(self):
+    # Create a pre-cq submittable change, let it be screened,
+    # and have the trybot mark it as verified.
+    change = self._PrepareChangesWithPendingVerifications()[0]
+    self.PatchObject(sync_stages.PreCQLauncherStage,
+                     'CanSubmitChangeInPreCQ',
+                     return_value=True)
+    change[0].approval_timestamp = 0
+    self.PerformSync(pre_cq_status=None, changes=[change],
+                     runs=2)
+
+    for config in constants.PRE_CQ_DEFAULT_CONFIGS:
+      build_id = self.fake_db.InsertBuild(
+          'builder name', constants.WATERFALL_TRYBOT, 2, config,
+          'bot hostname')
+      self.fake_db.InsertCLActions(
+          build_id,
+          [clactions.CLAction.FromGerritPatchAndAction(
+              change, constants.CL_ACTION_VERIFIED)])
+    return change
+
+  def testSubmitInPreCQ(self):
+    change = self._PrepareSubmittableChange()
+
+    # Change should be submitted by the pre-cq-launcher.
+    m = self.PatchObject(validation_pool.ValidationPool, 'SubmitChanges')
+    self.PerformSync(pre_cq_status=None, changes=[change], patch_objects=False)
+    m.assert_called_with(set([change]), reason=constants.STRATEGY_PRECQ_SUBMIT,
+                         check_tree_open=False)
+
+
+  def testSubmitUnableInPreCQ(self):
+    change = self._PrepareSubmittableChange()
+
+    # Change should throw a DependencyError when trying to create a transaction
+    e = cros_patch.DependencyError(change, cros_patch.PatchException(change))
+    self.PatchObject(validation_pool.PatchSeries, 'CreateTransaction',
+                     side_effect=e)
+    self.PerformSync(pre_cq_status=None, changes=[change], patch_objects=False)
+    # Change should be marked as pre-cq passed, rather than being submitted.
+    self.assertEqual(constants.CL_STATUS_PASSED, self._GetPreCQStatus(change))
+
+  def assertAllStatuses(self, changes, status):
+    """Verify that all configs for |changes| all have status |status|.
+
+    Args:
+      changes: List of changes.
+      status: Desired status value.
+    """
+    action_history = self.fake_db.GetActionsForChanges(changes)
+    progress_map = clactions.GetPreCQProgressMap(changes, action_history)
+    for change in changes:
+      for config in progress_map[change]:
+        self.assertEqual(progress_map[change][config][0], status)
+
+  def testNewPatches(self):
+    # Create a change that is ready to be tested.
+    change = self._PrepareChangesWithPendingVerifications()[0]
+    change.approval_timestamp = 0
+
+    # Change should be launched now.
+    self.PerformSync(pre_cq_status=None, changes=[change], runs=2)
+    self.assertAllStatuses([change], constants.CL_PRECQ_CONFIG_STATUS_LAUNCHED)
+
+  def testLaunchPerCycleLimit(self):
+    # Create 4x as many changes as we can launch in one cycle.
+    change_count = (
+        sync_stages.PreCQLauncherStage.MAX_LAUNCHES_PER_CYCLE_DERIVATIVE * 4)
+    changes = self._PrepareChangesWithPendingVerifications(
+        [['lumpy-pre-cq']] * change_count)
+    for c in changes:
+      c.approval_timestamp = 0
+
+    def count_launches():
+      action_history = self.fake_db.GetActionsForChanges(changes)
+      return len(
+          [a for a in action_history
+           if a.action == constants.CL_ACTION_TRYBOT_LAUNCHING])
+
+    # After one cycle of the launcher, exactly MAX_LAUNCHES_PER_CYCLE_DERIVATIVE
+    # should have launched.
+    self.PerformSync(pre_cq_status=None, changes=changes, runs=1)
+    self.assertEqual(
+        count_launches(),
+        sync_stages.PreCQLauncherStage.MAX_LAUNCHES_PER_CYCLE_DERIVATIVE)
+
+    # After the next cycle, exactly 3 * MAX_LAUNCHES_PER_CYCLE_DERIVATIVE should
+    # have launched in total.
+    self.PerformSync(pre_cq_status=None, changes=changes, runs=1,
+                     patch_objects=False)
+    self.assertEqual(
+        count_launches(),
+        3 * sync_stages.PreCQLauncherStage.MAX_LAUNCHES_PER_CYCLE_DERIVATIVE)
+
+  def testNoLaunchClosedTree(self):
+    self.PatchObject(tree_status, 'IsTreeOpen', return_value=False)
+
+    # Create a change that is ready to be tested.
+    change = self._PrepareChangesWithPendingVerifications()[0]
+    change.approval_timestamp = 0
+
+    # Change should still be pending.
+    self.PerformSync(pre_cq_status=None, changes=[change], runs=2)
+    self.assertAllStatuses([change], constants.CL_PRECQ_CONFIG_STATUS_PENDING)
+
+  def testDontTestSubmittedPatches(self):
+    # Create a change that has been submitted.
+    change = self._PrepareChangesWithPendingVerifications()[0]
+    change.approval_timestamp = 0
+    change.status = 'SUBMITTED'
+
+    # Change should not be touched by the Pre-CQ if it's submitted.
+    self.PerformSync(pre_cq_status=None, changes=[change], runs=1)
+    action_history = self.fake_db.GetActionsForChanges([change])
+    progress_map = clactions.GetPreCQProgressMap([change], action_history)
+    self.assertEqual(progress_map, {})
+
+  def testRetryInPreCQ(self):
+    # Create a change that is ready to be tested.
+    change = self._PrepareChangesWithPendingVerifications([['orange']])[0]
+    change.approval_timestamp = 0
+
+    # Change should be launched now.
+    self.PerformSync(pre_cq_status=None, changes=[change], runs=2)
+    self.assertAllStatuses([change], constants.CL_PRECQ_CONFIG_STATUS_LAUNCHED)
+
+    # Fake all these tryjobs starting
+    build_ids = self._FakeLaunchTryjobs([change])
+
+    # After 1 more Sync all configs should now be inflight.
+    self.PerformSync(pre_cq_status=None, changes=[change], patch_objects=False)
+    self.assertAllStatuses([change], constants.CL_PRECQ_CONFIG_STATUS_INFLIGHT)
+
+    # Pretend that the build failed with an infrastructure failure so the change
+    # should be retried.
+    self.fake_db.InsertCLActions(
+        build_ids['orange'],
+        [clactions.CLAction.FromGerritPatchAndAction(
+            change, constants.CL_ACTION_FORGIVEN)])
+
+    # Change should relaunch again.
+    self.PerformSync(pre_cq_status=None, changes=[change], runs=1,
+                     patch_objects=False)
+    self.assertAllStatuses([change], constants.CL_PRECQ_CONFIG_STATUS_LAUNCHED)
+
+  def testPreCQ(self):
+    changes = self._PrepareChangesWithPendingVerifications(
+        [['orange', 'apple'], ['banana'], ['banana'], ['banana'], ['banana']])
+    # After 2 runs, the changes should be screened but not
+    # yet launched (due to pre-launch timeout).
+    for c in changes:
+      c.approval_timestamp = time.time()
+
+    # Mark a change as trybot ready, but not approved. It should also be tried
+    # by the pre-cq.
+    for change in changes[2:5]:
+      change.flags = {'TRY': '1'}
+      change.IsMergeable = lambda: False
+
+    self.PerformSync(pre_cq_status=None, changes=changes, runs=2)
+
+    self.assertAllStatuses(changes, constants.CL_PRECQ_CONFIG_STATUS_PENDING)
+
+    action_history = self.fake_db.GetActionsForChanges(changes)
+    progress_map = clactions.GetPreCQProgressMap(changes, action_history)
+    self.assertEqual(2, len(progress_map[changes[0]]))
+    for change in changes[1:]:
+      self.assertEqual(1, len(progress_map[change]))
+
+    # Fake that launch delay has expired by changing change approval times.
+    for c in changes:
+      c.approval_timestamp = 0
+
+    # After 1 more Sync all configs for all changes should be launched.
+    self.PerformSync(pre_cq_status=None, changes=changes, patch_objects=False)
+
+    self.assertAllStatuses(changes, constants.CL_PRECQ_CONFIG_STATUS_LAUNCHED)
+
+    # Fake all these tryjobs starting
+    build_ids = self._FakeLaunchTryjobs(changes)
+
+    # After 1 more Sync all configs should now be inflight.
+    self.PerformSync(pre_cq_status=None, changes=changes, patch_objects=False)
+    self.assertAllStatuses(changes, constants.CL_PRECQ_CONFIG_STATUS_INFLIGHT)
+
+    # Fake INFLIGHT_TIMEOUT+1 passing with banana and orange config succeeding,
+    # and apple never launching. The first change should pass the pre-cq, the
+    # second should fail due to inflight timeout.
+    fake_time = datetime.datetime.now() + datetime.timedelta(
+        minutes=sync_stages.PreCQLauncherStage.INFLIGHT_TIMEOUT + 1)
+    self.fake_db.SetTime(fake_time)
+    self.fake_db.InsertCLActions(
+        build_ids['orange'],
+        [clactions.CLAction.FromGerritPatchAndAction(
+            changes[0], constants.CL_ACTION_VERIFIED)])
+    for change in changes[1:3]:
+      self.fake_db.InsertCLActions(
+          build_ids['banana'],
+          [clactions.CLAction.FromGerritPatchAndAction(
+              change, constants.CL_ACTION_VERIFIED)])
+
+    self.PerformSync(pre_cq_status=None, changes=changes, patch_objects=False)
+
+    self.assertEqual(self._GetPreCQStatus(changes[0]),
+                     constants.CL_STATUS_FAILED)
+    self.assertEqual(self._GetPreCQStatus(changes[1]),
+                     constants.CL_STATUS_PASSED)
+    self.assertEqual(self._GetPreCQStatus(changes[2]),
+                     constants.CL_STATUS_FULLY_VERIFIED)
+    for change in changes[3:5]:
+      self.assertEqual(self._GetPreCQStatus(change),
+                       constants.CL_STATUS_FAILED)
+
+    # Failed CLs that are marked ready should be tried again, and changes that
+    # aren't ready shouldn't be launched.
+    changes[4].flags = {'CRVW': '2'}
+    changes[4].HasReadyFlag = lambda: False
+    self.PerformSync(pre_cq_status=None, changes=changes, patch_objects=False,
+                     runs=3)
+    action_history = self.fake_db.GetActionsForChanges(changes)
+    progress_map = clactions.GetPreCQProgressMap(changes, action_history)
+    self.assertEqual(progress_map[changes[0]]['apple'][0],
+                     constants.CL_PRECQ_CONFIG_STATUS_LAUNCHED)
+    self.assertEqual(progress_map[changes[1]]['banana'][0],
+                     constants.CL_PRECQ_CONFIG_STATUS_VERIFIED)
+    self.assertEqual(progress_map[changes[2]]['banana'][0],
+                     constants.CL_PRECQ_CONFIG_STATUS_VERIFIED)
+    self.assertEqual(progress_map[changes[3]]['banana'][0],
+                     constants.CL_PRECQ_CONFIG_STATUS_LAUNCHED)
+    self.assertEqual(progress_map[changes[4]]['banana'][0],
+                     constants.CL_PRECQ_CONFIG_STATUS_FAILED)
+
+    # These actions should only be recorded at most once for every
+    # patch. We did not upload any new patch for changes, so there
+    # should not be dupulicated actions.
+    unique_actions = (constants.CL_ACTION_PRE_CQ_FULLY_VERIFIED,
+                      constants.CL_ACTION_PRE_CQ_READY_TO_SUBMIT,
+                      constants.CL_ACTION_PRE_CQ_PASSED)
+    for change in changes:
+      actions = self.fake_db.GetActionsForChanges([change])
+      for action_type in unique_actions:
+        self.assertTrue(
+            len([x for x in actions if x.action == action_type]) <= 1)
+
+    # Fake a long time elapsing, see that passed or fully verified changes
+    # (changes 1 and 2 in this test) get status expired back to None.
+    fake_time = self.fake_db.GetTime() + datetime.timedelta(
+        minutes=sync_stages.PreCQLauncherStage.STATUS_EXPIRY_TIMEOUT + 1)
+    self.fake_db.SetTime(fake_time)
+    self.PerformSync(pre_cq_status=None, changes=changes, patch_objects=False)
+    for c in changes[1:2]:
+      self.assertEqual(self._GetPreCQStatus(c), None)
+
+  def testSpeculativePreCQ(self):
+    changes = self._PrepareChangesWithPendingVerifications(
+        [constants.PRE_CQ_DEFAULT_CONFIGS] * 2)
+
+    # Turn our changes into speculatifve PreCQ candidates.
+    for change in changes:
+      change.flags.pop('COMR')
+      change.IsMergeable = lambda: False
+      change.HasReadyFlag = lambda: False
+
+    # Fake that launch delay has expired by changing change approval times.
+    for change in changes:
+      change.approval_timestamp = 0
+
+    # This should cause the changes to be pending.
+    self.PerformSync(pre_cq_status=None, changes=changes)
+
+    self.assertAllStatuses(changes, constants.CL_PRECQ_CONFIG_STATUS_PENDING)
+
+    # This should move the change from pending -> launched.
+    self.PerformSync(pre_cq_status=None, changes=changes, patch_objects=False)
+
+    self.assertAllStatuses(changes, constants.CL_PRECQ_CONFIG_STATUS_LAUNCHED)
+
+    # Make sure every speculative change is marked that way.
+    for change in changes:
+      actions = [a.action for a in self.fake_db.GetActionsForChanges([change])]
+      self.assertIn(constants.CL_ACTION_SPECULATIVE, actions)
+
+    # Fake all these tryjobs starting.
+    build_ids = self._FakeLaunchTryjobs(changes)
+
+    # After 1 more Sync all configs should now be inflight.
+    self.PerformSync(pre_cq_status=None, changes=changes, patch_objects=False)
+    self.assertAllStatuses(changes, constants.CL_PRECQ_CONFIG_STATUS_INFLIGHT)
+
+    # Verify that we mark the change as inflight.
+    self.assertEqual(self._GetPreCQStatus(changes[0]),
+                     constants.CL_STATUS_INFLIGHT)
+
+    # Fake CL 0 being verified by all configs.
+    for config in constants.PRE_CQ_DEFAULT_CONFIGS:
+      self.fake_db.InsertCLActions(
+          build_ids[config],
+          [clactions.CLAction.FromGerritPatchAndAction(
+              changes[0], constants.CL_ACTION_VERIFIED)])
+
+    # Fake CL 1 being rejected and failed by all configs except the first.
+    for config in constants.PRE_CQ_DEFAULT_CONFIGS[1:]:
+      self.fake_db.InsertCLActions(
+          build_ids[config],
+          [clactions.CLAction.FromGerritPatchAndAction(
+              changes[1], constants.CL_ACTION_KICKED_OUT)])
+      self.fake_db.InsertCLActions(
+          build_ids[config],
+          [clactions.CLAction.FromGerritPatchAndAction(
+              changes[1], constants.CL_ACTION_PRE_CQ_FAILED)])
+
+
+    self.PerformSync(pre_cq_status=None, changes=changes, patch_objects=False)
+
+    # Verify that we mark CL 0 as fully verified (not passed).
+    self.assertEqual(self._GetPreCQStatus(changes[0]),
+                     constants.CL_STATUS_FULLY_VERIFIED)
+    # Verify that CL 1 has status failed.
+    self.assertEqual(self._GetPreCQStatus(changes[1]),
+                     constants.CL_STATUS_FAILED)
+
+    # Mark our changes as ready, and see if they are immediately passed.
+    for change in changes:
+      change.flags['COMR'] = '1'
+      change.IsMergeable = lambda: True
+      change.HasReadyFlag = lambda: True
+
+    self.PerformSync(pre_cq_status=None, changes=changes, patch_objects=False)
+
+    self.assertEqual(self._GetPreCQStatus(changes[0]),
+                     constants.CL_STATUS_PASSED)
+
+  def _FakeLaunchTryjobs(self, changes):
+    """Pretend to start all launched tryjobs."""
+    action_history = self.fake_db.GetActionsForChanges(changes)
+    progress_map = clactions.GetPreCQProgressMap(changes, action_history)
+    build_ids_per_config = {}
+    for change, change_status_dict in progress_map.iteritems():
+      for config, (status, _, _) in change_status_dict.iteritems():
+        if status == constants.CL_PRECQ_CONFIG_STATUS_LAUNCHED:
+          if not config in build_ids_per_config:
+            build_ids_per_config[config] = self.fake_db.InsertBuild(
+                config, constants.WATERFALL_TRYBOT, 1, config, config)
+          self.fake_db.InsertCLActions(
+              build_ids_per_config[config],
+              [clactions.CLAction.FromGerritPatchAndAction(
+                  change, constants.CL_ACTION_PICKED_UP)])
+    return build_ids_per_config
+
+  def testCommitNonManifestChange(self):
+    """See MasterCQSyncTestCase"""
+    self._testCommitNonManifestChange()
+
+  def testFailedCommitOfNonManifestChange(self):
+    """See MasterCQSyncTestCase"""
+    self._testFailedCommitOfNonManifestChange()
+
+  def testCommitManifestChange(self):
+    """See MasterCQSyncTestCase"""
+    self._testCommitManifestChange()
+
+  def testDefaultSync(self):
+    """See MasterCQSyncTestCase"""
+    self._testDefaultSync()
+
+  def testTreeClosureIsOK(self):
+    """Test that tree closures block commits."""
+    self._testCommitNonManifestChange(tree_open=False)
+
+  def _GetPreCQStatus(self, change):
+    """Helper method to get pre-cq status of a CL from fake_db."""
+    action_history = self.fake_db.GetActionsForChanges([change])
+    return clactions.GetCLPreCQStatus(change, action_history)
+
+  def testRequeued(self):
+    """Test that a previously rejected patch gets marked as requeued."""
+    p = MockPatch()
+    previous_build_id = self.fake_db.InsertBuild(
+        'some name', constants.WATERFALL_TRYBOT, 1, 'some_config',
+        'some_hostname')
+    action = clactions.CLAction.FromGerritPatchAndAction(
+        p, constants.CL_ACTION_KICKED_OUT)
+    self.fake_db.InsertCLActions(previous_build_id, [action])
+
+    self.PerformSync(changes=[p])
+    actions_for_patch = self.fake_db.GetActionsForChanges([p])
+    requeued_actions = [a for a in actions_for_patch
+                        if a.action == constants.CL_ACTION_REQUEUED]
+    self.assertEqual(1, len(requeued_actions))
+
+
+class MasterSlaveLKGMSyncTest(generic_stages_unittest.StageTestCase):
+  """Unit tests for MasterSlaveLKGMSyncStage"""
+
+  BOT_ID = constants.PFQ_MASTER
+
+  def setUp(self):
+    """Setup"""
+    self.source_repo = 'ssh://source/repo'
+    self.manifest_version_url = 'fake manifest url'
+    self.branch = 'master'
+    self.build_name = 'master-chromium-pfq'
+    self.incr_type = 'branch'
+    self.next_version = 'next_version'
+    self.sync_stage = None
+
+    repo = repository.RepoRepository(
+        self.source_repo, self.tempdir, self.branch)
+    self.manager = lkgm_manager.LKGMManager(
+        source_repo=repo, manifest_repo=self.manifest_version_url,
+        build_names=[self.build_name],
+        build_type=constants.CHROME_PFQ_TYPE,
+        incr_type=self.incr_type,
+        force=False, branch=self.branch, dry_run=True)
+
+    # Create and set up a fake cidb instance.
+    self.fake_db = fake_cidb.FakeCIDBConnection()
+    cidb.CIDBConnectionFactory.SetupMockCidb(self.fake_db)
+
+    self._Prepare()
+
+  def _Prepare(self, bot_id=None, **kwargs):
+    super(MasterSlaveLKGMSyncTest, self)._Prepare(bot_id, **kwargs)
+
+    self._run.config['manifest_version'] = self.manifest_version_url
+    self.sync_stage = sync_stages.MasterSlaveLKGMSyncStage(self._run)
+    self.sync_stage.manifest_manager = self.manager
+    self._run.attrs.manifest_manager = self.manager
+
+  def testGetLastChromeOSVersion(self):
+    """Test GetLastChromeOSVersion"""
+    id1 = self.fake_db.InsertBuild(
+        builder_name='test_builder',
+        waterfall=constants.WATERFALL_TRYBOT,
+        build_number=666,
+        build_config='master-chromium-pfq',
+        bot_hostname='test_hostname')
+    id2 = self.fake_db.InsertBuild(
+        builder_name='test_builder',
+        waterfall=constants.WATERFALL_TRYBOT,
+        build_number=667,
+        build_config='master-chromium-pfq',
+        bot_hostname='test_hostname')
+    metadata_1 = metadata_lib.CBuildbotMetadata()
+    metadata_1.UpdateWithDict(
+        {'version': {'full': 'R42-7140.0.0-rc1'}})
+    metadata_2 = metadata_lib.CBuildbotMetadata()
+    metadata_2.UpdateWithDict(
+        {'version': {'full': 'R43-7141.0.0-rc1'}})
+    self._run.attrs.metadata.UpdateWithDict(
+        {'version': {'full': 'R44-7142.0.0-rc1'}})
+    self.fake_db.UpdateMetadata(id1 + 1, metadata_1)
+    self.fake_db.UpdateMetadata(id2 + 1, metadata_2)
+    v = self.sync_stage.GetLastChromeOSVersion()
+    self.assertEqual(v.milestone, '43')
+    self.assertEqual(v.platform, '7141.0.0-rc1')
diff --git a/cbuildbot/stages/test_stages.py b/cbuildbot/stages/test_stages.py
new file mode 100644
index 0000000..0ee4ad8
--- /dev/null
+++ b/cbuildbot/stages/test_stages.py
@@ -0,0 +1,452 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing the test stages."""
+
+from __future__ import print_function
+
+import collections
+import os
+
+from chromite.cbuildbot import afdo
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import validation_pool
+from chromite.cbuildbot.stages import generic_stages
+from chromite.lib import cgroups
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import image_test_lib
+from chromite.lib import osutils
+from chromite.lib import perf_uploader
+from chromite.lib import portage_util
+from chromite.lib import timeout_util
+
+
+_VM_TEST_ERROR_MSG = """
+!!!VMTests failed!!!
+
+Logs are uploaded in the corresponding %(vm_test_results)s. This can be found
+by clicking on the artifacts link in the "Report" Stage. Specifically look
+for the test_harness/failed for the failing tests. For more
+particulars, please refer to which test failed i.e. above see the
+individual test that failed -- or if an update failed, check the
+corresponding update directory.
+"""
+PRE_CQ = validation_pool.PRE_CQ
+
+
+class UnitTestStage(generic_stages.BoardSpecificBuilderStage):
+  """Run unit tests."""
+
+  option_name = 'tests'
+  config_name = 'unittests'
+
+  # If the unit tests take longer than 70 minutes, abort. They usually take
+  # ten minutes to run.
+  #
+  # If the processes hang, parallel_emerge will print a status report after 60
+  # minutes, so we picked 70 minutes because it gives us a little buffer time.
+  UNIT_TEST_TIMEOUT = 70 * 60
+
+  def PerformStage(self):
+    extra_env = {}
+    if self._run.config.useflags:
+      extra_env['USE'] = ' '.join(self._run.config.useflags)
+    with timeout_util.Timeout(self.UNIT_TEST_TIMEOUT):
+      commands.RunUnitTests(self._build_root,
+                            self._current_board,
+                            blacklist=self._run.config.unittest_blacklist,
+                            extra_env=extra_env)
+
+    if os.path.exists(os.path.join(self.GetImageDirSymlink(),
+                                   'au-generator.zip')):
+      commands.TestAuZip(self._build_root,
+                         self.GetImageDirSymlink())
+
+
+class VMTestStage(generic_stages.BoardSpecificBuilderStage,
+                  generic_stages.ArchivingStageMixin):
+  """Run autotests in a virtual machine."""
+
+  option_name = 'tests'
+  config_name = 'vm_tests'
+
+  VM_TEST_TIMEOUT = 60 * 60
+  # Check if the GCS target is available every 15 seconds.
+  CHECK_GCS_PERIOD = 15
+  CHECK_GCS_TIMEOUT = VM_TEST_TIMEOUT
+
+  def _PrintFailedTests(self, results_path, test_basename):
+    """Print links to failed tests.
+
+    Args:
+      results_path: Path to directory containing the test results.
+      test_basename: The basename that the tests are archived to.
+    """
+    test_list = commands.ListFailedTests(results_path)
+    for test_name, path in test_list:
+      self.PrintDownloadLink(
+          os.path.join(test_basename, path), text_to_display=test_name)
+
+  def _NoTestResults(self, path):
+    """Returns True if |path| is not a directory or is an empty directory."""
+    return not os.path.isdir(path) or not os.listdir(path)
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def _ArchiveTestResults(self, test_results_dir, test_basename):
+    """Archives test results to Google Storage.
+
+    Args:
+      test_results_dir: Name of the directory containing the test results.
+      test_basename: The basename to archive the tests.
+    """
+    results_path = commands.GetTestResultsDir(
+        self._build_root, test_results_dir)
+
+    # Skip archiving if results_path does not exist or is an empty directory.
+    if self._NoTestResults(results_path):
+      return
+
+    archived_results_dir = os.path.join(self.archive_path, test_basename)
+    # Copy relevant files to archvied_results_dir.
+    commands.ArchiveTestResults(results_path, archived_results_dir)
+    upload_paths = [os.path.basename(archived_results_dir)]
+    # Create the compressed tarball to upload.
+    # TODO: We should revisit whether uploading the tarball is necessary.
+    test_tarball = commands.BuildAndArchiveTestResultsTarball(
+        archived_results_dir, self._build_root)
+    upload_paths.append(test_tarball)
+
+    got_symbols = self.GetParallel('breakpad_symbols_generated',
+                                   pretty_name='breakpad symbols')
+    upload_paths += commands.GenerateStackTraces(
+        self._build_root, self._current_board, test_results_dir,
+        self.archive_path, got_symbols)
+
+    self._Upload(upload_paths)
+    self._PrintFailedTests(results_path, test_basename)
+
+    # Remove the test results directory.
+    osutils.RmDir(results_path, ignore_missing=True, sudo=True)
+
+  @failures_lib.SetFailureType(failures_lib.InfrastructureFailure)
+  def _ArchiveVMFiles(self, test_results_dir):
+    vm_files = commands.ArchiveVMFiles(
+        self._build_root, os.path.join(test_results_dir, 'test_harness'),
+        self.archive_path)
+    # We use paths relative to |self.archive_path|, for prettier
+    # formatting on the web page.
+    self._Upload([os.path.basename(image) for image in vm_files])
+
+  def _Upload(self, filenames):
+    logging.info('Uploading artifacts to Google Storage...')
+    with self.ArtifactUploader(archive=False, strict=False) as queue:
+      for filename in filenames:
+        queue.put([filename])
+        if filename.endswith('.dmp.txt'):
+          prefix = 'crash: '
+        elif constants.VM_DISK_PREFIX in os.path.basename(filename):
+          prefix = 'vm_disk: '
+        elif constants.VM_MEM_PREFIX in os.path.basename(filename):
+          prefix = 'vm_memory: '
+        else:
+          prefix = ''
+        self.PrintDownloadLink(filename, prefix)
+
+  def _WaitForGceTarball(self, image_path):
+    """Waits until GCE tarball is available."""
+    gce_tar_generated = self.GetParallel('gce_tarball_generated')
+    if not gce_tar_generated:
+      return
+    # Still need to check its availability as artifacts are uploaded in the
+    # background.
+    gs_ctx = gs.GSContext()
+    logging.info('Waiting for GCE tarball to be uploaded at %s.' % image_path)
+    gs_ctx.WaitForGsPaths([image_path], self.CHECK_GCS_TIMEOUT,
+                          self.CHECK_GCS_PERIOD)
+
+  def _RunTest(self, test_type, test_results_dir):
+    """Run a VM test.
+
+    Args:
+      test_type: Any test in constants.VALID_VM_TEST_TYPES
+      test_results_dir: The base directory to store the results.
+    """
+    if test_type == constants.CROS_VM_TEST_TYPE:
+      commands.RunCrosVMTest(self._current_board, self.GetImageDirSymlink())
+    elif test_type == constants.DEV_MODE_TEST_TYPE:
+      commands.RunDevModeTest(
+          self._build_root, self._current_board, self.GetImageDirSymlink())
+    else:
+      if test_type == constants.GCE_VM_TEST_TYPE:
+        # If tests are to run on GCE, use the uploaded tar ball.
+        image_path = ('%s/%s' % (self.download_url.rstrip('/'),
+                                 constants.TEST_IMAGE_GCE_TAR))
+
+        self._WaitForGceTarball(image_path)
+      else:
+        image_path = os.path.join(self.GetImageDirSymlink(),
+                                  constants.TEST_IMAGE_BIN)
+      ssh_private_key = os.path.join(self.GetImageDirSymlink(),
+                                     constants.TEST_KEY_PRIVATE)
+      if not os.path.exists(ssh_private_key):
+        # TODO: Disallow usage of default test key completely.
+        logging.warning('Test key was not found in the image directory. '
+                        'Default key will be used.')
+        ssh_private_key = None
+
+      commands.RunTestSuite(self._build_root,
+                            self._current_board,
+                            image_path,
+                            os.path.join(test_results_dir,
+                                         'test_harness'),
+                            test_type=test_type,
+                            whitelist_chrome_crashes=self._chrome_rev is None,
+                            archive_dir=self.bot_archive_root,
+                            ssh_private_key=ssh_private_key)
+
+  def PerformStage(self):
+    # These directories are used later to archive test artifacts.
+    test_results_dir = commands.CreateTestRoot(self._build_root)
+    test_basename = constants.VM_TEST_RESULTS % dict(attempt=self._attempt)
+    try:
+      for test_type in self._run.config.vm_tests:
+        logging.info('Running VM test %s.', test_type)
+        with cgroups.SimpleContainChildren('VMTest'):
+          with timeout_util.Timeout(self.VM_TEST_TIMEOUT):
+            self._RunTest(test_type, test_results_dir)
+
+    except Exception:
+      logging.error(_VM_TEST_ERROR_MSG % dict(vm_test_results=test_basename))
+      self._ArchiveVMFiles(test_results_dir)
+      raise
+    finally:
+      self._ArchiveTestResults(test_results_dir, test_basename)
+
+
+class HWTestStage(generic_stages.BoardSpecificBuilderStage,
+                  generic_stages.ArchivingStageMixin):
+  """Stage that runs tests in the Autotest lab."""
+
+  option_name = 'tests'
+  config_name = 'hw_tests'
+
+  PERF_RESULTS_EXTENSION = 'results'
+
+  def __init__(self, builder_run, board, suite_config, suffix=None, **kwargs):
+    suffix = self.UpdateSuffix(suite_config.suite, suffix)
+    super(HWTestStage, self).__init__(builder_run, board,
+                                      suffix=suffix,
+                                      **kwargs)
+    if not self._run.IsToTBuild():
+      suite_config.SetBranchedValues()
+
+    self.suite_config = suite_config
+    self.wait_for_results = True
+
+  # Disable complaint about calling _HandleStageException.
+  # pylint: disable=W0212
+  def _HandleStageException(self, exc_info):
+    """Override and don't set status to FAIL but FORGIVEN instead."""
+    exc_type = exc_info[0]
+
+    # If the suite config says HW Tests can only warn, only warn.
+    if self.suite_config.warn_only:
+      return self._HandleExceptionAsWarning(exc_info)
+
+    if self.suite_config.critical:
+      return super(HWTestStage, self)._HandleStageException(exc_info)
+
+    if issubclass(exc_type, failures_lib.TestWarning):
+      # HWTest passed with warning. All builders should pass.
+      logging.warning('HWTest passed with warning code.')
+      return self._HandleExceptionAsWarning(exc_info)
+    elif issubclass(exc_type, failures_lib.BoardNotAvailable):
+      # Some boards may not have been setup in the lab yet for
+      # non-code-checkin configs.
+      if not config_lib.IsPFQType(self._run.config.build_type):
+        logging.warning('HWTest did not run because the board was not '
+                        'available in the lab yet')
+        return self._HandleExceptionAsWarning(exc_info)
+
+    return super(HWTestStage, self)._HandleStageException(exc_info)
+
+  def PerformStage(self):
+    # Wait for UploadHWTestArtifacts to generate the payloads.
+    if not self.GetParallel('payloads_generated', pretty_name='payloads'):
+      logging.PrintBuildbotStepWarnings('missing payloads')
+      logging.warning('Cannot run HWTest because UploadTestArtifacts failed. '
+                      'See UploadTestArtifacts for details.')
+      return
+
+    if self.suite_config.suite == constants.HWTEST_AFDO_SUITE:
+      arch = self._GetPortageEnvVar('ARCH', self._current_board)
+      cpv = portage_util.BestVisible(constants.CHROME_CP,
+                                     buildroot=self._build_root)
+      if afdo.CheckAFDOPerfData(cpv, arch, gs.GSContext()):
+        logging.info('AFDO profile already generated for arch %s '
+                     'and Chrome %s. Not generating it again',
+                     arch, cpv.version_no_rev.split('_')[0])
+        return
+
+    build = '/'.join([self._bot_id, self.version])
+    if (self._run.options.remote_trybot and (self._run.options.hwtest or
+                                             self._run.config.pre_cq)):
+      debug = self._run.options.debug_forced
+    else:
+      debug = self._run.options.debug
+
+    # Get the subsystems set for the board to test
+    per_board_dict = self._run.attrs.metadata.GetDict()['board-metadata']
+    current_board_dict = per_board_dict.get(self._current_board)
+    if current_board_dict:
+      subsystems = set(current_board_dict.get('subsystems_to_test', []))
+    else:
+      subsystems = None
+
+    commands.RunHWTestSuite(
+        build, self.suite_config.suite, self._current_board,
+        pool=self.suite_config.pool, num=self.suite_config.num,
+        file_bugs=self.suite_config.file_bugs,
+        wait_for_results=self.wait_for_results,
+        priority=self.suite_config.priority,
+        timeout_mins=self.suite_config.timeout_mins,
+        retry=self.suite_config.retry,
+        max_retries=self.suite_config.max_retries,
+        minimum_duts=self.suite_config.minimum_duts,
+        suite_min_duts=self.suite_config.suite_min_duts,
+        offload_failures_only=self.suite_config.offload_failures_only,
+        debug=debug, subsystems=subsystems)
+
+
+class AUTestStage(HWTestStage):
+  """Stage for au hw test suites that requires special pre-processing."""
+
+  def PerformStage(self):
+    """Wait for payloads to be staged and uploads its au control files."""
+    # Wait for UploadHWTestArtifacts to generate the payloads.
+    if not self.GetParallel('delta_payloads_generated',
+                            pretty_name='delta payloads'):
+      logging.PrintBuildbotStepWarnings('missing delta payloads')
+      logging.warning('Cannot run HWTest because UploadTestArtifacts failed. '
+                      'See UploadTestArtifacts for details.')
+      return
+
+    with osutils.TempDir() as tempdir:
+      tarball = commands.BuildAUTestTarball(
+          self._build_root, self._current_board, tempdir,
+          self.version, self.upload_url)
+      self.UploadArtifact(tarball)
+
+    super(AUTestStage, self).PerformStage()
+
+
+class ASyncHWTestStage(HWTestStage, generic_stages.ForgivingBuilderStage):
+  """Stage that fires and forgets hw test suites to the Autotest lab."""
+
+  def __init__(self, *args, **kwargs):
+    super(ASyncHWTestStage, self).__init__(*args, **kwargs)
+    self.wait_for_results = False
+
+
+class ImageTestStage(generic_stages.BoardSpecificBuilderStage,
+                     generic_stages.ArchivingStageMixin):
+  """Stage that launches tests on the produced disk image."""
+
+  option_name = 'image_test'
+  config_name = 'image_test'
+
+  # Give the tests 60 minutes to run. Image tests should be really quick but
+  # the umount/rmdir bug (see osutils.UmountDir) may take a long time.
+  IMAGE_TEST_TIMEOUT = 60 * 60
+
+  def __init__(self, *args, **kwargs):
+    super(ImageTestStage, self).__init__(*args, **kwargs)
+
+  def PerformStage(self):
+    test_results_dir = commands.CreateTestRoot(self._build_root)
+    # CreateTestRoot returns a temp directory inside chroot.
+    # We bring that back out to the build root.
+    test_results_dir = os.path.join(self._build_root, test_results_dir[1:])
+    test_results_dir = os.path.join(test_results_dir, 'image_test_results')
+    osutils.SafeMakedirs(test_results_dir)
+    try:
+      with timeout_util.Timeout(self.IMAGE_TEST_TIMEOUT):
+        commands.RunTestImage(
+            self._build_root,
+            self._current_board,
+            self.GetImageDirSymlink(),
+            test_results_dir,
+        )
+    finally:
+      self.SendPerfValues(test_results_dir)
+
+  def SendPerfValues(self, test_results_dir):
+    """Gather all perf values in |test_results_dir| and send them to chromeperf.
+
+    The uploading will be retried 3 times for each file.
+
+    Args:
+      test_results_dir: A path to the directory with perf files.
+    """
+    # A dict of list of perf values, keyed by test name.
+    perf_entries = collections.defaultdict(list)
+    for root, _, filenames in os.walk(test_results_dir):
+      for relative_name in filenames:
+        if not image_test_lib.IsPerfFile(relative_name):
+          continue
+        full_name = os.path.join(root, relative_name)
+        entries = perf_uploader.LoadPerfValues(full_name)
+        test_name = image_test_lib.ImageTestCase.GetTestName(relative_name)
+        perf_entries[test_name].extend(entries)
+
+    platform_name = self._run.bot_id
+    try:
+      cros_ver = self._run.GetVersionInfo().VersionString()
+    except cbuildbot_run.VersionNotSetError:
+      logging.error('Could not obtain version info. '
+                    'Failed to upload perf results.')
+      return
+
+    chrome_ver = self._run.DetermineChromeVersion()
+    for test_name, perf_values in perf_entries.iteritems():
+      try:
+        perf_uploader.UploadPerfValues(perf_values, platform_name, test_name,
+                                       cros_version=cros_ver,
+                                       chrome_version=chrome_ver)
+      except Exception:
+        logging.exception('Failed to upload perf result for test %s.',
+                          test_name)
+
+
+class BinhostTestStage(generic_stages.BuilderStage):
+  """Stage that verifies Chrome prebuilts."""
+
+  config_name = 'binhost_test'
+
+  def PerformStage(self):
+    # Verify our binhosts.
+    # Don't check for incremental compatibility when we uprev chrome.
+    incremental = not (self._run.config.chrome_rev or
+                       self._run.options.chrome_rev)
+    commands.RunBinhostTest(self._build_root, incremental=incremental)
+
+
+class BranchUtilTestStage(generic_stages.BuilderStage):
+  """Stage that verifies branching works on the latest manifest version."""
+
+  config_name = 'branch_util_test'
+
+  def PerformStage(self):
+    assert (hasattr(self._run.attrs, 'manifest_manager') and
+            self._run.attrs.manifest_manager is not None), \
+        'Must run ManifestVersionedSyncStage before this stage.'
+    manifest_manager = self._run.attrs.manifest_manager
+    commands.RunBranchUtilTest(
+        self._build_root,
+        manifest_manager.GetCurrentVersionInfo().VersionString())
diff --git a/cbuildbot/stages/test_stages_unittest b/cbuildbot/stages/test_stages_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cbuildbot/stages/test_stages_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/stages/test_stages_unittest.py b/cbuildbot/stages/test_stages_unittest.py
new file mode 100644
index 0000000..02fea21
--- /dev/null
+++ b/cbuildbot/stages/test_stages_unittest.py
@@ -0,0 +1,492 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for test stages."""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.cbuildbot import cbuildbot_unittest
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import swarming_lib
+from chromite.cbuildbot import topology
+from chromite.cbuildbot.stages import artifact_stages
+from chromite.cbuildbot.stages import generic_stages
+from chromite.cbuildbot.stages import generic_stages_unittest
+from chromite.cbuildbot.stages import test_stages
+from chromite.lib import cgroups
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import path_util
+from chromite.lib import timeout_util
+
+
+# pylint: disable=too-many-ancestors
+
+
+class VMTestStageTest(generic_stages_unittest.AbstractStageTestCase,
+                      cbuildbot_unittest.SimpleBuilderTestCase):
+  """Tests for the VMTest stage."""
+
+  BOT_ID = 'x86-generic-full'
+  RELEASE_TAG = ''
+
+  def setUp(self):
+    for cmd in ('RunTestSuite', 'CreateTestRoot', 'GenerateStackTraces',
+                'ArchiveFile', 'ArchiveTestResults', 'ArchiveVMFiles',
+                'UploadArchivedFile', 'RunDevModeTest', 'RunCrosVMTest',
+                'ListFailedTests', 'GetTestResultsDir',
+                'BuildAndArchiveTestResultsTarball'):
+      self.PatchObject(commands, cmd, autospec=True)
+    self.PatchObject(test_stages.VMTestStage, '_NoTestResults',
+                     autospec=True, return_value=False)
+    self.PatchObject(osutils, 'RmDir', autospec=True)
+    self.PatchObject(cgroups, 'SimpleContainChildren', autospec=True)
+    self._Prepare()
+
+    # Simulate breakpad symbols being ready.
+    board_runattrs = self._run.GetBoardRunAttrs(self._current_board)
+    board_runattrs.SetParallel('breakpad_symbols_generated', True)
+
+  def ConstructStage(self):
+    # pylint: disable=W0212
+    self._run.GetArchive().SetupArchivePath()
+    stage = test_stages.VMTestStage(self._run, self._current_board)
+    image_dir = stage.GetImageDirSymlink()
+    osutils.Touch(os.path.join(image_dir, constants.TEST_KEY_PRIVATE),
+                  makedirs=True)
+    return stage
+
+  def testFullTests(self):
+    """Tests if full unit and cros_au_test_harness tests are run correctly."""
+    self._run.config['vm_tests'] = [constants.FULL_AU_TEST_TYPE]
+    self.RunStage()
+
+  def testQuickTests(self):
+    """Tests if quick unit and cros_au_test_harness tests are run correctly."""
+    self._run.config['vm_tests'] = [constants.SIMPLE_AU_TEST_TYPE]
+    self.RunStage()
+
+  def testGceTests(self):
+    """Tests if GCE_VM_TEST_TYPE tests are run on GCE."""
+    self._run.config['vm_tests'] = [constants.GCE_VM_TEST_TYPE]
+    gce_path = constants.TEST_IMAGE_GCE_TAR
+    board_runattrs = self._run.GetBoardRunAttrs(self._current_board)
+
+    # pylint: disable=unused-argument
+    def _MockRunTestSuite(buildroot, board, image_path, results_dir, test_type,
+                          *args, **kwargs):
+      self.assertEndsWith(image_path, gce_path)
+      self.assertEqual(test_type, constants.GCE_VM_TEST_TYPE)
+    # pylint: enable=unused-argument
+
+    def _MockWaitForGsPaths(_, paths, *_args, **_kwargs):
+      self.assertEndsWith(paths[0], gce_path)
+
+    self.PatchObject(generic_stages.BoardSpecificBuilderStage, 'GetParallel',
+                     autospec=True)
+    self.PatchObject(gs.GSContext, 'WaitForGsPaths',
+                     side_effect=_MockWaitForGsPaths, autospec=True)
+    commands.RunTestSuite.side_effect = _MockRunTestSuite
+    board_runattrs.SetParallel('gce_tarball_generated', True)
+
+    self.RunStage()
+
+    generic_stages.BoardSpecificBuilderStage.GetParallel.assert_any_call(
+        mock.ANY, 'gce_tarball_generated')
+    self.assertTrue(gs.GSContext.WaitForGsPaths.called and
+                    gs.GSContext.WaitForGsPaths.call_count == 1)
+    self.assertTrue(commands.RunTestSuite.called and
+                    commands.RunTestSuite.call_count == 1)
+
+  def testFailedTest(self):
+    """Tests if quick unit and cros_au_test_harness tests are run correctly."""
+    self.PatchObject(test_stages.VMTestStage, '_RunTest',
+                     autospec=True, side_effect=Exception())
+    self.assertRaises(failures_lib.StepFailure, self.RunStage)
+
+  def testRaisesInfraFail(self):
+    """Tests that a infra failures has been raised."""
+    commands.BuildAndArchiveTestResultsTarball.side_effect = (
+        OSError('Cannot archive'))
+    stage = self.ConstructStage()
+    self.assertRaises(failures_lib.InfrastructureFailure, stage.PerformStage)
+
+
+class UnitTestStageTest(generic_stages_unittest.AbstractStageTestCase):
+  """Tests for the UnitTest stage."""
+
+  BOT_ID = 'x86-generic-full'
+
+  def setUp(self):
+    self.rununittests_mock = self.PatchObject(commands, 'RunUnitTests')
+    self.testauzip_mock = self.PatchObject(commands, 'TestAuZip')
+
+    self.image_dir = os.path.join(
+        self.build_root, 'src/build/images/x86-generic/latest-cbuildbot')
+
+    self._Prepare()
+
+  def ConstructStage(self):
+    return test_stages.UnitTestStage(self._run, self._current_board)
+
+  def testFullTests(self):
+    """Tests if full unit and cros_au_test_harness tests are run correctly."""
+    exists_mock = self.PatchObject(os.path, 'exists', return_value=True)
+
+    self.RunStage()
+    exists_mock.assert_called_once_with(
+        os.path.join(self.image_dir, 'au-generator.zip'))
+    self.rununittests_mock.assert_called_once_with(
+        self.build_root, self._current_board, blacklist=[], extra_env=mock.ANY)
+    self.testauzip_mock.assert_called_once_with(self.build_root, self.image_dir)
+
+
+class HWTestStageTest(generic_stages_unittest.AbstractStageTestCase,
+                      cbuildbot_unittest.SimpleBuilderTestCase):
+  """Tests for the HWTest stage."""
+
+  BOT_ID = 'x86-mario-release'
+  VERSION = 'R36-5760.0.0'
+  RELEASE_TAG = ''
+
+  def setUp(self):
+    self.run_suite_mock = self.PatchObject(commands, 'RunHWTestSuite')
+    self.warning_mock = self.PatchObject(
+        logging, 'PrintBuildbotStepWarnings')
+    self.failure_mock = self.PatchObject(
+        logging, 'PrintBuildbotStepFailure')
+
+    self.suite_config = None
+    self.suite = None
+    self.version = None
+
+    self._Prepare()
+
+  def _Prepare(self, bot_id=None, version=None, warn_only=False, **kwargs):
+    super(HWTestStageTest, self)._Prepare(bot_id, **kwargs)
+
+    self.version = version or self.VERSION
+    self._run.options.log_dir = '/b/cbuild/mylogdir'
+    self.suite_config = self.GetHWTestSuite()
+    self.suite_config.warn_only = warn_only
+    self.suite = self.suite_config.suite
+
+  def ConstructStage(self):
+    self._run.GetArchive().SetupArchivePath()
+    board_runattrs = self._run.GetBoardRunAttrs(self._current_board)
+    board_runattrs.SetParallelDefault('payloads_generated', True)
+    return test_stages.HWTestStage(
+        self._run, self._current_board, self.suite_config)
+
+  def _RunHWTestSuite(self, debug=False, fails=False, warns=False,
+                      cmd_fail_mode=None):
+    """Verify the stage behavior in various circumstances.
+
+    Args:
+      debug: Whether the HWTest suite should be run in debug mode.
+      fails: Whether the stage should fail.
+      warns: Whether the stage should warn.
+      cmd_fail_mode: How commands.RunHWTestSuite() should fail.
+        If None, don't fail.
+    """
+    # We choose to define these mocks in setUp() because they are
+    # useful for tests that do not call this method. However, this
+    # means we have to reset the mocks before each run.
+    self.run_suite_mock.reset_mock()
+    self.warning_mock.reset_mock()
+    self.failure_mock.reset_mock()
+
+    to_raise = None
+
+    if cmd_fail_mode == 'timeout':
+      to_raise = timeout_util.TimeoutError('Timed out')
+    elif cmd_fail_mode == 'suite_timeout':
+      to_raise = failures_lib.SuiteTimedOut('Suite timed out')
+    elif cmd_fail_mode == 'board_not_available':
+      to_raise = failures_lib.BoardNotAvailable('Board not available')
+    elif cmd_fail_mode == 'lab_fail':
+      to_raise = failures_lib.TestLabFailure('Test lab failure')
+    elif cmd_fail_mode == 'test_warn':
+      to_raise = failures_lib.TestWarning('Suite passed with warnings')
+    elif cmd_fail_mode == 'test_fail':
+      to_raise = failures_lib.TestFailure('HWTest failed.')
+    elif cmd_fail_mode is not None:
+      raise ValueError('cmd_fail_mode %s not supported' % cmd_fail_mode)
+
+    self.run_suite_mock.side_effect = to_raise
+
+    if fails:
+      self.assertRaises(failures_lib.StepFailure, self.RunStage)
+    else:
+      self.RunStage()
+
+    self.run_suite_mock.assert_called_once()
+    self.assertEqual(self.run_suite_mock.call_args[1].get('debug'), debug)
+
+    # Make sure we print the buildbot failure/warning messages correctly.
+    if fails:
+      self.failure_mock.assert_called_once()
+    else:
+      self.assertFalse(self.failure_mock.called)
+
+    if warns:
+      self.warning_mock.assert_called_once()
+    else:
+      self.assertFalse(self.warning_mock.called)
+
+  def testRemoteTrybotWithHWTest(self):
+    """Test remote trybot with hw test enabled"""
+    cmd_args = ['--remote-trybot', '-r', self.build_root, '--hwtest']
+    self._Prepare(cmd_args=cmd_args)
+    self._RunHWTestSuite()
+
+  def testRemoteTrybotNoHWTest(self):
+    """Test remote trybot with no hw test"""
+    cmd_args = ['--remote-trybot', '-r', self.build_root]
+    self._Prepare(cmd_args=cmd_args)
+    self._RunHWTestSuite(debug=True)
+
+  def testWithSuite(self):
+    """Test if run correctly with a test suite."""
+    self._RunHWTestSuite()
+
+  def testHandleTestWarning(self):
+    """Tests that we pass the build on test warning."""
+    # CQ passes.
+    self._Prepare('x86-alex-paladin')
+    self._RunHWTestSuite(warns=True, cmd_fail_mode='test_warn')
+
+    # PFQ passes.
+    self._Prepare('falco-chrome-pfq')
+    self._RunHWTestSuite(warns=True, cmd_fail_mode='test_warn')
+
+    # Canary passes.
+    self._Prepare('x86-alex-release')
+    self._RunHWTestSuite(warns=True, cmd_fail_mode='test_warn')
+
+  def testHandleLabFail(self):
+    """Tests that we handle lab failures correctly."""
+    # CQ fails.
+    self._Prepare('x86-alex-paladin')
+    self._RunHWTestSuite(fails=True, cmd_fail_mode='lab_fail')
+
+    # PFQ fails.
+    self._Prepare('falco-chrome-pfq')
+    self._RunHWTestSuite(fails=True, cmd_fail_mode='lab_fail')
+
+    # Canary fails.
+    self._Prepare('x86-alex-release')
+    self._RunHWTestSuite(fails=True, cmd_fail_mode='lab_fail')
+
+  def testWithSuiteWithFatalFailure(self):
+    """Tests that we fail on test failure."""
+    self._RunHWTestSuite(fails=True, cmd_fail_mode='test_fail')
+
+  def testWithSuiteWithFatalFailureWarnFlag(self):
+    """Tests that we don't fail if HWTestConfig warn_only is True."""
+    self._Prepare('x86-alex-release', warn_only=True)
+    self._RunHWTestSuite(warns=True, cmd_fail_mode='test_fail')
+
+  def testHandleSuiteTimeout(self):
+    """Tests that we handle suite timeout correctly ."""
+    # Canary fails.
+    self._Prepare('x86-alex-release')
+    self._RunHWTestSuite(fails=True, cmd_fail_mode='suite_timeout')
+
+    # CQ fails.
+    self._Prepare('x86-alex-paladin')
+    self._RunHWTestSuite(fails=True, cmd_fail_mode='suite_timeout')
+
+    # PFQ fails.
+    self._Prepare('falco-chrome-pfq')
+    self._RunHWTestSuite(fails=True, cmd_fail_mode='suite_timeout')
+
+  def testHandleBoardNotAvailable(self):
+    """Tests that we handle board not available correctly."""
+    # Canary passes.
+    self._Prepare('x86-alex-release')
+    self._RunHWTestSuite(warns=True, cmd_fail_mode='board_not_available')
+
+    # CQ fails.
+    self._Prepare('x86-alex-paladin')
+    self._RunHWTestSuite(fails=True, cmd_fail_mode='board_not_available')
+
+    # PFQ fails.
+    self._Prepare('falco-chrome-pfq')
+    self._RunHWTestSuite(fails=True, cmd_fail_mode='board_not_available')
+
+  def testHandleTimeout(self):
+    """Tests that we handle timeout exceptions correctly."""
+    # Canary fails.
+    self._Prepare('x86-alex-release')
+    self._RunHWTestSuite(fails=True, cmd_fail_mode='timeout')
+
+    # CQ fails.
+    self._Prepare('x86-alex-paladin')
+    self._RunHWTestSuite(fails=True, cmd_fail_mode='timeout')
+
+    # PFQ fails.
+    self._Prepare('falco-chrome-pfq')
+    self._RunHWTestSuite(fails=True, cmd_fail_mode='timeout')
+
+  def testPayloadsNotGenerated(self):
+    """Test that we exit early if payloads are not generated."""
+    board_runattrs = self._run.GetBoardRunAttrs(self._current_board)
+    board_runattrs.SetParallel('payloads_generated', False)
+
+    self.RunStage()
+
+    # Make sure we make the stage orange.
+    self.warning_mock.assert_called_once()
+    # We exit early, so commands.RunHWTestSuite should not have been
+    # called.
+    self.assertFalse(self.run_suite_mock.called)
+
+  def testBranchedBuildExtendsTimeouts(self):
+    """Tests that we run with an extended timeout on a branched build."""
+    cmd_args = ['--branch', 'notTot', '-r', self.build_root,
+                '--remote-trybot', '--hwtest']
+    self._Prepare('x86-alex-release', cmd_args=cmd_args)
+    self._RunHWTestSuite()
+    self.assertEqual(self.suite_config.timeout,
+                     config_lib.HWTestConfig.BRANCHED_HW_TEST_TIMEOUT)
+    self.assertEqual(self.suite_config.priority,
+                     constants.HWTEST_DEFAULT_PRIORITY)
+
+
+class AUTestStageTest(generic_stages_unittest.AbstractStageTestCase,
+                      cros_build_lib_unittest.RunCommandTestCase,
+                      cbuildbot_unittest.SimpleBuilderTestCase,
+                      cros_test_lib.MockTempDirTestCase):
+  """Test only custom methods in AUTestStageTest."""
+
+  BOT_ID = 'x86-mario-release'
+  RELEASE_TAG = '0.0.1'
+
+  # pylint: disable=W0201
+  def setUp(self):
+    self.PatchObject(commands, 'ArchiveFile', autospec=True,
+                     return_value='foo.txt')
+
+    self.archive_stage = None
+    self.suite_config = None
+    self.suite = None
+
+    self._Prepare()
+
+  def _Prepare(self, bot_id=None, **kwargs):
+    super(AUTestStageTest, self)._Prepare(bot_id, **kwargs)
+
+    self._run.GetArchive().SetupArchivePath()
+    self.archive_stage = artifact_stages.ArchiveStage(self._run,
+                                                      self._current_board)
+    self.suite_config = self.GetHWTestSuite()
+    self.suite = self.suite_config.suite
+
+  def ConstructStage(self):
+    board_runattrs = self._run.GetBoardRunAttrs(self._current_board)
+    board_runattrs.SetParallelDefault('payloads_generated', True)
+    board_runattrs.SetParallelDefault('delta_payloads_generated', True)
+    return test_stages.AUTestStage(
+        self._run, self._current_board, self.suite_config)
+
+  def _PatchJson(self):
+    """Mock out the code that loads from swarming task summary."""
+    # pylint: disable=protected-access
+    temp_json_path = os.path.join(self.tempdir, 'temp_summary.json')
+    orig_func = commands._CreateSwarmingArgs
+
+    def replacement(*args, **kargs):
+      swarming_args = orig_func(*args, **kargs)
+      swarming_args['temp_json_path'] = temp_json_path
+      return swarming_args
+
+    self.PatchObject(commands, '_CreateSwarmingArgs', side_effect=replacement)
+
+    j = {'shards':[{'name': 'fake_name', 'bot_id': 'chromeos-server990',
+                    'created_ts': '2015-06-12 12:00:00',
+                    'internal_failure': False,
+                    'outputs': ['some fake output']}]}
+    self.PatchObject(swarming_lib.SwarmingCommandResult, 'LoadJsonSummary',
+                     return_value=j)
+
+  def testPerformStage(self):
+    """Tests that we correctly generate a tarball and archive it."""
+    # pylint: disable=protected-access
+
+    topology.FetchTopologyFromCIDB(None)
+    self._PatchJson()
+    stage = self.ConstructStage()
+    stage.PerformStage()
+    cmd = ['site_utils/autoupdate/full_release_test.py', '--npo', '--dump',
+           '--archive_url', self.archive_stage.upload_url,
+           self.archive_stage.release_tag, self._current_board]
+    self.assertCommandContains(cmd)
+    # pylint: disable=W0212
+    self.assertCommandContains([swarming_lib._SWARMING_PROXY_CLIENT,
+                                commands._RUN_SUITE_PATH, self.suite])
+
+  def testPayloadsNotGenerated(self):
+    """Test that we exit early if payloads are not generated."""
+    board_runattrs = self._run.GetBoardRunAttrs(self._current_board)
+    board_runattrs.SetParallel('delta_payloads_generated', False)
+    self.warning_mock = self.PatchObject(
+        logging, 'PrintBuildbotStepWarnings')
+    self.run_suite_mock = self.PatchObject(commands, 'RunHWTestSuite')
+
+    self.RunStage()
+
+    # Make sure we make the stage orange.
+    self.warning_mock.assert_called_once()
+    # We exit early, so commands.RunHWTestSuite should not have been
+    # called.
+    self.assertFalse(self.run_suite_mock.called)
+
+
+class ImageTestStageTest(generic_stages_unittest.AbstractStageTestCase,
+                         cros_build_lib_unittest.RunCommandTestCase,
+                         cbuildbot_unittest.SimpleBuilderTestCase):
+  """Test image test stage."""
+
+  BOT_ID = 'x86-mario-release'
+  RELEASE_TAG = 'ToT.0.0'
+
+  def setUp(self):
+    self._test_root = os.path.join(self.build_root, 'tmp/results_dir')
+    self.PatchObject(commands, 'CreateTestRoot', autospec=True,
+                     return_value='/tmp/results_dir')
+    self.PatchObject(path_util, 'ToChrootPath',
+                     side_effect=lambda x: x)
+    self._Prepare()
+
+  def _Prepare(self, bot_id=None, **kwargs):
+    super(ImageTestStageTest, self)._Prepare(bot_id, **kwargs)
+    self._run.GetArchive().SetupArchivePath()
+
+  def ConstructStage(self):
+    return test_stages.ImageTestStage(self._run, self._current_board)
+
+  def testPerformStage(self):
+    """Tests that we correctly run test-image script."""
+    stage = self.ConstructStage()
+    stage.PerformStage()
+    cmd = [
+        'sudo', '--',
+        os.path.join(self.build_root, 'chromite', 'bin', 'test_image'),
+        '--board', self._current_board,
+        '--test_results_root',
+        path_util.ToChrootPath(os.path.join(self._test_root,
+                                            'image_test_results')),
+        path_util.ToChrootPath(stage.GetImageDirSymlink()),
+    ]
+    self.assertCommandContains(cmd)
diff --git a/cbuildbot/swarming_lib.py b/cbuildbot/swarming_lib.py
new file mode 100644
index 0000000..435a6a9
--- /dev/null
+++ b/cbuildbot/swarming_lib.py
@@ -0,0 +1,135 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for running commands via swarming instance."""
+
+from __future__ import print_function
+
+
+import json
+import os
+
+from chromite.lib import cros_build_lib
+from chromite.lib import osutils
+
+# Location of swarming_client.py that is used to send swarming requests
+_DIR_NAME = os.path.dirname(os.path.abspath(__file__))
+_SWARMING_PROXY_CLIENT = os.path.abspath(os.path.join(
+    _DIR_NAME, '..', 'third_party', 'swarming.client', 'swarming.py'))
+CONNECTION_TYPE_COMMON = 'common'
+CONNECTION_TYPE_MOCK = 'mock'
+
+
+def RunSwarmingCommand(cmd, swarming_server, task_name=None,
+                       dimension=None,
+                       print_status_updates=False,
+                       timeout_secs=None, io_timeout_secs=None,
+                       hard_timeout_secs=None, expiration_secs=None,
+                       temp_json_path=None,
+                       *args, **kwargs):
+  """Run command via swarming proxy.
+
+  Args:
+    cmd: Commands to run, represented as a list.
+    swarming_server: The swarming server to send request to.
+    task_name: String, represent a task.
+    dimension: A tuple with two elements, representing dimension for
+               selecting a swarming bots. E.g. ('os', 'Linux')
+    print_status_updates: Boolean, whether to output status updates,
+                          can be used to prevent from hitting
+                          buildbot silent timeout.
+    timeout_secs: Timeout to wait for result used by swarming client.
+    hard_timeout_secs: Seconds to allow the task to complete.
+    io_timeout_secs: Seconds to allow the task to be silent.
+    expiration_secs: Seconds to allow the task to be pending for a bot to
+                     run before this task request expires.
+    temp_json_path: Where swarming client should dump the result.
+  """
+  with osutils.TempDir() as tempdir:
+    if temp_json_path is None:
+      temp_json_path = os.path.join(tempdir, 'temp_summary.json')
+    swarming_cmd = [_SWARMING_PROXY_CLIENT, 'run',
+                    '--swarming', swarming_server,
+                    '--task-summary-json', temp_json_path,
+                    '--raw-cmd']
+    if task_name:
+      swarming_cmd += ['--task-name', task_name]
+
+    if dimension:
+      swarming_cmd += ['--dimension', dimension[0], dimension[1]]
+
+    if print_status_updates:
+      swarming_cmd.append('--print-status-updates')
+
+    if timeout_secs is not None:
+      swarming_cmd += ['--timeout', str(timeout_secs)]
+
+    if io_timeout_secs is not None:
+      swarming_cmd += ['--io-timeout', str(io_timeout_secs)]
+
+    if hard_timeout_secs is not None:
+      swarming_cmd += ['--hard-timeout', str(hard_timeout_secs)]
+
+    if expiration_secs is not None:
+      swarming_cmd += ['--expiration', str(expiration_secs)]
+
+    swarming_cmd += ['--']
+    swarming_cmd += cmd
+
+    try:
+      result = cros_build_lib.RunCommand(swarming_cmd, *args, **kwargs)
+      return SwarmingCommandResult.CreateSwarmingCommandResult(
+          task_summary_json_path=temp_json_path, command_result=result)
+    except cros_build_lib.RunCommandError as e:
+      result = SwarmingCommandResult.CreateSwarmingCommandResult(
+          task_summary_json_path=temp_json_path, command_result=e.result)
+      raise cros_build_lib.RunCommandError(e.msg, result, e.exception)
+
+
+class SwarmingCommandResult(cros_build_lib.CommandResult):
+  """An object to store result of a command that is run via swarming.
+
+  Args:
+    task_summary_json: A dictionary, loaded from the json file
+                       output by swarming client. It cantains all
+                       details about the swarming task.
+  """
+
+  def __init__(self, task_summary_json, *args, **kwargs):
+    super(SwarmingCommandResult, self).__init__(*args, **kwargs)
+    self.task_summary_json = task_summary_json
+
+  @staticmethod
+  def LoadJsonSummary(task_summary_json_path):
+    """Load json file into a dict.
+
+    Args:
+      task_summary_json_path: A json that contains output of a swarming task.
+
+    Returns:
+      A dictionary or None if task_summary_json_path doesn't exist.
+    """
+    if os.path.exists(task_summary_json_path):
+      with open(task_summary_json_path) as f:
+        return json.load(f)
+
+  @staticmethod
+  def CreateSwarmingCommandResult(task_summary_json_path, command_result):
+    """Create a SwarmingCommandResult object from a CommandResult object.
+
+    Args:
+      task_summary_json_path: The path to a json file that contains
+                              output of a swarming task.
+      command_result: A CommandResult object.
+
+    Returns:
+      A SwarmingCommandResult object.
+    """
+    task_summary_json = SwarmingCommandResult.LoadJsonSummary(
+        task_summary_json_path)
+    return  SwarmingCommandResult(task_summary_json=task_summary_json,
+                                  cmd=command_result.cmd,
+                                  error=command_result.error,
+                                  output=command_result.output,
+                                  returncode=command_result.returncode)
diff --git a/cbuildbot/tee.py b/cbuildbot/tee.py
new file mode 100644
index 0000000..878f209
--- /dev/null
+++ b/cbuildbot/tee.py
@@ -0,0 +1,238 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module that handles tee-ing output to a file."""
+
+from __future__ import print_function
+
+import errno
+import fcntl
+import os
+import multiprocessing
+import select
+import signal
+import subprocess
+import sys
+import traceback
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+
+
+# Max amount of data we're hold in the buffer at a given time.
+_BUFSIZE = 1024
+
+
+# Custom signal handlers so we can catch the exception and handle it.
+class ToldToDie(Exception):
+  """Exception thrown via signal handlers."""
+
+  def __init__(self, signum):
+    Exception.__init__(self, "We received signal %i" % (signum,))
+
+
+# pylint: disable=W0613
+def _TeeProcessSignalHandler(signum, frame):
+  """TeeProcess custom signal handler.
+
+  This is used to decide whether or not to kill our parent.
+  """
+  raise ToldToDie(signum)
+
+
+def _output(line, output_files, complain):
+  """Print line to output_files.
+
+  Args:
+    line: Line to print.
+    output_files: List of files to print to.
+    complain: Print a warning if we get EAGAIN errors. Only one error
+              is printed per line.
+  """
+  for f in output_files:
+    offset = 0
+    while offset < len(line):
+      select.select([], [f], [])
+      try:
+        offset += os.write(f.fileno(), line[offset:])
+      except OSError as ex:
+        if ex.errno == errno.EINTR:
+          continue
+        elif ex.errno != errno.EAGAIN:
+          raise
+
+      if offset < len(line) and complain:
+        flags = fcntl.fcntl(f.fileno(), fcntl.F_GETFL, 0)
+        if flags & os.O_NONBLOCK:
+          warning = '\nWarning: %s/%d is non-blocking.\n' % (f.name,
+                                                             f.fileno())
+          _output(warning, output_files, False)
+
+        warning = '\nWarning: Short write for %s/%d.\n' % (f.name, f.fileno())
+        _output(warning, output_files, False)
+
+
+def _tee(input_fd, output_files, complain):
+  """Read data from |input_fd| and write to |output_files|."""
+  while True:
+    # We need to use os.read() directly because it will return to us when the
+    # other side has flushed its output (and is shorter than _BUFSIZE).  If we
+    # use python's file object helpers (like read() and readline()), it will
+    # not return until either the full buffer is filled or a newline is hit.
+    data = os.read(input_fd, _BUFSIZE)
+    if not data:
+      return
+    _output(data, output_files, complain)
+
+
+class _TeeProcess(multiprocessing.Process):
+  """Replicate output to multiple file handles."""
+
+  def __init__(self, output_filenames, complain, error_fd,
+               master_pid):
+    """Write to stdout and supplied filenames.
+
+    Args:
+      output_filenames: List of filenames to print to.
+      complain: Print a warning if we get EAGAIN errors.
+      error_fd: The fd to write exceptions/errors to during
+        shutdown.
+      master_pid: Pid to SIGTERM if we shutdown uncleanly.
+    """
+
+    self._reader_pipe, self.writer_pipe = os.pipe()
+    self._output_filenames = output_filenames
+    self._complain = complain
+    # Dupe the fd on the offchance it's stdout/stderr,
+    # which we screw with.
+    self._error_handle = os.fdopen(os.dup(error_fd), 'w', 0)
+    self.master_pid = master_pid
+    multiprocessing.Process.__init__(self)
+
+  def _CloseUnnecessaryFds(self):
+    preserve = set([1, 2, self._error_handle.fileno(), self._reader_pipe,
+                    subprocess.MAXFD])
+    preserve = iter(sorted(preserve))
+    fd = 0
+    while fd < subprocess.MAXFD:
+      current_low = preserve.next()
+      if fd != current_low:
+        os.closerange(fd, current_low)
+        fd = current_low
+      fd += 1
+
+  def run(self):
+    """Main function for tee subprocess."""
+    failed = True
+    try:
+      signal.signal(signal.SIGINT, _TeeProcessSignalHandler)
+      signal.signal(signal.SIGTERM, _TeeProcessSignalHandler)
+
+      # Cleanup every fd except for what we use.
+      self._CloseUnnecessaryFds()
+
+      # Read from the pipe.
+      input_fd = self._reader_pipe
+
+      # Create list of files to write to.
+      output_files = [os.fdopen(sys.stdout.fileno(), 'w', 0)]
+      for filename in self._output_filenames:
+        output_files.append(open(filename, 'w', 0))
+
+      # Send all data from the one input to all the outputs.
+      _tee(input_fd, output_files, self._complain)
+      failed = False
+    except ToldToDie:
+      failed = False
+    except Exception as e:
+      tb = traceback.format_exc()
+      logging.PrintBuildbotStepFailure(self._error_handle)
+      self._error_handle.write(
+          'Unhandled exception occured in tee:\n%s\n' % (tb,))
+      # Try to signal the parent telling them of our
+      # imminent demise.
+
+    finally:
+      # Close input.
+      os.close(input_fd)
+
+      if failed:
+        try:
+          os.kill(self.master_pid, signal.SIGTERM)
+        except Exception as e:
+          self._error_handle.write("\nTee failed signaling %s\n" % e)
+
+      # Finally, kill ourself.
+      # Specifically do it in a fashion that ensures no inherited
+      # cleanup code from our parent process is ran- leave that to
+      # the parent.
+      # pylint: disable=W0212
+      os._exit(0)
+
+
+class Tee(cros_build_lib.MasterPidContextManager):
+  """Class that handles tee-ing output to a file."""
+
+  def __init__(self, output_file):
+    """Initializes object with path to log file."""
+    cros_build_lib.MasterPidContextManager.__init__(self)
+    self._file = output_file
+    self._old_stdout = None
+    self._old_stderr = None
+    self._old_stdout_fd = None
+    self._old_stderr_fd = None
+    self._tee = None
+
+  def start(self):
+    """Start tee-ing all stdout and stderr output to the file."""
+    # Flush and save old file descriptors.
+    sys.stdout.flush()
+    sys.stderr.flush()
+    self._old_stdout_fd = os.dup(sys.stdout.fileno())
+    self._old_stderr_fd = os.dup(sys.stderr.fileno())
+    # Save file objects
+    self._old_stdout = sys.stdout
+    self._old_stderr = sys.stderr
+
+    # Replace std[out|err] with unbuffered file objects
+    sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
+    sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 0)
+
+    # Create a tee subprocess.
+    self._tee = _TeeProcess([self._file], True, self._old_stderr_fd,
+                            os.getpid())
+    self._tee.start()
+
+    # Redirect stdout and stderr to the tee subprocess.
+    writer_pipe = self._tee.writer_pipe
+    os.dup2(writer_pipe, sys.stdout.fileno())
+    os.dup2(writer_pipe, sys.stderr.fileno())
+    os.close(writer_pipe)
+
+  def stop(self):
+    """Restores old stdout and stderr handles and waits for tee proc to exit."""
+    # Close unbuffered std[out|err] file objects, as well as the tee's stdin.
+    sys.stdout.close()
+    sys.stderr.close()
+
+    # Restore file objects
+    sys.stdout = self._old_stdout
+    sys.stderr = self._old_stderr
+
+    # Restore old file descriptors.
+    os.dup2(self._old_stdout_fd, sys.stdout.fileno())
+    os.dup2(self._old_stderr_fd, sys.stderr.fileno())
+    os.close(self._old_stdout_fd)
+    os.close(self._old_stderr_fd)
+    self._tee.join()
+
+  def _enter(self):
+    self.start()
+
+  def _exit(self, exc_type, exc, exc_traceback):
+    try:
+      self.stop()
+    finally:
+      if self._tee is not None:
+        self._tee.terminate()
diff --git a/cbuildbot/topology.py b/cbuildbot/topology.py
new file mode 100644
index 0000000..a5c0479
--- /dev/null
+++ b/cbuildbot/topology.py
@@ -0,0 +1,68 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module used for run-time determination of toplogy information.
+
+By Toplogy, we mean a specification of which external server dependencies are
+located where. At the moment, this module provides a default key-value api via
+the |topology| member, and a cidb-backed store to provide environment-specific
+overrides of the default values.
+"""
+
+from __future__ import print_function
+
+import collections
+
+STATSD_HOST_KEY = '/statsd/host'
+STATSD_PORT_KEY = '/statsd/port'
+ELASTIC_SEARCH_HOST_KEY = '/statsd/es_host'
+ELASTIC_SEARCH_PORT_KEY = '/statsd/es_port'
+ELASTIC_SEARCH_UDP_PORT_KEY = '/statsd/es_udp_port'
+SWARMING_PROXY_HOST_KEY = '/swarming_proxy/host'
+
+TOPOLOGY_DEFAULTS = {
+    STATSD_HOST_KEY : '146.148.70.158',
+    STATSD_PORT_KEY : '8125',
+    ELASTIC_SEARCH_HOST_KEY : '146.148.70.158',
+    ELASTIC_SEARCH_PORT_KEY : '9200',
+    ELASTIC_SEARCH_UDP_PORT_KEY : '9700',
+    SWARMING_PROXY_HOST_KEY: 'fake_swarming_server',
+    }
+
+
+class LockedDictAccessException(Exception):
+  """Raised when attempting to access a locked dict."""
+
+
+class LockedDefaultDict(collections.defaultdict):
+  """collections.defaultdict which cannot be read from until unlocked."""
+
+  def __init__(self):
+    super(LockedDefaultDict, self).__init__()
+    self._locked = True
+
+  def get(self, key):
+    if self._locked:
+      raise LockedDictAccessException()
+    return super(LockedDefaultDict, self).get(key)
+
+  def unlock(self):
+    self._locked = False
+
+
+topology = LockedDefaultDict()
+topology.update(TOPOLOGY_DEFAULTS)
+
+
+def FetchTopologyFromCIDB(db):
+  """Update and unlock topology based on cidb-backed keyval store.
+
+  Args:
+    db: cidb.CIDBConnection instance for database to fetch keyvals from,
+        or None.
+  """
+  if db:
+    topology.update(db.GetKeyVals())
+
+  topology.unlock()
diff --git a/cbuildbot/topology_unittest b/cbuildbot/topology_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/topology_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/topology_unittest.py b/cbuildbot/topology_unittest.py
new file mode 100644
index 0000000..245ff14
--- /dev/null
+++ b/cbuildbot/topology_unittest.py
@@ -0,0 +1,33 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for topology module."""
+
+from __future__ import print_function
+
+from chromite.cbuildbot import topology
+from chromite.lib import fake_cidb
+from chromite.lib import cros_test_lib
+
+
+class ToplogyTest(cros_test_lib.TestCase):
+  """Unit test of topology module."""
+
+  def setUp(self):
+    # Mutually isolate these tests and make them independent of
+    # TOPOLOGY_DEFAULTS
+    topology.topology = topology.LockedDefaultDict()
+
+  def testWithDB(self):
+    fake_db = fake_cidb.FakeCIDBConnection(fake_keyvals={'/foo': 'bar'})
+    topology.FetchTopologyFromCIDB(fake_db)
+    self.assertEqual(topology.topology.get('/foo'), 'bar')
+
+  def testWithoutDB(self):
+    topology.FetchTopologyFromCIDB(None)
+    self.assertEqual(topology.topology.get('/foo'), None)
+
+  def testNotFetched(self):
+    with self.assertRaises(topology.LockedDictAccessException):
+      topology.topology.get('/foo')
diff --git a/cbuildbot/tree_status.py b/cbuildbot/tree_status.py
new file mode 100644
index 0000000..fd09de0
--- /dev/null
+++ b/cbuildbot/tree_status.py
@@ -0,0 +1,408 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Manage tree status."""
+
+from __future__ import print_function
+
+import httplib
+import json
+import os
+import re
+import socket
+import urllib
+import urllib2
+
+from chromite.cbuildbot import constants
+from chromite.lib import alerts
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import timeout_util
+
+
+CROS_TREE_STATUS_URL = 'https://chromiumos-status.appspot.com'
+CROS_TREE_STATUS_JSON_URL = '%s/current?format=json' % CROS_TREE_STATUS_URL
+CROS_TREE_STATUS_UPDATE_URL = '%s/status' % CROS_TREE_STATUS_URL
+
+_USER_NAME = 'buildbot@chromium.org'
+_PASSWORD_PATH = '/home/chrome-bot/.status_password_chromiumos'
+
+# The tree status json file contains the following keywords.
+TREE_STATUS_STATE = 'general_state'
+TREE_STATUS_USERNAME = 'username'
+TREE_STATUS_MESSAGE = 'message'
+TREE_STATUS_DATE = 'date'
+TREE_STATUS_CAN_COMMIT = 'can_commit_freely'
+
+# These keywords in a status message are detected automatically to
+# update the tree status.
+MESSAGE_KEYWORDS = ('open', 'throt', 'close', 'maint')
+
+# This is the delimiter to separate messages from different updates.
+MESSAGE_DELIMITER = '|'
+
+
+class PasswordFileDoesNotExist(Exception):
+  """Raised when password file does not exist."""
+
+
+class InvalidTreeStatus(Exception):
+  """Raised when user wants to set an invalid tree status."""
+
+
+def _GetStatusDict(status_url, raw_message=False):
+  """Polls |status_url| and returns the retrieved tree status dictionary.
+
+  This function gets a JSON response from |status_url|, and returns
+  the dictionary of the tree status, if one exists and the http
+  request was successful.
+
+  The tree status dictionary contains:
+    TREE_STATUS_USERNAME: User who posted the message (foo@chromium.org).
+    TREE_STATUS_MESSAGE: The status message ("Tree is Open (CQ is good)").
+    TREE_STATUS_CAN_COMMIT: Whether tree is commit ready ('true' or 'false').
+    TREE_STATUS_STATE: one of constants.VALID_TREE_STATUSES.
+
+  Args:
+    status_url: The URL of the tree status to check.
+    raw_message: Whether to return the raw message without stripping the
+      "Tree is open/throttled/closed" string. Defaults to always strip.
+
+  Returns:
+    The tree status as a dictionary, if it was successfully retrieved.
+    Otherwise None.
+  """
+  try:
+    # Check for successful response code.
+    response = urllib.urlopen(status_url)
+    if response.getcode() == 200:
+      data = json.load(response)
+      if not raw_message:
+        # Tree status message is usually in the form:
+        #   "Tree is open/closed/throttled (reason for the tree closure)"
+        # We want only the reason enclosed in the parentheses.
+        # This is a best-effort parsing because user may post the message
+        # in a form that we don't recognize.
+        match = re.match(r'Tree is [\w\s\.]+\((.*)\)',
+                         data.get(TREE_STATUS_MESSAGE, ''))
+        data[TREE_STATUS_MESSAGE] = '' if not match else match.group(1)
+      return data
+  # We remain robust against IOError's.
+  except IOError as e:
+    logging.error('Could not reach %s: %r', status_url, e)
+
+
+def _GetStatus(status_url):
+  """Polls |status_url| and returns the retrieved tree status.
+
+  This function gets a JSON response from |status_url|, and returns the
+  value associated with the TREE_STATUS_STATE, if one exists and the
+  http request was successful.
+
+  Returns:
+    The tree status, as a string, if it was successfully retrieved. Otherwise
+    None.
+  """
+  status_dict = _GetStatusDict(status_url)
+  if status_dict:
+    return status_dict.get(TREE_STATUS_STATE)
+
+
+def WaitForTreeStatus(status_url=None, period=1, timeout=1, throttled_ok=False):
+  """Wait for tree status to be open (or throttled, if |throttled_ok|).
+
+  Args:
+    status_url: The status url to check i.e.
+      'https://status.appspot.com/current?format=json'
+    period: How often to poll for status updates.
+    timeout: How long to wait until a tree status is discovered.
+    throttled_ok: is TREE_THROTTLED an acceptable status?
+
+  Returns:
+    The most recent tree status, either constants.TREE_OPEN or
+    constants.TREE_THROTTLED (if |throttled_ok|)
+
+  Raises:
+    timeout_util.TimeoutError if timeout expired before tree reached
+    acceptable status.
+  """
+  if not status_url:
+    status_url = CROS_TREE_STATUS_JSON_URL
+
+  acceptable_states = set([constants.TREE_OPEN])
+  verb = 'open'
+  if throttled_ok:
+    acceptable_states.add(constants.TREE_THROTTLED)
+    verb = 'not be closed'
+
+  timeout = max(timeout, 1)
+
+  def _LogMessage(remaining):
+    logging.info('Waiting for the tree to %s (%s left)...', verb, remaining)
+
+  def _get_status():
+    return _GetStatus(status_url)
+
+  return timeout_util.WaitForReturnValue(
+      acceptable_states, _get_status, timeout=timeout,
+      period=period, side_effect_func=_LogMessage)
+
+
+def IsTreeOpen(status_url=None, period=1, timeout=1, throttled_ok=False):
+  """Wait for tree status to be open (or throttled, if |throttled_ok|).
+
+  Args:
+    status_url: The status url to check i.e.
+      'https://status.appspot.com/current?format=json'
+    period: How often to poll for status updates.
+    timeout: How long to wait until a tree status is discovered.
+    throttled_ok: Does TREE_THROTTLED count as open?
+
+  Returns:
+    True if the tree is open (or throttled, if |throttled_ok|). False if
+    timeout expired before tree reached acceptable status.
+  """
+  if not status_url:
+    status_url = CROS_TREE_STATUS_JSON_URL
+
+  try:
+    WaitForTreeStatus(status_url=status_url, period=period, timeout=timeout,
+                      throttled_ok=throttled_ok)
+  except timeout_util.TimeoutError:
+    return False
+  return True
+
+
+def _GetPassword():
+  """Returns the password for updating tree status."""
+  if not os.path.exists(_PASSWORD_PATH):
+    raise PasswordFileDoesNotExist(
+        'Unable to retrieve password. %s does not exist',
+        _PASSWORD_PATH)
+
+  return osutils.ReadFile(_PASSWORD_PATH).strip()
+
+
+def _UpdateTreeStatus(status_url, message):
+  """Updates the tree status to |message|.
+
+  Args:
+    status_url: The tree status URL.
+    message: The tree status text to post .
+  """
+  password = _GetPassword()
+  params = urllib.urlencode({
+      'message': message,
+      'username': _USER_NAME,
+      'password': password,
+  })
+  headers = {'Content-Type': 'application/x-www-form-urlencoded'}
+  req = urllib2.Request(status_url, data=params, headers=headers)
+  try:
+    urllib2.urlopen(req)
+  except (urllib2.URLError, httplib.HTTPException, socket.error) as e:
+    logging.error('Unable to update tree status: %s', e)
+    raise e
+  else:
+    logging.info('Updated tree status with message: %s', message)
+
+
+def UpdateTreeStatus(status, message, announcer='cbuildbot', epilogue='',
+                     status_url=None, dryrun=False):
+  """Updates the tree status to |status| with additional |message|.
+
+  Args:
+    status: A status in constants.VALID_TREE_STATUSES.
+    message: A string to display as part of the tree status.
+    announcer: The announcer the message.
+    epilogue: The string to append to |message|.
+    status_url: The URL of the tree status to update.
+    dryrun: If set, don't update the tree status.
+  """
+  if status_url is None:
+    status_url = CROS_TREE_STATUS_UPDATE_URL
+
+  if status not in constants.VALID_TREE_STATUSES:
+    raise InvalidTreeStatus('%s is not a valid tree status.' % status)
+
+  if status == 'maintenance':
+    # This is a special case because "Tree is maintenance" is
+    # grammatically incorrect.
+    status = 'under maintenance'
+
+  text_dict = {
+      'status': status,
+      'epilogue': epilogue,
+      'announcer': announcer,
+      'message': message,
+      'delimiter': MESSAGE_DELIMITER
+  }
+  if epilogue:
+    text = ('Tree is %(status)s (%(announcer)s: %(message)s %(delimiter)s '
+            '%(epilogue)s)' % text_dict)
+  else:
+    text = 'Tree is %(status)s (%(announcer)s: %(message)s)' % text_dict
+
+  if dryrun:
+    logging.info('Would have updated the tree status with message: %s', text)
+  else:
+    _UpdateTreeStatus(status_url, text)
+
+
+def ThrottleOrCloseTheTree(announcer, message, internal=None, buildnumber=None,
+                           dryrun=False):
+  """Throttle or close the tree with |message|.
+
+  By default, this function throttles the tree with an updated
+  message. If the tree is already not open, it will keep the original
+  status (closed, maintenance) and only update the message. This
+  ensures that we do not lower the severity of tree closure.
+
+  In the case where the tree is not open, the previous tree status
+  message is kept by prepending it to |message|, if possible. This
+  ensures that the cause of the previous tree closure remains visible.
+
+  Args:
+    announcer: The announcer the message.
+    message: A string to display as part of the tree status.
+    internal: Whether the build is internal or not. Append the build type
+      if this is set. Defaults to None.
+    buildnumber: The build number to append.
+    dryrun: If set, generate the message but don't update the tree status.
+  """
+  # Get current tree status.
+  status_dict = _GetStatusDict(CROS_TREE_STATUS_JSON_URL)
+  current_status = status_dict.get(TREE_STATUS_STATE)
+  current_msg = status_dict.get(TREE_STATUS_MESSAGE)
+
+  status = constants.TREE_THROTTLED
+  if (constants.VALID_TREE_STATUSES.index(current_status) >
+      constants.VALID_TREE_STATUSES.index(status)):
+    # Maintain the current status if it is more servere than throttled.
+    status = current_status
+
+  epilogue = ''
+  # Don't prepend the current status message if the tree is open.
+  if current_status != constants.TREE_OPEN and current_msg:
+    # Scan the current message and discard the text by the same
+    # announcer.
+    chunks = [x.strip() for x in current_msg.split(MESSAGE_DELIMITER)
+              if '%s' % announcer not in x.strip()]
+    current_msg = MESSAGE_DELIMITER.join(chunks)
+
+    if any(x for x in MESSAGE_KEYWORDS if x.lower() in
+           current_msg.lower().split()):
+      # The waterfall scans the message for keywords to change the
+      # tree status. Don't prepend the current status message if it
+      # contains such keywords.
+      logging.warning('Cannot prepend the previous tree status message because '
+                      'there are keywords that may affect the tree state.')
+    else:
+      epilogue = current_msg
+
+  if internal is not None:
+    # 'p' stands for 'public.
+    announcer += '-i' if internal else '-p'
+
+  if buildnumber:
+    announcer = '%s-%d' % (announcer, buildnumber)
+
+  UpdateTreeStatus(status, message, announcer=announcer, epilogue=epilogue,
+                   dryrun=dryrun)
+
+
+def _OpenSheriffURL(sheriff_url):
+  """Returns the content of |sheriff_url| or None if failed to open it."""
+  try:
+    response = urllib.urlopen(sheriff_url)
+    if response.getcode() == 200:
+      return response.read()
+  except IOError as e:
+    logging.error('Could not reach %s: %r', sheriff_url, e)
+
+
+def GetSheriffEmailAddresses(sheriff_type):
+  """Get the email addresses of the sheriffs or deputy.
+
+  Args:
+    sheriff_type: Type of the sheriff to look for. See the keys in
+    constants.SHERIFF_TYPE_TO_URL.
+      - 'tree': tree sheriffs
+      - 'chrome': chrome gardener
+
+  Returns:
+    A list of email addresses.
+  """
+  if sheriff_type not in constants.SHERIFF_TYPE_TO_URL:
+    raise ValueError('Unknown sheriff type: %s' % sheriff_type)
+
+  urls = constants.SHERIFF_TYPE_TO_URL.get(sheriff_type)
+  sheriffs = []
+  for url in urls:
+    # The URL displays a line: document.write('taco, burrito')
+    raw_line = _OpenSheriffURL(url)
+    if raw_line is not None:
+      match = re.search(r'\'(.*)\'', raw_line)
+      if match and match.group(1) != 'None (channel is sheriff)':
+        sheriffs.extend(x.strip() for x in match.group(1).split(','))
+
+  return ['%s%s' % (x, constants.GOOGLE_EMAIL) for x in sheriffs]
+
+
+def GetHealthAlertRecipients(builder_run):
+  """Returns a list of email addresses of the health alert recipients."""
+  recipients = []
+  for entry in builder_run.config.health_alert_recipients:
+    if '@' in entry:
+      # If the entry is an email address, add it to the list.
+      recipients.append(entry)
+    else:
+      # Perform address lookup for a non-email entry.
+      recipients.extend(GetSheriffEmailAddresses(entry))
+
+  return recipients
+
+
+def SendHealthAlert(builder_run, subject, body, extra_fields=None):
+  """Send a health alert.
+
+  Health alerts are only sent for regular buildbots and Pre-CQ buildbots.
+
+  Args:
+    builder_run: BuilderRun for the main cbuildbot run.
+    subject: The subject of the health alert email.
+    body: The body of the health alert email.
+    extra_fields: (optional) A dictionary of additional message header fields
+                  to be added to the message. Custom field names should begin
+                  with the prefix 'X-'.
+  """
+  if builder_run.InProduction():
+    server = alerts.GmailServer(
+        token_cache_file=constants.GMAIL_TOKEN_CACHE_FILE,
+        token_json_file=constants.GMAIL_TOKEN_JSON_FILE)
+    alerts.SendEmail(subject,
+                     GetHealthAlertRecipients(builder_run),
+                     server=server,
+                     message=body,
+                     extra_fields=extra_fields)
+
+
+def ConstructDashboardURL(buildbot_url, builder_name, build_number,
+                          stage=None):
+  """Return the dashboard (buildbot) URL for this run
+
+  Args:
+    buildbot_url: Base URL for the waterfall.
+    builder_name: Builder name on buildbot dashboard.
+    build_number: Build number for this validation attempt.
+    stage: Link directly to a stage log, else use the general landing page.
+
+  Returns:
+    The fully formed URL.
+  """
+  url_suffix = 'builders/%s/builds/%s' % (builder_name, str(build_number))
+  if stage:
+    url_suffix += '/steps/%s/logs/stdio' % (stage,)
+  url_suffix = urllib.quote(url_suffix)
+  return os.path.join(buildbot_url, url_suffix)
diff --git a/cbuildbot/tree_status_unittest b/cbuildbot/tree_status_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/tree_status_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/tree_status_unittest.py b/cbuildbot/tree_status_unittest.py
new file mode 100644
index 0000000..979b34b
--- /dev/null
+++ b/cbuildbot/tree_status_unittest.py
@@ -0,0 +1,267 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test suite for tree_status.py"""
+
+from __future__ import print_function
+
+import mock
+import urllib
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import tree_status
+from chromite.lib import cros_test_lib
+from chromite.lib import timeout_util
+
+
+# pylint: disable=protected-access
+
+
+class TestTreeStatus(cros_test_lib.MockTestCase):
+  """Tests TreeStatus method in cros_build_lib."""
+
+  status_url = 'https://chromiumos-status.appspot.com/current?format=json'
+
+  def _TreeStatusFile(self, message, general_state):
+    """Returns a file-like object with the status message writtin in it."""
+    json = '{"message": "%s", "general_state": "%s"}' % (message, general_state)
+    return mock.MagicMock(json=json, getcode=lambda: 200, read=lambda: json)
+
+  def _SetupMockTreeStatusResponses(self,
+                                    final_tree_status='Tree is open.',
+                                    final_general_state=constants.TREE_OPEN,
+                                    rejected_tree_status='Tree is closed.',
+                                    rejected_general_state=
+                                    constants.TREE_CLOSED,
+                                    rejected_status_count=0,
+                                    retries_500=0,
+                                    output_final_status=True):
+    """Mocks out urllib.urlopen commands to simulate a given tree status.
+
+    Args:
+      final_tree_status: The final value of tree status that will be returned
+        by urlopen.
+      final_general_state: The final value of 'general_state' that will be
+        returned by urlopen.
+      rejected_tree_status: An intermediate value of tree status that will be
+        returned by urlopen and retried upon.
+      rejected_general_state: An intermediate value of 'general_state' that
+        will be returned by urlopen and retried upon.
+      rejected_status_count: The number of times urlopen will return the
+        rejected state.
+      retries_500: The number of times urlopen will fail with a 500 code.
+      output_final_status: If True, the status given by final_tree_status and
+        final_general_state will be the last status returned by urlopen. If
+        False, final_tree_status will never be returned, and instead an
+        unlimited number of times rejected_response will be returned.
+    """
+    final_response = self._TreeStatusFile(final_tree_status,
+                                          final_general_state)
+    rejected_response = self._TreeStatusFile(rejected_tree_status,
+                                             rejected_general_state)
+
+    error_500_response = mock.MagicMock(getcode=lambda: 500)
+    return_value = [error_500_response] * retries_500
+
+    if output_final_status:
+      return_value += [rejected_response] * rejected_status_count
+      return_value += [final_response]
+    else:
+      return_value += [rejected_response] * 10
+
+    self.PatchObject(urllib, 'urlopen', autospec=True,
+                     side_effect=return_value)
+
+  def testTreeIsOpen(self):
+    """Tests that we return True is the tree is open."""
+    self._SetupMockTreeStatusResponses(rejected_status_count=5,
+                                       retries_500=5)
+    self.assertTrue(tree_status.IsTreeOpen(status_url=self.status_url,
+                                           period=0))
+
+  def testTreeIsClosed(self):
+    """Tests that we return false is the tree is closed."""
+    self._SetupMockTreeStatusResponses(output_final_status=False)
+    self.assertFalse(tree_status.IsTreeOpen(status_url=self.status_url,
+                                            period=0.1))
+
+  def testTreeIsThrottled(self):
+    """Tests that we return True if the tree is throttled."""
+    self._SetupMockTreeStatusResponses(
+        final_tree_status='Tree is throttled (flaky bug on flaky builder)',
+        final_general_state=constants.TREE_THROTTLED)
+    self.assertTrue(tree_status.IsTreeOpen(status_url=self.status_url,
+                                           throttled_ok=True))
+
+  def testTreeIsThrottledNotOk(self):
+    """Tests that we respect throttled_ok"""
+    self._SetupMockTreeStatusResponses(
+        rejected_tree_status='Tree is throttled (flaky bug on flaky builder)',
+        rejected_general_state=constants.TREE_THROTTLED,
+        output_final_status=False)
+    self.assertFalse(tree_status.IsTreeOpen(status_url=self.status_url,
+                                            period=0.1))
+
+  def testWaitForStatusOpen(self):
+    """Tests that we can wait for a tree open response."""
+    self._SetupMockTreeStatusResponses()
+    self.assertEqual(tree_status.WaitForTreeStatus(status_url=self.status_url),
+                     constants.TREE_OPEN)
+
+
+  def testWaitForStatusThrottled(self):
+    """Tests that we can wait for a tree open response."""
+    self._SetupMockTreeStatusResponses(
+        final_general_state=constants.TREE_THROTTLED)
+    self.assertEqual(tree_status.WaitForTreeStatus(status_url=self.status_url,
+                                                   throttled_ok=True),
+                     constants.TREE_THROTTLED)
+
+  def testWaitForStatusFailure(self):
+    """Tests that we can wait for a tree open response."""
+    self._SetupMockTreeStatusResponses(output_final_status=False)
+    self.assertRaises(timeout_util.TimeoutError,
+                      tree_status.WaitForTreeStatus,
+                      status_url=self.status_url,
+                      period=0.1)
+
+  def testGetStatusDictParsesMessage(self):
+    """Tests that _GetStatusDict parses message correctly."""
+    self._SetupMockTreeStatusResponses(
+        final_tree_status='Tree is throttled (foo canary: taco investigating)',
+        final_general_state=constants.TREE_OPEN)
+    data = tree_status._GetStatusDict(self.status_url)
+    self.assertEqual(data[tree_status.TREE_STATUS_MESSAGE],
+                     'foo canary: taco investigating')
+
+  def testGetStatusDictEmptyMessage(self):
+    """Tests that _GetStatusDict stores an empty string for unknown format."""
+    self._SetupMockTreeStatusResponses(
+        final_tree_status='Tree is throttled. foo canary -> crbug.com/bar',
+        final_general_state=constants.TREE_OPEN)
+    data = tree_status._GetStatusDict(self.status_url)
+    self.assertEqual(data[tree_status.TREE_STATUS_MESSAGE], '')
+
+  def testGetStatusDictRawMessage(self):
+    """Tests that _GetStatusDict stores raw message if requested."""
+    self._SetupMockTreeStatusResponses(final_tree_status='Tree is open (taco).',
+                                       final_general_state=constants.TREE_OPEN)
+    data = tree_status._GetStatusDict(self.status_url, raw_message=True)
+    self.assertEqual(data[tree_status.TREE_STATUS_MESSAGE],
+                     'Tree is open (taco).')
+
+  def testUpdateTreeStatusWithEpilogue(self):
+    """Tests that epilogue is appended to the message."""
+    with mock.patch.object(tree_status, '_UpdateTreeStatus') as m:
+      tree_status.UpdateTreeStatus(
+          constants.TREE_CLOSED, 'failure', announcer='foo',
+          epilogue='bar')
+      m.assert_called_once_with(mock.ANY, 'Tree is closed (foo: failure | bar)')
+
+  def testUpdateTreeStatusWithoutEpilogue(self):
+    """Tests that the tree status message is created as expected."""
+    with mock.patch.object(tree_status, '_UpdateTreeStatus') as m:
+      tree_status.UpdateTreeStatus(
+          constants.TREE_CLOSED, 'failure', announcer='foo')
+      m.assert_called_once_with(mock.ANY, 'Tree is closed (foo: failure)')
+
+  def testUpdateTreeStatusUnknownStatus(self):
+    """Tests that the exception is raised on unknown tree status."""
+    with mock.patch.object(tree_status, '_UpdateTreeStatus'):
+      self.assertRaises(tree_status.InvalidTreeStatus,
+                        tree_status.UpdateTreeStatus, 'foostatus', 'failure')
+
+  def testThrottlesTreeOnWithBuildNumberAndType(self):
+    """Tests that tree is throttled with the build number in the message."""
+    self._SetupMockTreeStatusResponses(final_tree_status='Tree is open (taco)',
+                                       final_general_state=constants.TREE_OPEN)
+    with mock.patch.object(tree_status, '_UpdateTreeStatus') as m:
+      tree_status.ThrottleOrCloseTheTree('foo', 'failure', buildnumber=1234,
+                                         internal=True)
+      m.assert_called_once_with(mock.ANY,
+                                'Tree is throttled (foo-i-1234: failure)')
+
+  def testThrottlesTreeOnWithBuildNumberAndPublicType(self):
+    """Tests that tree is throttled with the build number in the message."""
+    self._SetupMockTreeStatusResponses(final_tree_status='Tree is open (taco)',
+                                       final_general_state=constants.TREE_OPEN)
+    with mock.patch.object(tree_status, '_UpdateTreeStatus') as m:
+      tree_status.ThrottleOrCloseTheTree('foo', 'failure', buildnumber=1234,
+                                         internal=False)
+      m.assert_called_once_with(mock.ANY,
+                                'Tree is throttled (foo-p-1234: failure)')
+
+  def testThrottlesTreeOnOpen(self):
+    """Tests that ThrottleOrCloseTheTree throttles the tree if tree is open."""
+    self._SetupMockTreeStatusResponses(final_tree_status='Tree is open (taco)',
+                                       final_general_state=constants.TREE_OPEN)
+    with mock.patch.object(tree_status, '_UpdateTreeStatus') as m:
+      tree_status.ThrottleOrCloseTheTree('foo', 'failure')
+      m.assert_called_once_with(mock.ANY, 'Tree is throttled (foo: failure)')
+
+  def testThrottlesTreeOnThrottled(self):
+    """Tests ThrottleOrCloseTheTree throttles the tree if tree is throttled."""
+    self._SetupMockTreeStatusResponses(
+        final_tree_status='Tree is throttled (taco)',
+        final_general_state=constants.TREE_THROTTLED)
+    with mock.patch.object(tree_status, '_UpdateTreeStatus') as m:
+      tree_status.ThrottleOrCloseTheTree('foo', 'failure')
+      # Also make sure that previous status message is included.
+      m.assert_called_once_with(mock.ANY,
+                                'Tree is throttled (foo: failure | taco)')
+
+  def testClosesTheTreeOnClosed(self):
+    """Tests ThrottleOrCloseTheTree closes the tree if tree is closed."""
+    self._SetupMockTreeStatusResponses(
+        final_tree_status='Tree is closed (taco)',
+        final_general_state=constants.TREE_CLOSED)
+    with mock.patch.object(tree_status, '_UpdateTreeStatus') as m:
+      tree_status.ThrottleOrCloseTheTree('foo', 'failure')
+      m.assert_called_once_with(mock.ANY,
+                                'Tree is closed (foo: failure | taco)')
+
+  def testClosesTheTreeOnMaintenance(self):
+    """Tests ThrottleOrCloseTheTree closes the tree if tree is closed."""
+    self._SetupMockTreeStatusResponses(
+        final_tree_status='Tree is under maintenance (taco)',
+        final_general_state=constants.TREE_MAINTENANCE)
+    with mock.patch.object(tree_status, '_UpdateTreeStatus') as m:
+      tree_status.ThrottleOrCloseTheTree('foo', 'failure')
+      m.assert_called_once_with(
+          mock.ANY,
+          'Tree is under maintenance (foo: failure | taco)')
+
+  def testDiscardUpdateFromTheSameAnnouncer(self):
+    """Tests we don't include messages from the same announcer."""
+    self._SetupMockTreeStatusResponses(
+        final_tree_status='Tree is throttled (foo: failure | bar: taco)',
+        final_general_state=constants.TREE_THROTTLED)
+    with mock.patch.object(tree_status, '_UpdateTreeStatus') as m:
+      tree_status.ThrottleOrCloseTheTree('foo', 'failure')
+      # Also make sure that previous status message is included.
+      m.assert_called_once_with(mock.ANY,
+                                'Tree is throttled (foo: failure | bar: taco)')
+
+
+class TestGettingSheriffEmails(cros_test_lib.MockTestCase):
+  """Tests functions related to retrieving the sheriff's email address."""
+
+  def testParsingSheriffEmails(self):
+    """Tests parsing the raw data to get sheriff emails."""
+    # Test parsing when there is only one sheriff.
+    raw_line = "document.write('taco')"
+    self.PatchObject(tree_status, '_OpenSheriffURL', return_value=raw_line)
+    self.assertEqual(tree_status.GetSheriffEmailAddresses('chrome'),
+                     ['taco@google.com'])
+
+    # Test parsing when there are multiple sheriffs.
+    raw_line = "document.write('taco, burrito')"
+    self.PatchObject(tree_status, '_OpenSheriffURL', return_value=raw_line)
+    self.assertEqual(tree_status.GetSheriffEmailAddresses('chrome'),
+                     ['taco@google.com', 'burrito@google.com'])
+
+    # Test parsing when sheriff is None.
+    raw_line = "document.write('None (channel is sheriff)')"
+    self.PatchObject(tree_status, '_OpenSheriffURL', return_value=raw_line)
+    self.assertEqual(tree_status.GetSheriffEmailAddresses('chrome'), [])
diff --git a/cbuildbot/triage_lib.py b/cbuildbot/triage_lib.py
new file mode 100644
index 0000000..099dc4a
--- /dev/null
+++ b/cbuildbot/triage_lib.py
@@ -0,0 +1,702 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module that helps to triage Commit Queue failures."""
+
+from __future__ import print_function
+
+import ConfigParser
+import glob
+import os
+import pprint
+
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gerrit
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import patch as cros_patch
+from chromite.lib import portage_util
+
+
+def GetRelevantOverlaysForConfig(config, build_root):
+  """Returns a list of overlays relevant to |config|.
+
+  Args:
+    config: A cbuildbot config name.
+    build_root: Path to the build root.
+
+  Returns:
+    A set of overlays.
+  """
+  relevant_overlays = set()
+  for board in config.boards:
+    overlays = portage_util.FindOverlays(
+        constants.BOTH_OVERLAYS, board, build_root)
+    relevant_overlays.update(overlays)
+
+  return relevant_overlays
+
+
+def _GetAffectedImmediateSubdirs(change, git_repo):
+  """Gets the set of immediate subdirs affected by |change|.
+
+  Args:
+    change: GitRepoPatch to examine.
+    git_repo: Path to checkout of git repository.
+
+  Returns:
+    A set of absolute paths to modified subdirectories of |git_repo|.
+  """
+  return set([os.path.join(git_repo, path.split(os.path.sep)[0])
+              for path in change.GetDiffStatus(git_repo)])
+
+
+def _GetCommonAffectedSubdir(change, git_repo):
+  """Gets the longest common path of changes in |change|.
+
+  Args:
+    change: GitRepoPatch to examine.
+    git_repo: Path to checkout of git repository.
+
+  Returns:
+    An absolute path in |git_repo|.
+  """
+  affected_paths = [os.path.join(git_repo, path)
+                    for path in change.GetDiffStatus(git_repo)]
+  return cros_build_lib.GetCommonPathPrefix(affected_paths)
+
+
+def GetAffectedOverlays(change, manifest, all_overlays):
+  """Get the set of overlays affected by a given change.
+
+  Args:
+    change: The GerritPatch instance to look at.
+    manifest: A ManifestCheckout instance representing our build directory.
+    all_overlays: The set of all valid overlays.
+
+  Returns:
+    The set of overlays affected by the specified |change|. If the change
+    affected something other than an overlay, return None.
+  """
+  checkout = change.GetCheckout(manifest, strict=False)
+  if checkout:
+    git_repo = checkout.GetPath(absolute=True)
+
+    # The whole git repo is an overlay. Return it.
+    # Example: src/private-overlays/overlay-x86-zgb-private
+    if git_repo in all_overlays:
+      return set([git_repo])
+
+    # Get the set of immediate subdirs affected by the change.
+    # Example: src/overlays/overlay-x86-zgb
+    subdirs = _GetAffectedImmediateSubdirs(change, git_repo)
+
+    # If all of the subdirs are overlays, return them.
+    if subdirs.issubset(all_overlays):
+      return subdirs
+
+
+def GetAffectedPackagesForOverlayChange(change, manifest, overlays):
+  """Get the set of packages affected by the overlay |change|.
+
+  Args:
+    change: The GerritPatch instance that modifies an overlay.
+    manifest: A ManifestCheckout instance representing our build directory.
+    overlays: List of overlay paths.
+
+  Returns:
+    The set of packages affected by the specified |change|. E.g.
+    {'chromeos-base/chromite-0.0.1-r1258'}. If the change affects
+    something other than packages, return None.
+  """
+  checkout = change.GetCheckout(manifest, strict=False)
+  if checkout:
+    git_repo = checkout.GetPath(absolute=True)
+
+  packages = set()
+  for path in change.GetDiffStatus(git_repo):
+    # Determine if path is in a package directory by walking up
+    # directories and see if there is an ebuild in the directory.
+    start_path = os.path.join(git_repo, path)
+    ebuild_path = osutils.FindInPathParents(
+        '*.ebuild', start_path, test_func=glob.glob, end_path=git_repo)
+    if ebuild_path:
+      # Convert git_repo/../*.ebuild to the real ebuild path.
+      ebuild_path = glob.glob(ebuild_path)[0]
+      # Double check that the ebuild is two-levels deep in an overlay
+      # directory.
+      if os.path.sep.join(ebuild_path.split(os.path.sep)[:-3]) in overlays:
+        category, pkg_name, _ = portage_util.SplitEbuildPath(ebuild_path)
+        packages.add('%s/%s' % (category, pkg_name))
+        continue
+
+    # If |change| affects anything other than packages, return None.
+    return None
+
+  return packages
+
+
+def _GetOptionFromConfigFile(config_path, section, option):
+  """Get |option| from |section| in |config_path|.
+
+  Args:
+    config_path: Filename to look at.
+    section: Section header name.
+    option: Option name.
+
+  Returns:
+    The value of the option.
+  """
+  parser = ConfigParser.SafeConfigParser()
+  parser.read(config_path)
+  if parser.has_option(section, option):
+    return parser.get(section, option)
+
+
+def _GetConfigFileForChange(change, checkout_path):
+  """Gets the path of the config file for |change|.
+
+  This function takes into account the files that are modified by |change| to
+  determine the commit queue config file within |checkout_path| that should be
+  used for this change. The config file used is the one in the common ancestor
+  directory to all changed files, or the nearest parent directory. See
+  http://chromium.org/chromium-os/build/bypassing-tests-on-a-per-project-basis
+
+  Args:
+    change: Change to examine, as a GitRepoPatch object.
+    checkout_path: Full absolute path to a checkout of the repository that
+                   |change| applies to.
+
+  Returns:
+    Path to the config file to be read for |change|. The returned path will
+    be within |checkout_path|. If no config files in common subdirectories
+    were found, a config file path in the root of the checkout will be
+    returned, in which case the file is not guaranteed to exist.
+  """
+  current_dir = _GetCommonAffectedSubdir(change, checkout_path)
+  while True:
+    config_file = os.path.join(current_dir, constants.CQ_CONFIG_FILENAME)
+    if os.path.isfile(config_file) or checkout_path.startswith(current_dir):
+      return config_file
+    assert current_dir not in ('/', '')
+    current_dir = os.path.dirname(current_dir)
+
+
+def GetOptionForChange(build_root, change, section, option):
+  """Get |option| from |section| in the config file for |change|.
+
+  Args:
+    build_root: The root of the checkout.
+    change: Change to examine, as a GitRepoPatch object.
+    section: Section header name.
+    option: Option name.
+
+  Returns:
+    The value of the option.
+  """
+  manifest = git.ManifestCheckout.Cached(build_root)
+  checkout = change.GetCheckout(manifest)
+  if checkout:
+    dirname = checkout.GetPath(absolute=True)
+    config_path = _GetConfigFileForChange(change, dirname)
+    result = None
+    try:
+      result = _GetOptionFromConfigFile(config_path, section, option)
+    except ConfigParser.Error:
+      logging.error('%s has malformed config file', change, exc_info=True)
+    return result
+
+
+def GetStagesToIgnoreForChange(build_root, change):
+  """Get a list of stages that the CQ should ignore for a given |change|.
+
+  The list of stage name prefixes to ignore for each project is specified in a
+  config file inside the project, named COMMIT-QUEUE.ini. The file would look
+  like this:
+
+  [GENERAL]
+    ignored-stages: HWTest VMTest
+
+  The CQ will submit changes to the given project even if the listed stages
+  failed. These strings are stage name prefixes, meaning that "HWTest" would
+  match any HWTest stage (e.g. "HWTest [bvt]" or "HWTest [foo]")
+
+  Args:
+    build_root: The root of the checkout.
+    change: Change to examine, as a PatchQuery object.
+
+  Returns:
+    A list of stages to ignore for the given |change|.
+  """
+  result = GetOptionForChange(build_root, change, 'GENERAL', 'ignored-stages')
+  return result.split() if result else []
+
+
+def GetTestSubsystemForChange(build_root, change):
+  """Get a list of subsystem that a given |change| affects.
+
+  The list of the subsystem that a change affacts is specified in a config file
+  inside the project, named COMMIT-QUEUE.ini. The file would look like this:
+
+  [GENERAL]
+    subsystem: power graphics
+
+  Based on the subsystems a given |change| affects, the CQ could tell whether a
+  failure is potentially caused by this |change|. The CQ could then submit some
+  changes in the face of unrelated failures.
+
+  Args:
+    build_root: The root of the checkout.
+    change: Change to examine, as a PatchQuery object.
+
+  Returns:
+    A list of subsystem for the given |change|.
+  """
+  result = GetOptionForChange(build_root, change, 'GENERAL', 'subsystem')
+  return result.split() if result else []
+
+class CategorizeChanges(object):
+  """A collection of methods to help categorize GerritPatch changes.
+
+  This class is mainly used on a build slave to categorize changes
+  applied in the build.
+  """
+
+  @classmethod
+  def ClassifyOverlayChanges(cls, changes, config, build_root, manifest,
+                             packages_under_test):
+    """Classifies overlay changes in |changes|.
+
+    Args:
+      changes: The list or set of GerritPatch instances.
+      config: The cbuildbot config.
+      build_root: Path to the build root.
+      manifest: A ManifestCheckout instance representing our build directory.
+      packages_under_test: A list of packages names included in the build
+        without version/revision (e.g. ['chromeos-base/chromite']). If None,
+        don't try to map overlay changes to packages.
+
+    Returns:
+      A (overlay_changes, irrelevant_overlay_changes) tuple; overlay_changes
+      is a subset of |changes| that have modified one or more overlays, and
+      irrelevant_overlay_changes is a subset of overlay_changes which are
+      irrelevant to |config|.
+    """
+    visible_overlays = set(portage_util.FindOverlays(config.overlays, None,
+                                                     build_root))
+    # The overlays relevant to this build.
+    relevant_overlays = GetRelevantOverlaysForConfig(config, build_root)
+
+    overlay_changes = set()
+    irrelevant_overlay_changes = set()
+    for change in changes:
+      affected_overlays = GetAffectedOverlays(change, manifest,
+                                              visible_overlays)
+      if affected_overlays is not None:
+        # The change modifies an overlay.
+        overlay_changes.add(change)
+        if not any(x in relevant_overlays for x in affected_overlays):
+          # The change touched an irrelevant overlay.
+          irrelevant_overlay_changes.add(change)
+          continue
+
+        if packages_under_test:
+          # If the change modifies packages that are not part of this
+          # build, they are considered irrelevant too.
+          packages = GetAffectedPackagesForOverlayChange(
+              change, manifest, visible_overlays)
+          if packages:
+            logging.info('%s affects packages %s',
+                         cros_patch.GetChangesAsString([change]),
+                         ', '.join(packages))
+            if not any(x in packages_under_test for x in packages):
+              irrelevant_overlay_changes.add(change)
+
+    return overlay_changes, irrelevant_overlay_changes
+
+  @classmethod
+  def ClassifyWorkOnChanges(cls, changes, config, build_root,
+                            manifest, packages_under_test):
+    """Classifies WorkOn package changes in |changes|.
+
+    Args:
+      changes: The list or set of GerritPatch instances.
+      config: The cbuildbot config.
+      build_root: Path to the build root.
+      manifest: A ManifestCheckout instance representing our build directory.
+      packages_under_test: A list of packages names included in the build.
+        (e.g. ['chromeos-base/chromite-0.0.1-r1258']).
+
+    Returns:
+      A (workon_changes, irrelevant_workon_changes) tuple; workon_changes
+      is a subset of |changes| that have modified workon packages, and
+      irrelevant_workon_changes is a subset of workon_changes which are
+      irrelevant to |config|.
+    """
+    workon_changes = set()
+    irrelevant_workon_changes = set()
+
+    workon_dict = portage_util.BuildFullWorkonPackageDictionary(
+        build_root, config.overlays, manifest)
+
+    pp = pprint.PrettyPrinter(indent=2)
+    logging.info('(project, branch) to workon package mapping:\n %s',
+                 pp.pformat(workon_dict))
+    logging.info('packages under test\n: %s', pp.pformat(packages_under_test))
+
+    for change in changes:
+      packages = workon_dict.get((change.project, change.tracking_branch))
+      if packages:
+        # The CL modifies a workon package.
+        workon_changes.add(change)
+        if all(x not in packages_under_test for x in packages):
+          irrelevant_workon_changes.add(change)
+
+    return workon_changes, irrelevant_workon_changes
+
+  @classmethod
+  def _FilterProjectsInManifestByGroup(cls, manifest, groups):
+    """Filters projects in |manifest| by |groups|.
+
+    Args:
+      manifest: A git.Manifest instance.
+      groups: A list of groups to filter.
+
+    Returns:
+      A set of (project, branch) tuples where each tuple is asssociated
+      with at least one group in |groups|.
+    """
+    results = set()
+    for project, checkout_list in manifest.checkouts_by_name.iteritems():
+      for checkout in checkout_list:
+        if any(x in checkout['groups'] for x in groups):
+          branch = git.StripRefs(checkout['tracking_branch'])
+          results.add((project, branch))
+
+    return results
+
+  @classmethod
+  def GetChangesToBuildTools(cls, changes, manifest):
+    """Returns a changes associated with buildtools projects.
+
+    Args:
+      changes: The list or set of GerritPatch instances.
+      manifest: A git.Manifest instance.
+
+    Returns:
+      A subset of |changes| to projects of "buildtools" group.
+    """
+    buildtool_set = cls._FilterProjectsInManifestByGroup(
+        manifest, ['buildtools'])
+    return set([x for x in changes if (x.project, x.tracking_branch)
+                in buildtool_set])
+
+  @classmethod
+  def GetIrrelevantChanges(cls, changes, config, build_root, manifest,
+                           packages_under_test):
+    """Determine changes irrelavant to build |config|.
+
+    This method determine a set of changes that are irrelevant to the
+    build |config|. The general rule of thumb is that if we are unsure
+    whether a change is relevant, consider it relevant.
+
+    Args:
+      changes: The list or set of GerritPatch instances.
+      config: The cbuildbot config.
+      build_root: Path to the build root.
+      manifest: A ManifestCheckout instance representing our build directory.
+      packages_under_test: A list of packages that were tested in this build.
+
+    Returns:
+      A subset of |changes| which are irrelevant to |config|.
+    """
+    untriaged_changes = set(changes)
+    irrelevant_changes = set()
+
+    # Changes that modify projects used in building are always relevant.
+    untriaged_changes -= cls.GetChangesToBuildTools(changes, manifest)
+
+    if packages_under_test is not None:
+      # Strip the version of the package in packages_under_test.
+      cpv_list = [portage_util.SplitCPV(x) for x in packages_under_test]
+      packages_under_test = ['%s/%s' % (x.category, x.package) for x in
+                             cpv_list]
+
+    # Handles overlay changes.
+    # ClassifyOverlayChanges only handles overlays visible to this
+    # build. For example, an external build may not be able to view
+    # the internal overlays. However, in that case, the internal changes
+    # have already been filtered out in CommitQueueSyncStage, and are
+    # not included in |changes|.
+    overlay_changes, irrelevant_overlay_changes = cls.ClassifyOverlayChanges(
+        untriaged_changes, config, build_root, manifest, packages_under_test)
+    untriaged_changes -= overlay_changes
+    irrelevant_changes |= irrelevant_overlay_changes
+
+    # Handles workon package changes.
+    if packages_under_test is not None:
+      try:
+        workon_changes, irrelevant_workon_changes = cls.ClassifyWorkOnChanges(
+            untriaged_changes, config, build_root, manifest,
+            packages_under_test)
+      except Exception as e:
+        # Ignore the exception if we cannot categorize workon
+        # changes. We will conservatively assume the changes are
+        # relevant.
+        logging.warning('Unable to categorize cros workon changes: %s', e)
+      else:
+        untriaged_changes -= workon_changes
+        irrelevant_changes |= irrelevant_workon_changes
+
+    return irrelevant_changes
+
+
+class CalculateSuspects(object):
+  """Diagnose the cause for a given set of failures."""
+
+  @classmethod
+  def GetBlamedChanges(cls, changes):
+    """Returns the changes that have been manually blamed.
+
+    Args:
+      changes: List of GerritPatch changes.
+
+    Returns:
+      A list of |changes| that were marked verified: -1 or
+      code-review: -2.
+    """
+    # Load the latest info about whether the changes were vetoed, in case they
+    # were vetoed in the middle of a cbuildbot run. That said, be careful not to
+    # return info about newer patchsets.
+    reloaded_changes = gerrit.GetGerritPatchInfoWithPatchQueries(changes)
+    return [x for x, y in zip(changes, reloaded_changes) if y.WasVetoed()]
+
+  @classmethod
+  def _FindPackageBuildFailureSuspects(cls, changes, messages, sanity):
+    """Figure out what CLs are at fault for a set of build failures.
+
+    Args:
+        changes: A list of cros_patch.GerritPatch instances to consider.
+        messages: A list of failure messages. We will only look at the ones of
+                  type BuildFailureMessage.
+        sanity: The sanity checker builder passed and the tree was open when
+                the build started.
+    """
+    suspects = set()
+    for message in messages:
+      if message:
+        suspects.update(
+            message.FindPackageBuildFailureSuspects(changes, sanity))
+      elif sanity:
+        suspects.update(changes)
+    return suspects
+
+  @classmethod
+  def FilterChangesForInfraFail(cls, changes):
+    """Returns a list of changes responsible for infra failures."""
+    # Chromite changes could cause infra failures.
+    return [x for x in changes if x.project in constants.INFRA_PROJECTS]
+
+  @classmethod
+  def _MatchesFailureType(cls, messages, fail_type, strict=True):
+    """Returns True if all failures are instances of |fail_type|.
+
+    Args:
+      messages: A list of BuildFailureMessage or NoneType objects
+        from the failed slaves.
+      fail_type: The exception class to look for.
+      strict: If False, treat NoneType message as a match.
+
+    Returns:
+      True if all objects in |messages| are non-None and all failures are
+      instances of |fail_type|.
+    """
+    return ((not strict or all(messages)) and
+            all(x.MatchesFailureType(fail_type) for x in messages if x))
+
+  @classmethod
+  def OnlyLabFailures(cls, messages, no_stat):
+    """Determine if the cause of build failure was lab failure.
+
+    Args:
+      messages: A list of BuildFailureMessage or NoneType objects
+        from the failed slaves.
+      no_stat: A list of builders which failed prematurely without reporting
+        status.
+
+    Returns:
+      True if the build failed purely due to lab failures.
+    """
+    # If any builder failed prematuely, lab failure was not the only cause.
+    return (not no_stat and
+            cls._MatchesFailureType(messages, failures_lib.TestLabFailure))
+
+  @classmethod
+  def OnlyInfraFailures(cls, messages, no_stat):
+    """Determine if the cause of build failure was infrastructure failure.
+
+    Args:
+      messages: A list of BuildFailureMessage or NoneType objects
+        from the failed slaves.
+      no_stat: A list of builders which failed prematurely without reporting
+        status.
+
+    Returns:
+      True if the build failed purely due to infrastructure failures.
+    """
+    # "Failed to report status" and "NoneType" messages are considered
+    # infra failures.
+    return ((not messages and no_stat) or
+            cls._MatchesFailureType(
+                messages, failures_lib.InfrastructureFailure, strict=False))
+
+  @classmethod
+  def FindSuspects(cls, changes, messages, infra_fail=False, lab_fail=False,
+                   sanity=True):
+    """Find out what changes probably caused our failure.
+
+    In cases where there were no internal failures, we can assume that the
+    external failures are at fault. Otherwise, this function just defers to
+    _FindPackageBuildFailureSuspects and GetBlamedChanges as needed.
+    If the failures don't match either case, just fail everything.
+
+    Args:
+      changes: A list of cros_patch.GerritPatch instances to consider.
+      messages: A list of build failure messages, of type
+        BuildFailureMessage or of type NoneType.
+      infra_fail: The build failed purely due to infrastructure failures.
+      lab_fail: The build failed purely due to test lab infrastructure
+        failures.
+      sanity: The sanity checker builder passed and the tree was open when
+              the build started.
+
+    Returns:
+       A set of changes as suspects.
+    """
+    bad_changes = cls.GetBlamedChanges(changes)
+    if bad_changes:
+      # If there are changes that have been set verified=-1 or
+      # code-review=-2, these changes are the ONLY suspects of the
+      # failed build.
+      logging.warning('Detected that some changes have been blamed for '
+                      'the build failure. Only these CLs will be rejected: %s',
+                      cros_patch.GetChangesAsString(bad_changes))
+      return set(bad_changes)
+    elif lab_fail:
+      logging.warning('Detected that the build failed purely due to HW '
+                      'Test Lab failure(s). Will not reject any changes')
+      return set()
+    elif infra_fail:
+      # The non-lab infrastructure errors might have been caused
+      # by chromite changes.
+      logging.warning(
+          'Detected that the build failed due to non-lab infrastructure '
+          'issue(s). Will only reject chromite changes')
+      return set(cls.FilterChangesForInfraFail(changes))
+
+    return cls._FindPackageBuildFailureSuspects(changes, messages, sanity)
+
+  @classmethod
+  def _CanIgnoreFailures(cls, messages, change, build_root):
+    """Examine whether we can ignore the failures for |change|.
+
+    Examine the |messages| to see if we are allowed to ignore
+    the failures base on the per-repository settings in
+    COMMIT_QUEUE.ini.
+
+    Args:
+      messages: A list of BuildFailureMessage from the failed slaves.
+      change: A GerritPatch instance to examine.
+      build_root: Build root directory.
+
+    Returns:
+      True if we can ignore the failures; False otherwise.
+    """
+    # Some repositories may opt to ignore certain stage failures.
+    failing_stages = set()
+    if any(x.GetFailingStages() is None for x in messages):
+      # If there are no tracebacks, that means that the builder
+      # did not report its status properly. We don't know what
+      # stages failed and cannot safely ignore any stage.
+      return False
+
+    for message in messages:
+      failing_stages.update(message.GetFailingStages())
+    ignored_stages = GetStagesToIgnoreForChange(build_root, change)
+    if ignored_stages and failing_stages.issubset(ignored_stages):
+      return True
+
+    return False
+
+  @classmethod
+  def GetFullyVerifiedChanges(cls, changes, changes_by_config, failing,
+                              inflight, no_stat, messages, build_root):
+
+    """Examines build failures and returns a set of fully verified changes.
+
+    A change is fully verified if all the build configs relevant to
+    this change have either passed or failed in a manner that can be
+    safely ignored by the change.
+
+    Args:
+      changes: A list of GerritPatch instances to examine.
+      changes_by_config: A dictionary of relevant changes indexed by the
+        config names.
+      failing: Names of the builders that failed.
+      inflight: Names of the builders that timed out.
+      no_stat: Set of builder names of slave builders that had status None.
+      messages: A list of BuildFailureMessage or NoneType objects from
+        the failed slaves.
+      build_root: Build root directory.
+
+    Returns:
+      A set of fully verified changes.
+    """
+    changes = set(changes)
+    no_stat = set(no_stat)
+    failing = set(failing)
+    inflight = set(inflight)
+
+    fully_verified = set()
+
+    all_tested_changes = set()
+    for tested_changes in changes_by_config.itervalues():
+      all_tested_changes.update(tested_changes)
+
+    untested_changes = changes - all_tested_changes
+    if untested_changes:
+      # Some board overlay changes were not tested by CQ at all.
+      logging.info('These changes were not tested by any slaves, '
+                   'so they will be submitted: %s',
+                   cros_patch.GetChangesAsString(untested_changes))
+      fully_verified.update(untested_changes)
+
+    for change in all_tested_changes:
+      # If all relevant configs associated with a change passed, the
+      # change is fully verified.
+      relevant_configs = [k for k, v in changes_by_config.iteritems() if
+                          change in v]
+      if any(x in set.union(no_stat, inflight) for x in relevant_configs):
+        continue
+
+      failed_configs = [x for x in relevant_configs if x in failing]
+      if not failed_configs:
+        logging.info('All the %s relevant config(s) for change %s passed, so '
+                     'it will be submitted.', len(relevant_configs),
+                     cros_patch.GetChangesAsString([change]))
+        fully_verified.add(change)
+      else:
+        # Examine the failures and see if we can safely ignore them
+        # for the change.
+        failed_messages = [x for x in messages if x.builder in failed_configs]
+        if cls._CanIgnoreFailures(failed_messages, change, build_root):
+          logging.info('All failures of relevant configs for change %s are '
+                       'ignorable by this change, so it will be submitted.',
+                       cros_patch.GetChangesAsString([change]))
+          fully_verified.add(change)
+
+    return fully_verified
diff --git a/cbuildbot/triage_lib_unittest b/cbuildbot/triage_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/triage_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/triage_lib_unittest.py b/cbuildbot/triage_lib_unittest.py
new file mode 100644
index 0000000..ab8716c
--- /dev/null
+++ b/cbuildbot/triage_lib_unittest.py
@@ -0,0 +1,435 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module that contains unittests for triage_lib module."""
+
+from __future__ import print_function
+
+import ConfigParser
+import os
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import results_lib
+from chromite.cbuildbot import triage_lib
+from chromite.cbuildbot.stages import sync_stages_unittest
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import gerrit
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import patch as cros_patch
+from chromite.lib import patch_unittest
+from chromite.lib import portage_util
+
+
+site_config = config_lib.GetConfig()
+
+
+def GetFailedMessage(exceptions, stage='Build', internal=False,
+                     bot='daisy_spring-paladin'):
+  """Returns a BuildFailureMessage object."""
+  tracebacks = []
+  for ex in exceptions:
+    tracebacks.append(results_lib.RecordedTraceback(stage, stage, ex,
+                                                    str(ex)))
+  reason = 'failure reason string'
+  return failures_lib.BuildFailureMessage(
+      'Stage %s failed' % stage, tracebacks, internal, reason, bot)
+
+
+class TestFindSuspects(patch_unittest.MockPatchBase):
+  """Tests CalculateSuspects."""
+
+  def setUp(self):
+    overlay = 'chromiumos/overlays/chromiumos-overlay'
+    self.overlay_patch = self.GetPatches(project=overlay)
+    chromite = 'chromiumos/chromite'
+    self.chromite_patch = self.GetPatches(project=chromite)
+    self.power_manager = 'chromiumos/platform2/power_manager'
+    self.power_manager_pkg = 'chromeos-base/power_manager'
+    self.power_manager_patch = self.GetPatches(project=self.power_manager)
+    self.kernel = 'chromiumos/third_party/kernel/foo'
+    self.kernel_pkg = 'sys-kernel/chromeos-kernel-foo'
+    self.kernel_patch = self.GetPatches(project=self.kernel)
+    self.secret = 'chromeos/secret'
+    self.secret_patch = self.GetPatches(
+        project=self.secret, remote=site_config.params.INTERNAL_REMOTE)
+    self.PatchObject(cros_patch.GitRepoPatch, 'GetCheckout')
+    self.PatchObject(cros_patch.GitRepoPatch, 'GetDiffStatus')
+    self.PatchObject(gerrit, 'GetGerritPatchInfoWithPatchQueries',
+                     side_effect=lambda x: x)
+
+  @staticmethod
+  def _GetBuildFailure(pkg):
+    """Create a PackageBuildFailure for the specified |pkg|.
+
+    Args:
+      pkg: Package that failed to build.
+    """
+    ex = cros_build_lib.RunCommandError('foo', cros_build_lib.CommandResult())
+    return failures_lib.PackageBuildFailure(ex, 'bar', [pkg])
+
+  def _AssertSuspects(self, patches, suspects, pkgs=(), exceptions=(),
+                      internal=False, infra_fail=False, lab_fail=False,
+                      sanity=True):
+    """Run _FindSuspects and verify its output.
+
+    Args:
+      patches: List of patches to look at.
+      suspects: Expected list of suspects returned by _FindSuspects.
+      pkgs: List of packages that failed with exceptions in the build.
+      exceptions: List of other exceptions that occurred during the build.
+      internal: Whether the failures occurred on an internal bot.
+      infra_fail: Whether the build failed due to infrastructure issues.
+      lab_fail: Whether the build failed due to lab infrastructure issues.
+      sanity: The sanity checker builder passed and the tree was open when
+              the build started.
+    """
+    all_exceptions = list(exceptions) + [self._GetBuildFailure(x) for x in pkgs]
+    message = GetFailedMessage(all_exceptions, internal=internal)
+    results = triage_lib.CalculateSuspects.FindSuspects(
+        patches, [message], lab_fail=lab_fail, infra_fail=infra_fail,
+        sanity=sanity)
+    self.assertEquals(set(suspects), results)
+
+  def testFailSameProject(self):
+    """Patches to the package that failed should be marked as failing."""
+    suspects = [self.kernel_patch]
+    patches = suspects + [self.power_manager_patch, self.secret_patch]
+    with self.PatchObject(portage_util, 'FindWorkonProjects',
+                          return_value=self.kernel):
+      self._AssertSuspects(patches, suspects, [self.kernel_pkg])
+      self._AssertSuspects(patches, suspects, [self.kernel_pkg], sanity=False)
+
+  def testFailSameProjectPlusOverlay(self):
+    """Patches to the overlay should be marked as failing."""
+    suspects = [self.overlay_patch, self.kernel_patch]
+    patches = suspects + [self.power_manager_patch, self.secret_patch]
+    with self.PatchObject(portage_util, 'FindWorkonProjects',
+                          return_value=self.kernel):
+      self._AssertSuspects(patches, suspects, [self.kernel_pkg])
+      self._AssertSuspects(patches, [self.kernel_patch], [self.kernel_pkg],
+                           sanity=False)
+
+  def testFailUnknownPackage(self):
+    """If no patches changed the package, all patches should fail."""
+    changes = [self.overlay_patch, self.power_manager_patch, self.secret_patch]
+    self._AssertSuspects(changes, changes, [self.kernel_pkg])
+    self._AssertSuspects(changes, [], [self.kernel_pkg], sanity=False)
+
+  def testFailUnknownException(self):
+    """An unknown exception should cause all patches to fail."""
+    changes = [self.kernel_patch, self.power_manager_patch, self.secret_patch]
+    self._AssertSuspects(changes, changes, exceptions=[Exception('foo bar')])
+    self._AssertSuspects(changes, [], exceptions=[Exception('foo bar')],
+                         sanity=False)
+
+  def testFailUnknownInternalException(self):
+    """An unknown exception should cause all patches to fail."""
+    suspects = [self.kernel_patch, self.power_manager_patch, self.secret_patch]
+    self._AssertSuspects(suspects, suspects, exceptions=[Exception('foo bar')],
+                         internal=True)
+    self._AssertSuspects(suspects, [], exceptions=[Exception('foo bar')],
+                         internal=True, sanity=False)
+
+  def testFailUnknownCombo(self):
+    """Unknown exceptions should cause all patches to fail.
+
+    Even if there are also build failures that we can explain.
+    """
+    suspects = [self.kernel_patch, self.power_manager_patch, self.secret_patch]
+    with self.PatchObject(portage_util, 'FindWorkonProjects',
+                          return_value=self.kernel):
+      self._AssertSuspects(suspects, suspects, [self.kernel_pkg],
+                           [Exception('foo bar')])
+      self._AssertSuspects(suspects, [self.kernel_patch], [self.kernel_pkg],
+                           [Exception('foo bar')], sanity=False)
+
+  def testFailNone(self):
+    """If a message is just 'None', it should cause all patches to fail."""
+    patches = [self.kernel_patch, self.power_manager_patch, self.secret_patch]
+    results = triage_lib.CalculateSuspects.FindSuspects(patches, [None])
+    self.assertItemsEqual(results, patches)
+
+    results = triage_lib.CalculateSuspects.FindSuspects(
+        patches, [None], sanity=False)
+    self.assertItemsEqual(results, [])
+
+  def testFailNoExceptions(self):
+    """If there are no exceptions, all patches should be failed."""
+    suspects = [self.kernel_patch, self.power_manager_patch, self.secret_patch]
+    self._AssertSuspects(suspects, suspects)
+    self._AssertSuspects(suspects, [], sanity=False)
+
+  def testLabFail(self):
+    """If there are only lab failures, no suspect is chosen."""
+    suspects = []
+    changes = [self.kernel_patch, self.power_manager_patch]
+    self._AssertSuspects(changes, suspects, lab_fail=True, infra_fail=True)
+    self._AssertSuspects(changes, suspects, lab_fail=True, infra_fail=True,
+                         sanity=False)
+
+  def testInfraFail(self):
+    """If there are only non-lab infra failures, pick chromite changes."""
+    suspects = [self.chromite_patch]
+    changes = [self.kernel_patch, self.power_manager_patch] + suspects
+    self._AssertSuspects(changes, suspects, lab_fail=False, infra_fail=True)
+    self._AssertSuspects(changes, suspects, lab_fail=False, infra_fail=True,
+                         sanity=False)
+
+  def testManualBlame(self):
+    """If there are changes that were manually blamed, pick those changes."""
+    approvals1 = [{'type': 'VRIF', 'value': '-1', 'grantedOn': 1391733002},
+                  {'type': 'CRVW', 'value': '2', 'grantedOn': 1391733002},
+                  {'type': 'COMR', 'value': '1', 'grantedOn': 1391733002},]
+    approvals2 = [{'type': 'VRIF', 'value': '1', 'grantedOn': 1391733002},
+                  {'type': 'CRVW', 'value': '-2', 'grantedOn': 1391733002},
+                  {'type': 'COMR', 'value': '1', 'grantedOn': 1391733002},]
+    suspects = [self.MockPatch(approvals=approvals1),
+                self.MockPatch(approvals=approvals2)]
+    changes = [self.kernel_patch, self.chromite_patch] + suspects
+    self._AssertSuspects(changes, suspects, lab_fail=False, infra_fail=False)
+    self._AssertSuspects(changes, suspects, lab_fail=True, infra_fail=False)
+    self._AssertSuspects(changes, suspects, lab_fail=True, infra_fail=True)
+    self._AssertSuspects(changes, suspects, lab_fail=False, infra_fail=True)
+    self._AssertSuspects(changes, suspects, lab_fail=False, infra_fail=False,
+                         sanity=False)
+    self._AssertSuspects(changes, suspects, lab_fail=True, infra_fail=False,
+                         sanity=False)
+    self._AssertSuspects(changes, suspects, lab_fail=True, infra_fail=True,
+                         sanity=False)
+    self._AssertSuspects(changes, suspects, lab_fail=False, infra_fail=True,
+                         sanity=False)
+
+  def _GetMessages(self, lab_fail=0, infra_fail=0, other_fail=0):
+    """Returns a list of BuildFailureMessage objects."""
+    messages = []
+    messages.extend(
+        [GetFailedMessage([failures_lib.TestLabFailure()])
+         for _ in range(lab_fail)])
+    messages.extend(
+        [GetFailedMessage([failures_lib.InfrastructureFailure()])
+         for _ in range(infra_fail)])
+    messages.extend(
+        [GetFailedMessage(Exception()) for _ in range(other_fail)])
+    return messages
+
+  def testOnlyLabFailures(self):
+    """Tests the OnlyLabFailures function."""
+    messages = self._GetMessages(lab_fail=2)
+    no_stat = []
+    self.assertTrue(
+        triage_lib.CalculateSuspects.OnlyLabFailures(messages, no_stat))
+
+    no_stat = ['foo', 'bar']
+    # Some builders did not start. This is not a lab failure.
+    self.assertFalse(
+        triage_lib.CalculateSuspects.OnlyLabFailures(messages, no_stat))
+
+    messages = self._GetMessages(lab_fail=1, infra_fail=1)
+    no_stat = []
+    # Non-lab infrastructure failures are present.
+    self.assertFalse(
+        triage_lib.CalculateSuspects.OnlyLabFailures(messages, no_stat))
+
+  def testOnlyInfraFailures(self):
+    """Tests the OnlyInfraFailures function."""
+    messages = self._GetMessages(infra_fail=2)
+    no_stat = []
+    self.assertTrue(
+        triage_lib.CalculateSuspects.OnlyInfraFailures(messages, no_stat))
+
+    messages = self._GetMessages(lab_fail=2)
+    no_stat = []
+    # Lab failures are infrastructure failures.
+    self.assertTrue(
+        triage_lib.CalculateSuspects.OnlyInfraFailures(messages, no_stat))
+
+    no_stat = ['orange']
+    messages = []
+    # 'Builders failed to report statuses' belong to infrastructure failures.
+    self.assertTrue(
+        triage_lib.CalculateSuspects.OnlyInfraFailures(messages, no_stat))
+
+
+class TestGetFullyVerifiedChanges(patch_unittest.MockPatchBase):
+  """Tests GetFullyVerifiedChanges() and related functions."""
+
+  def setUp(self):
+    self.build_root = '/foo/build/root'
+    self.changes = self.GetPatches(how_many=5)
+
+  def testChangesNoAllTested(self):
+    """Tests that those changes are fully verified."""
+    no_stat = failing = messages = []
+    inflight = ['foo-paladin']
+    changes_by_config = {'foo-paladin': []}
+
+    verified = triage_lib.CalculateSuspects.GetFullyVerifiedChanges(
+        self.changes, changes_by_config, failing, inflight, no_stat,
+        messages, self.build_root)
+
+    self.assertEquals(verified, set(self.changes))
+
+  def testChangesNotVerified(self):
+    """Tests that changes are not verified if builds failed prematurely."""
+    failing = messages = []
+    inflight = ['foo-paladin']
+    no_stat = ['puppy-paladin']
+    changes_by_config = {'foo-paladin': set(self.changes[:2]),
+                         'bar-paladin': set(self.changes),
+                         'puppy-paladin': set(self.changes[-2:])}
+
+    verified = triage_lib.CalculateSuspects.GetFullyVerifiedChanges(
+        self.changes, changes_by_config, failing, inflight, no_stat,
+        messages, self.build_root)
+    self.assertEquals(verified, set(self.changes[2:-2]))
+
+  def testChangesNotVerifiedOnFailures(self):
+    """Tests that changes are not verified if failures cannot be ignored."""
+    messages = no_stat = inflight = []
+    failing = ['cub-paladin']
+    changes_by_config = {'bar-paladin': set(self.changes),
+                         'cub-paladin': set(self.changes[:2])}
+
+    self.PatchObject(
+        triage_lib.CalculateSuspects, '_CanIgnoreFailures', return_value=False)
+    verified = triage_lib.CalculateSuspects.GetFullyVerifiedChanges(
+        self.changes, changes_by_config, failing, inflight, no_stat,
+        messages, self.build_root)
+    self.assertEquals(verified, set(self.changes[2:]))
+
+  def testChangesVerifiedWhenFailuresCanBeIgnored(self):
+    """Tests that changes are verified if failures can be ignored."""
+    messages = no_stat = inflight = []
+    failing = ['cub-paladin']
+    changes_by_config = {'bar-paladin': set(self.changes),
+                         'cub-paladin': set(self.changes[:2])}
+
+    self.PatchObject(
+        triage_lib.CalculateSuspects, '_CanIgnoreFailures', return_value=True)
+    verified = triage_lib.CalculateSuspects.GetFullyVerifiedChanges(
+        self.changes, changes_by_config, failing, inflight, no_stat,
+        messages, self.build_root)
+    self.assertEquals(verified, set(self.changes))
+
+  def testCanIgnoreFailures(self):
+    """Tests _CanIgnoreFailures()."""
+    # pylint: disable=protected-access
+    change = self.changes[0]
+    messages = [GetFailedMessage([Exception()], stage='HWTest'),
+                GetFailedMessage([Exception()], stage='VMTest'),]
+    m = self.PatchObject(triage_lib, 'GetStagesToIgnoreForChange')
+
+    m.return_value = ('HWTest',)
+    self.assertFalse(triage_lib.CalculateSuspects._CanIgnoreFailures(
+        messages, change, self.build_root))
+
+    m.return_value = ('HWTest', 'VMTest', 'Foo')
+    self.assertTrue(triage_lib.CalculateSuspects._CanIgnoreFailures(
+        messages, change, self.build_root))
+
+    m.return_value = None
+    self.assertFalse(triage_lib.CalculateSuspects._CanIgnoreFailures(
+        messages, change, self.build_root))
+
+
+class GetOptionsTest(patch_unittest.MockPatchBase):
+  """Tests for functions that get options from config file."""
+
+  def GetOption(self, path, section='a', option='b'):
+    # pylint: disable=protected-access
+    return triage_lib._GetOptionFromConfigFile(path, section, option)
+
+  def testBadConfigFile(self):
+    """Test if we can handle an incorrectly formatted config file."""
+    with osutils.TempDir(set_global=True) as tempdir:
+      path = os.path.join(tempdir, 'foo.ini')
+      osutils.WriteFile(path, 'foobar')
+      self.assertRaises(ConfigParser.Error, self.GetOption, path)
+
+  def testMissingConfigFile(self):
+    """Test if we can handle a missing config file."""
+    with osutils.TempDir(set_global=True) as tempdir:
+      path = os.path.join(tempdir, 'foo.ini')
+      self.assertEqual(None, self.GetOption(path))
+
+  def testGoodConfigFile(self):
+    """Test if we can handle a good config file."""
+    with osutils.TempDir(set_global=True) as tempdir:
+      path = os.path.join(tempdir, 'foo.ini')
+      osutils.WriteFile(path, '[a]\nb: bar baz\n')
+      ignored = self.GetOption(path)
+      self.assertEqual('bar baz', ignored)
+
+  def testGetIgnoredStages(self):
+    """Test if we can get the ignored stages from a good config file."""
+    with osutils.TempDir(set_global=True) as tempdir:
+      path = os.path.join(tempdir, 'foo.ini')
+      osutils.WriteFile(path, '[GENERAL]\nignored-stages: bar baz\n')
+      ignored = self.GetOption(path, section='GENERAL', option='ignored-stages')
+      self.assertEqual('bar baz', ignored)
+
+  def testGetSubsystem(self):
+    """Test if we can get the subsystem label from a good config file."""
+    with osutils.TempDir(set_global=True) as tempdir:
+      path = os.path.join(tempdir, 'foo.ini')
+      osutils.WriteFile(path, '[GENERAL]\nsubsystem: power light\n')
+      ignored = self.GetOption(path, section='GENERAL', option='subsystem')
+      self.assertEqual('power light', ignored)
+
+  def testResultForBadConfigFile(self):
+    """Test whether the return is None when handle a malformat config file."""
+    build_root = 'foo/build/root'
+    change = self.GetPatches(how_many=1)
+    self.PatchObject(git.ManifestCheckout, 'Cached')
+    self.PatchObject(cros_patch.GitRepoPatch, 'GetCheckout',
+                     return_value=git.ProjectCheckout(attrs={}))
+    self.PatchObject(git.ProjectCheckout, 'GetPath')
+
+    with osutils.TempDir(set_global=True) as tempdir:
+      path = os.path.join(tempdir, 'COMMIT-QUEUE.ini')
+      osutils.WriteFile(path, 'foo\n')
+      self.PatchObject(triage_lib, '_GetConfigFileForChange', return_value=path)
+
+      result = triage_lib.GetOptionForChange(build_root, change, 'a', 'b')
+      self.assertEqual(None, result)
+
+
+class ConfigFileTest(cros_test_lib.MockTestCase):
+  """Tests for functions that read config information for a patch."""
+  # pylint: disable=protected-access
+
+  def _GetPatch(self, affected_files):
+    return sync_stages_unittest.MockPatch(
+        mock_diff_status={path: 'M' for path in affected_files})
+
+  def testAffectedSubdir(self):
+    p = self._GetPatch(['a', 'b', 'c'])
+    self.assertEqual(triage_lib._GetCommonAffectedSubdir(p, '/a/b'),
+                     '/a/b')
+
+    p = self._GetPatch(['a/a', 'a/b', 'a/c'])
+    self.assertEqual(triage_lib._GetCommonAffectedSubdir(p, '/a/b'),
+                     '/a/b/a')
+
+    p = self._GetPatch(['a/a', 'a/b', 'a/c'])
+    self.assertEqual(triage_lib._GetCommonAffectedSubdir(p, '/a/b'),
+                     '/a/b/a')
+
+  def testGetConfigFile(self):
+    p = self._GetPatch(['a/a', 'a/b', 'a/c'])
+    self.PatchObject(os.path, 'isfile', return_value=True)
+    self.assertEqual(triage_lib._GetConfigFileForChange(p, '/a/b'),
+                     '/a/b/a/COMMIT-QUEUE.ini')
+    self.assertEqual(triage_lib._GetConfigFileForChange(p, '/a/b/'),
+                     '/a/b/a/COMMIT-QUEUE.ini')
+
+
+    self.PatchObject(os.path, 'isfile', return_value=False)
+    self.assertEqual(triage_lib._GetConfigFileForChange(p, '/a/b'),
+                     '/a/b/COMMIT-QUEUE.ini')
+    self.assertEqual(triage_lib._GetConfigFileForChange(p, '/a/b/'),
+                     '/a/b/COMMIT-QUEUE.ini')
diff --git a/cbuildbot/trybot_patch_pool.py b/cbuildbot/trybot_patch_pool.py
new file mode 100644
index 0000000..b16d2da
--- /dev/null
+++ b/cbuildbot/trybot_patch_pool.py
@@ -0,0 +1,159 @@
+# Copyright (c) 2011-2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module that contains trybot patch pool code."""
+
+from __future__ import print_function
+
+import functools
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.lib import cros_logging as logging
+from chromite.lib import gerrit
+from chromite.lib import git
+from chromite.lib import patch as cros_patch
+
+
+site_config = config_lib.GetConfig()
+
+
+def ChromiteFilter(patch):
+  """Used with FilterFn to isolate patches to chromite."""
+  return patch.project == constants.CHROMITE_PROJECT
+
+
+def ExtManifestFilter(patch):
+  """Used with FilterFn to isolate patches to the external manifest."""
+  return patch.project == site_config.params.MANIFEST_PROJECT
+
+
+def IntManifestFilter(patch):
+  """Used with FilterFn to isolate patches to the internal manifest."""
+  return patch.project == site_config.params.MANIFEST_INT_PROJECT
+
+
+def ManifestFilter(patch):
+  """Used with FilterFn to isolate patches to the manifest."""
+  return ExtManifestFilter(patch) or IntManifestFilter(patch)
+
+
+def BranchFilter(branch, patch):
+  """Used with FilterFn to isolate patches based on a specific upstream."""
+  return patch.tracking_branch == branch
+
+
+def GitRemoteUrlFilter(url, patch):
+  """Used with FilterFn to isolate a patch based on the url of its remote."""
+  return patch.git_remote_url == url
+
+
+class TrybotPatchPool(object):
+  """Represents patches specified by the user to test."""
+  def __init__(self, gerrit_patches=(), local_patches=(), remote_patches=()):
+    self.gerrit_patches = tuple(gerrit_patches)
+    self.local_patches = tuple(local_patches)
+    self.remote_patches = tuple(remote_patches)
+
+  def __nonzero__(self):
+    """Returns True if the pool has any patches."""
+    return any([self.gerrit_patches, self.local_patches, self.remote_patches])
+
+  def Filter(self, **kwargs):
+    """Returns a new pool with only patches that match constraints.
+
+    Args:
+      **kwargs: constraints in the form of attr=value.  I.e.,
+                project='chromiumos/chromite', tracking_branch='master'.
+    """
+    def AttributeFilter(patch):
+      for key in kwargs:
+        if getattr(patch, key, object()) != kwargs[key]:
+          return False
+      return True
+
+    return self.FilterFn(AttributeFilter)
+
+  def FilterFn(self, filter_fn, negate=False):
+    """Returns a new pool with only patches that match constraints.
+
+    Args:
+      filter_fn: Functor that accepts a 'patch' argument, and returns whether to
+                 include the patch in the results.
+      negate: Return patches that don't pass the filter_fn.
+    """
+    f = filter_fn
+    if negate:
+      f = lambda p: not filter_fn(p)
+
+    return self.__class__(
+        gerrit_patches=filter(f, self.gerrit_patches),
+        local_patches=filter(f, self.local_patches),
+        remote_patches=filter(f, self.remote_patches))
+
+  def FilterManifest(self, negate=False):
+    """Return a patch pool with only patches to the manifest."""
+    return self.FilterFn(ManifestFilter, negate=negate)
+
+  def FilterIntManifest(self, negate=False):
+    """Return a patch pool with only patches to the internal manifest."""
+    return self.FilterFn(IntManifestFilter, negate=negate)
+
+  def FilterExtManifest(self, negate=False):
+    """Return a patch pool with only patches to the external manifest."""
+    return self.FilterFn(ExtManifestFilter, negate=negate)
+
+  def FilterBranch(self, branch, negate=False):
+    """Return a patch pool with only patches based on a particular branch."""
+    return self.FilterFn(functools.partial(BranchFilter, branch), negate=negate)
+
+  def FilterGitRemoteUrl(self, url, negate=False):
+    """Return a patch pool where patches have a particular remote url."""
+    return self.FilterFn(functools.partial(GitRemoteUrlFilter, url),
+                         negate=negate)
+
+  def __iter__(self):
+    for source in [self.local_patches, self.remote_patches,
+                   self.gerrit_patches]:
+      for patch in source:
+        yield patch
+
+  @classmethod
+  def FromOptions(cls, gerrit_patches=None, local_patches=None, sourceroot=None,
+                  remote_patches=None):
+    """Generate patch objects from passed in options.
+
+    Args:
+      gerrit_patches: Gerrit ids that gerrit.GetGerritPatchInfo accepts.
+      local_patches: Local ids that cros_patch.PrepareLocalPatches accepts.
+      sourceroot: The source repository to look up |local_patches|.
+      remote_patches: Remote ids that cros_patch.PrepareRemotePatches accepts.
+
+    Returns:
+      A TrybotPatchPool object.
+
+    Raises:
+      gerrit.GerritException, cros_patch.PatchException
+    """
+    if gerrit_patches:
+      gerrit_patches = gerrit.GetGerritPatchInfo(gerrit_patches)
+      for patch in gerrit_patches:
+        if patch.IsAlreadyMerged():
+          logging.warning('Patch %s has already been merged.', patch)
+    else:
+      gerrit_patches = ()
+
+    if local_patches:
+      manifest = git.ManifestCheckout.Cached(sourceroot)
+      local_patches = cros_patch.PrepareLocalPatches(manifest, local_patches)
+    else:
+      local_patches = ()
+
+    if remote_patches:
+      remote_patches = cros_patch.PrepareRemotePatches(remote_patches)
+    else:
+      remote_patches = ()
+
+    return cls(gerrit_patches=gerrit_patches, local_patches=local_patches,
+               remote_patches=remote_patches)
diff --git a/cbuildbot/trybot_patch_pool_unittest b/cbuildbot/trybot_patch_pool_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/trybot_patch_pool_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/trybot_patch_pool_unittest.py b/cbuildbot/trybot_patch_pool_unittest.py
new file mode 100644
index 0000000..49eeaee
--- /dev/null
+++ b/cbuildbot/trybot_patch_pool_unittest.py
@@ -0,0 +1,57 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for trybot_patch_pool."""
+
+from __future__ import print_function
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import trybot_patch_pool
+from chromite.lib import patch as cros_patch
+from chromite.lib import patch_unittest
+
+
+site_config = config_lib.GetConfig()
+
+
+class FilterTests(patch_unittest.GitRepoPatchTestCase):
+  """Tests for all the various filters."""
+
+  patch_kls = cros_patch.LocalPatch
+
+  def testChromiteFilter(self):
+    """Make sure the chromite filter works"""
+    _, _, patch = self._CommonGitSetup()
+    patch.project = constants.CHROMITE_PROJECT
+    self.assertTrue(trybot_patch_pool.ChromiteFilter(patch))
+    patch.project = 'foooo'
+    self.assertFalse(trybot_patch_pool.ChromiteFilter(patch))
+
+  def testManifestFilters(self):
+    """Make sure the manifest filters work"""
+    _, _, patch = self._CommonGitSetup()
+
+    patch.project = constants.CHROMITE_PROJECT
+    self.assertFalse(trybot_patch_pool.ExtManifestFilter(patch))
+    self.assertFalse(trybot_patch_pool.IntManifestFilter(patch))
+    self.assertFalse(trybot_patch_pool.ManifestFilter(patch))
+
+    patch.project = site_config.params.MANIFEST_PROJECT
+    self.assertTrue(trybot_patch_pool.ExtManifestFilter(patch))
+    self.assertFalse(trybot_patch_pool.IntManifestFilter(patch))
+    self.assertTrue(trybot_patch_pool.ManifestFilter(patch))
+
+    patch.project = site_config.params.MANIFEST_INT_PROJECT
+    self.assertFalse(trybot_patch_pool.ExtManifestFilter(patch))
+    self.assertTrue(trybot_patch_pool.IntManifestFilter(patch))
+    self.assertTrue(trybot_patch_pool.ManifestFilter(patch))
+
+  def testBranchFilter(self):
+    """Make sure the branch filter works"""
+    _, _, patch = self._CommonGitSetup()
+
+    self.assertFalse(trybot_patch_pool.BranchFilter('/,/asdf', patch))
+    self.assertTrue(trybot_patch_pool.BranchFilter(
+        patch.tracking_branch, patch))
diff --git a/cbuildbot/update_binhost_json b/cbuildbot/update_binhost_json
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/update_binhost_json
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/update_binhost_json.py b/cbuildbot/update_binhost_json.py
new file mode 100644
index 0000000..9d1625a
--- /dev/null
+++ b/cbuildbot/update_binhost_json.py
@@ -0,0 +1,63 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Update the binhost json. Used by buildbots only."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import binhost
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+
+
+def _ParseArguments(argv):
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('--buildroot', default=constants.SOURCE_ROOT,
+                      help='Root directory where source is checked out to.')
+  parser.add_argument('--skip-regen', default=True, dest='regen',
+                      action='store_false',
+                      help='Don\'t regenerate configs that have already been'
+                           'generated.')
+  opts = parser.parse_args(argv)
+  opts.Freeze()
+  return opts
+
+
+def main(argv):
+  cros_build_lib.AssertInsideChroot()
+  opts = _ParseArguments(argv)
+
+  site_config = config_lib.LoadConfigFromFile()
+
+  logging.info('Generating board configs. This takes about 2m...')
+  for key in sorted(binhost.GetChromePrebuiltConfigs(site_config)):
+    binhost.GenConfigsForBoard(key.board, regen=opts.regen, error_code_ok=True)
+
+  # Fetch all compat IDs.
+  fetcher = binhost.CompatIdFetcher()
+  keys = binhost.GetChromePrebuiltConfigs(site_config).keys()
+  compat_ids = fetcher.FetchCompatIds(keys)
+
+  # Save the PFQ configs.
+  pfq_configs = binhost.PrebuiltMapping.Get(keys, compat_ids)
+  filename_internal = binhost.PrebuiltMapping.GetFilename(opts.buildroot,
+                                                          'chrome')
+  pfq_configs.Dump(filename_internal)
+  git.AddPath(filename_internal)
+  git.Commit(os.path.dirname(filename_internal), 'Update PFQ config dump',
+             allow_empty=True)
+
+  filename_external = binhost.PrebuiltMapping.GetFilename(opts.buildroot,
+                                                          'chromium',
+                                                          internal=False)
+  pfq_configs.Dump(filename_external, internal=False)
+  git.AddPath(filename_external)
+  git.Commit(os.path.dirname(filename_external), 'Update PFQ config dump',
+             allow_empty=True)
diff --git a/cbuildbot/validation_pool.py b/cbuildbot/validation_pool.py
new file mode 100644
index 0000000..b6e96e8
--- /dev/null
+++ b/cbuildbot/validation_pool.py
@@ -0,0 +1,2803 @@
+# Copyright (c) 2011-2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module that handles interactions with a Validation Pool.
+
+The validation pool is the set of commits that are ready to be validated i.e.
+ready for the commit queue to try.
+"""
+
+from __future__ import print_function
+
+import contextlib
+import cPickle
+import functools
+import httplib
+import os
+import random
+import sys
+import time
+from xml.dom import minidom
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import lkgm_manager
+from chromite.cbuildbot import tree_status
+from chromite.cbuildbot import triage_lib
+from chromite.lib import clactions
+from chromite.lib import cros_logging as logging
+from chromite.lib import cros_build_lib
+from chromite.lib import gerrit
+from chromite.lib import git
+from chromite.lib import gob_util
+from chromite.lib import parallel
+from chromite.lib import patch as cros_patch
+from chromite.lib import timeout_util
+
+
+site_config = config_lib.GetConfig()
+
+
+# Third-party libraries bundled with chromite need to be listed after the
+# first chromite import.
+import digraph
+
+# We import mox so that w/in ApplyPoolIntoRepo, if a mox exception is
+# thrown, we don't cover it up.
+try:
+  import mox
+except ImportError:
+  mox = None
+
+
+PRE_CQ = constants.PRE_CQ
+CQ = constants.CQ
+
+CQ_CONFIG = constants.CQ_MASTER
+PRE_CQ_LAUNCHER_CONFIG = constants.PRE_CQ_LAUNCHER_CONFIG
+
+# Set of configs that can reject a CL from the pre-CQ / CQ pipeline.
+# TODO(davidjames): Any Pre-CQ config can reject CLs now, so this is wrong.
+# This is only used for fail counts. Maybe it makes sense to just get rid of
+# the fail count?
+CQ_PIPELINE_CONFIGS = {CQ_CONFIG, PRE_CQ_LAUNCHER_CONFIG}
+
+# The gerrit-on-borg team tells us that delays up to 2 minutes can be
+# normal.  Setting timeout to 3 minutes to be safe-ish.
+SUBMITTED_WAIT_TIMEOUT = 3 * 60 # Time in seconds.
+
+
+class TreeIsClosedException(Exception):
+  """Raised when the tree is closed and we wanted to submit changes."""
+
+  def __init__(self, closed_or_throttled=False):
+    """Initialization.
+
+    Args:
+      closed_or_throttled: True if the exception is being thrown on a
+                           possibly 'throttled' tree. False if only
+                           thrown on a 'closed' tree. Default: False
+    """
+    if closed_or_throttled:
+      status_text = 'closed or throttled'
+      opposite_status_text = 'open'
+    else:
+      status_text = 'closed'
+      opposite_status_text = 'throttled or open'
+
+    super(TreeIsClosedException, self).__init__(
+        'Tree is %s.  Please set tree status to %s to '
+        'proceed.' % (status_text, opposite_status_text))
+
+
+class FailedToSubmitAllChangesException(failures_lib.StepFailure):
+  """Raised if we fail to submit any change."""
+
+  def __init__(self, changes, num_submitted):
+    super(FailedToSubmitAllChangesException, self).__init__(
+        'FAILED TO SUBMIT ALL CHANGES:  Could not verify that changes %s were '
+        'submitted.'
+        '\nSubmitted %d changes successfully.' %
+        (' '.join(str(c) for c in changes), num_submitted))
+
+
+class InternalCQError(cros_patch.PatchException):
+  """Exception thrown when CQ has an unexpected/unhandled error."""
+
+  def __init__(self, patch, message):
+    cros_patch.PatchException.__init__(self, patch, message=message)
+
+  def ShortExplanation(self):
+    return 'failed to apply due to a CQ issue: %s' % (self.message,)
+
+
+class InconsistentReloadException(Exception):
+  """Raised if patches applied by the CQ cannot be found anymore."""
+
+
+class PatchModified(cros_patch.PatchException):
+  """Raised if a patch is modified while the CQ is running."""
+
+  def __init__(self, patch, patch_number):
+    cros_patch.PatchException.__init__(self, patch)
+    self.new_patch_number = patch_number
+    self.args = (patch, patch_number)
+
+  def ShortExplanation(self):
+    return ('was modified while the CQ was in the middle of testing it. '
+            'Patch set %s was uploaded.' % self.new_patch_number)
+
+
+class PatchRejected(cros_patch.PatchException):
+  """Raised if a patch was rejected by the CQ because the CQ failed."""
+
+  def ShortExplanation(self):
+    return 'was rejected by the CQ.'
+
+
+class PatchFailedToSubmit(cros_patch.PatchException):
+  """Raised if we fail to submit a change."""
+
+  def ShortExplanation(self):
+    error = 'could not be submitted by the CQ.'
+    if self.message:
+      error += ' The error message from Gerrit was: %s' % (self.message,)
+    else:
+      error += ' The Gerrit server might be having trouble.'
+    return error
+
+
+class PatchConflict(cros_patch.PatchException):
+  """Raised if a patch needs to be rebased."""
+
+  def ShortExplanation(self):
+    return ('could not be submitted because Gerrit reported a conflict. Did '
+            'you modify your patch during the CQ run? Or do you just need to '
+            'rebase?')
+
+
+class PatchSubmittedWithoutDeps(cros_patch.DependencyError):
+  """Exception thrown when a patch was submitted incorrectly."""
+
+  def ShortExplanation(self):
+    dep_error = cros_patch.DependencyError.ShortExplanation(self)
+    return ('was submitted, even though it %s\n'
+            '\n'
+            'You may want to revert your patch, and investigate why its'
+            'dependencies failed to submit.\n'
+            '\n'
+            'This error only occurs when we have a dependency cycle, and we '
+            'submit one change before realizing that a later change cannot '
+            'be submitted.' % (dep_error,))
+
+
+class PatchSeriesTooLong(cros_patch.PatchException):
+  """Exception thrown when a required dep isn't satisfied."""
+
+  def __init__(self, patch, max_length):
+    cros_patch.PatchException.__init__(self, patch)
+    self.max_length = max_length
+
+  def ShortExplanation(self):
+    return ("The Pre-CQ cannot handle a patch series longer than %s patches. "
+            "Please wait for some patches to be submitted before marking more "
+            "patches as ready. "  % (self.max_length,))
+
+  def __str__(self):
+    return self.ShortExplanation()
+
+
+class GerritHelperNotAvailable(gerrit.GerritException):
+  """Exception thrown when a specific helper is requested but unavailable."""
+
+  def __init__(self, remote=site_config.params.EXTERNAL_REMOTE):
+    gerrit.GerritException.__init__(self)
+    # Stringify the pool so that serialization doesn't try serializing
+    # the actual HelperPool.
+    self.remote = remote
+    self.args = (remote,)
+
+  def __str__(self):
+    return (
+        "Needed a remote=%s gerrit_helper, but one isn't allowed by this "
+        "HelperPool instance.") % (self.remote,)
+
+
+class HelperPool(object):
+  """Pool of allowed GerritHelpers to be used by CQ/PatchSeries."""
+
+  def __init__(self, cros_internal=None, cros=None):
+    """Initialize this instance with the given handlers.
+
+    Most likely you want the classmethod SimpleCreate which takes boolean
+    options.
+
+    If a given handler is None, then it's disabled; else the passed in
+    object is used.
+    """
+    self.pool = {
+        site_config.params.EXTERNAL_REMOTE : cros,
+        site_config.params.INTERNAL_REMOTE : cros_internal
+    }
+
+  @classmethod
+  def SimpleCreate(cls, cros_internal=True, cros=True):
+    """Classmethod helper for creating a HelperPool from boolean options.
+
+    Args:
+      cros_internal: If True, allow access to a GerritHelper for internal.
+      cros: If True, allow access to a GerritHelper for external.
+
+    Returns:
+      An appropriately configured HelperPool instance.
+    """
+    if cros:
+      cros = gerrit.GetGerritHelper(site_config.params.EXTERNAL_REMOTE)
+    else:
+      cros = None
+
+    if cros_internal:
+      cros_internal = gerrit.GetGerritHelper(site_config.params.INTERNAL_REMOTE)
+    else:
+      cros_internal = None
+
+    return cls(cros_internal=cros_internal, cros=cros)
+
+  def ForChange(self, change):
+    """Return the helper to use for a particular change.
+
+    If no helper is configured, an Exception is raised.
+    """
+    return self.GetHelper(change.remote)
+
+  def GetHelper(self, remote):
+    """Return the helper to use for a given remote.
+
+    If no helper is configured, an Exception is raised.
+    """
+    helper = self.pool.get(remote)
+    if not helper:
+      raise GerritHelperNotAvailable(remote)
+
+    return helper
+
+  def __iter__(self):
+    for helper in self.pool.itervalues():
+      if helper:
+        yield helper
+
+
+def _PatchWrapException(functor):
+  """Decorator to intercept patch exceptions and wrap them.
+
+  Specifically, for known/handled Exceptions, it intercepts and
+  converts it into a DependencyError- via that, preserving the
+  cause, while casting it into an easier to use form (one that can
+  be chained in addition).
+  """
+  def f(self, parent, *args, **kwargs):
+    try:
+      return functor(self, parent, *args, **kwargs)
+    except gerrit.GerritException as e:
+      if isinstance(e, gerrit.QueryNotSpecific):
+        e = ("%s\nSuggest you use gerrit numbers instead (prefixed with a * "
+             "if it's an internal change)." % e)
+      new_exc = cros_patch.PatchException(parent, e)
+      raise new_exc.__class__, new_exc, sys.exc_info()[2]
+    except cros_patch.PatchException as e:
+      if e.patch.id == parent.id:
+        raise
+      new_exc = cros_patch.DependencyError(parent, e)
+      raise new_exc.__class__, new_exc, sys.exc_info()[2]
+
+  f.__name__ = functor.__name__
+  return f
+
+
+class PatchSeries(object):
+  """Class representing a set of patches applied to a repo checkout."""
+
+  def __init__(self, path, helper_pool=None, forced_manifest=None,
+               deps_filter_fn=None, is_submitting=False):
+    """Constructor.
+
+    Args:
+      path: Path to the buildroot.
+      helper_pool: Pool of allowed GerritHelpers to be used for fetching
+        patches. Defaults to allowing both internal and external fetches.
+      forced_manifest: A manifest object to use for mapping projects to
+        repositories. Defaults to the buildroot.
+      deps_filter_fn: A function which specifies what patches you would
+        like to accept. It is passed a patch and is expected to return
+        True or False.
+      is_submitting: Whether we are currently submitting patchsets. This is
+        used to print better error messages.
+    """
+    self.manifest = forced_manifest
+
+    if helper_pool is None:
+      helper_pool = HelperPool.SimpleCreate(cros_internal=True, cros=True)
+    self._helper_pool = helper_pool
+    self._path = path
+    if deps_filter_fn is None:
+      deps_filter_fn = lambda x: True
+    self.deps_filter_fn = deps_filter_fn
+    self._is_submitting = is_submitting
+
+    self.failed_tot = {}
+
+    # A mapping of ChangeId to exceptions if the patch failed against
+    # ToT.  Primarily used to keep the resolution/applying from going
+    # down known bad paths.
+    self._committed_cache = cros_patch.PatchCache()
+    self._lookup_cache = cros_patch.PatchCache()
+    self._change_deps_cache = {}
+
+  def _ManifestDecorator(functor):
+    """Method decorator that sets self.manifest automatically.
+
+    This function automatically initializes the manifest, and allows callers to
+    override the manifest if needed.
+    """
+    # pylint: disable=E0213,W0212,E1101,E1102
+    def f(self, *args, **kwargs):
+      manifest = kwargs.pop('manifest', None)
+      # Wipe is used to track if we need to reset manifest to None, and
+      # to identify if we already had a forced_manifest via __init__.
+      wipe = self.manifest is None
+      if manifest:
+        if not wipe:
+          raise ValueError("manifest can't be specified when one is forced "
+                           "via __init__")
+      elif wipe:
+        manifest = git.ManifestCheckout.Cached(self._path)
+      else:
+        manifest = self.manifest
+
+      try:
+        self.manifest = manifest
+        return functor(self, *args, **kwargs)
+      finally:
+        if wipe:
+          self.manifest = None
+
+    f.__name__ = functor.__name__
+    f.__doc__ = functor.__doc__
+    return f
+
+  @_ManifestDecorator
+  def GetGitRepoForChange(self, change, strict=False):
+    """Get the project path associated with the specified change.
+
+    Args:
+      change: The change to operate on.
+      strict: If True, throw ChangeNotInManifest rather than returning
+        None. Default: False.
+
+    Returns:
+      The project path if found in the manifest. Otherwise returns
+      None (if strict=False).
+    """
+    project_dir = None
+    if self.manifest:
+      checkout = change.GetCheckout(self.manifest, strict=strict)
+      if checkout is not None:
+        project_dir = checkout.GetPath(absolute=True)
+
+    return project_dir
+
+  @_ManifestDecorator
+  def ApplyChange(self, change):
+    # Always enable content merging.
+    return change.ApplyAgainstManifest(self.manifest, trivial=False)
+
+  def _LookupHelper(self, patch):
+    """Returns the helper for the given cros_patch.PatchQuery object."""
+    return self._helper_pool.GetHelper(patch.remote)
+
+  def _GetGerritPatch(self, query):
+    """Query the configured helpers looking for a given change.
+
+    Args:
+      project: The gerrit project to query.
+      query: A cros_patch.PatchQuery object.
+
+    Returns:
+      A GerritPatch object.
+    """
+    helper = self._LookupHelper(query)
+    query_text = query.ToGerritQueryText()
+    change = helper.QuerySingleRecord(
+        query_text, must_match=not git.IsSHA1(query_text))
+
+    if not change:
+      return
+
+    # If the query was a gerrit number based query, check the projects/change-id
+    # to see if we already have it locally, but couldn't map it since we didn't
+    # know the gerrit number at the time of the initial injection.
+    existing = self._lookup_cache[change]
+    if cros_patch.ParseGerritNumber(query_text) and existing is not None:
+      keys = change.LookupAliases()
+      self._lookup_cache.InjectCustomKeys(keys, existing)
+      return existing
+
+    self.InjectLookupCache([change])
+    if change.IsAlreadyMerged():
+      self.InjectCommittedPatches([change])
+    return change
+
+  def _LookupUncommittedChanges(self, deps, limit_to=None):
+    """Given a set of deps (changes), return unsatisfied dependencies.
+
+    Args:
+      deps: A list of cros_patch.PatchQuery objects representing
+        sequence of dependencies for the leaf that we need to identify
+        as either merged, or needing resolving.
+      limit_to: If non-None, then this must be a mapping (preferably a
+        cros_patch.PatchCache for translation reasons) of which non-committed
+        changes are allowed to be used for a transaction.
+
+    Returns:
+      A sequence of cros_patch.GitRepoPatch instances (or derivatives) that
+      need to be resolved for this change to be mergable.
+    """
+    unsatisfied = []
+    for dep in deps:
+      if dep in self._committed_cache:
+        continue
+
+      try:
+        self._LookupHelper(dep)
+      except GerritHelperNotAvailable:
+        # Internal dependencies are irrelevant to external builders.
+        logging.info("Skipping internal dependency: %s", dep)
+        continue
+
+      dep_change = self._lookup_cache[dep]
+
+      if dep_change is None:
+        dep_change = self._GetGerritPatch(dep)
+      if dep_change is None:
+        continue
+      if getattr(dep_change, 'IsAlreadyMerged', lambda: False)():
+        continue
+      elif limit_to is not None and dep_change not in limit_to:
+        if self._is_submitting:
+          raise PatchRejected(dep_change)
+        else:
+          raise dep_change.GetMergeException() or PatchRejected(dep_change)
+
+      unsatisfied.append(dep_change)
+
+    # Perform last minute custom filtering.
+    return [x for x in unsatisfied if self.deps_filter_fn(x)]
+
+  def CreateTransaction(self, change, limit_to=None):
+    """Given a change, resolve it into a transaction.
+
+    In this case, a transaction is defined as a group of commits that
+    must land for the given change to be merged- specifically its
+    parent deps, and its CQ-DEPEND.
+
+    Args:
+      change: A cros_patch.GitRepoPatch instance to generate a transaction
+        for.
+      limit_to: If non-None, limit the allowed uncommitted patches to
+        what's in that container/mapping.
+
+    Returns:
+      A sequence of the necessary cros_patch.GitRepoPatch objects for
+      this transaction.
+
+    Raises:
+      DependencyError: If we could not resolve a dependency.
+      GerritException or GOBError: If there is a failure in querying gerrit.
+    """
+    plan = []
+    gerrit_deps_seen = cros_patch.PatchCache()
+    cq_deps_seen = cros_patch.PatchCache()
+    self._AddChangeToPlanWithDeps(change, plan, gerrit_deps_seen,
+                                  cq_deps_seen, limit_to=limit_to)
+    return plan
+
+  def CreateTransactions(self, changes, limit_to=None):
+    """Create a list of transactions from a list of changes.
+
+    Args:
+      changes: A list of cros_patch.GitRepoPatch instances to generate
+        transactions for.
+      limit_to: See CreateTransaction docs.
+
+    Returns:
+      A list of (change, plan, e) tuples for the given list of changes. The
+      plan represents the necessary GitRepoPatch objects for a given change. If
+      an exception occurs while creating the transaction, e will contain the
+      exception. (Otherwise, e will be None.)
+    """
+    for change in changes:
+      try:
+        plan = self.CreateTransaction(change, limit_to=limit_to)
+      except cros_patch.PatchException as e:
+        yield (change, (), e)
+      else:
+        yield (change, plan, None)
+
+  def CreateDisjointTransactions(self, changes, max_txn_length=None,
+                                 merge_projects=False):
+    """Create a list of disjoint transactions from a list of changes.
+
+    Args:
+      changes: A list of cros_patch.GitRepoPatch instances to generate
+        transactions for.
+      max_txn_length: The maximum length of any given transaction.  By default,
+        do not limit the length of transactions.
+      merge_projects: If set, put all changes to a given project in the same
+        transaction.
+
+    Returns:
+      A list of disjoint transactions and a list of exceptions. Each transaction
+      can be tried independently, without involving patches from other
+      transactions. Each change in the pool will included in exactly one of the
+      transactions, unless the patch does not apply for some reason.
+    """
+    # Gather the dependency graph for the specified changes.
+    deps, edges, failed = {}, {}, []
+    for change, plan, ex in self.CreateTransactions(changes, limit_to=changes):
+      if ex is not None:
+        logging.info('Failed creating transaction for %s: %s', change, ex)
+        failed.append(ex)
+      else:
+        # Save off the ordered dependencies of this change.
+        deps[change] = plan
+
+        # Mark every change in the transaction as bidirectionally connected.
+        for change_dep in plan:
+          edges.setdefault(change_dep, set()).update(plan)
+
+    if merge_projects:
+      projects = {}
+      for change in deps:
+        projects.setdefault(change.project, []).append(change)
+      for project in projects:
+        for change in projects[project]:
+          edges.setdefault(change, set()).update(projects[project])
+
+    # Calculate an unordered group of strongly connected components.
+    unordered_plans = digraph.StronglyConnectedComponents(list(edges), edges)
+
+    # Sort the groups according to our ordered dependency graph.
+    ordered_plans = []
+    for unordered_plan in unordered_plans:
+      ordered_plan, seen = [], set()
+      for change in unordered_plan:
+        # Iterate over the required CLs, adding them to our plan in order.
+        new_changes = list(dep_change for dep_change in deps[change]
+                           if dep_change not in seen)
+        new_plan_size = len(ordered_plan) + len(new_changes)
+        if not max_txn_length or new_plan_size <= max_txn_length:
+          seen.update(new_changes)
+          ordered_plan.extend(new_changes)
+
+      if ordered_plan:
+        # We found a transaction that is <= max_txn_length. Process the
+        # transaction. Ignore the remaining patches for now; they will be
+        # processed later (once the current transaction has been pushed).
+        ordered_plans.append(ordered_plan)
+      else:
+        # We couldn't find any transactions that were <= max_txn_length.
+        # This should only happen if circular dependencies prevent us from
+        # truncating a long list of patches. Reject the whole set of patches
+        # and complain.
+        for change in unordered_plan:
+          failed.append(PatchSeriesTooLong(change, max_txn_length))
+
+    return ordered_plans, failed
+
+  @_PatchWrapException
+  def _AddChangeToPlanWithDeps(self, change, plan, gerrit_deps_seen,
+                               cq_deps_seen, limit_to=None,
+                               include_cq_deps=True):
+    """Add a change and its dependencies into a |plan|.
+
+    Args:
+      change: The change to add to the plan.
+      plan: The list of changes to apply, in order. This function will append
+        |change| and any necessary dependencies to |plan|.
+      gerrit_deps_seen: The changes whose Gerrit dependencies have already been
+        processed.
+      cq_deps_seen: The changes whose CQ-DEPEND and Gerrit dependencies have
+        already been processed.
+      limit_to: If non-None, limit the allowed uncommitted patches to
+        what's in that container/mapping.
+      include_cq_deps: If True, include CQ dependencies in the list
+        of dependencies. Defaults to True.
+
+    Raises:
+      DependencyError: If we could not resolve a dependency.
+      GerritException or GOBError: If there is a failure in querying gerrit.
+    """
+    if change in self._committed_cache:
+      return
+
+    # Get a list of the changes that haven't been committed.
+    # These are returned as cros_patch.PatchQuery objects.
+    gerrit_deps, cq_deps = self.GetDepsForChange(change)
+
+    # Only process the Gerrit dependencies for each change once. We prioritize
+    # Gerrit dependencies over CQ dependencies, since Gerrit dependencies might
+    # be required in order for the change to apply.
+    old_plan_len = len(plan)
+    if change not in gerrit_deps_seen:
+      gerrit_deps = self._LookupUncommittedChanges(
+          gerrit_deps, limit_to=limit_to)
+      gerrit_deps_seen.Inject(change)
+      for dep in gerrit_deps:
+        self._AddChangeToPlanWithDeps(dep, plan, gerrit_deps_seen, cq_deps_seen,
+                                      limit_to=limit_to, include_cq_deps=False)
+
+    if include_cq_deps and change not in cq_deps_seen:
+      cq_deps = self._LookupUncommittedChanges(
+          cq_deps, limit_to=limit_to)
+      cq_deps_seen.Inject(change)
+      for dep in plan[old_plan_len:] + cq_deps:
+        # Add the requested change (plus deps) to our plan, if it we aren't
+        # already in the process of doing that.
+        if dep not in cq_deps_seen:
+          self._AddChangeToPlanWithDeps(dep, plan, gerrit_deps_seen,
+                                        cq_deps_seen, limit_to=limit_to)
+
+    # If there are cyclic dependencies, we might have already applied this
+    # patch as part of dependency resolution. If not, apply this patch.
+    if change not in plan:
+      plan.append(change)
+
+  @_PatchWrapException
+  def GetDepChangesForChange(self, change):
+    """Look up the Gerrit/CQ dependency changes for |change|.
+
+    Returns:
+      (gerrit_deps, cq_deps): The change's Gerrit dependencies and CQ
+      dependencies, as lists of GerritPatch objects.
+
+    Raises:
+      DependencyError: If we could not resolve a dependency.
+      GerritException or GOBError: If there is a failure in querying gerrit.
+    """
+    gerrit_deps, cq_deps = self.GetDepsForChange(change)
+
+    def _DepsToChanges(deps):
+      dep_changes = []
+      unprocessed_deps = []
+      for dep in deps:
+        dep_change = self._committed_cache[dep]
+        if dep_change:
+          dep_changes.append(dep_change)
+        else:
+          unprocessed_deps.append(dep)
+
+      for dep in unprocessed_deps:
+        dep_changes.extend(self._LookupUncommittedChanges(deps))
+
+      return dep_changes
+
+    return _DepsToChanges(gerrit_deps), _DepsToChanges(cq_deps)
+
+  @_PatchWrapException
+  def GetDepsForChange(self, change):
+    """Look up the Gerrit/CQ deps for |change|.
+
+    Returns:
+      A tuple of PatchQuery objects representing change's Gerrit
+      dependencies, and CQ dependencies.
+
+    Raises:
+      DependencyError: If we could not resolve a dependency.
+      GerritException or GOBError: If there is a failure in querying gerrit.
+    """
+    val = self._change_deps_cache.get(change)
+    if val is None:
+      git_repo = self.GetGitRepoForChange(change)
+      val = self._change_deps_cache[change] = (
+          change.GerritDependencies(),
+          change.PaladinDependencies(git_repo))
+
+    return val
+
+  def InjectCommittedPatches(self, changes):
+    """Record that the given patches are already committed.
+
+    This is primarily useful for external code to notify this object
+    that changes were applied to the tree outside its purview- specifically
+    useful for dependency resolution.
+    """
+    self._committed_cache.Inject(*changes)
+
+  def InjectLookupCache(self, changes):
+    """Inject into the internal lookup cache the given changes.
+
+    Uses |changes| rather than asking gerrit for them for dependencies.
+    """
+    self._lookup_cache.Inject(*changes)
+
+  def FetchChanges(self, changes):
+    """Fetch the specified changes, if needed.
+
+    If we're an external builder, internal changes are filtered out.
+
+    Returns:
+      A list of the filtered changes.
+    """
+    by_repo = {}
+    changes_to_fetch = []
+    for change in changes:
+      try:
+        self._helper_pool.ForChange(change)
+      except GerritHelperNotAvailable:
+        # Internal patches are irrelevant to external builders.
+        logging.info("Skipping internal patch: %s", change)
+        continue
+      repo = self.GetGitRepoForChange(change, strict=True)
+      by_repo.setdefault(repo, []).append(change)
+      changes_to_fetch.append(change)
+
+    # Fetch changes in parallel. The change.Fetch() method modifies the
+    # 'change' object, so make sure we grab all of that information.
+    with parallel.Manager() as manager:
+      fetched_changes = manager.dict()
+
+      fetch_repo = functools.partial(
+          _FetchChangesForRepo, fetched_changes, by_repo)
+      parallel.RunTasksInProcessPool(fetch_repo, [[repo] for repo in by_repo])
+
+      return [fetched_changes[c.id] for c in changes_to_fetch]
+
+  def ReapplyChanges(self, by_repo):
+    """Make sure that all of the local changes still apply.
+
+    Syncs all of the repositories in the manifest and reapplies the changes on
+    top of the tracking branch for each repository.
+
+    Args:
+      by_repo: A mapping from repo paths to changes in that repo.
+
+    Returns:
+      a new by_repo dict containing only the patches that apply correctly, and
+      errors, a dict of patches to exceptions encountered while applying them.
+    """
+    self.ResetCheckouts(constants.PATCH_BRANCH, fetch=True)
+    local_changes = reduce(set.union, by_repo.values(), set())
+    applied_changes, failed_tot, failed_inflight = self.Apply(local_changes)
+    errors = {}
+    for exception in failed_tot + failed_inflight:
+      errors[exception.patch] = exception
+
+    # Filter out only the changes that applied.
+    by_repo = dict(by_repo)
+    for repo in by_repo:
+      by_repo[repo] &= set(applied_changes)
+
+    return by_repo, errors
+
+  @_ManifestDecorator
+  def ResetCheckouts(self, branch, fetch=False):
+    """Updates |branch| in all Git checkouts in the manifest to their remotes.
+
+    Args:
+      branch: The branch to update.
+      fetch: Indicates whether to sync the remotes before resetting.
+    """
+    if not self.manifest:
+      logging.info("No manifest, skipping reset.")
+      return
+
+    def _Reset(checkout):
+      path = checkout.GetPath()
+
+      # There is no need to reset the branch if it doesn't exist.
+      if not git.DoesCommitExistInRepo(path, branch):
+        logging.info('Skipping reset for %s because %s is not in the repo.',
+                     path, branch)
+        return
+
+      if fetch:
+        git.RunGit(path, ['fetch', '--all'], capture_output=False)
+
+      def _LogBranch():
+        branches = git.RunGit(path, ['branch', '-vv']).output.splitlines()
+        branch_line = [b for b in branches if branch in b]
+        logging.info(branch_line)
+
+      _LogBranch()
+      git.RunGit(path, ['checkout', '-f', branch])
+      logging.info('Resetting to %s', checkout['tracking_branch'])
+      git.RunGit(path, ['reset', checkout['tracking_branch'], '--hard'])
+      _LogBranch()
+
+    parallel.RunTasksInProcessPool(
+        _Reset,
+        [[c] for c in self.manifest.ListCheckouts()])
+
+  @_ManifestDecorator
+  def Apply(self, changes, frozen=True, honor_ordering=False,
+            changes_filter=None, max_change_count=None):
+    """Applies changes from pool into the build root specified by the manifest.
+
+    This method resolves each given change down into a set of transactions-
+    the change and its dependencies- that must go in, then tries to apply
+    the largest transaction first, working its way down.
+
+    If a transaction cannot be applied, then it is rolled back
+    in full- note that if a change is involved in multiple transactions,
+    if an earlier attempt fails, that change can be retried in a new
+    transaction if the failure wasn't caused by the patch being incompatible
+    to ToT.
+
+    Args:
+      changes: A sequence of cros_patch.GitRepoPatch instances to resolve
+        and apply.
+      frozen: If True, then resolving of the given changes is explicitly
+        limited to just the passed in changes, or known committed changes.
+        This is basically CQ/Paladin mode, used to limit the changes being
+        pulled in/committed to just what we allow.
+      honor_ordering: Apply normally will reorder the transactions it
+        computes, trying the largest first, then degrading through smaller
+        transactions if the larger of the two fails.  If honor_ordering
+        is False, then the ordering given via changes is preserved-
+        this is mainly of use for cbuildbot induced patching, and shouldn't
+        be used for CQ patching.
+      changes_filter: If not None, must be a functor taking two arguments:
+        series, changes; it must return the changes to work on.
+        This is invoked after the initial changes have been fetched,
+        thus this is a way for consumers to do last minute checking of the
+        changes being inspected, and expand the changes if necessary.
+        Primarily this is of use for cbuildbot patching when dealing w/
+        uploaded/remote patches.
+      max_change_count: If not None, this is a soft integer limit on the number
+        of patches to pull in. We stop pulling in patches as soon as we grab
+        at least this many patches. Note that this limit may be exceeded by N-1,
+        where N is the length of the longest transaction.
+
+    Returns:
+      A tuple of changes-applied, Exceptions for the changes that failed
+      against ToT, and Exceptions that failed inflight;  These exceptions
+      are cros_patch.PatchException instances.
+    """
+    # Prefetch the changes; we need accurate change_id/id's, which is
+    # guaranteed via Fetch.
+    changes = self.FetchChanges(changes)
+    if changes_filter:
+      changes = changes_filter(self, changes)
+
+    self.InjectLookupCache(changes)
+    limit_to = cros_patch.PatchCache(changes) if frozen else None
+    resolved, applied, failed = [], [], []
+    planned = set()
+    for change, plan, ex in self.CreateTransactions(changes, limit_to=limit_to):
+      if ex is not None:
+        logging.info("Failed creating transaction for %s: %s", change, ex)
+        failed.append(ex)
+      else:
+        resolved.append((change, plan))
+        logging.info("Transaction for %s is %s.",
+                     change, ', '.join(map(str, resolved[-1][-1])))
+        planned.update(plan)
+
+      if max_change_count is not None and len(planned) >= max_change_count:
+        break
+
+    if not resolved:
+      # No work to do; either no changes were given to us, or all failed
+      # to be resolved.
+      return [], failed, []
+
+    if not honor_ordering:
+      # Sort by length, falling back to the order the changes were given to us.
+      # This is done to prefer longer transactions (more painful to rebase)
+      # over shorter transactions.
+      position = dict((change, idx) for idx, change in enumerate(changes))
+      def mk_key(data):
+        change, plan = data
+        ids = [x.id for x in plan]
+        return -len(ids), position[change]
+      resolved.sort(key=mk_key)
+
+    for inducing_change, transaction_changes in resolved:
+      try:
+        with self._Transaction(transaction_changes):
+          logging.debug("Attempting transaction for %s: changes: %s",
+                        inducing_change,
+                        ', '.join(map(str, transaction_changes)))
+          self._ApplyChanges(inducing_change, transaction_changes)
+      except cros_patch.PatchException as e:
+        logging.info("Failed applying transaction for %s: %s",
+                     inducing_change, e)
+        failed.append(e)
+      else:
+        applied.extend(transaction_changes)
+        self.InjectCommittedPatches(transaction_changes)
+
+    # Uniquify while maintaining order.
+    def _uniq(l):
+      s = set()
+      for x in l:
+        if x not in s:
+          yield x
+          s.add(x)
+
+    applied = list(_uniq(applied))
+    self._is_submitting = True
+
+    failed = [x for x in failed if x.patch not in applied]
+    failed_tot = [x for x in failed if not x.inflight]
+    failed_inflight = [x for x in failed if x.inflight]
+    return applied, failed_tot, failed_inflight
+
+  @contextlib.contextmanager
+  def _Transaction(self, commits):
+    """ContextManager used to rollback changes to a build root if necessary.
+
+    Specifically, if an unhandled non system exception occurs, this context
+    manager will roll back all relevant modifications to the git repos
+    involved.
+
+    Args:
+      commits: A sequence of cros_patch.GitRepoPatch instances that compromise
+        this transaction- this is used to identify exactly what may be changed,
+        thus what needs to be tracked and rolled back if the transaction fails.
+    """
+    # First, the book keeping code; gather required data so we know what
+    # to rollback to should this transaction fail.  Specifically, we track
+    # what was checked out for each involved repo, and if it was a branch,
+    # the sha1 of the branch; that information is enough to rewind us back
+    # to the original repo state.
+    project_state = set(
+        map(functools.partial(self.GetGitRepoForChange, strict=True), commits))
+    resets = []
+    for project_dir in project_state:
+      current_sha1 = git.RunGit(
+          project_dir, ['rev-list', '-n1', 'HEAD']).output.strip()
+      resets.append((project_dir, current_sha1))
+      assert current_sha1
+
+    committed_cache = self._committed_cache.copy()
+
+    try:
+      yield
+    except Exception:
+      logging.info("Rewinding transaction: failed changes: %s .",
+                   ', '.join(map(str, commits)), exc_info=True)
+
+      for project_dir, sha1 in resets:
+        git.RunGit(project_dir, ['reset', '--hard', sha1])
+
+      self._committed_cache = committed_cache
+      raise
+
+  @_PatchWrapException
+  def _ApplyChanges(self, _inducing_change, changes):
+    """Apply a given ordered sequence of changes.
+
+    Args:
+      _inducing_change: The core GitRepoPatch instance that lead to this
+        sequence of changes; basically what this transaction was computed from.
+        Needs to be passed in so that the exception wrapping machinery can
+        convert any failures, assigning blame appropriately.
+      manifest: A ManifestCheckout instance representing what we're working on.
+      changes: A ordered sequence of GitRepoPatch instances to apply.
+    """
+    # Bail immediately if we know one of the requisite patches won't apply.
+    for change in changes:
+      failure = self.failed_tot.get(change.id)
+      if failure is not None:
+        raise failure
+
+    applied = []
+    for change in changes:
+      if change in self._committed_cache:
+        continue
+
+      try:
+        self.ApplyChange(change)
+      except cros_patch.PatchException as e:
+        if not e.inflight:
+          self.failed_tot[change.id] = e
+        raise
+      applied.append(change)
+
+    logging.debug('Done investigating changes.  Applied %s',
+                  ' '.join([c.id for c in applied]))
+
+  @classmethod
+  def WorkOnSingleRepo(cls, git_repo, tracking_branch, **kwargs):
+    """Classmethod to generate a PatchSeries that targets a single git repo.
+
+    It does this via forcing a fake manifest, which in turn points
+    tracking branch/paths/content-merging at what is passed through here.
+
+    Args:
+      git_repo: Absolute path to the git repository to operate upon.
+      tracking_branch: Which tracking branch patches should apply against.
+      kwargs: See PatchSeries.__init__ for the various optional args;
+        note forced_manifest cannot be used here.
+
+    Returns:
+      A PatchSeries instance w/ a forced manifest.
+    """
+
+    if 'forced_manifest' in kwargs:
+      raise ValueError("RawPatchSeries doesn't allow a forced_manifest "
+                       "argument.")
+    kwargs['forced_manifest'] = _ManifestShim(git_repo, tracking_branch)
+
+    return cls(git_repo, **kwargs)
+
+
+def _FetchChangesForRepo(fetched_changes, by_repo, repo):
+  """Fetch the changes for a given `repo`.
+
+  Args:
+    fetched_changes: A dict from change ids to changes which is updated by
+      this method.
+    by_repo: A mapping from repositories to changes.
+    repo: The repository we should fetch the changes for.
+  """
+  changes = by_repo[repo]
+  refs = set(c.ref for c in changes if not c.HasBeenFetched(repo))
+  cmd = ['fetch', '-f', changes[0].project_url] + list(refs)
+  git.RunGit(repo, cmd, print_cmd=True)
+
+  for change in changes:
+    sha1 = change.HasBeenFetched(repo) or change.sha1
+    change.UpdateMetadataFromRepo(repo, sha1=sha1)
+    fetched_changes[change.id] = change
+
+
+class _ManifestShim(object):
+  """A fake manifest that only contains a single repository.
+
+  This fake manifest is used to allow us to filter out patches for
+  the PatchSeries class. It isn't a complete implementation -- we just
+  implement the functions that PatchSeries uses. It works via duck typing.
+
+  All of the below methods accept the same arguments as the corresponding
+  methods in git.ManifestCheckout.*, but they do not make any use of the
+  arguments -- they just always return information about this project.
+  """
+
+  def __init__(self, path, tracking_branch, remote='origin'):
+
+    tracking_branch = 'refs/remotes/%s/%s' % (
+        remote, git.StripRefs(tracking_branch),
+    )
+    attrs = dict(local_path=path, path=path, tracking_branch=tracking_branch)
+    self.checkout = git.ProjectCheckout(attrs)
+
+  def FindCheckouts(self, *_args, **_kwargs):
+    """Returns the list of checkouts.
+
+    In this case, we only have one repository so we just return that repository.
+    We accept the same arguments as git.ManifestCheckout.FindCheckouts, but we
+    do not make any use of them.
+
+    Returns:
+      A list of ProjectCheckout objects.
+    """
+    return [self.checkout]
+
+
+class ValidationPool(object):
+  """Class that handles interactions with a validation pool.
+
+  This class can be used to acquire a set of commits that form a pool of
+  commits ready to be validated and committed.
+
+  Usage:  Use ValidationPool.AcquirePool -- a static
+  method that grabs the commits that are ready for validation.
+  """
+
+  # Global variable to control whether or not we should allow CL's to get tried
+  # and/or committed when the tree is throttled.
+  # TODO(sosa): Remove this global once metrics show that this is the direction
+  # we want to go (and remove all additional throttled_ok logic from this
+  # module.
+  THROTTLED_OK = True
+  GLOBAL_DRYRUN = False
+  DEFAULT_TIMEOUT = 60 * 60 * 4
+  # How long to wait when the tree is throttled before checking for CR+1 CL's.
+  CQ_THROTTLED_TIMEOUT = 60 * 10
+  SLEEP_TIMEOUT = 30
+  # Buffer time to leave when using the global build deadline as the sync stage
+  # timeout. We need some time to possibly extend the global build deadline
+  # after the sync timeout is hit.
+  EXTENSION_TIMEOUT_BUFFER = 10 * 60
+  INCONSISTENT_SUBMIT_MSG = ('Gerrit thinks that the change was not submitted, '
+                             'even though we hit the submit button.')
+
+  # The grace period (in seconds) before we reject a patch due to dependency
+  # errors.
+  REJECTION_GRACE_PERIOD = 30 * 60
+
+  # How many CQ runs to go back when making a decision about the CQ health.
+  # Note this impacts the max exponential fallback (2^10=1024 max exponential
+  # divisor)
+  CQ_SEARCH_HISTORY = 10
+
+
+  def __init__(self, overlays, build_root, build_number, builder_name,
+               is_master, dryrun, changes=None, non_os_changes=None,
+               conflicting_changes=None, pre_cq_trybot=False,
+               tree_was_open=True, builder_run=None):
+    """Initializes an instance by setting default variables to instance vars.
+
+    Generally use AcquirePool as an entry pool to a pool rather than this
+    method.
+
+    Args:
+      overlays: One of constants.VALID_OVERLAYS.
+      build_root: Build root directory.
+      build_number: Build number for this validation attempt.
+      builder_name: Builder name on buildbot dashboard.
+      is_master: True if this is the master builder for the Commit Queue.
+      dryrun: If set to True, do not submit anything to Gerrit.
+    Optional Args:
+      changes: List of changes for this validation pool.
+      non_os_changes: List of changes that are part of this validation
+        pool but aren't part of the cros checkout.
+      conflicting_changes: Changes that failed to apply but we're keeping around
+        because they conflict with other changes in flight.
+      pre_cq_trybot: If set to True, this is a Pre-CQ trybot. (Note: The Pre-CQ
+        launcher is NOT considered a Pre-CQ trybot.)
+      tree_was_open: Whether the tree was open when the pool was created.
+      builder_run: BuilderRun instance used to fetch cidb handle and metadata
+        instance. Please note due to the pickling logic, this MUST be the last
+        kwarg listed.
+    """
+
+    self.build_root = build_root
+
+    # These instances can be instantiated via both older, or newer pickle
+    # dumps.  Thus we need to assert the given args since we may be getting
+    # a value we no longer like (nor work with).
+    if overlays not in constants.VALID_OVERLAYS:
+      raise ValueError("Unknown/unsupported overlay: %r" % (overlays,))
+
+    self._helper_pool = self.GetGerritHelpersForOverlays(overlays)
+
+    if not isinstance(build_number, int):
+      raise ValueError("Invalid build_number: %r" % (build_number,))
+
+    if not isinstance(builder_name, basestring):
+      raise ValueError("Invalid builder_name: %r" % (builder_name,))
+
+    for changes_name, changes_value in (
+        ('changes', changes), ('non_os_changes', non_os_changes)):
+      if not changes_value:
+        continue
+      if not all(isinstance(x, cros_patch.GitRepoPatch) for x in changes_value):
+        raise ValueError(
+            'Invalid %s: all elements must be a GitRepoPatch derivative, got %r'
+            % (changes_name, changes_value))
+
+    if conflicting_changes and not all(
+        isinstance(x, cros_patch.PatchException)
+        for x in conflicting_changes):
+      raise ValueError(
+          'Invalid conflicting_changes: all elements must be a '
+          'cros_patch.PatchException derivative, got %r'
+          % (conflicting_changes,))
+
+    self.is_master = bool(is_master)
+    self.pre_cq_trybot = pre_cq_trybot
+    self._run = builder_run
+    self.dryrun = bool(dryrun) or self.GLOBAL_DRYRUN
+    if pre_cq_trybot:
+      self.queue = 'A trybot'
+    elif builder_name == constants.PRE_CQ_LAUNCHER_NAME:
+      self.queue = 'The Pre-Commit Queue'
+    else:
+      self.queue = 'The Commit Queue'
+
+    # See optional args for types of changes.
+    self.changes = changes or []
+    self.non_manifest_changes = non_os_changes or []
+    # Note, we hold onto these CLs since they conflict against our current CLs
+    # being tested; if our current ones succeed, we notify the user to deal
+    # w/ the conflict.  If the CLs we're testing fail, then there is no
+    # reason we can't try these again in the next run.
+    self.changes_that_failed_to_apply_earlier = conflicting_changes or []
+
+    # Private vars only used for pickling and self._build_log.
+    self._overlays = overlays
+    self._build_number = build_number
+    self._builder_name = builder_name
+
+    # Set to False if the tree was not open when we acquired changes.
+    self.tree_was_open = tree_was_open
+
+  @property
+  def build_log(self):
+    if self._run:
+      return tree_status.ConstructDashboardURL(
+          self._run.GetBuildbotUrl(), self._builder_name, self._build_number)
+
+  @staticmethod
+  def GetGerritHelpersForOverlays(overlays):
+    """Discern the allowed GerritHelpers to use based on the given overlay."""
+    cros_internal = cros = False
+    if overlays in [constants.PUBLIC_OVERLAYS, constants.BOTH_OVERLAYS, False]:
+      cros = True
+
+    if overlays in [constants.PRIVATE_OVERLAYS, constants.BOTH_OVERLAYS]:
+      cros_internal = True
+
+    return HelperPool.SimpleCreate(cros_internal=cros_internal, cros=cros)
+
+  def __reduce__(self):
+    """Used for pickling to re-create validation pool."""
+    # NOTE: self._run is specifically excluded from the validation pool
+    # pickle. We do not want the un-pickled validation pool to have a reference
+    # to its own un-pickled BuilderRun instance. Instead, we want to to refer
+    # to the new builder run's metadata instance. This is accomplished by
+    # setting the BuilderRun at un-pickle time, in ValidationPool.Load(...).
+    return (
+        self.__class__,
+        (
+            self._overlays,
+            self.build_root, self._build_number, self._builder_name,
+            self.is_master, self.dryrun, self.changes,
+            self.non_manifest_changes,
+            self.changes_that_failed_to_apply_earlier,
+            self.pre_cq_trybot,
+            self.tree_was_open))
+
+  @classmethod
+  @failures_lib.SetFailureType(failures_lib.BuilderFailure)
+  def AcquirePreCQPool(cls, *args, **kwargs):
+    """See ValidationPool.__init__ for arguments."""
+    kwargs.setdefault('tree_was_open', True)
+    kwargs.setdefault('pre_cq_trybot', True)
+    kwargs.setdefault('is_master', True)
+    pool = cls(*args, **kwargs)
+    return pool
+
+  @staticmethod
+  def _WaitForQuery(query):
+    """Helper method to return msg to print out when waiting for a |query|."""
+    # Dictionary that maps CQ Queries to msg's to display.
+    if query == constants.CQ_READY_QUERY:
+      return 'new CLs'
+    elif query == constants.THROTTLED_CQ_READY_QUERY:
+      return 'new CQ+2 CLs or the tree to open'
+    else:
+      return 'waiting for tree to open'
+
+  def AcquireChanges(self, gerrit_query, ready_fn, change_filter):
+    """Helper method for AcquirePool. Adds changes to pool based on args.
+
+    Queries gerrit using the given flags, filters out any unwanted changes, and
+    handles draft changes.
+
+    Args:
+      gerrit_query: gerrit query to use.
+      ready_fn: CR function (see constants).
+      change_filter: If set, filters with change_filter(pool, changes,
+        non_manifest_changes) to remove unwanted patches.
+    """
+    # Iterate through changes from all gerrit instances we care about.
+    for helper in self._helper_pool:
+      changes = helper.Query(gerrit_query, sort='lastUpdated')
+      changes.reverse()
+
+      if ready_fn:
+        # The query passed in may include a dictionary of flags to use for
+        # revalidating the query results. We need to do this because Gerrit
+        # caches are sometimes stale and need sanity checking.
+        changes = [x for x in changes if ready_fn(x)]
+
+      # Tell users to publish drafts before marking them commit ready.
+      for change in changes:
+        if change.HasApproval('COMR', ('1', '2')) and change.IsDraft():
+          self.HandleDraftChange(change)
+
+      changes, non_manifest_changes = ValidationPool._FilterNonCrosProjects(
+          changes, git.ManifestCheckout.Cached(self.build_root))
+      self.changes.extend(changes)
+      self.non_manifest_changes.extend(non_manifest_changes)
+
+    # Filter out unwanted changes.
+    self.changes, self.non_manifest_changes = change_filter(
+        self, self.changes, self.non_manifest_changes)
+
+    return self.changes or self.non_manifest_changes
+
+  @classmethod
+  def AcquirePool(cls, overlays, repo, build_number, builder_name, query,
+                  dryrun=False, check_tree_open=True,
+                  change_filter=None, builder_run=None):
+    """Acquires the current pool from Gerrit.
+
+    Polls Gerrit and checks for which changes are ready to be committed.
+    Should only be called from master builders.
+
+    Args:
+      overlays: One of constants.VALID_OVERLAYS.
+      repo: The repo used to sync, to filter projects, and to apply patches
+        against.
+      build_number: Corresponding build number for the build.
+      builder_name: Builder name on buildbot dashboard.
+      query: constants.CQ_READY_QUERY, PRECQ_READY_QUERY, or a custom
+        query description of the form (<query_str>, None).
+      dryrun: Don't submit anything to gerrit.
+      check_tree_open: If True, only return when the tree is open.
+      change_filter: If set, use change_filter(pool, changes,
+        non_manifest_changes) to filter out unwanted patches.
+      builder_run: instance used to record CL actions to metadata and cidb.
+
+    Returns:
+      ValidationPool object.
+
+    Raises:
+      TreeIsClosedException: if the tree is closed (or throttled, if not
+                             |THROTTLED_OK|).
+    """
+    if change_filter is None:
+      change_filter = lambda _, x, y: (x, y)
+
+    # We choose a longer wait here as we haven't committed to anything yet. By
+    # doing this here we can reduce the number of builder cycles.
+    timeout = cls.DEFAULT_TIMEOUT
+    if builder_run is not None:
+      build_id, db = builder_run.GetCIDBHandle()
+      if db:
+        time_to_deadline = db.GetTimeToDeadline(build_id)
+        if time_to_deadline is not None:
+          # We must leave enough time before the deadline to allow us to extend
+          # the deadline in case we hit this timeout.
+          timeout = time_to_deadline - cls.EXTENSION_TIMEOUT_BUFFER
+
+    end_time = time.time() + timeout
+    # How long to wait until if the tree is throttled and we want to be more
+    # accepting of changes. We leave it as end_time whenever the tree is open.
+    tree_throttled_time = end_time
+    status = constants.TREE_OPEN
+
+    while True:
+      current_time = time.time()
+      time_left = end_time - current_time
+      # Wait until the tree becomes open.
+      if check_tree_open:
+        try:
+          status = tree_status.WaitForTreeStatus(
+              period=cls.SLEEP_TIMEOUT, timeout=time_left,
+              throttled_ok=cls.THROTTLED_OK)
+          # Manages the timer for accepting CL's >= CR+1 based on tree status.
+          # If the tree is not open.
+          if status == constants.TREE_OPEN:
+            # Reset the timer in case it was changed.
+            tree_throttled_time = end_time
+          elif tree_throttled_time == end_time:
+            # Tree not open and tree_throttled_time not set.
+            tree_throttled_time = current_time + cls.CQ_THROTTLED_TIMEOUT
+        except timeout_util.TimeoutError:
+          raise TreeIsClosedException(
+              closed_or_throttled=not cls.THROTTLED_OK)
+
+      # Sync so that we are up-to-date on what is committed.
+      repo.Sync()
+
+      # Determine the query to use.
+      gerrit_query, ready_fn = query
+      tree_was_open = True
+      if (status == constants.TREE_THROTTLED and
+          query == constants.CQ_READY_QUERY):
+        if current_time < tree_throttled_time:
+          gerrit_query, ready_fn = constants.THROTTLED_CQ_READY_QUERY
+        else:
+          # Note we only apply the tree not open logic after a given
+          # window.
+          tree_was_open = False
+          gerrit_query, ready_fn = constants.CQ_READY_QUERY
+
+      pool = ValidationPool(overlays, repo.directory, build_number,
+                            builder_name, True, dryrun, builder_run=builder_run,
+                            tree_was_open=tree_was_open)
+
+      if pool.AcquireChanges(gerrit_query, ready_fn, change_filter):
+        break
+
+      if dryrun or time_left < 0 or cls.ShouldExitEarly():
+        break
+
+      # Iterated through all queries with no changes.
+      logging.info('Waiting for %s (%d minutes left)...',
+                   cls._WaitForQuery(query), time_left / 60)
+      time.sleep(cls.SLEEP_TIMEOUT)
+
+    return pool
+
+  def _GetFailStreak(self):
+    """Returns the fail streak for the validation pool.
+
+    Queries CIDB for the last CQ_SEARCH_HISTORY builds from the current build_id
+    and returns how many of them haven't passed in a row. This is used for
+    tree throttled validation pool logic.
+    """
+    # TODO(sosa): Remove Google Storage Fail Streak Counter.
+    build_id, db = self._run.GetCIDBHandle()
+    if not db:
+      return 0
+
+    builds = db.GetBuildHistory(self._run.config.name,
+                                ValidationPool.CQ_SEARCH_HISTORY,
+                                ignore_build_id=build_id)
+    number_of_failures = 0
+    # Iterate through the ordered list of builds until you get one that is
+    # passed.
+    for build in builds:
+      if build['status'] != constants.BUILDER_STATUS_PASSED:
+        number_of_failures += 1
+      else:
+        break
+
+    return number_of_failures
+
+  def AddPendingCommitsIntoPool(self, manifest):
+    """Add the pending commits from |manifest| into pool.
+
+    Args:
+      manifest: path to the manifest.
+    """
+    manifest_dom = minidom.parse(manifest)
+    pending_commits = manifest_dom.getElementsByTagName(
+        lkgm_manager.PALADIN_COMMIT_ELEMENT)
+    for pc in pending_commits:
+      attr_names = cros_patch.ALL_ATTRS
+      attr_dict = {}
+      for name in attr_names:
+        attr_dict[name] = pc.getAttribute(name)
+      patch = cros_patch.GerritFetchOnlyPatch.FromAttrDict(attr_dict)
+
+      self.changes.append(patch)
+
+  @classmethod
+  def AcquirePoolFromManifest(cls, manifest, overlays, repo, build_number,
+                              builder_name, is_master, dryrun,
+                              builder_run=None):
+    """Acquires the current pool from a given manifest.
+
+    This function assumes that you have already synced to the given manifest.
+
+    Args:
+      manifest: path to the manifest where the pool resides.
+      overlays: One of constants.VALID_OVERLAYS.
+      repo: The repo used to filter projects and to apply patches against.
+      build_number: Corresponding build number for the build.
+      builder_name: Builder name on buildbot dashboard.
+      is_master: Boolean that indicates whether this is a pool for a master.
+        config or not.
+      dryrun: Don't submit anything to gerrit.
+      builder_run: BuilderRun instance used to record CL actions to metadata and
+        cidb.
+
+    Returns:
+      ValidationPool object.
+    """
+    pool = ValidationPool(overlays, repo.directory, build_number, builder_name,
+                          is_master, dryrun, builder_run=builder_run)
+    pool.AddPendingCommitsIntoPool(manifest)
+    return pool
+
+  @classmethod
+  def ShouldExitEarly(cls):
+    """Return whether we should exit early.
+
+    This function is intended to be overridden by tests or by subclasses.
+    """
+    return False
+
+  @staticmethod
+  def _FilterNonCrosProjects(changes, manifest):
+    """Filters changes to a tuple of relevant changes.
+
+    There are many code reviews that are not part of Chromium OS and/or
+    only relevant on a different branch. This method returns a tuple of (
+    relevant reviews in a manifest, relevant reviews not in the manifest). Note
+    that this function must be run while chromite is checked out in a
+    repo-managed checkout.
+
+    Args:
+      changes: List of GerritPatch objects.
+      manifest: The manifest to check projects/branches against.
+
+    Returns:
+      Tuple of (relevant reviews in a manifest,
+                relevant reviews not in the manifest).
+    """
+
+    def IsCrosReview(change):
+      return (change.project.startswith('chromiumos') or
+              change.project.startswith('chromeos'))
+
+    # First we filter to only Chromium OS repositories.
+    changes = [c for c in changes if IsCrosReview(c)]
+
+    changes_in_manifest = []
+    changes_not_in_manifest = []
+    for change in changes:
+      if change.GetCheckout(manifest, strict=False):
+        changes_in_manifest.append(change)
+      elif change.IsMergeable():
+        logging.info('Found non-manifest change %s', change)
+        changes_not_in_manifest.append(change)
+      else:
+        logging.info('Non-manifest change %s is not commit ready yet', change)
+
+    return changes_in_manifest, changes_not_in_manifest
+
+  @classmethod
+  def _FilterDependencyErrors(cls, errors):
+    """Filter out ignorable DependencyError exceptions.
+
+    If a dependency isn't marked as ready, or a dependency fails to apply,
+    we only complain after REJECTION_GRACE_PERIOD has passed since the patch
+    was uploaded.
+
+    This helps in two situations:
+      1) If the developer is in the middle of marking a stack of changes as
+         ready, we won't reject their work until the grace period has passed.
+      2) If the developer marks a big circular stack of changes as ready, and
+         some change in the middle of the stack doesn't apply, the user will
+         get a chance to rebase their change before we mark all the changes as
+         'not ready'.
+
+    This function filters out dependency errors that can be ignored due to
+    the grace period.
+
+    Args:
+      errors: List of exceptions to filter.
+
+    Returns:
+      List of unfiltered exceptions.
+    """
+    reject_timestamp = time.time() - cls.REJECTION_GRACE_PERIOD
+    results = []
+    for error in errors:
+      results.append(error)
+      is_ready = error.patch.HasReadyFlag()
+      if not is_ready or reject_timestamp < error.patch.approval_timestamp:
+        while error is not None:
+          if isinstance(error, cros_patch.DependencyError):
+            if is_ready:
+              logging.info('Ignoring dependency errors for %s due to grace '
+                           'period', error.patch)
+            else:
+              logging.info('Ignoring dependency errors for %s until it is '
+                           'marked trybot ready or commit ready', error.patch)
+            results.pop()
+            break
+          error = getattr(error, 'error', None)
+    return results
+
+  @classmethod
+  def PrintLinksToChanges(cls, changes):
+    """Print links to the specified |changes|.
+
+    This method prints a link to list of |changes| by using the
+    information stored in |changes|. It should not attempt to query
+    Google Storage or Gerrit.
+
+    Args:
+      changes: A list of cros_patch.GerritPatch instances to generate
+        transactions for.
+    """
+    def SortKeyForChanges(change):
+      return (-change.total_fail_count, -change.fail_count,
+              os.path.basename(change.project), change.gerrit_number)
+
+    # Now, sort and print the changes.
+    for change in sorted(changes, key=SortKeyForChanges):
+      project = os.path.basename(change.project)
+      gerrit_number = cros_patch.AddPrefix(change, change.gerrit_number)
+      # We cannot print '@' in the link because it is used to separate
+      # the display text and the URL by the buildbot annotator.
+      author = change.owner_email.replace('@', '-AT-')
+      if (change.owner_email.endswith(constants.GOOGLE_EMAIL) or
+          change.owner_email.endswith(constants.CHROMIUM_EMAIL)):
+        author = change.owner
+
+      s = '%s | %s | %s' % (project, author, gerrit_number)
+
+      # Print a count of how many times a given CL has failed the CQ.
+      if change.total_fail_count:
+        s += ' | fails:%d' % (change.fail_count,)
+        if change.total_fail_count > change.fail_count:
+          s += '(%d)' % (change.total_fail_count,)
+
+      # Add a note if the latest patchset has already passed the CQ.
+      if change.pass_count > 0:
+        s += ' | passed:%d' % change.pass_count
+
+      logging.PrintBuildbotLink(s, change.url)
+
+  def ApplyPoolIntoRepo(self, manifest=None):
+    """Applies changes from pool into the directory specified by the buildroot.
+
+    This method applies changes in the order specified. If the build
+    is running as the master, it also respects the dependency
+    order. Otherwise, the changes should already be listed in an order
+    that will not break the dependency order.
+
+    Returns:
+      True if we managed to apply any changes.
+    """
+    # applied is a list of applied GerritPatch instances.
+    # failed_tot and failed_inflight are lists of PatchException instances.
+    applied = []
+    failed_tot = []
+    failed_inflight = []
+    patch_series = PatchSeries(self.build_root, helper_pool=self._helper_pool)
+
+    # Only try a subset of the changes if the tree was throttled.
+    max_change_count = len(self.changes)
+    if not self.tree_was_open:
+      random.shuffle(self.changes)
+      fail_streak = self._GetFailStreak()
+      max_change_count = max(1, len(self.changes) / (2**fail_streak))
+
+    if self.is_master:
+      try:
+        # pylint: disable=E1123
+        applied, failed_tot, failed_inflight = patch_series.Apply(
+            self.changes, manifest=manifest, max_change_count=max_change_count)
+      except (KeyboardInterrupt, RuntimeError, SystemExit):
+        raise
+      except Exception as e:
+        if mox is not None and isinstance(e, mox.Error):
+          raise
+
+        msg = (
+            'Unhandled exception occurred while applying changes: %s\n\n'
+            'To be safe, we have kicked out all of the CLs, so that the '
+            'commit queue does not go into an infinite loop retrying '
+            'patches.' % (e,)
+        )
+        links = cros_patch.GetChangesAsString(self.changes)
+        logging.error('%s\nAffected Patches are: %s', msg, links)
+        errors = [InternalCQError(patch, msg) for patch in self.changes]
+        self._HandleApplyFailure(errors)
+        raise
+
+      _, db = self._run.GetCIDBHandle()
+      if db:
+        action_history = db.GetActionsForChanges(applied)
+        for change in applied:
+          change.total_fail_count = clactions.GetCLActionCount(
+              change, CQ_PIPELINE_CONFIGS, constants.CL_ACTION_KICKED_OUT,
+              action_history, latest_patchset_only=False)
+          change.fail_count = clactions.GetCLActionCount(
+              change, CQ_PIPELINE_CONFIGS, constants.CL_ACTION_KICKED_OUT,
+              action_history)
+          change.pass_count = clactions.GetCLActionCount(
+              change, CQ_PIPELINE_CONFIGS, constants.CL_ACTION_SUBMIT_FAILED,
+              action_history)
+
+    else:
+      # Slaves do not need to create transactions and should simply
+      # apply the changes serially, based on the order that the
+      # changes were listed on the manifest.
+      for change in self.changes:
+        try:
+          # pylint: disable=E1123
+          patch_series.ApplyChange(change, manifest=manifest)
+        except cros_patch.PatchException as e:
+          # Fail if any patch cannot be applied.
+          self._HandleApplyFailure([InternalCQError(change, e)])
+          raise
+        else:
+          applied.append(change)
+
+    self.RecordPatchesInMetadataAndDatabase(applied)
+    self.PrintLinksToChanges(applied)
+
+    if self.is_master and not self.pre_cq_trybot:
+      inputs = [[change, self.build_log] for change in applied]
+      parallel.RunTasksInProcessPool(self.HandleApplySuccess, inputs)
+
+    # We only filter out dependency errors in the CQ and Pre-CQ masters.
+    # On Pre-CQ trybots, we want to reject patches immediately, because
+    # otherwise the pre-cq master will think we just dropped the patch
+    # on the floor and never tested it.
+    if not self.pre_cq_trybot:
+      failed_tot = self._FilterDependencyErrors(failed_tot)
+      failed_inflight = self._FilterDependencyErrors(failed_inflight)
+
+    if failed_tot:
+      logging.info(
+          'The following changes could not cleanly be applied to ToT: %s',
+          ' '.join([c.patch.id for c in failed_tot]))
+      self._HandleApplyFailure(failed_tot)
+
+    if failed_inflight:
+      logging.info(
+          'The following changes could not cleanly be applied against the '
+          'current stack of patches; if this stack fails, they will be tried '
+          'in the next run.  Inflight failed changes: %s',
+          ' '.join([c.patch.id for c in failed_inflight]))
+      for x in failed_inflight:
+        self._HandleFailedToApplyDueToInflightConflict(x.patch)
+
+    self.changes_that_failed_to_apply_earlier.extend(failed_inflight)
+    self.changes = applied
+
+    return bool(self.changes)
+
+  @staticmethod
+  def Load(filename, builder_run=None):
+    """Loads the validation pool from the file.
+
+    Args:
+      filename: path of file to load from.
+      builder_run: BuilderRun instance to use in unpickled validation pool, used
+        for fetching cidb handle for access to metadata.
+    """
+    with open(filename, 'rb') as p_file:
+      pool = cPickle.load(p_file)
+      # pylint: disable=protected-access
+      pool._run = builder_run
+      return pool
+
+  def Save(self, filename):
+    """Serializes the validation pool."""
+    with open(filename, 'wb') as p_file:
+      cPickle.dump(self, p_file, protocol=cPickle.HIGHEST_PROTOCOL)
+
+  # Note: All submit code, all gerrit code, and basically everything other
+  # than patch resolution/applying needs to use .change_id from patch objects.
+  # Basically all code from this point forward.
+  def _SubmitChangeWithDeps(self, patch_series, change, errors, limit_to,
+                            reason=None):
+    """Submit |change| and its dependencies.
+
+    If you call this function multiple times with the same PatchSeries, each
+    CL will only be submitted once.
+
+    Args:
+      patch_series: A PatchSeries() object.
+      change: The change (a GerritPatch object) to submit.
+      errors: A dictionary. This dictionary should contain all patches that have
+        encountered errors, and map them to the associated exception object.
+      limit_to: The list of patches that were approved by this CQ run. We will
+        only consider submitting patches that are in this list.
+      reason: string reason for submission to be recorded in cidb. (Should be
+        None or constant with name STRATEGY_* from constants.py)
+
+    Returns:
+      A copy of the errors object. If new errors have occurred while submitting
+      this change, and those errors have prevented a change from being
+      submitted, they will be added to the errors object.
+    """
+    # Find out what patches we need to submit.
+    errors = errors.copy()
+    try:
+      plan = patch_series.CreateTransaction(change, limit_to=limit_to)
+    except cros_patch.PatchException as e:
+      errors[change] = e
+      return errors
+
+    submitted = []
+    dep_error = None
+    for dep_change in plan:
+      # Has this change failed to submit before?
+      dep_error = errors.get(dep_change)
+      if dep_error is not None:
+        break
+
+    if dep_error is None:
+      for dep_change in plan:
+        try:
+          success = self._SubmitChangeUsingGerrit(dep_change, reason=reason)
+          if success or self.dryrun:
+            submitted.append(dep_change)
+        except (gob_util.GOBError, gerrit.GerritException) as e:
+          if getattr(e, 'http_status', None) == httplib.CONFLICT:
+            if e.message.rstrip().endswith('change is merged'):
+              submitted.append(dep_change)
+            else:
+              dep_error = PatchConflict(dep_change)
+          else:
+            dep_error = PatchFailedToSubmit(dep_change, str(e))
+
+        if dep_change not in submitted:
+          if dep_error is None:
+            msg = self.INCONSISTENT_SUBMIT_MSG
+            dep_error = PatchFailedToSubmit(dep_change, msg)
+
+          # Log any errors we saw.
+          logging.error('%s', dep_error)
+          errors[dep_change] = dep_error
+          break
+
+    if (dep_error is not None and change not in errors and
+        change not in submitted):
+      # One of the dependencies failed to submit. Report an error.
+      errors[change] = cros_patch.DependencyError(change, dep_error)
+
+    # Track submitted patches so that we don't submit them again.
+    patch_series.InjectCommittedPatches(submitted)
+
+    # Look for incorrectly submitted patches. We only have this problem
+    # when we have a dependency cycle, and we submit one change before
+    # realizing that a later change cannot be submitted. For now, just
+    # print an error message and notify the developers.
+    #
+    # If you see this error a lot, consider implementing a best-effort
+    # attempt at reverting changes.
+    for submitted_change in submitted:
+      gdeps, pdeps = patch_series.GetDepChangesForChange(submitted_change)
+      for dep in gdeps + pdeps:
+        dep_error = errors.get(dep)
+        if dep_error is not None:
+          error = PatchSubmittedWithoutDeps(submitted_change, dep_error)
+          self._HandleIncorrectSubmission(error)
+          logging.error('%s was erroneously submitted.', submitted_change)
+          break
+
+    return errors
+
+  def SubmitChanges(self, changes, check_tree_open=True, throttled_ok=True,
+                    reason=None):
+    """Submits the given changes to Gerrit.
+
+    Args:
+      changes: GerritPatch's to submit.
+      check_tree_open: Whether to check that the tree is open before submitting
+        changes. If this is False, TreeIsClosedException will never be raised.
+      throttled_ok: if |check_tree_open|, treat a throttled tree as open
+      reason: string reason for submission to be recorded in cidb. (Should be
+        None or constant with name STRATEGY_* from constants.py)
+
+    Returns:
+      (submitted, errors) where submitted is a set of changes that were
+      submitted, and errors is a map {change: error} containing changes that
+      failed to submit.
+
+    Raises:
+      TreeIsClosedException: if the tree is closed.
+    """
+    assert self.is_master, 'Non-master builder calling SubmitPool'
+    assert not self.pre_cq_trybot, 'Trybot calling SubmitPool'
+
+    # TODO(pprabhu) It is bad form for master-paladin to do work after its
+    # deadline has passed. Extend the deadline after waiting for slave
+    # completion and ensure that none of the follow up stages go beyond the
+    # deadline.
+    if (check_tree_open and not self.dryrun and not
+        tree_status.IsTreeOpen(period=self.SLEEP_TIMEOUT,
+                               timeout=self.DEFAULT_TIMEOUT,
+                               throttled_ok=throttled_ok)):
+      raise TreeIsClosedException(
+          closed_or_throttled=not throttled_ok)
+
+    # Filter out changes that were modified during the CQ run.
+    filtered_changes, errors = self.FilterModifiedChanges(changes)
+
+    patch_series = PatchSeries(self.build_root, helper_pool=self._helper_pool,
+                               is_submitting=True)
+
+    patch_series.InjectLookupCache(filtered_changes)
+
+    # Partition the changes into local changes and remote changes.  Local
+    # changes have a local repository associated with them, so we can do a
+    # batched git push for them.  Remote changes must be submitted via Gerrit.
+    by_repo = {}
+    for change in filtered_changes:
+      by_repo.setdefault(
+          patch_series.GetGitRepoForChange(change, strict=False), set()
+          ).add(change)
+    remote_changes = by_repo.pop(None, set())
+
+    by_repo, reapply_errors = patch_series.ReapplyChanges(by_repo)
+
+    submitted_locals, local_submission_errors = self.SubmitLocalChanges(
+        by_repo, reason)
+    submitted_remotes, remote_errors = self.SubmitRemoteChanges(
+        patch_series, remote_changes, reason)
+
+    errors.update(reapply_errors)
+    errors.update(local_submission_errors)
+    errors.update(remote_errors)
+    for patch, error in errors.iteritems():
+      logging.error('Could not submit %s', patch)
+      self._HandleCouldNotSubmit(patch, error)
+
+    return submitted_locals | submitted_remotes, errors
+
+  def SubmitRemoteChanges(self, patch_series, changes, reason):
+    """Submits non-manifest changes via Gerrit.
+
+    This function first splits the patches into disjoint transactions so that we
+    can submit in parallel. We merge together changes to the same project into
+    the same transaction because it helps avoid Gerrit performance problems
+    (Gerrit chokes when two people hit submit at the same time in the same
+    project).
+
+    Args:
+      patch_series: The PatchSeries instance associated with the changes.
+      changes: A colleciton of changes
+      reason: string reason for submission to be recorded in cidb. (Should be
+        None or constant with name STRATEGY_* from constants.py)
+
+    Returns:
+      (submitted, errors) where submitted is a set of changes that were
+      submitted, and errors is a map {change: error} containing changes that
+      failed to submit.
+    """
+    plans, failed = patch_series.CreateDisjointTransactions(
+        changes, merge_projects=True)
+    errors = {}
+    for error in failed:
+      errors[error.patch] = error
+
+    # Submit each disjoint transaction in parallel.
+    with parallel.Manager() as manager:
+      p_errors = manager.dict(errors)
+      def _SubmitPlan(*plan):
+        for change in plan:
+          p_errors.update(self._SubmitChangeWithDeps(
+              patch_series, change, dict(p_errors), plan, reason=reason))
+      parallel.RunTasksInProcessPool(_SubmitPlan, plans, processes=4)
+
+      submitted_changes = set(changes) - set(p_errors.keys())
+      return (submitted_changes, dict(p_errors))
+
+  def SubmitLocalChanges(self, by_repo, reason):
+    """Submit a set of local changes, i.e. changes which are in the manifest.
+
+    Precondition: we must have already checked that all the changes are
+    submittable, such as having a +2 in Gerrit.
+
+    Args:
+      by_repo: A mapping from repo paths to changes in that repo
+      reason: string reason for submission to be recorded in cidb. (Should be
+        None or constant with name STRATEGY_* from constants.py)
+
+    Returns:
+      (submitted, errors) where submitted is a set of changes that were
+      submitted, and errors is a map {change: error} containing changes that
+      failed to submit.
+    """
+    merged_errors = {}
+    submitted = set()
+    for repo, changes in by_repo.iteritems():
+      changes, errors = self._SubmitRepo(repo, changes, reason=reason)
+      submitted |= set(changes)
+      merged_errors.update(errors)
+    return submitted, merged_errors
+
+  def _SubmitRepo(self, repo, changes, reason=None):
+    """Submit a sequence of changes from the same repository.
+
+    The changes must be from a repository that is checked out locally, we can do
+    a single git push, and then verify that Gerrit updated its metadata for each
+    patch.
+
+    Args:
+      repo: the path to the repository containing the changes
+      changes: a sequence of changes from a single repository.
+      reason: string reason for submission to be recorded in cidb. (Should be
+        None or constant with name STRATEGY_* from constants.py)
+
+    Returns:
+      (submitted, errors) where submitted is a set of changes that were
+      submitted, and errors is a map {change: error} containing changes that
+      failed to submit.
+    """
+    branches = set((change.tracking_branch,) for change in changes)
+    push_branch = functools.partial(self.PushRepoBranch, repo, changes)
+    push_results = parallel.RunTasksInProcessPool(push_branch, branches)
+
+    sha1s = {}
+    errors = {}
+    for sha1s_for_branch, branch_errors in filter(bool, push_results):
+      sha1s.update(sha1s_for_branch)
+      errors.update(branch_errors)
+
+    for change in changes:
+      push_success = change not in errors
+      self._CheckChangeWasSubmitted(change, push_success, reason=reason,
+                                    sha1=sha1s.get(change))
+
+    return set(changes) - set(errors), errors
+
+  def PushRepoBranch(self, repo, changes, branch):
+    """Pushes a branch of a repo to the remote.
+
+    Args:
+      repo: the path to the repository containing the changes
+      changes: a sequence of changes from a single branch of a repository.
+      branch: the tracking branch name.
+
+    Returns:
+      (sha1, errors) where sha1s is a mapping from changes to their sha1s, and
+      errors is a map {change: error} containing changes that failed to submit.
+    """
+
+    project_url = next(iter(changes)).project_url
+    remote_ref = git.GetTrackingBranch(repo)
+    push_to = git.RemoteRef(project_url, branch)
+    for _ in range(3):
+      # try to resync and push.
+      try:
+        git.SyncPushBranch(repo, remote_ref.remote, remote_ref.ref)
+      except cros_build_lib.RunCommandError:
+        # TODO(phobbs) parse the sync failure output and find which change was
+        # at fault.
+        logging.error('git rebase failed for %s:%s; it is likely that a change '
+                      'was chumped in the middle of the CQ run.',
+                      repo, branch, exc_info=True)
+        break
+
+      try:
+        git.GitPush(repo, 'HEAD', push_to, skip=self.dryrun)
+        return {}
+      except cros_build_lib.RunCommandError:
+        logging.warn('git push failed for %s:%s; was a change chumped in the '
+                     'middle of the CQ run?',
+                     repo, branch, exc_info=True)
+
+    errors = dict(
+        (change, PatchFailedToSubmit(change, 'Failed to push to %s'))
+        for change in changes)
+
+    sha1s = dict(
+        (change, change.GetLocalSHA1(repo, branch))
+        for change in changes)
+
+    return sha1s, errors
+
+  def RecordPatchesInMetadataAndDatabase(self, changes):
+    """Mark all patches as having been picked up in metadata.json and cidb.
+
+    If self._run is None, then this function does nothing.
+    """
+    if not self._run:
+      return
+
+    metadata = self._run.attrs.metadata
+    _, db = self._run.GetCIDBHandle()
+    timestamp = int(time.time())
+
+    for change in changes:
+      metadata.RecordCLAction(change, constants.CL_ACTION_PICKED_UP,
+                              timestamp)
+      # TODO(akeshet): If a separate query for each insert here becomes
+      # a performance issue, consider batch inserting all the cl actions
+      # with a single query.
+      if db:
+        self._InsertCLActionToDatabase(change, constants.CL_ACTION_PICKED_UP)
+
+  @classmethod
+  def FilterModifiedChanges(cls, changes):
+    """Filter out changes that were modified while the CQ was in-flight.
+
+    Args:
+      changes: A list of changes (as PatchQuery objects).
+
+    Returns:
+      This returns a tuple (unmodified_changes, errors).
+
+      unmodified_changes: A reloaded list of changes, only including mergeable,
+                          unmodified and unsubmitted changes.
+      errors: A dictionary. This dictionary will contain all patches that have
+        encountered errors, and map them to the associated exception object.
+    """
+    # Reload all of the changes from the Gerrit server so that we have a
+    # fresh view of their approval status. This is needed so that our filtering
+    # that occurs below will be mostly up-to-date.
+    unmodified_changes, errors = [], {}
+    reloaded_changes = list(cls.ReloadChanges(changes))
+    old_changes = cros_patch.PatchCache(changes)
+
+    if list(changes) != list(reloaded_changes):
+      logging.error('Changes: %s', map(str, changes))
+      logging.error('Reloaded changes: %s', map(str, reloaded_changes))
+      for change in set(changes) - set(reloaded_changes):
+        logging.error('%s disappeared after reloading', change)
+      for change in set(reloaded_changes) - set(changes):
+        logging.error('%s appeared after reloading', change)
+      raise InconsistentReloadException()
+
+    for reloaded_change in reloaded_changes:
+      old_change = old_changes[reloaded_change]
+      if reloaded_change.IsAlreadyMerged():
+        logging.warning('%s is already merged. It was most likely chumped '
+                        'during the current CQ run.', reloaded_change)
+      elif reloaded_change.patch_number != old_change.patch_number:
+        # If users upload new versions of a CL while the CQ is in-flight, then
+        # their CLs are no longer tested. These CLs should be rejected.
+        errors[old_change] = PatchModified(reloaded_change,
+                                           reloaded_change.patch_number)
+      elif not reloaded_change.IsMergeable():
+        # Get the reason why this change is not mergeable anymore.
+        errors[old_change] = reloaded_change.GetMergeException()
+        errors[old_change].patch = old_change
+      else:
+        unmodified_changes.append(old_change)
+
+    return unmodified_changes, errors
+
+  @classmethod
+  def ReloadChanges(cls, changes):
+    """Reload the specified |changes| from the server.
+
+    Args:
+      changes: A list of PatchQuery objects.
+
+    Returns:
+      A list of GerritPatch objects.
+    """
+    return gerrit.GetGerritPatchInfoWithPatchQueries(changes)
+
+  def _SubmitChangeUsingGerrit(self, change, reason=None):
+    """Submits patch using Gerrit Review.
+
+    This uses the Gerrit "submit" API, then waits for the patch to move out of
+    "NEW" state, ideally into "MERGED" status.  It records in CIDB whether the
+    Gerrit's status != "NEW".
+
+    Args:
+      change: GerritPatch to submit.
+      reason: string reason for submission to be recorded in cidb. (Should be
+        None or constant with name STRATEGY_* from constants.py)
+
+    Returns:
+      Whether the push succeeded, indicated by Gerrit review status not being
+      "NEW".
+    """
+    logging.info('Change %s will be submitted', change)
+    self._helper_pool.ForChange(change).SubmitChange(
+        change, dryrun=self.dryrun)
+    return self._CheckChangeWasSubmitted(change, True, reason)
+
+  def _CheckChangeWasSubmitted(self, change, push_success, reason, sha1=None):
+    """Confirms that a change is in "submitted" state in Gerrit.
+
+    First, we force Gerrit to double-check whether the change has been merged,
+    then we poll Gerrit until either the change is merged or we timeout. Then,
+    we update cidb with information about whether the change was pushed
+    successfully.
+
+    Args:
+      change: The change to check
+      push_success: Whether we were successful in pushing the change.
+      reason: string reason for submission to be recorded in cidb. (Should be
+        None or constant with name STRATEGY_* from constants.py)
+      sha1: Optional hint to Gerrit about what sha1 the pushed commit has.
+
+    Returns:
+      Whether the push succeeded and the Gerrit review is not in "NEW" state.
+      Ideally it would be in "MERGED" state, but it is safe to proceed with it
+      only in "SUBMITTED" state.
+    """
+    # TODO(phobbs): Use a helper process to check that Gerrit marked the change
+    # as merged asynchronously.
+    helper = self._helper_pool.ForChange(change)
+
+    # Force Gerrit to check whether the change is merged.
+    gob_util.CheckChange(helper.host, change.gerrit_number, sha1=sha1)
+
+    updated_change = helper.QuerySingleRecord(change.gerrit_number)
+    if push_success and updated_change.status == 'SUBMITTED':
+      def _Query():
+        return helper.QuerySingleRecord(change.gerrit_number)
+      def _Retry(value):
+        return value and value.status == 'SUBMITTED'
+
+      # If we succeeded in pushing but the change is 'NEW' give gerrit some time
+      # to resolve that to 'MERGED' or fail outright.
+      try:
+        updated_change = timeout_util.WaitForSuccess(
+            _Retry, _Query, timeout=SUBMITTED_WAIT_TIMEOUT, period=1)
+      except timeout_util.TimeoutError:
+        # The change really is stuck on submitted, not merged, then.
+        logging.warning('Timed out waiting for gerrit to notice that we'
+                        ' submitted change %s, but status is still "%s".',
+                        change.gerrit_number_str, updated_change.status)
+        helper.SetReview(change, msg='This change was pushed, but we timed out'
+                         'waiting for Gerrit to notice that it was submitted.')
+
+    if push_success and not updated_change.status == 'MERGED':
+      logging.warning(
+          'Change %s was pushed without errors, but gerrit is'
+          ' reporting it with status "%s" (expected "MERGED").',
+          change.gerrit_number_str, updated_change.status)
+      if updated_change.status == 'SUBMITTED':
+        # So far we have never seen a SUBMITTED CL that did not eventually
+        # transition to MERGED.  If it is stuck on SUBMITTED treat as MERGED.
+        logging.info('Proceeding now with the assumption that change %s'
+                     ' will eventually transition to "MERGED".',
+                     change.gerrit_number_str)
+      else:
+        logging.error('Gerrit likely was unable to merge change %s.',
+                      change.gerrit_number_str)
+
+    succeeded = push_success and (updated_change.status != 'NEW')
+    if self._run:
+      self._RecordSubmitInCIDB(change, succeeded, reason)
+    return succeeded
+
+  def _RecordSubmitInCIDB(self, change, succeeded, reason):
+    """Records in CIDB whether the submit succeeded."""
+    action = (constants.CL_ACTION_SUBMITTED if succeeded
+              else constants.CL_ACTION_SUBMIT_FAILED)
+
+    metadata = self._run.attrs.metadata
+    timestamp = int(time.time())
+    metadata.RecordCLAction(change, action, timestamp)
+    _, db = self._run.GetCIDBHandle()
+    # NOTE(akeshet): The same |reason| will be recorded, regardless of whether
+    # the change was submitted successfully or unsuccessfully. This is
+    # probably what we want, because it gives us a way to determine why we
+    # tried to submit changes that failed to submit.
+    if db:
+      self._InsertCLActionToDatabase(change, action, reason)
+
+  def RemoveReady(self, change, reason=None):
+    """Remove the commit ready and trybot ready bits for |change|."""
+    self._helper_pool.ForChange(change).RemoveReady(change, dryrun=self.dryrun)
+    if self._run:
+      metadata = self._run.attrs.metadata
+      timestamp = int(time.time())
+      metadata.RecordCLAction(change, constants.CL_ACTION_KICKED_OUT,
+                              timestamp)
+
+    self._InsertCLActionToDatabase(change, constants.CL_ACTION_KICKED_OUT,
+                                   reason)
+    if self.pre_cq_trybot:
+      self.UpdateCLPreCQStatus(change, constants.CL_STATUS_FAILED)
+
+  def MarkForgiven(self, change, reason=None):
+    """Mark |change| as forgiven with |reason|.
+
+    Args:
+      change: A GerritPatch or GerritPatchTuple object.
+      reason: string reason for submission to be recorded in cidb. (Should be
+        None or constant with name STRATEGY_* from constants.py)
+    """
+    self._InsertCLActionToDatabase(change, constants.CL_ACTION_FORGIVEN, reason)
+
+  def _InsertCLActionToDatabase(self, change, action, reason=None):
+    """If cidb is set up and not None, insert given cl action to cidb.
+
+    Args:
+      change: A GerritPatch or GerritPatchTuple object.
+      action: The action taken, should be one of constants.CL_ACTIONS
+      reason: string reason for submission to be recorded in cidb. (Should be
+        None or constant with name STRATEGY_* from constants.py)
+    """
+    build_id, db = self._run.GetCIDBHandle()
+    if db:
+      db.InsertCLActions(
+          build_id,
+          [clactions.CLAction.FromGerritPatchAndAction(change, action, reason)])
+
+  def SubmitNonManifestChanges(self, check_tree_open=True, reason=None):
+    """Commits changes to Gerrit from Pool that aren't part of the checkout.
+
+    Args:
+      check_tree_open: Whether to check that the tree is open before submitting
+        changes. If this is False, TreeIsClosedException will never be raised.
+      reason: string reason for submission to be recorded in cidb. (Should be
+        None or constant with name STRATEGY_* from constants.py)
+
+    Raises:
+      TreeIsClosedException: if the tree is closed.
+    """
+    self.SubmitChanges(self.non_manifest_changes,
+                       check_tree_open=check_tree_open,
+                       reason=reason)
+
+  def SubmitPool(self, check_tree_open=True, throttled_ok=True, reason=None):
+    """Commits changes to Gerrit from Pool.  This is only called by a master.
+
+    Args:
+      check_tree_open: Whether to check that the tree is open before submitting
+        changes. If this is False, TreeIsClosedException will never be raised.
+      throttled_ok: if |check_tree_open|, treat a throttled tree as open
+      reason: string reason for submission to be recorded in cidb. (Should be
+        None or constant with name STRATEGY_* from constants.py)
+
+    Raises:
+      TreeIsClosedException: if the tree is closed.
+      FailedToSubmitAllChangesException: if we can't submit a change.
+    """
+    # Note that SubmitChanges can throw an exception if it can't
+    # submit all changes; in that particular case, don't mark the inflight
+    # failures patches as failed in gerrit- some may apply next time we do
+    # a CQ run (since the submit state has changed, we have no way of
+    # knowing).  They *likely* will still fail, but this approach tries
+    # to minimize wasting the developers time.
+    submitted, errors = self.SubmitChanges(self.changes,
+                                           check_tree_open=check_tree_open,
+                                           throttled_ok=throttled_ok,
+                                           reason=reason)
+    if errors:
+      raise FailedToSubmitAllChangesException(errors, len(submitted))
+
+    if self.changes_that_failed_to_apply_earlier:
+      self._HandleApplyFailure(self.changes_that_failed_to_apply_earlier)
+
+  def SubmitPartialPool(self, changes, messages, changes_by_config, failing,
+                        inflight, no_stat, reason=None):
+    """If the build failed, push any CLs that don't care about the failure.
+
+    In this function we calculate what CLs are definitely innocent and submit
+    those CLs.
+
+    Each project can specify a list of stages it does not care about in its
+    COMMIT-QUEUE.ini file. Changes to that project will be submitted even if
+    those stages fail.
+
+    Args:
+      changes: A list of GerritPatch instances to examine.
+      messages: A list of BuildFailureMessage or NoneType objects from
+        the failed slaves.
+      changes_by_config: A dictionary of relevant changes indexed by the
+        config names.
+      failing: Names of the builders that failed.
+      inflight: Names of the builders that timed out.
+      no_stat: Set of builder names of slave builders that had status None.
+      reason: string reason for submission to be recorded in cidb. (Should be
+        None or constant with name STRATEGY_* from constants.py)
+
+    Returns:
+      A set of the non-submittable changes.
+    """
+    fully_verified = triage_lib.CalculateSuspects.GetFullyVerifiedChanges(
+        changes, changes_by_config, failing, inflight, no_stat,
+        messages, self.build_root)
+    if fully_verified:
+      logging.info('The following changes will be submitted using '
+                   'board-aware submission logic: %s',
+                   cros_patch.GetChangesAsString(fully_verified))
+    # TODO(akeshet): We have no way to record different submission Reasons for
+    # different CLs, if we had multiple different BAS strategies at work for
+    # them. If we add new strategies to GetFullyVerifiedChanges
+    # strategy above, we should move responsibility to determining |reason| from
+    # the caller of SubmitPartialPool to either SubmitPartialPool or
+    # GetFullyVerifiedChanges.
+    self.SubmitChanges(fully_verified, reason=reason)
+
+    # Return the list of non-submittable changes.
+    return set(changes) - set(fully_verified)
+
+  def _HandleApplyFailure(self, failures):
+    """Handles changes that were not able to be applied cleanly.
+
+    Args:
+      failures: List of cros_patch.PatchException instances to handle.
+    """
+    for failure in failures:
+      logging.info('Change %s did not apply cleanly.', failure.patch)
+      if self.is_master:
+        self._HandleCouldNotApply(failure)
+
+  def _HandleCouldNotApply(self, failure):
+    """Handler for when Paladin fails to apply a change.
+
+    This handler notifies set CodeReview-2 to the review forcing the developer
+    to re-upload a rebased change.
+
+    Args:
+      failure: cros_patch.PatchException instance to operate upon.
+    """
+    msg = ('%(queue)s failed to apply your change in %(build_log)s .'
+           ' %(failure)s')
+    self.SendNotification(failure.patch, msg, failure=failure)
+    self.RemoveReady(failure.patch)
+
+  def _HandleIncorrectSubmission(self, failure):
+    """Handler for when Paladin incorrectly submits a change."""
+    msg = ('%(queue)s incorrectly submitted your change in %(build_log)s .'
+           '  %(failure)s')
+    self.SendNotification(failure.patch, msg, failure=failure)
+    self.RemoveReady(failure.patch)
+
+  def HandleDraftChange(self, change):
+    """Handler for when the latest patch set of |change| is not published.
+
+    This handler removes the commit ready bit from the specified changes and
+    sends the developer a message explaining why.
+
+    Args:
+      change: GerritPatch instance to operate upon.
+    """
+    msg = ('%(queue)s could not apply your change because the latest patch '
+           'set is not published. Please publish your draft patch set before '
+           'marking your commit as ready.')
+    self.SendNotification(change, msg)
+    self.RemoveReady(change)
+
+  def _HandleFailedToApplyDueToInflightConflict(self, change):
+    """Handler for when a patch conflicts with another patch in the CQ run.
+
+    This handler simply comments on the affected change, explaining why it
+    is being skipped in the current CQ run.
+
+    Args:
+      change: GerritPatch instance to operate upon.
+    """
+    msg = ('%(queue)s could not apply your change because it conflicts with '
+           'other change(s) that it is testing. If those changes do not pass '
+           'your change will be retried. Otherwise it will be rejected at '
+           'the end of this CQ run.')
+    self.SendNotification(change, msg)
+
+  def HandleValidationTimeout(self, changes=None, sanity=True):
+    """Handles changes that timed out.
+
+    If sanity is set, then this handler removes the commit ready bit
+    from infrastructure changes and sends the developer a message explaining
+    why.
+
+    Args:
+      changes: A list of cros_patch.GerritPatch instances to mark as failed.
+        By default, mark all of the changes as failed.
+      sanity: A boolean indicating whether the build was considered sane. If
+        not sane, none of the changes will have their CommitReady bit modified.
+    """
+    if changes is None:
+      changes = self.changes
+
+    logging.info('Validation timed out for all changes.')
+    base_msg = ('%(queue)s timed out while verifying your change in '
+                '%(build_log)s . This means that a supporting builder did not '
+                'finish building your change within the specified timeout.')
+
+    blamed = triage_lib.CalculateSuspects.FilterChangesForInfraFail(changes)
+
+    for change in changes:
+      logging.info('Validation timed out for change %s.', change)
+      if sanity and change in blamed:
+        msg = ('%s If you believe this happened in error, just re-mark your '
+               'commit as ready. Your change will then get automatically '
+               'retried.' % base_msg)
+        self.SendNotification(change, msg)
+        self.RemoveReady(change)
+      else:
+        msg = ('NOTE: The Commit Queue will retry your change automatically.'
+               '\n\n'
+               '%s The build failure may have been caused by infrastructure '
+               'issues, so your change will not be blamed for the failure.'
+               % base_msg)
+        self.SendNotification(change, msg)
+        self.MarkForgiven(change)
+
+  def SendNotification(self, change, msg, **kwargs):
+    if not kwargs.get('build_log'):
+      kwargs['build_log'] = self.build_log
+    kwargs.setdefault('queue', self.queue)
+    d = dict(**kwargs)
+    try:
+      msg %= d
+    except (TypeError, ValueError) as e:
+      logging.error(
+          "Generation of message %s for change %s failed: dict was %r, "
+          "exception %s", msg, change, d, e)
+      raise e.__class__(
+          "Generation of message %s for change %s failed: dict was %r, "
+          "exception %s" % (msg, change, d, e))
+    PaladinMessage(msg, change, self._helper_pool.ForChange(change)).Send(
+        self.dryrun)
+
+  def HandlePreCQSuccess(self, changes):
+    """Handler that is called when |changes| passed all pre-cq configs."""
+    msg = ('%(queue)s has successfully verified your change.')
+    def ProcessChange(change):
+      self.SendNotification(change, msg)
+
+    inputs = [[change] for change in changes]
+    parallel.RunTasksInProcessPool(ProcessChange, inputs)
+
+  def HandlePreCQPerConfigSuccess(self):
+    """Handler that is called when a pre-cq tryjob verifies a change."""
+    def ProcessChange(change):
+      # Note: This function has no unit test coverage. Be careful when
+      # modifying.
+      if self._run:
+        metadata = self._run.attrs.metadata
+        timestamp = int(time.time())
+        metadata.RecordCLAction(change, constants.CL_ACTION_VERIFIED,
+                                timestamp)
+        self._InsertCLActionToDatabase(change, constants.CL_ACTION_VERIFIED)
+
+    # Process the changes in parallel.
+    inputs = [[change] for change in self.changes]
+    parallel.RunTasksInProcessPool(ProcessChange, inputs)
+
+  def _HandleCouldNotSubmit(self, change, error=''):
+    """Handler that is called when Paladin can't submit a change.
+
+    This should be rare, but if an admin overrides the commit queue and commits
+    a change that conflicts with this change, it'll apply, build/validate but
+    receive an error when submitting.
+
+    Args:
+      change: GerritPatch instance to operate upon.
+      error: The reason why the change could not be submitted.
+    """
+    self.SendNotification(
+        change,
+        '%(queue)s failed to submit your change in %(build_log)s . '
+        '%(error)s', error=error)
+    self.RemoveReady(change)
+
+  @staticmethod
+  def _CreateValidationFailureMessage(pre_cq_trybot, change, suspects, messages,
+                                      sanity=True, infra_fail=False,
+                                      lab_fail=False, no_stat=None,
+                                      retry=False):
+    """Create a message explaining why a validation failure occurred.
+
+    Args:
+      pre_cq_trybot: Whether the builder is a Pre-CQ trybot. (Note: The Pre-CQ
+        launcher is NOT considered a Pre-CQ trybot.)
+      change: The change we want to create a message for.
+      suspects: The set of suspect changes that we think broke the build.
+      messages: A list of build failure messages from supporting builders.
+        These must be BuildFailureMessage objects or NoneType objects.
+      sanity: A boolean indicating whether the build was considered sane. If
+        not sane, none of the changes will have their CommitReady bit modified.
+      infra_fail: The build failed purely due to infrastructure failures.
+      lab_fail: The build failed purely due to test lab infrastructure failures.
+      no_stat: A list of builders which failed prematurely without reporting
+        status.
+      retry: Whether we should retry automatically.
+
+    Returns:
+      A string that communicates what happened.
+    """
+    msg = []
+    if no_stat:
+      msg.append('The following build(s) did not start or failed prematurely:')
+      msg.append(', '.join(no_stat))
+
+    if messages:
+      # Build a list of error messages. We don't want to build a ridiculously
+      # long comment, as Gerrit will reject it. See http://crbug.com/236831
+      max_error_len = 20000 / max(1, len(messages))
+      msg.append('The following build(s) failed:')
+      for message in map(str, messages):
+        if len(message) > max_error_len:
+          message = message[:max_error_len] + '... (truncated)'
+        msg.append(message)
+
+    # Create a list of changes other than this one that might be guilty.
+    # Limit the number of suspects to 20 so that the list of suspects isn't
+    # ridiculously long.
+    max_suspects = 20
+    other_suspects = set(suspects) - set([change])
+    if len(other_suspects) < max_suspects:
+      other_suspects_str = cros_patch.GetChangesAsString(other_suspects)
+    else:
+      other_suspects_str = ('%d other changes. See the blamelist for more '
+                            'details.' % (len(other_suspects),))
+
+    if not sanity:
+      msg.append('The build was consider not sane because the sanity check '
+                 'builder(s) failed. Your change will not be blamed for the '
+                 'failure.')
+      assert retry
+    elif lab_fail:
+      msg.append('The build encountered Chrome OS Lab infrastructure issues. '
+                 ' Your change will not be blamed for the failure.')
+      assert retry
+    else:
+      if infra_fail:
+        msg.append('The build failure may have been caused by infrastructure '
+                   'issues and/or bad %s changes.' % constants.INFRA_PROJECTS)
+
+      if change in suspects:
+        if other_suspects_str:
+          msg.append('Your change may have caused this failure. There are '
+                     'also other changes that may be at fault: %s'
+                     % other_suspects_str)
+        else:
+          msg.append('This failure was probably caused by your change.')
+
+          msg.append('Please check whether the failure is your fault. If your '
+                     'change is not at fault, you may mark it as ready again.')
+      else:
+        if len(suspects) == 1:
+          msg.append('This failure was probably caused by %s'
+                     % other_suspects_str)
+        elif len(suspects) > 0:
+          msg.append('One of the following changes is probably at fault: %s'
+                     % other_suspects_str)
+
+        assert retry
+
+    if retry:
+      bot = 'The Pre-Commit Queue' if pre_cq_trybot else 'The Commit Queue'
+      msg.insert(0, 'NOTE: %s will retry your change automatically.' % bot)
+
+    return '\n\n'.join(msg)
+
+  def _ChangeFailedValidation(self, change, messages, suspects, sanity,
+                              infra_fail, lab_fail, no_stat):
+    """Handles a validation failure for an individual change.
+
+    Args:
+      change: The change to mark as failed.
+      messages: A list of build failure messages from supporting builders.
+          These must be BuildFailureMessage objects.
+      suspects: The list of changes that are suspected of breaking the build.
+      sanity: A boolean indicating whether the build was considered sane. If
+        not sane, none of the changes will have their CommitReady bit modified.
+      infra_fail: The build failed purely due to infrastructure failures.
+      lab_fail: The build failed purely due to test lab infrastructure failures.
+      no_stat: A list of builders which failed prematurely without reporting
+        status.
+    """
+    retry = not sanity or lab_fail or change not in suspects
+    msg = self._CreateValidationFailureMessage(
+        self.pre_cq_trybot, change, suspects, messages,
+        sanity, infra_fail, lab_fail, no_stat, retry)
+    self.SendNotification(change, '%(details)s', details=msg)
+    if retry:
+      self.MarkForgiven(change)
+    else:
+      self.RemoveReady(change)
+
+  def HandleValidationFailure(self, messages, changes=None, sanity=True,
+                              no_stat=None):
+    """Handles a list of validation failure messages from slave builders.
+
+    This handler parses a list of failure messages from our list of builders
+    and calculates which changes were likely responsible for the failure. The
+    changes that were responsible for the failure have their Commit Ready bit
+    stripped and the other changes are left marked as Commit Ready.
+
+    Args:
+      messages: A list of build failure messages from supporting builders.
+          These must be BuildFailureMessage objects or NoneType objects.
+      changes: A list of cros_patch.GerritPatch instances to mark as failed.
+        By default, mark all of the changes as failed.
+      sanity: A boolean indicating whether the build was considered sane. If
+        not sane, none of the changes will have their CommitReady bit modified.
+      no_stat: A list of builders which failed prematurely without reporting
+        status. If not None, this implies there were infrastructure issues.
+    """
+    if changes is None:
+      changes = self.changes
+
+    candidates = []
+
+    if self.pre_cq_trybot:
+      _, db = self._run.GetCIDBHandle()
+      action_history = []
+      if db:
+        action_history = db.GetActionsForChanges(changes)
+
+      for change in changes:
+        # Don't reject changes that have already passed the pre-cq.
+        pre_cq_status = clactions.GetCLPreCQStatus(
+            change, action_history)
+        if pre_cq_status == constants.CL_STATUS_PASSED:
+          continue
+        candidates.append(change)
+    else:
+      candidates.extend(changes)
+
+    # Determine the cause of the failures and the changes that are likely at
+    # fault for the failure.
+    lab_fail = triage_lib.CalculateSuspects.OnlyLabFailures(messages, no_stat)
+    infra_fail = triage_lib.CalculateSuspects.OnlyInfraFailures(
+        messages, no_stat)
+    suspects = triage_lib.CalculateSuspects.FindSuspects(
+        candidates, messages, infra_fail=infra_fail, lab_fail=lab_fail,
+        sanity=sanity)
+
+    # Send out failure notifications for each change.
+    inputs = [[change, messages, suspects, sanity, infra_fail,
+               lab_fail, no_stat] for change in candidates]
+    parallel.RunTasksInProcessPool(self._ChangeFailedValidation, inputs)
+
+  def HandleApplySuccess(self, change, build_log=None):
+    """Handler for when Paladin successfully applies (picks up) a change.
+
+    This handler notifies a developer that their change is being tried as
+    part of a Paladin run defined by a build_log.
+
+    Args:
+      change: GerritPatch instance to operate upon.
+      action_history: List of CLAction instances.
+      build_log: The URL to the build log.
+    """
+    msg = ('%(queue)s has picked up your change. '
+           'You can follow along at %(build_log)s .')
+    self.SendNotification(change, msg, build_log=build_log)
+
+  def UpdateCLPreCQStatus(self, change, status):
+    """Update the pre-CQ |status| of |change|."""
+    action = clactions.TranslatePreCQStatusToAction(status)
+    self._InsertCLActionToDatabase(change, action)
+
+  def CreateDisjointTransactions(self, manifest, changes, max_txn_length=None):
+    """Create a list of disjoint transactions from the changes in the pool.
+
+    Args:
+      manifest: Manifest to use.
+      changes: List of changes to use.
+      max_txn_length: The maximum length of any given transaction.  By default,
+        do not limit the length of transactions.
+
+    Returns:
+      A list of disjoint transactions. Each transaction can be tried
+      independently, without involving patches from other transactions.
+      Each change in the pool will included in exactly one of transactions,
+      unless the patch does not apply for some reason.
+    """
+    patches = PatchSeries(self.build_root, forced_manifest=manifest)
+    plans, failed = patches.CreateDisjointTransactions(
+        changes, max_txn_length=max_txn_length)
+    failed = self._FilterDependencyErrors(failed)
+    if failed:
+      self._HandleApplyFailure(failed)
+    return plans
+
+  def RecordIrrelevantChanges(self, changes):
+    """Records |changes| irrelevant to the slave build into cidb.
+
+    Args:
+      changes: A set of irrelevant changes to record.
+    """
+    if changes:
+      logging.info('The following changes are irrelevant to this build: %s',
+                   cros_patch.GetChangesAsString(changes))
+    else:
+      logging.info('All changes are considered relevant to this build.')
+
+    for change in changes:
+      self._InsertCLActionToDatabase(change,
+                                     constants.CL_ACTION_IRRELEVANT_TO_SLAVE)
+
+
+class PaladinMessage(object):
+  """Object used to send messages to developers about their changes."""
+
+  # URL where Paladin documentation is stored.
+  _PALADIN_DOCUMENTATION_URL = ('http://www.chromium.org/developers/'
+                                'tree-sheriffs/sheriff-details-chromium-os/'
+                                'commit-queue-overview')
+
+  # Gerrit can't handle commands over 32768 bytes. See http://crbug.com/236831
+  MAX_MESSAGE_LEN = 32000
+
+  def __init__(self, message, patch, helper):
+    if len(message) > self.MAX_MESSAGE_LEN:
+      message = message[:self.MAX_MESSAGE_LEN] + '... (truncated)'
+    self.message = message
+    self.patch = patch
+    self.helper = helper
+
+  def _ConstructPaladinMessage(self):
+    """Adds any standard Paladin messaging to an existing message."""
+    return self.message + ('\n\nCommit queue documentation: %s' %
+                           self._PALADIN_DOCUMENTATION_URL)
+
+  def Send(self, dryrun):
+    """Posts a comment to a gerrit review."""
+    body = {
+        'message': self._ConstructPaladinMessage(),
+        'notify': 'OWNER',
+    }
+    path = 'changes/%s/revisions/%s/review' % (
+        self.patch.gerrit_number, self.patch.revision)
+    if dryrun:
+      logging.info('Would have sent %r to %s', body, path)
+      return
+    gob_util.FetchUrl(self.helper.host, path, reqtype='POST', body=body)
diff --git a/cbuildbot/validation_pool_unittest b/cbuildbot/validation_pool_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cbuildbot/validation_pool_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cbuildbot/validation_pool_unittest.py b/cbuildbot/validation_pool_unittest.py
new file mode 100644
index 0000000..228c5e4
--- /dev/null
+++ b/cbuildbot/validation_pool_unittest.py
@@ -0,0 +1,2024 @@
+# Copyright (c) 2011-2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module that contains unittests for validation_pool module."""
+
+from __future__ import print_function
+
+import collections
+import contextlib
+import copy
+import functools
+import httplib
+import itertools
+import mock
+import mox
+import os
+import pickle
+import random
+import tempfile
+import time
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import metadata_lib
+from chromite.cbuildbot import repository
+from chromite.cbuildbot import tree_status
+from chromite.cbuildbot import validation_pool
+from chromite.cbuildbot import triage_lib
+from chromite.lib import cidb
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import fake_cidb
+from chromite.lib import gerrit
+from chromite.lib import git
+from chromite.lib import gob_util
+from chromite.lib import gs_unittest
+from chromite.lib import parallel
+from chromite.lib import parallel_unittest
+from chromite.lib import partial_mock
+from chromite.lib import patch as cros_patch
+from chromite.lib import patch_unittest
+
+
+site_config = config_lib.GetConfig()
+
+
+_GetNumber = iter(itertools.count()).next
+# Without this some lambda's defined in constants will not be the same as
+# constants defined in this module. For comparisons, lambdas must be the same
+# function.
+validation_pool.constants = constants
+
+def GetTestJson(change_id=None):
+  """Get usable fake Gerrit patch json data
+
+  Args:
+    change_id: If given, force this ChangeId
+  """
+  data = copy.deepcopy(patch_unittest.FAKE_PATCH_JSON)
+  if change_id is not None:
+    data['id'] = str(change_id)
+  return data
+
+
+class MockManifest(object):
+  """Helper class for Mocking Manifest objects."""
+
+  def __init__(self, path, **kwargs):
+    self.root = path
+    for key, attr in kwargs.iteritems():
+      setattr(self, key, attr)
+
+
+class FakeBuilderRun(object):
+  """A lightweight partial implementation of BuilderRun.
+
+  validation_pool.ValidationPool makes use of a BuilderRun to access
+  cidb and metadata, but does not need to make use of the extensive
+  other BuilderRun features. This lightweight partial reimplementation
+  allows unit tests to be much faster.
+  """
+  def __init__(self, fake_db=None):
+    self.fake_db = fake_db
+    metadata_dict = {'buildbot-master-name': constants.WATERFALL_INTERNAL}
+    FakeAttrs = collections.namedtuple('FakeAttrs', ['metadata'])
+    self.attrs = FakeAttrs(metadata=metadata_lib.CBuildbotMetadata(
+        metadata_dict=metadata_dict))
+    FakeConfig = collections.namedtuple('FakeConfig', ['name'])
+    self.config = FakeConfig(name='master-paladin')
+    self.GetBuildbotUrl = lambda: constants.WATERFALL_INTERNAL
+
+  def GetCIDBHandle(self):
+    """Get the build_id and cidb handle, if available.
+
+    Returns:
+      A (build_id, CIDBConnection) tuple if fake_db is set up and a build_id is
+      known in metadata. Otherwise, (None, None).
+    """
+    try:
+      build_id = self.attrs.metadata.GetValue('build_id')
+    except KeyError:
+      return (None, None)
+
+    if build_id is not None and self.fake_db:
+      return (build_id, self.fake_db)
+
+    return (None, None)
+
+
+# pylint: disable=protected-access
+class MoxBase(patch_unittest.MockPatchBase, cros_test_lib.MoxTestCase):
+  """Base class for other test suites with numbers mocks patched in."""
+
+  def setUp(self):
+    self.build_root = 'fakebuildroot'
+    self.manager = parallel.Manager()
+    self.PatchObject(gob_util, 'CreateHttpConn',
+                     side_effect=AssertionError('Test should not contact GoB'))
+    self.PatchObject(gob_util, 'CheckChange')
+    self.PatchObject(tree_status, 'IsTreeOpen', return_value=True)
+    self.PatchObject(tree_status, 'WaitForTreeStatus',
+                     return_value=constants.TREE_OPEN)
+    self.fake_db = fake_cidb.FakeCIDBConnection()
+    cidb.CIDBConnectionFactory.SetupMockCidb(self.fake_db)
+    # Suppress all gerrit access; having this occur is generally a sign
+    # the code is either misbehaving, or that the tests are bad.
+    self.mox.StubOutWithMock(gerrit.GerritHelper, 'Query')
+    self.gs_mock = self.StartPatcher(gs_unittest.GSContextMock())
+
+  def tearDown(self):
+    cidb.CIDBConnectionFactory.ClearMock()
+
+  def MakeHelper(self, cros_internal=None, cros=None):
+    # pylint: disable=W0201
+    if cros_internal:
+      cros_internal = self.mox.CreateMock(gerrit.GerritHelper)
+      cros_internal.version = '2.2'
+      cros_internal.remote = site_config.params.INTERNAL_REMOTE
+    if cros:
+      cros = self.mox.CreateMock(gerrit.GerritHelper)
+      cros.remote = site_config.params.EXTERNAL_REMOTE
+      cros.version = '2.2'
+    return validation_pool.HelperPool(cros_internal=cros_internal,
+                                      cros=cros)
+
+
+class FakePatch(partial_mock.PartialMock):
+  """Mocks out dependency and fetch methods of GitRepoPatch.
+
+  Usage: set FakePatch.parents, .cq and .build_roots per patch, and set
+  FakePatch.assertEqual to your TestCase's assertEqual method.  The behavior of
+  `GerritDependencies`, `PaladinDependencies` and `Fetch` depends on the patch
+  id.
+
+  """
+  TARGET = 'chromite.lib.patch.GitRepoPatch'
+  ATTRS = ('GerritDependencies', 'PaladinDependencies', 'Fetch')
+
+  parents = {}
+  cq = {}
+  build_root = None
+  assertEqual = None
+
+  def PreStart(self):
+    FakePatch.parents = {}
+    FakePatch.cq = {}
+
+  def PreStop(self):
+    FakePatch.build_root = None
+    FakePatch.assertEqual = None
+
+  def GerritDependencies(self, patch):
+    return map(cros_patch.ParsePatchDep, self.parents[patch.id])
+
+  def PaladinDependencies(self, patch, path):
+    self._assertPath(patch, path)
+    return map(cros_patch.ParsePatchDep, self.cq[patch.id])
+
+  def Fetch(self, patch, path):
+    self._assertPath(patch, path)
+    return patch.sha1
+
+  def _assertPath(self, patch, path):
+    self.assertEqual(path,
+                     os.path.join(self.build_root, patch.project))
+
+
+class FakeGerritPatch(FakePatch):
+  """Mocks out the "GerritDependencies" method of GerritPatch.
+
+  This is necessary because GerritPatch overrides the GerritDependencies method.
+  """
+  TARGET = 'chromite.lib.patch.GerritPatch'
+  ATTRS = ('GerritDependencies',)
+
+
+# pylint:disable=too-many-ancestors
+class PatchSeriesTestCase(MoxBase, patch_unittest.UploadedLocalPatchTestCase):
+  """Base class for tests that need to test PatchSeries."""
+
+  @contextlib.contextmanager
+  def _ValidateTransactionCall(self, _changes):
+    yield
+
+  def setUp(self):
+    self.StartPatcher(parallel_unittest.ParallelMock())
+
+  def GetPatchSeries(self, helper_pool=None):
+    if helper_pool is None:
+      helper_pool = self.MakeHelper(cros_internal=True, cros=True)
+    series = validation_pool.PatchSeries(self.build_root, helper_pool)
+
+    # Suppress transactions.
+    series._Transaction = self._ValidateTransactionCall
+    series.GetGitRepoForChange = \
+        lambda change, **kwargs: os.path.join(self.build_root, change.project)
+
+    return series
+
+  def _ValidatePatchApplyManifest(self, value):
+    self.assertTrue(isinstance(value, MockManifest))
+    self.assertEqual(value.root, self.build_root)
+    return True
+
+  def SetPatchApply(self, patch, trivial=False):
+    self.mox.StubOutWithMock(patch, 'ApplyAgainstManifest')
+    return patch.ApplyAgainstManifest(
+        mox.Func(self._ValidatePatchApplyManifest),
+        trivial=trivial)
+
+  def assertResults(self, series, changes, applied=(), failed_tot=(),
+                    failed_inflight=(), frozen=True):
+    manifest = MockManifest(self.build_root)
+    result = series.Apply(changes, frozen=frozen, manifest=manifest)
+
+    _GetIds = lambda seq: [x.id for x in seq]
+    _GetFailedIds = lambda seq: _GetIds(x.patch for x in seq)
+
+    applied_result = _GetIds(result[0])
+    failed_tot_result, failed_inflight_result = map(_GetFailedIds, result[1:])
+
+    applied = _GetIds(applied)
+    failed_tot = _GetIds(failed_tot)
+    failed_inflight = _GetIds(failed_inflight)
+
+    self.maxDiff = None
+    self.assertEqual(applied, applied_result)
+    self.assertItemsEqual(failed_inflight, failed_inflight_result)
+    self.assertItemsEqual(failed_tot, failed_tot_result)
+    return result
+
+
+class TestUploadedLocalPatch(PatchSeriesTestCase):
+  """Test the interaction between uploaded local git patches and PatchSeries."""
+
+  def testFetchChanges(self):
+    """Test fetching uploaded local patches."""
+    git1, git2, patch1 = self._CommonGitSetup()
+    patch2 = self.CommitFile(git1, 'monkeys2', 'foon2')
+    patch3 = self._MkPatch(git1, None, original_sha1=patch1.sha1)
+    patch4 = self._MkPatch(git1, None, original_sha1=patch2.sha1)
+    self.assertEqual(patch3.id, patch1.id)
+    self.assertEqual(patch4.id, patch2.id)
+    self.assertNotEqual(patch3.id, patch4.id)
+    series = self.GetPatchSeries()
+    series.GetGitRepoForChange = lambda change, **kwargs: git2
+    patches = series.FetchChanges([patch3, patch4])
+    self.assertEqual(len(patches), 2)
+    self.assertEqual(patches[0].id, patch3.id)
+    self.assertEqual(patches[1].id, patch4.id)
+
+
+def FakeFetchChangesForRepo(fetched_changes, by_repo, repo):
+  """Fake version of the "PatchSeries._FetchChangesForRepo" method.
+
+  Thes does nothing to the changes and simply copies them into the output
+  dict.
+  """
+  for c in by_repo[repo]:
+    fetched_changes[c.id] = c
+
+
+class TestPatchSeries(PatchSeriesTestCase):
+  """Tests resolution and applying logic of validation_pool.ValidationPool."""
+
+  def setUp(self):
+    self.StartPatcher(FakePatch())
+    self.PatchObject(FakePatch, 'assertEqual', new=self.assertEqual)
+    self.PatchObject(FakePatch, 'build_root', new=self.build_root)
+    self.PatchObject(validation_pool, '_FetchChangesForRepo',
+                     new=FakeFetchChangesForRepo)
+    self.StartPatcher(FakeGerritPatch())
+
+  def SetPatchDeps(self, patch, parents=(), cq=()):
+    """Set the dependencies of |patch|.
+
+    Args:
+      patch: The patch to process.
+      parents: A set of strings to set as parents of |patch|.
+      cq: A set of strings to set as paladin dependencies of |patch|.
+    """
+    FakePatch.parents[patch.id] = parents
+    FakePatch.cq[patch.id] = cq
+
+  def testApplyWithDeps(self):
+    """Test that we can apply changes correctly and respect deps.
+
+    This tests a simple out-of-order change where change1 depends on change2
+    but tries to get applied before change2.  What should happen is that
+    we should notice change2 is a dep of change1 and apply it first.
+    """
+    series = self.GetPatchSeries()
+
+    patch1, patch2 = patches = self.GetPatches(2)
+
+    self.SetPatchDeps(patch2)
+    self.SetPatchDeps(patch1, [patch2.id])
+
+    self.SetPatchApply(patch2)
+    self.SetPatchApply(patch1)
+
+    self.mox.ReplayAll()
+    self.assertResults(series, patches, [patch2, patch1])
+    self.mox.VerifyAll()
+
+  def testSha1Deps(self):
+    """Test that we can apply changes correctly and respect sha1 deps.
+
+    This tests a simple out-of-order change where change1 depends on change2
+    but tries to get applied before change2.  What should happen is that
+    we should notice change2 is a dep of change1 and apply it first.
+    """
+    series = self.GetPatchSeries()
+
+    patch1, patch2, patch3 = patches = self.GetPatches(3)
+    patch3.remote = site_config.params.INTERNAL_REMOTE
+
+    self.SetPatchDeps(patch1, [patch2.sha1])
+    self.SetPatchDeps(patch2, ['*%s' % patch3.sha1])
+    self.SetPatchDeps(patch3)
+
+    self.SetPatchApply(patch2)
+    self.SetPatchApply(patch3)
+    self.SetPatchApply(patch1)
+
+    self.mox.ReplayAll()
+    self.assertResults(series, patches, [patch3, patch2, patch1])
+    self.mox.VerifyAll()
+
+  def testGerritNumberDeps(self):
+    """Test that we can apply CQ-DEPEND changes in the right order."""
+    series = self.GetPatchSeries()
+
+    patch1, patch2, patch3 = patches = self.GetPatches(3)
+
+    self.SetPatchDeps(patch1, cq=[patch2.id])
+    self.SetPatchDeps(patch2, cq=[patch3.gerrit_number])
+    self.SetPatchDeps(patch3, cq=[patch1.gerrit_number])
+
+    self.SetPatchApply(patch1)
+    self.SetPatchApply(patch2)
+    self.SetPatchApply(patch3)
+
+    self.mox.ReplayAll()
+    self.assertResults(series, patches, patches[::-1])
+    self.mox.VerifyAll()
+
+  def testGerritLazyMapping(self):
+    """Given a patch lacking a gerrit number, via gerrit, map it to that change.
+
+    Literally, this ensures that local patches pushed up- lacking a gerrit
+    number- are mapped back to a changeid via asking gerrit for that number,
+    then the local matching patch is used if available.
+    """
+    series = self.GetPatchSeries()
+
+    patch1 = self.MockPatch()
+    self.PatchObject(patch1, 'LookupAliases', return_value=[patch1.id])
+    patch2 = self.MockPatch(change_id=int(patch1.change_id[1:]))
+    patch3 = self.MockPatch()
+
+    self.SetPatchDeps(patch3, cq=[patch2.gerrit_number])
+    self.SetPatchDeps(patch2)
+    self.SetPatchDeps(patch1)
+
+    self.SetPatchApply(patch1)
+    self.SetPatchApply(patch3)
+
+    self._SetQuery(series, patch2, query=patch2.gerrit_number).AndReturn(patch2)
+
+    self.mox.ReplayAll()
+    applied = self.assertResults(series, [patch1, patch3], [patch1, patch3])[0]
+    self.assertTrue(applied[0] is patch1)
+    self.assertTrue(applied[1] is patch3)
+    self.mox.VerifyAll()
+
+  def testCrosGerritDeps(self, cros_internal=True):
+    """Test that we can apply changes correctly and respect deps.
+
+    This tests a simple out-of-order change where change1 depends on change3
+    but tries to get applied before it.  What should happen is that
+    we should notice the dependency and apply change3 first.
+    """
+    helper_pool = self.MakeHelper(cros_internal=cros_internal, cros=True)
+    series = self.GetPatchSeries(helper_pool=helper_pool)
+
+    patch1 = self.MockPatch(remote=site_config.params.EXTERNAL_REMOTE)
+    patch2 = self.MockPatch(remote=site_config.params.INTERNAL_REMOTE)
+    patch3 = self.MockPatch(remote=site_config.params.EXTERNAL_REMOTE)
+    patches = [patch1, patch2, patch3]
+    if cros_internal:
+      applied_patches = [patch3, patch2, patch1]
+    else:
+      applied_patches = [patch3, patch1]
+
+    self.SetPatchDeps(patch1, [patch3.id])
+    self.SetPatchDeps(patch2)
+    self.SetPatchDeps(patch3, cq=[patch2.id])
+
+    if cros_internal:
+      self.SetPatchApply(patch2)
+    self.SetPatchApply(patch1)
+    self.SetPatchApply(patch3)
+
+    self.mox.ReplayAll()
+    self.assertResults(series, patches, applied_patches)
+    self.mox.VerifyAll()
+
+  def testExternalCrosGerritDeps(self):
+    """Test that we exclude internal deps on external trybot."""
+    self.testCrosGerritDeps(cros_internal=False)
+
+  @staticmethod
+  def _SetQuery(series, change, query=None):
+    helper = series._helper_pool.GetHelper(change.remote)
+    query = change.id if query is None else query
+    return helper.QuerySingleRecord(query, must_match=True)
+
+  def testApplyMissingDep(self):
+    """Test that we don't try to apply a change without met dependencies.
+
+    Patch2 is in the validation pool that depends on Patch1 (which is not)
+    Nothing should get applied.
+    """
+    series = self.GetPatchSeries()
+
+    patch1, patch2 = self.GetPatches(2)
+
+    self.SetPatchDeps(patch2, [patch1.id])
+    self._SetQuery(series, patch1).AndReturn(patch1)
+
+    self.mox.ReplayAll()
+    self.assertResults(series, [patch2],
+                       [], [patch2])
+    self.mox.VerifyAll()
+
+  def testApplyWithCommittedDeps(self):
+    """Test that we apply a change with dependency already committed."""
+    series = self.GetPatchSeries()
+
+    # Use for basic commit check.
+    patch1 = self.GetPatches(1, is_merged=True)
+    patch2 = self.GetPatches(1)
+
+    self.SetPatchDeps(patch2, [patch1.id])
+    self._SetQuery(series, patch1).AndReturn(patch1)
+    self.SetPatchApply(patch2)
+
+    # Used to ensure that an uncommitted change put in the lookup cache
+    # isn't invalidly pulled into the graph...
+    patch3, patch4, patch5 = self.GetPatches(3)
+
+    self._SetQuery(series, patch3).AndReturn(patch3)
+    self.SetPatchDeps(patch4, [patch3.id])
+    self.SetPatchDeps(patch5, [patch3.id])
+
+    self.mox.ReplayAll()
+    self.assertResults(series, [patch2, patch4, patch5], [patch2],
+                       [patch4, patch5])
+    self.mox.VerifyAll()
+
+  def testCyclicalDeps(self):
+    """Verify that the machinery handles cycles correctly."""
+    series = self.GetPatchSeries()
+
+    patch1, patch2, patch3 = patches = self.GetPatches(3)
+
+    self.SetPatchDeps(patch1, [patch2.id])
+    self.SetPatchDeps(patch2, cq=[patch3.id])
+    self.SetPatchDeps(patch3, [patch1.id])
+
+    self.SetPatchApply(patch1)
+    self.SetPatchApply(patch2)
+    self.SetPatchApply(patch3)
+
+    self.mox.ReplayAll()
+    self.assertResults(series, patches, [patch2, patch1, patch3])
+    self.mox.VerifyAll()
+
+  def testComplexCyclicalDeps(self, fail=False):
+    """Verify handling of two interdependent cycles."""
+    series = self.GetPatchSeries()
+
+    # Create two cyclically interdependent patch chains.
+    # Example: Two patch series A1<-A2<-A3<-A4 and B1<-B2<-B3<-B4. A1 has a
+    # CQ-DEPEND on B4 and B1 has a CQ-DEPEND on A4, so all of the patches must
+    # be committed together.
+    chain1, chain2 = chains = self.GetPatches(4), self.GetPatches(4)
+    for chain in chains:
+      (other_chain,) = [x for x in chains if x != chain]
+      self.SetPatchDeps(chain[0], [], cq=[other_chain[-1].id])
+      for i in range(1, len(chain)):
+        self.SetPatchDeps(chain[i], [chain[i-1].id])
+
+    # Apply the second-last patch first, so that the last patch in the series
+    # will be pulled in via the CQ-DEPEND on the other patch chain.
+    to_apply = [chain1[-2]] + [x for x in (chain1 + chain2) if x != chain1[-2]]
+
+    # Mark all the patches but the last ones as applied successfully.
+    for patch in chain1 + chain2[:-1]:
+      self.SetPatchApply(patch)
+
+    if fail:
+      # Pretend that chain2[-1] failed to apply.
+      res = self.SetPatchApply(chain2[-1])
+      res.AndRaise(cros_patch.ApplyPatchException(chain1[-1]))
+      applied = []
+      failed_tot = to_apply
+    else:
+      # We apply the patches in this order since the last patch in chain1
+      # is pulled in via CQ-DEPEND.
+      self.SetPatchApply(chain2[-1])
+      applied = chain1[:2] + chain2[:-1] + chain1[2:] + chain2[-1:]
+      failed_tot = []
+
+    self.mox.ReplayAll()
+    self.assertResults(series, to_apply, applied=applied, failed_tot=failed_tot)
+    self.mox.VerifyAll()
+
+  def testFailingComplexCyclicalDeps(self):
+    """Verify handling of failing interlocked cycles."""
+    self.testComplexCyclicalDeps(fail=True)
+
+  def testApplyPartialFailures(self):
+    """Test that can apply changes correctly when one change fails to apply.
+
+    This tests a simple change order where 1 depends on 2 and 1 fails to apply.
+    Only 1 should get tried as 2 will abort once it sees that 1 can't be
+    applied.  3 with no dependencies should go through fine.
+
+    Since patch1 fails to apply, we should also get a call to handle the
+    failure.
+    """
+    series = self.GetPatchSeries()
+
+    patch1, patch2, patch3, patch4 = patches = self.GetPatches(4)
+
+    self.SetPatchDeps(patch1)
+    self.SetPatchDeps(patch2, [patch1.id])
+    self.SetPatchDeps(patch3)
+    self.SetPatchDeps(patch4)
+
+    self.SetPatchApply(patch1).AndRaise(
+        cros_patch.ApplyPatchException(patch1))
+
+    self.SetPatchApply(patch3)
+    self.SetPatchApply(patch4).AndRaise(
+        cros_patch.ApplyPatchException(patch1, inflight=True))
+
+    self.mox.ReplayAll()
+    self.assertResults(series, patches,
+                       [patch3], [patch2, patch1], [patch4])
+    self.mox.VerifyAll()
+
+  def testComplexApply(self):
+    """More complex deps test.
+
+    This tests a total of 2 change chains where the first change we see
+    only has a partial chain with the 3rd change having the whole chain i.e.
+    1->2, 3->1->2.  Since we get these in the order 1,2,3,4,5 the order we
+    should apply is 2,1,3,4,5.
+
+    This test also checks the patch order to verify that Apply re-orders
+    correctly based on the chain.
+    """
+    series = self.GetPatchSeries()
+
+    patch1, patch2, patch3, patch4, patch5 = patches = self.GetPatches(5)
+
+    self.SetPatchDeps(patch1, [patch2.id])
+    self.SetPatchDeps(patch2)
+    self.SetPatchDeps(patch3, [patch1.id, patch2.id])
+    self.SetPatchDeps(patch4, cq=[patch5.id])
+    self.SetPatchDeps(patch5)
+
+    for patch in (patch2, patch1, patch3, patch4, patch5):
+      self.SetPatchApply(patch)
+
+    self.mox.ReplayAll()
+    self.assertResults(
+        series, patches, [patch2, patch1, patch3, patch5, patch4])
+    self.mox.VerifyAll()
+
+  def testApplyStandalonePatches(self):
+    """Simple apply of two changes with no dependent CL's."""
+    series = self.GetPatchSeries()
+
+    patches = self.GetPatches(3)
+
+    for patch in patches:
+      self.SetPatchDeps(patch)
+
+    for patch in patches:
+      self.SetPatchApply(patch)
+
+    self.mox.ReplayAll()
+    self.assertResults(series, patches, patches)
+    self.mox.VerifyAll()
+
+  def testResetCheckouts(self):
+    """Tests resetting git repositories to origin."""
+    series = self.GetPatchSeries()
+
+    repo_path, _, _ = self._CommonGitSetup()
+    self.CommitFile(repo_path, 'aoeu', 'asdf')
+
+    def _GetHeadAndRemote():
+      head = git.RunGit(repo_path, ['log', 'HEAD', '-n1']).output
+      remote = git.RunGit(repo_path, ['log', 'cros', '-n1']).output
+      return head, remote
+
+    head, remote = _GetHeadAndRemote()
+    self.assertNotEqual(head, remote)
+
+    series.manifest = mock.Mock()
+    series.manifest.ListCheckouts.return_value = [mock.Mock(
+        GetPath=mock.Mock(return_value=repo_path),
+        __getitem__=lambda _self, k: {'tracking_branch': 'cros/master'}[k]
+    )]
+
+    def _MapStar(f, argss):
+      return [f(*args) for args in argss]
+
+    with mock.patch.object(parallel, 'RunTasksInProcessPool', new=_MapStar):
+      series.ResetCheckouts('master')
+
+    # verify that the checkout is reset.
+    head, remote = _GetHeadAndRemote()
+    self.assertEqual(head, remote)
+
+def MakePool(overlays=constants.PUBLIC_OVERLAYS, build_number=1,
+             builder_name='foon', is_master=True, dryrun=True,
+             fake_db=None, **kwargs):
+  """Helper for creating ValidationPool objects for tests."""
+  kwargs.setdefault('changes', [])
+  build_root = kwargs.pop('build_root', '/fake_root')
+
+  builder_run = FakeBuilderRun(fake_db)
+  if fake_db:
+    build_id = fake_db.InsertBuild(
+        builder_name, constants.WATERFALL_INTERNAL, build_number,
+        'build-config', 'bot hostname')
+    builder_run.attrs.metadata.UpdateWithDict({'build_id': build_id})
+
+
+  pool = validation_pool.ValidationPool(
+      overlays, build_root, build_number, builder_name, is_master,
+      dryrun, builder_run=builder_run, **kwargs)
+  return pool
+
+
+class MockPatchSeries(partial_mock.PartialMock):
+  """Mock the PatchSeries functions."""
+  TARGET = 'chromite.cbuildbot.validation_pool.PatchSeries'
+  ATTRS = ('GetDepsForChange', '_GetGerritPatch', '_LookupHelper')
+
+  def __init__(self):
+    partial_mock.PartialMock.__init__(self)
+    self.deps = {}
+    self.cq_deps = {}
+
+  def SetGerritDependencies(self, patch, deps):
+    """Add |deps| to the Gerrit dependencies of |patch|."""
+    self.deps[patch] = deps
+
+  def SetCQDependencies(self, patch, deps):
+    """Add |deps| to the CQ dependencies of |patch|."""
+    self.cq_deps[patch] = deps
+
+  def GetDepsForChange(self, _inst, patch):
+    return self.deps.get(patch, []), self.cq_deps.get(patch, [])
+
+  def _GetGerritPatch(self, _inst, dep, **_kwargs):
+    return dep
+
+  _LookupHelper = mock.MagicMock()
+
+
+class FakeValidationPool(partial_mock.PartialMock):
+  """Mocks out ValidationPool's interaction with cidb."""
+  TARGET = 'chromite.cbuildbot.validation_pool.ValidationPool'
+  ATTRS = ['_InsertCLActionToDatabase']
+
+  def _InsertCLActionToDatabase(self, *args, **kwargs):
+    pass
+
+
+class TestSubmitChange(MoxBase):
+  """Test suite related to submitting changes."""
+
+  def setUp(self):
+    self.orig_timeout = validation_pool.SUBMITTED_WAIT_TIMEOUT
+    self.pool_mock = self.StartPatcher(FakeValidationPool())
+    validation_pool.SUBMITTED_WAIT_TIMEOUT = 4
+
+  def tearDown(self):
+    validation_pool.SUBMITTED_WAIT_TIMEOUT = self.orig_timeout
+
+  def _TestSubmitChange(self, results, build_id=31337):
+    """Test submitting a change with the given results."""
+    results = [cros_test_lib.EasyAttr(status=r) for r in results]
+    change = self.MockPatch(change_id=12345, patch_number=1)
+    pool = validation_pool.ValidationPool(
+        constants.VALID_OVERLAYS[0],
+        build_root=None,
+        build_number=0,
+        builder_name='',
+        is_master=False,
+        dryrun=False)
+    pool._run = FakeBuilderRun(self.fake_db)
+    pool._run.attrs.metadata.UpdateWithDict({'build_id': build_id})
+    pool._helper_pool = self.mox.CreateMock(validation_pool.HelperPool)
+    helper = self.mox.CreateMock(validation_pool.gerrit.GerritHelper)
+    pool._helper_pool.host = ''
+    helper.host = ''
+
+    # Prepare replay script.
+    pool._helper_pool.ForChange(change).AndReturn(helper)
+    pool._helper_pool.ForChange(change).AndReturn(helper)
+    helper.SubmitChange(change, dryrun=False)
+    pool._InsertCLActionToDatabase(change, mox.IgnoreArg(), mox.IgnoreArg())
+    for result in results:
+      helper.QuerySingleRecord(change.gerrit_number).AndReturn(result)
+    if results[-1]['status'] == 'SUBMITTED':
+      helper.SetReview(change, msg=mox.IgnoreArg())
+    self.mox.ReplayAll()
+
+    # Verify results.
+    retval = validation_pool.ValidationPool._SubmitChangeUsingGerrit(
+        pool, change, reason=mox.IgnoreArg())
+    self.mox.VerifyAll()
+    return retval
+
+  def testSubmitChangeMerged(self):
+    """Submit one change to gerrit, status MERGED."""
+    self.assertTrue(self._TestSubmitChange(['MERGED']))
+
+  def testSubmitChangeSubmitted(self):
+    """Submit one change to gerrit, stuck on SUBMITTED."""
+    # The query will be retried 1 more time than query timeout.
+    results = ['SUBMITTED' for _i in
+               xrange(validation_pool.SUBMITTED_WAIT_TIMEOUT + 1)]
+    self.assertTrue(self._TestSubmitChange(results))
+
+  def testSubmitChangeSubmittedToMerged(self):
+    """Submit one change to gerrit, status SUBMITTED then MERGED."""
+    results = ['SUBMITTED', 'SUBMITTED', 'MERGED']
+    self.assertTrue(self._TestSubmitChange(results))
+
+  def testSubmitChangeFailed(self):
+    """Submit one change to gerrit, reported back as NEW."""
+    self.assertFalse(self._TestSubmitChange(['NEW']))
+
+
+class ValidationFailureOrTimeout(MoxBase):
+  """Tests that HandleValidationFailure and HandleValidationTimeout functions.
+
+  These tests check that HandleValidationTimeout and HandleValidationFailure
+  reject (i.e. zero out the CQ field) of the correct number of patches, under
+  various circumstances.
+  """
+
+  _PATCH_MESSAGE = 'Your patch failed.'
+  _BUILD_MESSAGE = 'Your build failed.'
+
+  def setUp(self):
+    self._patches = self.GetPatches(3)
+    self._pool = MakePool(changes=self._patches, fake_db=self.fake_db)
+
+    self.PatchObject(
+        triage_lib.CalculateSuspects, 'FindSuspects',
+        return_value=self._patches)
+    self.PatchObject(validation_pool.ValidationPool, 'SendNotification')
+    self.remove = self.PatchObject(gerrit.GerritHelper, 'RemoveReady')
+    self.PatchObject(gerrit, 'GetGerritPatchInfoWithPatchQueries',
+                     lambda x: x)
+    self.PatchObject(triage_lib.CalculateSuspects, 'OnlyLabFailures',
+                     return_value=False)
+    self.PatchObject(triage_lib.CalculateSuspects, 'OnlyInfraFailures',
+                     return_value=False)
+    self.StartPatcher(parallel_unittest.ParallelMock())
+    self._AssertActions(self._patches, [])
+
+  def _AssertActions(self, changes, actions):
+    """Assert that each change in |changes| has |actions|."""
+    for change in changes:
+      action_history = self.fake_db.GetActionsForChanges([change])
+      self.assertEqual([x.action for x in action_history], actions)
+
+  def testPatchesWereRejectedByFailure(self):
+    """Tests that all patches are rejected by failure."""
+    self._pool.HandleValidationFailure([self._BUILD_MESSAGE])
+    self.assertEqual(len(self._patches), self.remove.call_count)
+    self._AssertActions(self._patches, [constants.CL_ACTION_KICKED_OUT])
+
+  def testPatchesWereRejectedByTimeout(self):
+    self._pool.HandleValidationTimeout()
+    self.assertEqual(len(self._patches), self.remove.call_count)
+    self._AssertActions(self._patches, [constants.CL_ACTION_KICKED_OUT])
+
+  def testOnlyChromitePatchesWereRejectedByTimeout(self):
+    self._patches[-1].project = 'chromiumos/tacos'
+    self._pool.HandleValidationTimeout()
+    self.assertEqual(len(self._patches) - 1, self.remove.call_count)
+    self._AssertActions(self._patches[:-1], [constants.CL_ACTION_KICKED_OUT])
+    self._AssertActions(self._patches[-1:], [constants.CL_ACTION_FORGIVEN])
+
+  def testNoSuspectsWithFailure(self):
+    """Tests no change is blamed when there is no suspect."""
+    self.PatchObject(triage_lib.CalculateSuspects, 'FindSuspects',
+                     return_value=[])
+    self._pool.HandleValidationFailure([self._BUILD_MESSAGE])
+    self.assertEqual(0, self.remove.call_count)
+    self._AssertActions(self._patches, [constants.CL_ACTION_FORGIVEN])
+
+  def testPreCQ(self):
+    for change in self._patches:
+      self._pool.UpdateCLPreCQStatus(change, constants.CL_STATUS_PASSED)
+    self._pool.pre_cq_trybot = True
+    self._pool.HandleValidationFailure([self._BUILD_MESSAGE])
+    self.assertEqual(0, self.remove.call_count)
+    self._AssertActions(self._patches, [constants.CL_ACTION_PRE_CQ_PASSED])
+
+  def testPatchesWereNotRejectedByInsaneFailure(self):
+    self._pool.HandleValidationFailure([self._BUILD_MESSAGE], sanity=False)
+    self.assertEqual(0, self.remove.call_count)
+    self._AssertActions(self._patches, [constants.CL_ACTION_FORGIVEN])
+
+
+class TestCoreLogic(MoxBase):
+  """Tests resolution and applying logic of validation_pool.ValidationPool."""
+
+  def setUp(self):
+    self.mox.StubOutWithMock(validation_pool.PatchSeries, 'Apply')
+    self.mox.StubOutWithMock(validation_pool.PatchSeries, 'ApplyChange')
+    self.patch_mock = self.StartPatcher(MockPatchSeries())
+    funcs = ['SendNotification', '_SubmitChangeUsingGerrit']
+    for func in funcs:
+      self.mox.StubOutWithMock(validation_pool.ValidationPool, func)
+    self.PatchObject(gerrit, 'GetGerritPatchInfoWithPatchQueries',
+                     side_effect=lambda x: x)
+    self.StartPatcher(parallel_unittest.ParallelMock())
+
+  def MakePool(self, *args, **kwargs):
+    """Helper for creating ValidationPool objects for Mox tests."""
+    handlers = kwargs.pop('handlers', False)
+    kwargs['build_root'] = self.build_root
+    pool = MakePool(*args, **kwargs)
+    funcs = ['HandleApplySuccess', '_HandleApplyFailure',
+             '_HandleCouldNotApply', '_HandleCouldNotSubmit',
+             '_HandleFailedToApplyDueToInflightConflict']
+    if handlers:
+      for func in funcs:
+        self.mox.StubOutWithMock(pool, func)
+    return pool
+
+  def MakeFailure(self, patch, inflight=True):
+    return cros_patch.ApplyPatchException(patch, inflight=inflight)
+
+  def GetPool(self, changes, applied=(), tot=(), inflight=(),
+              max_change_count=None, **kwargs):
+    if not max_change_count:
+      max_change_count = len(changes)
+
+    pool = self.MakePool(changes=changes, fake_db=self.fake_db, **kwargs)
+    applied = list(applied)
+    tot = [self.MakeFailure(x, inflight=False) for x in tot]
+    inflight = [self.MakeFailure(x, inflight=True) for x in inflight]
+    # pylint: disable=E1120,E1123
+    validation_pool.PatchSeries.Apply(
+        changes, manifest=mox.IgnoreArg(), max_change_count=max_change_count
+        ).AndReturn((applied, tot, inflight))
+
+    for patch in applied:
+      pool.HandleApplySuccess(patch, mox.IgnoreArg()).AndReturn(None)
+
+    if tot:
+      pool._HandleApplyFailure(tot).AndReturn(None)
+
+    for failure in inflight:
+      pool._HandleFailedToApplyDueToInflightConflict(
+          failure.patch).AndReturn(None)
+
+    # We stash this on the pool object so we can reuse it during validation.
+    # We could stash this in the test instances, but that would break
+    # for any tests that do multiple pool instances.
+
+    pool._test_data = (changes, applied, tot, inflight)
+
+    return pool
+
+  def testApplySlavePool(self):
+    """Verifies that slave calls ApplyChange() directly for each patch."""
+    slave_pool = self.MakePool(is_master=False)
+    patches = self.GetPatches(3)
+    slave_pool.changes = patches
+    for patch in patches:
+      # pylint: disable=E1120, E1123
+      validation_pool.PatchSeries.ApplyChange(patch, manifest=mox.IgnoreArg())
+
+    self.mox.ReplayAll()
+    self.assertEqual(True, slave_pool.ApplyPoolIntoRepo())
+    self.mox.VerifyAll()
+
+  def runApply(self, pool, result):
+    self.assertEqual(result, pool.ApplyPoolIntoRepo())
+    self.assertEqual(pool.changes, pool._test_data[1])
+    failed_inflight = pool.changes_that_failed_to_apply_earlier
+    expected_inflight = set(pool._test_data[3])
+    # Intersect the results, since it's possible there were results failed
+    # results that weren't related to the ApplyPoolIntoRepo call.
+    self.assertEqual(set(failed_inflight).intersection(expected_inflight),
+                     expected_inflight)
+
+    self.assertEqual(pool.changes, pool._test_data[1])
+
+  def testPatchSeriesInteraction(self):
+    """Verify the interaction between PatchSeries and ValidationPool.
+
+    Effectively, this validates data going into PatchSeries, and coming back
+    out; verifies the hand off to _Handle* functions, but no deeper.
+    """
+    patches = self.GetPatches(3)
+
+    apply_pool = self.GetPool(patches, applied=patches, handlers=True)
+    all_inflight = self.GetPool(patches, inflight=patches, handlers=True)
+    all_tot = self.GetPool(patches, tot=patches, handlers=True)
+    mixed = self.GetPool(patches, tot=patches[0:1], inflight=patches[1:2],
+                         applied=patches[2:3], handlers=True)
+
+    self.mox.ReplayAll()
+    self.runApply(apply_pool, True)
+    self.runApply(all_inflight, False)
+    self.runApply(all_tot, False)
+    self.runApply(mixed, True)
+    self.mox.VerifyAll()
+
+  def testHandleApplySuccess(self):
+    """Validate steps taken for successfull application."""
+    patch = self.GetPatches(1)
+    pool = self.MakePool(fake_db=self.fake_db)
+    pool.SendNotification(patch, mox.StrContains('has picked up your change'),
+                          build_log=mox.IgnoreArg())
+    self.mox.ReplayAll()
+    pool.HandleApplySuccess(patch, build_log=mox.IgnoreArg())
+    self.mox.VerifyAll()
+
+  def testHandleApplyFailure(self):
+    failures = [cros_patch.ApplyPatchException(x) for x in self.GetPatches(4)]
+
+    notified_patches = failures[:2]
+    unnotified_patches = failures[2:]
+    master_pool = self.MakePool(dryrun=False)
+    slave_pool = self.MakePool(is_master=False)
+
+    self.mox.StubOutWithMock(gerrit.GerritHelper, 'RemoveReady')
+
+    for failure in notified_patches:
+      master_pool.SendNotification(
+          failure.patch,
+          mox.StrContains('failed to apply your change'),
+          failure=mox.IgnoreArg())
+      # This pylint suppressin shouldn't be necessary, but pylint is invalidly
+      # thinking that the first arg isn't passed in; we suppress it to suppress
+      # the pylnt bug.
+      # pylint: disable=E1120
+      gerrit.GerritHelper.RemoveReady(failure.patch, dryrun=False)
+
+    self.mox.ReplayAll()
+    master_pool._HandleApplyFailure(notified_patches)
+    slave_pool._HandleApplyFailure(unnotified_patches)
+    self.mox.VerifyAll()
+
+  def _setUpSubmit(self):
+    pool = self.MakePool(dryrun=False, handlers=True)
+    patches = self.GetPatches(3)
+    failed = self.GetPatches(3)
+    pool.changes = patches[:]
+    # While we don't do anything w/ these patches, that's
+    # intentional; we're verifying that it isn't submitted
+    # if there is a failure.
+    pool.changes_that_failed_to_apply_earlier = failed[:]
+
+    return (pool, patches, failed)
+
+  def testSubmitPoolFailures(self):
+    """Tests that a fatal exception is raised."""
+    pool, patches, _failed = self._setUpSubmit()
+    patch1, patch2, patch3 = patches
+
+    pool._SubmitChangeUsingGerrit(patch1, reason=None).AndReturn(True)
+    pool._SubmitChangeUsingGerrit(patch2, reason=None).AndReturn(False)
+
+    pool._HandleCouldNotSubmit(patch2, mox.IgnoreArg()).InAnyOrder()
+    pool._HandleCouldNotSubmit(patch3, mox.IgnoreArg()).InAnyOrder()
+
+    # pylint: disable=E1120,E1123
+    validation_pool.PatchSeries.Apply(set()).AndReturn(([], [], []))
+    self.mox.ReplayAll()
+
+    mock_manifest = mock.MagicMock()
+    with mock.patch.object(git.ManifestCheckout, 'Cached', new=mock_manifest):
+      self.assertRaises(validation_pool.FailedToSubmitAllChangesException,
+                        pool.SubmitPool)
+    self.mox.VerifyAll()
+
+  def testSubmitPool(self):
+    """Tests that we can submit a pool of patches."""
+    pool, patches, failed = self._setUpSubmit()
+    reason = 'fake reason'
+
+    for patch in patches:
+      pool._SubmitChangeUsingGerrit(patch, reason=reason).AndReturn(True)
+
+    pool._HandleApplyFailure(failed)
+
+    # pylint: disable=E1120,E1123
+    validation_pool.PatchSeries.Apply(set()).AndReturn(([], [], []))
+    self.mox.ReplayAll()
+    mock_manifest = mock.MagicMock()
+    with mock.patch.object(git.ManifestCheckout, 'Cached', new=mock_manifest):
+      pool.SubmitPool(reason=reason)
+    self.mox.VerifyAll()
+
+  def testSubmitNonManifestChanges(self):
+    """Simple test to make sure we can submit non-manifest changes."""
+    pool, patches, _failed = self._setUpSubmit()
+    pool.non_manifest_changes = patches[:]
+    reason = 'fake reason'
+
+    for patch in patches:
+      pool._SubmitChangeUsingGerrit(patch, reason=reason).AndReturn(True)
+
+    # pylint: disable=E1120,E1123
+    validation_pool.PatchSeries.Apply(set()).AndReturn(([], [], []))
+
+    mock_manifest = mock.MagicMock()
+    self.mox.ReplayAll()
+    with mock.patch.object(git.ManifestCheckout, 'Cached', new=mock_manifest):
+      pool.SubmitNonManifestChanges(reason=reason)
+    self.mox.VerifyAll()
+
+  def testSubmitAccumulation(self):
+    """Tests ValidationPool.SubmitChanges.
+
+    Tests that it accumulates a mix of local and remote changes that were
+    submitted and rejected.
+    """
+    pool, patches, _failed = self._setUpSubmit()
+    pool.non_manifest_changes = patches[:1]
+    reason = 'fake reason'
+
+    # pylint: disable=E1120,E1123
+    error = mock.Mock(patch=patches[1])
+    validation_pool.PatchSeries.Apply(
+        set(patches[1:])).AndReturn(
+            ([patches[2]],
+             [error],
+             []))
+
+    self.mox.StubOutWithMock(validation_pool.PatchSeries, 'GetGitRepoForChange')
+    for i, patch in enumerate(patches):
+      # pylint: disable=E1120,E1123
+      validation_pool.PatchSeries.GetGitRepoForChange(
+          mox.IgnoreArg(), strict=False
+          ).AndReturn('foo_repo' if i > 0 else None)
+
+    self.mox.StubOutWithMock(validation_pool.ValidationPool,
+                             'SubmitLocalChanges')
+    pool.SubmitLocalChanges(
+        {'foo_repo': set((patches[2],))}, reason
+        ).AndReturn((set((patches[2],)), {}))
+
+    for patch in pool.non_manifest_changes:
+      pool._SubmitChangeUsingGerrit(patch, reason=reason).AndReturn(True)
+
+    pool._HandleCouldNotSubmit(patches[1], error)
+
+    mock_manifest = mock.MagicMock()
+    self.mox.ReplayAll()
+    with mock.patch.object(git.ManifestCheckout, 'Cached', new=mock_manifest):
+      submitted, errors = pool.SubmitChanges(patches, reason=reason)
+
+    self.assertEqual(submitted, set((patches[0], patches[2])))
+    self.assertEqual(errors, {patches[1]: error})
+    self.mox.VerifyAll()
+
+  def testPushRepoBranchPushesOnce(self):
+    """Tests that PushRepoBranch pushes once if there is no error."""
+    pool, patches, _failed = self._setUpSubmit()
+
+    repo = '/fake/path/aoeuidhtns'
+    tracking_branch = git.RemoteRef('cros', 'to_branch')
+
+    context = contextlib.nested(
+        mock.patch.object(git, 'SyncPushBranch'),
+        mock.patch.object(git, 'GitPush'),
+        mock.patch.object(git, 'GetTrackingBranch',
+                          new=lambda _: tracking_branch))
+
+    with context as (sync_func, push_func, _):
+      errors = pool.PushRepoBranch(repo, set(patches), 'from_branch')
+      self.assertEqual({}, errors)
+      self.assertEqual(1, sync_func.call_count)
+      self.assertEqual(1, push_func.call_count)
+
+  def testUnhandledExceptions(self):
+    """Test that CQ doesn't loop due to unhandled Exceptions."""
+    pool, patches, _failed = self._setUpSubmit()
+
+    class MyException(Exception):
+      """"Unique Exception used for testing."""
+
+    def VerifyCQError(patch, error):
+      cq_error = validation_pool.InternalCQError(patch, error.message)
+      return str(error) == str(cq_error)
+
+    # pylint: disable=E1120,E1123
+    validation_pool.PatchSeries.Apply(
+        patches, manifest=mox.IgnoreArg(),
+        max_change_count=len(patches)).AndRaise(MyException)
+    errors = [mox.Func(functools.partial(VerifyCQError, x)) for x in patches]
+    pool._HandleApplyFailure(errors).AndReturn(None)
+
+    self.mox.ReplayAll()
+    self.assertRaises(MyException, pool.ApplyPoolIntoRepo)
+    self.mox.VerifyAll()
+
+  def testFilterDependencyErrors(self):
+    """Verify that dependency errors are correctly filtered out."""
+    failures = [cros_patch.ApplyPatchException(x) for x in self.GetPatches(2)]
+    failures += [cros_patch.DependencyError(x, y) for x, y in
+                 zip(self.GetPatches(2), failures)]
+    failures[0].patch.approval_timestamp = time.time()
+    failures[-1].patch.approval_timestamp = time.time()
+    self.mox.ReplayAll()
+    result = validation_pool.ValidationPool._FilterDependencyErrors(failures)
+    self.assertEquals(set(failures[:-1]), set(result))
+    self.mox.VerifyAll()
+
+  def testFilterSpeculativeErrors(self):
+    """Filter out dependency errors for speculative patches."""
+    failures = [cros_patch.ApplyPatchException(x) for x in self.GetPatches(2)]
+    failures += [cros_patch.DependencyError(x, y) for x, y in
+                 zip(self.GetPatches(2), failures)]
+    self.PatchObject(failures[-1].patch, 'HasReadyFlag', return_value=False)
+    self.mox.ReplayAll()
+    result = validation_pool.ValidationPool._FilterDependencyErrors(failures)
+    self.assertEquals(set(failures[:-1]), set(result))
+    self.mox.VerifyAll()
+
+  def testFilterNonCrosProjects(self):
+    """Runs through a filter of own manifest and fake changes.
+
+    This test should filter out the tacos/chromite project as its not real.
+    """
+    base_func = itertools.cycle(['chromiumos', 'chromeos']).next
+    patches = self.GetPatches(10)
+    for patch in patches:
+      patch.project = '%s/%i' % (base_func(), _GetNumber())
+      patch.tracking_branch = str(_GetNumber())
+
+    non_cros_patches = self.GetPatches(2)
+    for patch in non_cros_patches:
+      patch.project = str(_GetNumber())
+
+    filtered_patches = patches[:4]
+    allowed_patches = []
+    projects = {}
+    for idx, patch in enumerate(patches[4:]):
+      fails = bool(idx % 2)
+      # Vary the revision so we can validate that it checks the branch.
+      revision = ('monkeys' if fails
+                  else 'refs/heads/%s' % patch.tracking_branch)
+      if fails:
+        filtered_patches.append(patch)
+      else:
+        allowed_patches.append(patch)
+      projects.setdefault(patch.project, {})['revision'] = revision
+
+    manifest = MockManifest(self.build_root, projects=projects)
+    for patch in allowed_patches:
+      patch.GetCheckout = lambda *_args, **_kwargs: True
+    for patch in filtered_patches:
+      patch.GetCheckout = lambda *_args, **_kwargs: False
+
+    # Mark the last two patches as not commit ready.
+    for p in patches[-2:]:
+      p.IsMergeable = lambda *_args, **_kwargs: False
+
+    # Non-manifest patches that aren't commit ready should be skipped.
+    filtered_patches = filtered_patches[:-1]
+
+    self.mox.ReplayAll()
+    results = validation_pool.ValidationPool._FilterNonCrosProjects(
+        patches + non_cros_patches, manifest)
+
+    def compare(list1, list2):
+      mangle = lambda c: (c.id, c.project, c.tracking_branch)
+      self.assertEqual(
+          list1, list2,
+          msg=('Comparison failed:\n list1: %r\n list2: %r'
+               % (map(mangle, list1), map(mangle, list2))))
+
+    compare(results[0], allowed_patches)
+    compare(results[1], filtered_patches)
+
+  def testAcquirePool(self):
+    """Various tests for the AcquirePool method."""
+    directory = '/tmp/dontmattah'
+    repo = repository.RepoRepository(directory, directory, 'master', depth=1)
+    self.mox.StubOutWithMock(repo, 'Sync')
+    self.mox.StubOutWithMock(validation_pool.ValidationPool, 'AcquireChanges')
+    self.mox.StubOutWithMock(time, 'sleep')
+    self.mox.StubOutWithMock(tree_status, 'WaitForTreeStatus')
+
+    # 1) Test, tree open -> get changes and finish.
+    tree_status.WaitForTreeStatus(
+        period=mox.IgnoreArg(),
+        throttled_ok=mox.IgnoreArg(),
+        timeout=mox.IgnoreArg()).AndReturn(constants.TREE_OPEN)
+    repo.Sync()
+    # pylint: disable=no-value-for-parameter
+    validation_pool.ValidationPool.AcquireChanges(
+        mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(True)
+
+    self.mox.ReplayAll()
+
+    query = constants.CQ_READY_QUERY
+    pool = validation_pool.ValidationPool.AcquirePool(
+        constants.PUBLIC_OVERLAYS, repo, 1, 'buildname', query, dryrun=False,
+        check_tree_open=True)
+
+    self.assertTrue(pool.tree_was_open)
+    self.mox.VerifyAll()
+    self.mox.ResetAll()
+
+    # 2) Test, tree open -> need to loop at least once to get changes.
+    tree_status.WaitForTreeStatus(
+        period=mox.IgnoreArg(),
+        throttled_ok=mox.IgnoreArg(),
+        timeout=mox.IgnoreArg()).AndReturn(constants.TREE_OPEN)
+    repo.Sync()
+    validation_pool.ValidationPool.AcquireChanges(
+        mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(False)
+    time.sleep(validation_pool.ValidationPool.SLEEP_TIMEOUT)
+    tree_status.WaitForTreeStatus(
+        period=mox.IgnoreArg(),
+        throttled_ok=mox.IgnoreArg(),
+        timeout=mox.IgnoreArg()).AndReturn(constants.TREE_OPEN)
+    repo.Sync()
+    validation_pool.ValidationPool.AcquireChanges(
+        mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(True)
+    self.mox.ReplayAll()
+
+    query = constants.CQ_READY_QUERY
+    pool = validation_pool.ValidationPool.AcquirePool(
+        constants.PUBLIC_OVERLAYS, repo, 1, 'buildname', query, dryrun=False,
+        check_tree_open=True)
+
+    self.assertTrue(pool.tree_was_open)
+    self.mox.VerifyAll()
+    self.mox.ResetAll()
+
+    # 3) Test, tree throttled -> get changes and finish.
+    tree_status.WaitForTreeStatus(
+        period=mox.IgnoreArg(),
+        throttled_ok=mox.IgnoreArg(),
+        timeout=mox.IgnoreArg()).AndReturn(constants.TREE_THROTTLED)
+    repo.Sync()
+    validation_pool.ValidationPool.AcquireChanges(
+        mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(True)
+
+    self.mox.ReplayAll()
+    query = constants.CQ_READY_QUERY
+    pool = validation_pool.ValidationPool.AcquirePool(
+        constants.PUBLIC_OVERLAYS, repo, 1, 'buildname', query, dryrun=False,
+        check_tree_open=True)
+
+    self.assertTrue(pool.tree_was_open)
+    self.mox.VerifyAll()
+    self.mox.ResetAll()
+
+    # 4) Test, tree throttled -> use exponential fallback logic.
+    # We force this case to be different than 3 by setting the exponential
+    # fallback timeout from 10 minutes to 0 seconds.
+    tree_status.WaitForTreeStatus(
+        period=mox.IgnoreArg(),
+        throttled_ok=mox.IgnoreArg(),
+        timeout=mox.IgnoreArg()).AndReturn(constants.TREE_THROTTLED)
+    repo.Sync()
+    validation_pool.ValidationPool.AcquireChanges(
+        mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(True)
+
+    self.mox.ReplayAll()
+
+    validation_pool.ValidationPool.CQ_THROTTLED_TIMEOUT = 0
+    query = constants.CQ_READY_QUERY
+    pool = validation_pool.ValidationPool.AcquirePool(
+        constants.PUBLIC_OVERLAYS, repo, 1, 'buildname', query, dryrun=False,
+        check_tree_open=True)
+
+    self.assertFalse(pool.tree_was_open)
+
+
+  def testGetFailStreak(self):
+    """Tests that we're correctly able to calculate a fail streak."""
+    # Leave first build as inflight.
+    builder_name = 'master-paladin'
+    slave_pool = self.MakePool(builder_name=builder_name, fake_db=self.fake_db)
+    self.fake_db.buildTable[0]['status'] = constants.BUILDER_STATUS_INFLIGHT
+    self.fake_db.buildTable[0]['build_config'] = builder_name
+    self.assertEqual(slave_pool._GetFailStreak(), 0)
+
+    # Create a passing build.
+    for i in range(2):
+      self.fake_db.InsertBuild(
+          builder_name, None, i, builder_name, 'abcdelicious',
+          status=constants.BUILDER_STATUS_PASSED)
+
+    self.assertEqual(slave_pool._GetFailStreak(), 0)
+
+    # Add a fail streak.
+    for i in range(3, 6):
+      self.fake_db.InsertBuild(
+          builder_name, None, i, builder_name, 'abcdelicious',
+          status=constants.BUILDER_STATUS_FAILED)
+
+    self.assertEqual(slave_pool._GetFailStreak(), 3)
+
+    # Add another success and failure.
+    self.fake_db.InsertBuild(
+        builder_name, None, 6, builder_name, 'abcdelicious',
+        status=constants.BUILDER_STATUS_PASSED)
+    self.fake_db.InsertBuild(
+        builder_name, None, 7, builder_name, 'abcdelicious',
+        status=constants.BUILDER_STATUS_FAILED)
+
+    self.assertEqual(slave_pool._GetFailStreak(), 1)
+
+    # Finally just add one last pass and make sure fail streak is wiped.
+    self.fake_db.InsertBuild(
+        builder_name, None, 8, builder_name, 'abcdelicious',
+        status=constants.BUILDER_STATUS_PASSED)
+
+    self.assertEqual(slave_pool._GetFailStreak(), 0)
+
+  def testApplyWithTreeNotOpen(self):
+    """Tests that we can correctly apply exponential fallback."""
+    patches = self.GetPatches(4)
+
+    # We mock out the shuffle so that we can deterministically test.
+    self.mox.StubOutWithMock(random, 'shuffle')
+    self.mox.StubOutWithMock(validation_pool.ValidationPool, '_GetFailStreak')
+
+    slave_pool = self.GetPool(changes=patches, applied=patches[:2],
+                              max_change_count=2,
+                              tree_was_open=False, handlers=True)
+    random.shuffle(patches) # Mock.
+    # pylint: disable=no-value-for-parameter
+    validation_pool.ValidationPool._GetFailStreak().AndReturn(1)
+
+    self.mox.ReplayAll()
+    self.runApply(slave_pool, True)
+    self.assertEqual(len(slave_pool.changes), 2)
+    self.mox.VerifyAll()
+    self.mox.ResetAll()
+
+    slave_pool = self.GetPool(changes=patches, applied=patches[:1],
+                              max_change_count=1,
+                              tree_was_open=False, handlers=True)
+    random.shuffle(patches) # Mock.
+    validation_pool.ValidationPool._GetFailStreak().AndReturn(2)
+
+    self.mox.ReplayAll()
+    self.runApply(slave_pool, True)
+    self.assertEqual(len(slave_pool.changes), 1)
+    self.mox.VerifyAll()
+    self.mox.ResetAll()
+
+    slave_pool = self.GetPool(changes=patches, applied=patches[:1],
+                              max_change_count=1,
+                              tree_was_open=False, handlers=True)
+    random.shuffle(patches) # Mock.
+    validation_pool.ValidationPool._GetFailStreak().AndReturn(10)
+
+    self.mox.ReplayAll()
+    self.runApply(slave_pool, True)
+    self.assertEqual(len(slave_pool.changes), 1)
+    self.mox.VerifyAll()
+
+
+class TestPickling(cros_test_lib.TempDirTestCase):
+  """Tests to validate pickling of ValidationPool, covering CQ's needs"""
+
+  def testSelfCompatibility(self):
+    """Verify compatibility of current git HEAD against itself."""
+    self._CheckTestData(self._GetTestData())
+
+  @cros_test_lib.NetworkTest()
+  def testToTCompatibility(self):
+    """Validate that ToT can use our pickles, and that we can use ToT's data."""
+    repo = os.path.join(self.tempdir, 'chromite')
+    reference = os.path.abspath(__file__)
+    reference = os.path.normpath(os.path.join(reference, '../../'))
+
+    repository.CloneGitRepo(
+        repo,
+        '%s/chromiumos/chromite' % site_config.params.EXTERNAL_GOB_URL,
+        reference=reference)
+
+    code = """
+import sys
+from chromite.cbuildbot import validation_pool_unittest
+if not hasattr(validation_pool_unittest, 'TestPickling'):
+  sys.exit(0)
+sys.stdout.write(validation_pool_unittest.TestPickling.%s)
+"""
+
+    # Verify ToT can take our pickle.
+    cros_build_lib.RunCommand(
+        ['python2', '-c', code % '_CheckTestData(sys.stdin.read())'],
+        cwd=self.tempdir, print_cmd=False, capture_output=True,
+        input=self._GetTestData())
+
+    # Verify we can handle ToT's pickle.
+    ret = cros_build_lib.RunCommand(
+        ['python2', '-c', code % '_GetTestData()'],
+        cwd=self.tempdir, print_cmd=False, capture_output=True)
+
+    self._CheckTestData(ret.output)
+
+  @staticmethod
+  def _GetCrosInternalPatch(patch_info):
+    return cros_patch.GerritPatch(
+        patch_info,
+        site_config.params.INTERNAL_REMOTE,
+        site_config.params.INTERNAL_GERRIT_URL)
+
+  @staticmethod
+  def _GetCrosPatch(patch_info):
+    return cros_patch.GerritPatch(
+        patch_info,
+        site_config.params.EXTERNAL_REMOTE,
+        site_config.params.EXTERNAL_GERRIT_URL)
+
+  @classmethod
+  def _GetTestData(cls):
+    ids = [cros_patch.MakeChangeId() for _ in xrange(3)]
+    changes = [cls._GetCrosInternalPatch(GetTestJson(ids[0]))]
+    non_os = [cls._GetCrosPatch(GetTestJson(ids[1]))]
+    conflicting = [cls._GetCrosInternalPatch(GetTestJson(ids[2]))]
+    conflicting = [cros_patch.PatchException(x)for x in conflicting]
+    pool = validation_pool.ValidationPool(
+        constants.PUBLIC_OVERLAYS,
+        '/fake/pathway', 1,
+        'testing', True, True,
+        changes=changes, non_os_changes=non_os,
+        conflicting_changes=conflicting)
+    return pickle.dumps([pool, changes, non_os, conflicting])
+
+  @staticmethod
+  def _CheckTestData(data):
+    results = pickle.loads(data)
+    pool, changes, non_os, conflicting = results
+    def _f(source, value, getter=None):
+      if getter is None:
+        getter = lambda x: x
+      assert len(source) == len(value)
+      for s_item, v_item in zip(source, value):
+        assert getter(s_item).id == getter(v_item).id
+        assert getter(s_item).remote == getter(v_item).remote
+    _f(pool.changes, changes)
+    _f(pool.non_manifest_changes, non_os)
+    _f(pool.changes_that_failed_to_apply_earlier, conflicting,
+       getter=lambda s: getattr(s, 'patch', s))
+    return ''
+
+
+class TestPrintLinks(MoxBase):
+  """Tests that change links can be printed."""
+  def testPrintLinks(self):
+    changes = self.GetPatches(3)
+    with parallel_unittest.ParallelMock():
+      validation_pool.ValidationPool.PrintLinksToChanges(changes)
+
+
+class TestCreateValidationFailureMessage(MoxBase):
+  """Tests validation_pool.ValidationPool._CreateValidationFailureMessage"""
+
+  def _AssertMessage(self, change, suspects, messages, sanity=True,
+                     infra_fail=False, lab_fail=False, no_stat=None,
+                     xretry=False):
+    """Call the _CreateValidationFailureMessage method.
+
+    Args:
+      change: The change we are commenting on.
+      suspects: List of suspected changes.
+      messages: List of messages should appear in the failure message.
+      sanity: Bool indicating sanity of build, default: True.
+      infra_fail: True if build failed due to infrastructure issues.
+      lab_fail: True if build failed due to lab infrastructure issues.
+      no_stat: List of builders that did not start.
+      xretry: Whether we expect the change to be retried.
+    """
+    msg = validation_pool.ValidationPool._CreateValidationFailureMessage(
+        False, change, set(suspects), [], sanity=sanity,
+        infra_fail=infra_fail, lab_fail=lab_fail, no_stat=no_stat,
+        retry=xretry)
+    for x in messages:
+      self.assertTrue(x in msg)
+    self.assertEqual(xretry, 'retry your change automatically' in msg)
+    return msg
+
+  def testSuspectChange(self):
+    """Test case where 1 is the only change and is suspect."""
+    patch = self.GetPatches(1)
+    self._AssertMessage(patch, [patch], ['probably caused by your change'])
+
+  def testInnocentChange(self):
+    """Test case where 1 is innocent."""
+    patch1, patch2 = self.GetPatches(2)
+    self._AssertMessage(patch1, [patch2],
+                        ['This failure was probably caused by',
+                         'retry your change automatically'],
+                        xretry=True)
+
+  def testSuspectChanges(self):
+    """Test case where 1 is suspected, but so is 2."""
+    patches = self.GetPatches(2)
+    self._AssertMessage(patches[0], patches,
+                        ['may have caused this failure'])
+
+  def testInnocentChangeWithMultipleSuspects(self):
+    """Test case where 2 and 3 are suspected."""
+    patches = self.GetPatches(3)
+    self._AssertMessage(patches[0], patches[1:],
+                        ['One of the following changes is probably'],
+                        xretry=True)
+
+  def testNoMessages(self):
+    """Test case where there are no messages."""
+    patch1 = self.GetPatches(1)
+    self._AssertMessage(patch1, [patch1], [])
+
+  def testInsaneBuild(self):
+    """Test case where the build was not sane."""
+    patches = self.GetPatches(3)
+    self._AssertMessage(
+        patches[0], patches, ['The build was consider not sane',
+                              'retry your change automatically'],
+        sanity=False, xretry=True)
+
+  def testLabFailMessage(self):
+    """Test case where the build failed due to lab failures."""
+    patches = self.GetPatches(3)
+    self._AssertMessage(
+        patches[0], patches, ['Lab infrastructure',
+                              'retry your change automatically'],
+        lab_fail=True, xretry=True)
+
+  def testInfraFailMessage(self):
+    """Test case where the build failed due to infrastructure failures."""
+    patches = self.GetPatches(2)
+    self._AssertMessage(
+        patches[0], [patches[0]],
+        ['may have been caused by infrastructure',
+         'This failure was probably caused by your change'],
+        infra_fail=True)
+    self._AssertMessage(
+        patches[1], [patches[0]], ['may have been caused by infrastructure',
+                                   'retry your change automatically'],
+        infra_fail=True, xretry=True)
+
+
+class TestCreateDisjointTransactions(MoxBase):
+  """Test the CreateDisjointTransactions function."""
+
+  def setUp(self):
+    self.patch_mock = self.StartPatcher(MockPatchSeries())
+
+  def GetPatches(self, how_many, **kwargs):
+    return super(TestCreateDisjointTransactions, self).GetPatches(
+        how_many, always_use_list=True, **kwargs)
+
+  def verifyTransactions(self, txns, max_txn_length=None, circular=False):
+    """Verify the specified list of transactions are processed correctly.
+
+    Args:
+      txns: List of transactions to process.
+      max_txn_length: Maximum length of any given transaction. This is passed
+        to the CreateDisjointTransactions function.
+      circular: Whether the transactions contain circular dependencies.
+    """
+    remove = self.PatchObject(gerrit.GerritHelper, 'RemoveReady')
+    patches = list(itertools.chain.from_iterable(txns))
+    expected_plans = txns
+    if max_txn_length is not None:
+      # When max_txn_length is specified, transactions should be truncated to
+      # the specified length, ignoring any remaining patches.
+      expected_plans = [txn[:max_txn_length] for txn in txns]
+
+    pool = MakePool(changes=patches)
+    plans = pool.CreateDisjointTransactions(None, pool.changes,
+                                            max_txn_length=max_txn_length)
+
+    # If the dependencies are circular, the order of the patches is not
+    # guaranteed, so compare them in sorted order.
+    if circular:
+      plans = [sorted(plan) for plan in plans]
+      expected_plans = [sorted(plan) for plan in expected_plans]
+
+    # Verify the plans match, and that no changes were rejected.
+    self.assertEqual(set(map(str, plans)), set(map(str, expected_plans)))
+    self.assertEqual(0, remove.call_count)
+
+  def testPlans(self, max_txn_length=None):
+    """Verify that independent sets are distinguished."""
+    for num in range(0, 5):
+      txns = [self.GetPatches(num) for _ in range(0, num)]
+      self.verifyTransactions(txns, max_txn_length=max_txn_length)
+
+  def runUnresolvedPlan(self, changes, max_txn_length=None):
+    """Helper for testing unresolved plans."""
+    notify = self.PatchObject(validation_pool.ValidationPool,
+                              'SendNotification')
+    remove = self.PatchObject(gerrit.GerritHelper, 'RemoveReady')
+    pool = MakePool(changes=changes)
+    plans = pool.CreateDisjointTransactions(None, changes,
+                                            max_txn_length=max_txn_length)
+    self.assertEqual(plans, [])
+    self.assertEqual(remove.call_count, notify.call_count)
+    return remove.call_count
+
+  def testUnresolvedPlan(self):
+    """Test plan with old approval_timestamp."""
+    changes = self.GetPatches(5)[1:]
+    with cros_test_lib.LoggingCapturer():
+      call_count = self.runUnresolvedPlan(changes)
+    self.assertEqual(4, call_count)
+
+  def testRecentUnresolvedPlan(self):
+    """Test plan with recent approval_timestamp."""
+    changes = self.GetPatches(5)[1:]
+    for change in changes:
+      change.approval_timestamp = time.time()
+    with cros_test_lib.LoggingCapturer():
+      call_count = self.runUnresolvedPlan(changes)
+    self.assertEqual(0, call_count)
+
+  def testTruncatedPlan(self):
+    """Test that plans can be truncated correctly."""
+    # Long lists of patches should be truncated, and we should not see any
+    # errors when this happens.
+    self.testPlans(max_txn_length=3)
+
+  def testCircularPlans(self):
+    """Verify that circular plans are handled correctly."""
+    patches = self.GetPatches(5)
+    self.patch_mock.SetGerritDependencies(patches[0], [patches[-1]])
+
+    # Verify that all patches can be submitted normally.
+    self.verifyTransactions([patches], circular=True)
+
+    # It is not possible to truncate a circular plan. Verify that an error
+    # is reported in this case.
+    with cros_test_lib.LoggingCapturer():
+      call_count = self.runUnresolvedPlan(patches, max_txn_length=3)
+    self.assertEqual(5, call_count)
+
+
+class MockValidationPool(partial_mock.PartialMock):
+  """Mock out a ValidationPool instance."""
+
+  TARGET = 'chromite.cbuildbot.validation_pool.ValidationPool'
+  ATTRS = ('RemoveReady', '_SubmitChangeUsingGerrit', 'SendNotification')
+
+  def __init__(self, manager):
+    partial_mock.PartialMock.__init__(self)
+    self.submit_results = {}
+    self.max_submits = manager.Value('i', -1)
+    self.submitted = manager.list()
+    self.notification_calls = manager.list()
+
+  def PreStart(self):
+    self.PatchObject(gerrit, 'GetGerritPatchInfoWithPatchQueries',
+                     side_effect=lambda x: x)
+
+  def GetSubmittedChanges(self):
+    return list(self.submitted)
+
+  # pylint: disable=unused-argument
+  def _SubmitChangeUsingGerrit(self, _inst, change, reason=None):
+    result = self.submit_results.get(change, True)
+    self.submitted.append(change)
+    if isinstance(result, Exception):
+      raise result
+    if result and self.max_submits.value != -1:
+      if self.max_submits.value <= 0:
+        return False
+      self.max_submits.value -= 1
+    return result
+
+  def SendNotification(self, *args, **kwargs):
+    self.notification_calls.append((args, kwargs))
+
+  RemoveReady = None
+
+
+class BaseSubmitPoolTestCase(MoxBase):
+  """Test full ability to submit and reject CL pools."""
+
+  # Whether all slave builds passed. This would affect the submission
+  # logic.
+  ALL_BUILDS_PASSED = True
+
+  def setUp(self):
+    self.pool_mock = self.StartPatcher(MockValidationPool(self.manager))
+    self.patch_mock = self.StartPatcher(MockPatchSeries())
+    self.PatchObject(gerrit.GerritHelper, 'QuerySingleRecord')
+    # This is patched out for performance, not correctness.
+    self.PatchObject(validation_pool.PatchSeries, 'ReapplyChanges',
+                     lambda self, by_repo: (by_repo, {}))
+    self.patches = self.GetPatches(2)
+
+  def SetUpPatchPool(self, failed_to_apply=False):
+    pool = MakePool(changes=self.patches, dryrun=False)
+    if failed_to_apply:
+      # Create some phony errors and add them to the pool.
+      errors = []
+      for patch in self.GetPatches(2):
+        errors.append(validation_pool.InternalCQError(patch, str('foo')))
+      pool.changes_that_failed_to_apply_earlier = errors[:]
+    return pool
+
+  def SubmitPool(self, submitted=(), rejected=(), reason=None, **kwargs):
+    """Helper function for testing that we can submit a pool successfully.
+
+    Args:
+      submitted: List of changes that we expect to be submitted.
+      rejected: List of changes that we expect to be rejected.
+      reason: Expected reason for submitting changes.
+      **kwargs: Keyword arguments for SetUpPatchPool.
+    """
+    # Set up our pool and submit the patches.
+    pool = self.SetUpPatchPool(**kwargs)
+    mock_manifest = mock.MagicMock()
+    with mock.patch.object(git.ManifestCheckout, 'Cached', new=mock_manifest):
+      if not self.ALL_BUILDS_PASSED:
+        actually_rejected = sorted(pool.SubmitPartialPool(
+            pool.changes, mock.ANY, dict(), [], [], [], reason=reason))
+      else:
+        _, actually_rejected = pool.SubmitChanges(self.patches, reason=reason)
+
+    # Check that the right patches were submitted and rejected.
+    self.assertItemsEqual(map(str, rejected), map(str, actually_rejected))
+    actually_submitted = self.pool_mock.GetSubmittedChanges()
+    self.assertEqual(map(str, submitted), map(str, actually_submitted))
+
+  def GetNotifyArg(self, change, key):
+    """Look up a call to notify about |change| and grab |key| from it.
+
+    Args:
+      change: The change to look up.
+      key: The key to look up. If this is an integer, look up a positional
+        argument by index. Otherwise, look up a keyword argument.
+    """
+    names = []
+    for call in self.pool_mock.notification_calls:
+      call_args, call_kwargs = call
+      if change == call_args[1]:
+        if isinstance(key, int):
+          return call_args[key]
+        return call_kwargs[key]
+      names.append(call_args[1])
+
+    # Verify that |change| is present at all. This should always fail.
+    self.assertIn(change, names)
+
+  def assertEqualNotifyArg(self, value, change, idx):
+    """Verify that |value| equals self.GetNotifyArg(|change|, |idx|)."""
+    self.assertEqual(str(value), str(self.GetNotifyArg(change, idx)))
+
+
+class SubmitPoolTest(BaseSubmitPoolTestCase):
+  """Test suite related to the Submit Pool."""
+
+  def testSubmitPool(self):
+    """Test that we can submit a pool successfully."""
+    self.SubmitPool(submitted=self.patches)
+
+  def testRejectCLs(self):
+    """Test that we can reject a CL successfully."""
+    self.SubmitPool(submitted=self.patches, failed_to_apply=True)
+
+  def testSubmitCycle(self):
+    """Submit a cyclic set of dependencies"""
+    self.patch_mock.SetCQDependencies(self.patches[0], [self.patches[1]])
+    self.SubmitPool(submitted=self.patches)
+
+  def testSubmitReverseCycle(self):
+    """Submit a cyclic set of dependencies, specified in reverse order."""
+    self.patch_mock.SetCQDependencies(self.patches[1], [self.patches[0]])
+    self.patch_mock.SetGerritDependencies(self.patches[1], [])
+    self.patch_mock.SetGerritDependencies(self.patches[0], [self.patches[1]])
+    self.SubmitPool(submitted=self.patches[::-1])
+
+  def testSubmitEmptyDeps(self):
+    """Submit when one patch depends directly on many independent patches."""
+    # patches[4] depends on patches[0:3], but there are no other dependencies.
+    self.patches = self.GetPatches(5)
+    for p in self.patches[:-1]:
+      self.patch_mock.SetGerritDependencies(p, [])
+    self.patch_mock.SetGerritDependencies(self.patches[4], self.patches[::-1])
+    self.pool_mock.max_submits.value = 1
+    submitted = [self.patches[2], self.patches[1], self.patches[3],
+                 self.patches[0]]
+    rejected = self.patches[:2] + self.patches[3:]
+    self.SubmitPool(submitted=submitted, rejected=rejected)
+    for p in rejected[:-1]:
+      p_failed_submit = validation_pool.PatchFailedToSubmit(
+          p, validation_pool.ValidationPool.INCONSISTENT_SUBMIT_MSG)
+      self.assertEqualNotifyArg(p_failed_submit, p, 'error')
+    failed_submit = validation_pool.PatchFailedToSubmit(
+        self.patches[1], validation_pool.ValidationPool.INCONSISTENT_SUBMIT_MSG)
+    dep_failed = cros_patch.DependencyError(self.patches[4], failed_submit)
+    self.assertEqualNotifyArg(dep_failed, self.patches[4], 'error')
+
+  def testRedundantCQDepend(self):
+    """Submit a cycle with redundant CQ-DEPEND specifications."""
+    self.patches = self.GetPatches(4)
+    self.patch_mock.SetCQDependencies(self.patches[0], [self.patches[-1]])
+    self.patch_mock.SetCQDependencies(self.patches[1], [self.patches[-1]])
+    self.SubmitPool(submitted=self.patches)
+
+  def testSubmitPartialCycle(self):
+    """Submit a failed cyclic set of dependencies"""
+    self.pool_mock.max_submits.value = 1
+    self.patch_mock.SetCQDependencies(self.patches[0], self.patches)
+    self.SubmitPool(submitted=self.patches, rejected=[self.patches[1]])
+    (submitted, rejected) = self.pool_mock.GetSubmittedChanges()
+    failed_submit = validation_pool.PatchFailedToSubmit(
+        rejected, validation_pool.ValidationPool.INCONSISTENT_SUBMIT_MSG)
+    bad_submit = validation_pool.PatchSubmittedWithoutDeps(
+        submitted, failed_submit)
+    self.assertEqualNotifyArg(failed_submit, rejected, 'error')
+    self.assertEqualNotifyArg(bad_submit, submitted, 'failure')
+
+  def testSubmitFailedCycle(self):
+    """Submit a failed cyclic set of dependencies"""
+    self.pool_mock.max_submits.value = 0
+    self.patch_mock.SetCQDependencies(self.patches[0], [self.patches[1]])
+    self.SubmitPool(submitted=[self.patches[0]], rejected=self.patches)
+    (attempted,) = self.pool_mock.GetSubmittedChanges()
+    (rejected,) = [x for x in self.patches if x.id != attempted.id]
+    failed_submit = validation_pool.PatchFailedToSubmit(
+        attempted, validation_pool.ValidationPool.INCONSISTENT_SUBMIT_MSG)
+    dep_failed = cros_patch.DependencyError(rejected, failed_submit)
+    self.assertEqualNotifyArg(failed_submit, attempted, 'error')
+    self.assertEqualNotifyArg(dep_failed, rejected, 'error')
+
+  def testConflict(self):
+    """Submit a change that conflicts with TOT."""
+    error = gob_util.GOBError(httplib.CONFLICT, 'Conflict')
+    self.pool_mock.submit_results[self.patches[0]] = error
+    self.SubmitPool(submitted=[self.patches[0]], rejected=self.patches[::-1])
+    notify_error = validation_pool.PatchConflict(self.patches[0])
+    self.assertEqualNotifyArg(notify_error, self.patches[0], 'error')
+
+  def testConflictAlreadyMerged(self):
+    """Submit a change that conflicts with TOT because it was already merged."""
+    error = gob_util.GOBError(httplib.CONFLICT, 'change is merged\n')
+    self.pool_mock.submit_results[self.patches[0]] = error
+    self.SubmitPool(submitted=self.patches, rejected=())
+
+  def testServerError(self):
+    """Test case where GOB returns a server error."""
+    error = gerrit.GerritException('Internal server error')
+    self.pool_mock.submit_results[self.patches[0]] = error
+    self.SubmitPool(submitted=[self.patches[0]], rejected=self.patches[::-1])
+    notify_error = validation_pool.PatchFailedToSubmit(self.patches[0], error)
+    self.assertEqualNotifyArg(notify_error, self.patches[0], 'error')
+
+  def testNotCommitReady(self):
+    """Test that a CL is rejected if its approvals were pulled."""
+    def _ReloadPatches(patches):
+      reloaded = copy.deepcopy(patches)
+      approvals = {('VRIF', '1'): False}
+      backup = reloaded[1].HasApproval
+      self.PatchObject(
+          reloaded[1], 'HasApproval',
+          side_effect=lambda *args: approvals.get(args, backup(*args)))
+      return reloaded
+    self.PatchObject(gerrit, 'GetGerritPatchInfoWithPatchQueries',
+                     _ReloadPatches)
+    self.SubmitPool(submitted=self.patches[:1], rejected=self.patches[1:])
+    message = 'CL:2 is not marked Verified=+1.'
+    self.assertEqualNotifyArg(message, self.patches[1], 'error')
+
+  def testAlreadyMerged(self):
+    """Test that a CL that was chumped during the run was not rejected."""
+    self.PatchObject(self.patches[0], 'IsAlreadyMerged', return_value=True)
+    self.SubmitPool(submitted=self.patches[1:], rejected=[])
+
+  def testModified(self):
+    """Test that a CL that was modified during the run is rejected."""
+    def _ReloadPatches(patches):
+      reloaded = copy.deepcopy(patches)
+      reloaded[1].patch_number += 1
+      return reloaded
+    self.PatchObject(gerrit, 'GetGerritPatchInfoWithPatchQueries',
+                     _ReloadPatches)
+    self.SubmitPool(submitted=self.patches[:1], rejected=self.patches[1:])
+    error = validation_pool.PatchModified(self.patches[1],
+                                          self.patches[1].patch_number + 1)
+    self.assertEqualNotifyArg(error, self.patches[1], 'error')
+
+
+class SubmitPartialPoolTest(BaseSubmitPoolTestCase):
+  """Test the SubmitPartialPool function."""
+
+  # Whether all slave builds passed. This would affect the submission
+  # logic.
+  ALL_BUILDS_PASSED = False
+
+  def setUp(self):
+    # Set up each patch to be in its own project, so that we can easily
+    # request to ignore failures for the specified patch.
+    for patch in self.patches:
+      patch.project = str(patch)
+
+    self.verified_mock = self.PatchObject(
+        triage_lib.CalculateSuspects, 'GetFullyVerifiedChanges',
+        return_value=[])
+
+  def _MarkPatchesVerified(self, patches):
+    """Set up to mark |patches| as verified."""
+    self.verified_mock.return_value = patches
+
+  def testSubmitNone(self):
+    """Submit no changes."""
+    self.SubmitPool(submitted=(), rejected=self.patches)
+
+  def testSubmitAll(self):
+    """Submit all changes."""
+    self._MarkPatchesVerified(self.patches[:2])
+    self.SubmitPool(submitted=self.patches, rejected=[])
+
+  def testSubmitFirst(self):
+    """Submit the first change in a series."""
+    self._MarkPatchesVerified([self.patches[0]])
+    self.SubmitPool(submitted=[self.patches[0]], rejected=[self.patches[1]])
+    self.assertEqual(len(self.pool_mock.notification_calls), 0)
+
+  def testSubmitSecond(self):
+    """Attempt to submit the second change in a series."""
+    self._MarkPatchesVerified([self.patches[1]])
+    self.SubmitPool(submitted=[], rejected=[self.patches[0]])
+    error = validation_pool.PatchRejected(self.patches[0])
+    dep_error = cros_patch.DependencyError(self.patches[1], error)
+    self.assertEqualNotifyArg(dep_error, self.patches[1], 'error')
+    self.assertEqual(len(self.pool_mock.notification_calls), 1)
+
+
+class LoadManifestTest(cros_test_lib.TempDirTestCase):
+  """Tests loading the manifest."""
+
+  manifest_content = (
+      '<?xml version="1.0" ?><manifest>'
+      '<pending_commit branch="master" '
+      'change_id="Ieeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee1" '
+      'commit="1ddddddddddddddddddddddddddddddddddddddd" '
+      'fail_count="2" gerrit_number="17000" owner_email="foo@chromium.org" '
+      'pass_count="0" patch_number="2" project="chromiumos/taco/bar" '
+      'project_url="https://base_url/chromiumos/taco/bar" '
+      'ref="refs/changes/51/17000/2" remote="cros" total_fail_count="3"/>'
+      '</manifest>')
+
+  def setUp(self):
+    """Sets up a pool."""
+    self.pool = MakePool()
+
+  def testAddPendingCommitsIntoPool(self):
+    """Test reading the pending commits and add them into the pool."""
+    with tempfile.NamedTemporaryFile() as f:
+      f.write(self.manifest_content)
+      f.flush()
+      self.pool.AddPendingCommitsIntoPool(f.name)
+
+    self.assertEqual(self.pool.changes[0].owner_email, 'foo@chromium.org')
+    self.assertEqual(self.pool.changes[0].tracking_branch, 'master')
+    self.assertEqual(self.pool.changes[0].remote, 'cros')
+    self.assertEqual(self.pool.changes[0].gerrit_number, '17000')
+    self.assertEqual(self.pool.changes[0].project, 'chromiumos/taco/bar')
+    self.assertEqual(self.pool.changes[0].project_url,
+                     'https://base_url/chromiumos/taco/bar')
+    self.assertEqual(self.pool.changes[0].change_id,
+                     'Ieeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee1')
+    self.assertEqual(self.pool.changes[0].commit,
+                     '1ddddddddddddddddddddddddddddddddddddddd')
+    self.assertEqual(self.pool.changes[0].fail_count, 2)
+    self.assertEqual(self.pool.changes[0].pass_count, 0)
+    self.assertEqual(self.pool.changes[0].total_fail_count, 3)
diff --git a/cidb/developer.readme b/cidb/developer.readme
new file mode 100644
index 0000000..6cc95b3
--- /dev/null
+++ b/cidb/developer.readme
@@ -0,0 +1,36 @@
+This README covers documentation on how to develop with and test against
+a local CIDB instance. Whenever you add or modify a cidb feature you should run
+cidb_integration_test.py. Specifically, there are two large sets of data that
+can be used for adding tests. Data for these are stored in individual json files
+that are stored in .test_data/. Each series has a readme that roughly describes
+what is in the data.
+
+However, when adding a new test, this may not be sufficient. The test launches a
+local mysqld instance that runs out of a temporary directory. To prevent this
+directory from disappearing at the end of the test, run the test with --no-wipe.
+$ lib/cidb_integration_test --debug --no-wipe
+The test logs the path to the temporary working directory at the end.
+
+You can launch the mysqld server again to play with the database in its final
+state. If `tmpdir` is the temporary directory left behind by the test, Inside
+the chroot, run:
+
+$ /usr/sbin/mysqld --no-defaults --datadir ${tmpdir}/mysqld_dir --socket \
+    ${tmpdir}/mysqld_dir/mysqld.socket --port 8440 --pid-file \
+    ${tmpdir}/mysqld_dir/mysqld.pid --tmpdir ${tmpdir}/mysqld_dir/tmp
+
+You can connect to this instance using mysql client.
+
+$ mysql -u root -S ${tmpdir}/mysqld_dir/mysqld.socket
+
+At this point you can run normal SQL. To double check, run `show tables;`
+You can then use the data here to create your own integration test to test
+something you added to CIDB.
+
+When you're done, remember to shutdown the mysqld instance, and delete the
+temporary directory.
+
+$ mysqldadmin -u root -S ${tmpdir}/mysqld_dir/mysqld.socket shutdown
+$ rm -rf ${tmpdir}
+
+Please see ./schema.dump.readme for more information on how to read the schema.
diff --git a/cidb/migrations/00001_create_schema_table.sql b/cidb/migrations/00001_create_schema_table.sql
new file mode 100644
index 0000000..ed43162
--- /dev/null
+++ b/cidb/migrations/00001_create_schema_table.sql
@@ -0,0 +1,9 @@
+CREATE TABLE schemaVersionTable (
+  schemaVersion INT NOT NULL,
+  scriptName VARCHAR(80),
+  timestamp TIMESTAMP,
+  PRIMARY KEY (schemaVersion)
+);
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (1, '00001_create_schema_table.sql');
diff --git a/cidb/migrations/00002_create_build_table.sql b/cidb/migrations/00002_create_build_table.sql
new file mode 100644
index 0000000..9079bc8
--- /dev/null
+++ b/cidb/migrations/00002_create_build_table.sql
@@ -0,0 +1,37 @@
+CREATE TABLE buildTable (
+  id INT NOT NULL AUTO_INCREMENT,
+  master_build_id INT,
+  buildbot_generation INT NOT NULL,
+  builder_name VARCHAR(80) NOT NULL,
+  waterfall ENUM('chromeos', 'chromiumos', 'chromiumos.tryserver') NOT NULL,
+  build_number INT NOT NULL,
+  build_config VARCHAR(80) NOT NULL,
+  bot_hostname VARCHAR(80) NOT NULL,
+  -- Specifying a DEFAULT value without an ON UPDATE clause allows
+  -- UPDATE queries to other columns that do not automatically update
+  -- start_time to CURRENT_TIMESTAMP
+  start_time TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+  finish_time TIMESTAMP,
+  -- The following ENUM values should match
+  -- manifest_version.BuilderStatus.All_STATUSES
+  status ENUM('fail', 'pass', 'inflight', 'missing', 'aborted')
+    DEFAULT 'inflight' NOT NULL,
+  status_pickle BLOB,
+  build_type VARCHAR(80),
+  chrome_version VARCHAR(80),
+  milestone_version VARCHAR(80),
+  platform_version VARCHAR(80),
+  full_version VARCHAR(80),
+  sdk_version VARCHAR(80),
+  toolchain_url VARCHAR(240),
+  metadata_json BLOB,
+  final BOOL NOT NULL DEFAULT false,
+  PRIMARY KEY (id),
+  FOREIGN KEY (master_build_id)
+    REFERENCES buildTable(id),
+  UNIQUE INDEX (buildbot_generation, builder_name, waterfall, build_number),
+  INDEX (master_build_id)
+);
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (2, '00002_create_build_table.sql');
diff --git a/cidb/migrations/00003_create_claction_table.sql b/cidb/migrations/00003_create_claction_table.sql
new file mode 100644
index 0000000..d709041
--- /dev/null
+++ b/cidb/migrations/00003_create_claction_table.sql
@@ -0,0 +1,19 @@
+CREATE TABLE clActionTable (
+  id INT NOT NULL AUTO_INCREMENT,
+  build_id INT NOT NULL,
+  change_number INT NOT NULL,
+  patch_number INT NOT NULL,
+  change_source ENUM('internal', 'external') NOT NULL,
+  -- The following ENUM values should match constants.py:CL_ACTIONS
+  action ENUM('picked_up', 'submitted', 'kicked_out', 'submit_failed') NOT NULL,
+  reason VARCHAR(80),
+  timestamp TIMESTAMP NOT NULL,
+  PRIMARY KEY (id),
+  FOREIGN KEY(build_id)
+    REFERENCES buildTable(id),
+  INDEX (change_number, change_source),
+  INDEX (change_number, patch_number, change_source)
+);
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (3, '00003_create_claction_table.sql');
diff --git a/cidb/migrations/00004_create_buildstage_table.sql b/cidb/migrations/00004_create_buildstage_table.sql
new file mode 100644
index 0000000..15341df
--- /dev/null
+++ b/cidb/migrations/00004_create_buildstage_table.sql
@@ -0,0 +1,17 @@
+CREATE TABLE buildStageTable (
+  id INT NOT NULL AUTO_INCREMENT,
+  build_id INT NOT NULL,
+  name VARCHAR(80) NOT NULL,
+  board VARCHAR(80),
+  status ENUM('fail', 'pass') NOT NULL,
+  log_url VARCHAR(240),
+  duration_seconds INT NOT NULL,
+  summary BLOB,
+  PRIMARY KEY (id),
+  FOREIGN KEY (build_id)
+    REFERENCES buildTable(id),
+  INDEX (build_id)
+);
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (4, '00004_create_buildstage_table.sql');
diff --git a/cidb/migrations/00005_create_users.sql b/cidb/migrations/00005_create_users.sql
new file mode 100644
index 0000000..279118c
--- /dev/null
+++ b/cidb/migrations/00005_create_users.sql
@@ -0,0 +1,29 @@
+USE mysql;
+
+-- Delete all non-root users and all their privileges
+DELETE from user where User!='root' and User!='';
+DELETE from db where User!='root' and User!='';
+DELETE from tables_priv where User!='root' and User!='';
+DELETE from columns_priv where User!='root' and User!='';
+DELETE from procs_priv where User!='root' and User!='';
+DELETE from proxies_priv where User!='root' and User!='';
+FLUSH PRIVILEGES;
+
+USE cidb;
+
+-- Create users
+CREATE USER readonly
+IDENTIFIED BY  PASSWORD '*12D90798C9984D0EFDDBF991FB2A92D7EEFC2E53';
+
+CREATE USER bot
+IDENTIFIED BY PASSWORD '*D17E032E3C8F0215AE62E2733FB66463F0746DAA';
+
+-- Give users correct privileges
+GRANT SELECT on cidb.* to readonly;
+
+GRANT SELECT, UPDATE, INSERT on cidb.* to bot;
+
+FLUSH PRIVILEGES;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (5, '00005_create_users.sql');
diff --git a/cidb/migrations/00006_create_boardperbuild_table.sql b/cidb/migrations/00006_create_boardperbuild_table.sql
new file mode 100644
index 0000000..f0e1a67
--- /dev/null
+++ b/cidb/migrations/00006_create_boardperbuild_table.sql
@@ -0,0 +1,13 @@
+CREATE TABLE boardPerBuildTable (
+  build_id INT NOT NULL,
+  board VARCHAR(80) NOT NULL,
+  main_firmware_version VARCHAR(80),
+  ec_firmware_version VARCHAR(80),
+  PRIMARY KEY (build_id, board),
+  FOREIGN KEY (build_id)
+    REFERENCES buildTable(id),
+  INDEX (build_id)
+);
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (6, '00006_create_boardperbuild_table.sql');
diff --git a/cidb/migrations/00007_create_childconfigperbuild_table.sql b/cidb/migrations/00007_create_childconfigperbuild_table.sql
new file mode 100644
index 0000000..96d83a4
--- /dev/null
+++ b/cidb/migrations/00007_create_childconfigperbuild_table.sql
@@ -0,0 +1,11 @@
+CREATE TABLE childConfigPerBuildTable (
+  build_id INT NOT NULL,
+  child_config VARCHAR(80) NOT NULL,
+  PRIMARY KEY (build_id, child_config),
+  FOREIGN KEY (build_id)
+    REFERENCES buildTable(id),
+  INDEX (build_id)
+);
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (7, '00007_create_childconfigperbuild_table.sql');
diff --git a/cidb/migrations/00008_create_build_config_index.sql b/cidb/migrations/00008_create_build_config_index.sql
new file mode 100644
index 0000000..a9cb95c
--- /dev/null
+++ b/cidb/migrations/00008_create_build_config_index.sql
@@ -0,0 +1,4 @@
+CREATE INDEX build_config_index on buildTable (build_config);
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (8, '00008_create_build_config_index.sql');
diff --git a/cidb/migrations/00009_alter_build_table_add_last_updated.sql b/cidb/migrations/00009_alter_build_table_add_last_updated.sql
new file mode 100644
index 0000000..003b5dd
--- /dev/null
+++ b/cidb/migrations/00009_alter_build_table_add_last_updated.sql
@@ -0,0 +1,13 @@
+-- We want to add an auto-updating last_updated column. In MySQL prior to 5.6,
+-- only one column timestamp column in the table may reference CURRENT_TIMESTAMP,
+-- and for it to work as expected it must be the lowest-numbered column of type
+-- TIMESTAMP. Thus, this alter table simultaneously changes the type of start_time
+-- to no longer reference CURRENT TIME, adds the last_updated column as the table's
+-- second column, and creates an index on the new column.
+ALTER TABLE buildTable
+  MODIFY start_time TIMESTAMP DEFAULT 0,
+  ADD COLUMN last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP AFTER id,
+  ADD INDEX last_updated_index(last_updated);
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (9, '00009_alter_build_table_add_last_updated.sql');
diff --git a/cidb/migrations/00010_alter_claction_table_add_verified.sql b/cidb/migrations/00010_alter_claction_table_add_verified.sql
new file mode 100644
index 0000000..f82cad2
--- /dev/null
+++ b/cidb/migrations/00010_alter_claction_table_add_verified.sql
@@ -0,0 +1,6 @@
+ALTER TABLE clActionTable
+  MODIFY action ENUM('picked_up', 'submitted', 'kicked_out', 'submit_failed',
+                     'verified') NOT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (10, '00010_alter_claction_table_add_verified.sql');
diff --git a/cidb/migrations/00011_alter_build_table_add_metadata_url.sql b/cidb/migrations/00011_alter_build_table_add_metadata_url.sql
new file mode 100644
index 0000000..7194a1e
--- /dev/null
+++ b/cidb/migrations/00011_alter_build_table_add_metadata_url.sql
@@ -0,0 +1,5 @@
+ALTER TABLE buildTable
+  ADD COLUMN metadata_url varchar(240);
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (11, '00011_alter_build_table_add_metadata_url.sql');
diff --git a/cidb/migrations/00012_alter_claction_table_add_precq_actions.sql b/cidb/migrations/00012_alter_claction_table_add_precq_actions.sql
new file mode 100644
index 0000000..fd0d43c
--- /dev/null
+++ b/cidb/migrations/00012_alter_claction_table_add_precq_actions.sql
@@ -0,0 +1,16 @@
+ALTER TABLE clActionTable
+  MODIFY action ENUM('picked_up',
+                     'submitted',
+                     'kicked_out',
+                     'submit_failed',
+                     'verified',
+                     'pre_cq_inflight',
+                     'pre_cq_passed',
+                     'pre_cq_failed',
+                     'pre_cq_launching',
+                     'pre_cq_waiting',
+                     'pre_cq_ready_to_submit')
+    NOT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (12, '00012_alter_claction_table_add_precq_actions.sql');
diff --git a/cidb/migrations/00013_alter_build_table_drop_metadata_json.sql b/cidb/migrations/00013_alter_build_table_drop_metadata_json.sql
new file mode 100644
index 0000000..4564d93
--- /dev/null
+++ b/cidb/migrations/00013_alter_build_table_drop_metadata_json.sql
@@ -0,0 +1,5 @@
+ALTER TABLE buildTable
+  DROP COLUMN metadata_json;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (13, '00013_alter_build_table_drop_metadata_json.sql');
diff --git a/cidb/migrations/00014_alter_build_table_add_release_waterfall.sql b/cidb/migrations/00014_alter_build_table_add_release_waterfall.sql
new file mode 100644
index 0000000..5fb7967
--- /dev/null
+++ b/cidb/migrations/00014_alter_build_table_add_release_waterfall.sql
@@ -0,0 +1,6 @@
+ALTER TABLE buildTable
+  MODIFY waterfall ENUM('chromeos', 'chromiumos', 'chromiumos.tryserver',
+                        'chromeos_release') NOT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (14, '00014_alter_build_table_add_release_waterfall.sql');
diff --git a/cidb/migrations/00015_alter_boardperbuild_table_add_final.sql b/cidb/migrations/00015_alter_boardperbuild_table_add_final.sql
new file mode 100644
index 0000000..515cf1d
--- /dev/null
+++ b/cidb/migrations/00015_alter_boardperbuild_table_add_final.sql
@@ -0,0 +1,8 @@
+ALTER TABLE boardPerBuildTable
+  ADD COLUMN last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+    ON UPDATE CURRENT_TIMESTAMP,
+  ADD COLUMN final BOOL NOT NULL DEFAULT false,
+  ADD INDEX last_updated_index(last_updated);
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (15, '00015_alter_boardperbuild_table_add_final.sql');
diff --git a/cidb/migrations/00016_alter_childconfigperbuild_table_add_status.sql b/cidb/migrations/00016_alter_childconfigperbuild_table_add_status.sql
new file mode 100644
index 0000000..ee537b8
--- /dev/null
+++ b/cidb/migrations/00016_alter_childconfigperbuild_table_add_status.sql
@@ -0,0 +1,10 @@
+ALTER TABLE childConfigPerBuildTable
+  ADD COLUMN last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+    ON UPDATE CURRENT_TIMESTAMP,
+  ADD COLUMN status ENUM('fail','pass','inflight','missing','aborted')
+    NOT NULL DEFAULT 'inflight',
+  ADD COLUMN final BOOL NOT NULL DEFAULT false,
+  ADD INDEX last_updated_index(last_updated);
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (16, '00016_alter_childconfigperbuild_table_add_status.sql');
diff --git a/cidb/migrations/00017_alter_build_table_add_branch_waterfall.sql b/cidb/migrations/00017_alter_build_table_add_branch_waterfall.sql
new file mode 100644
index 0000000..9514458
--- /dev/null
+++ b/cidb/migrations/00017_alter_build_table_add_branch_waterfall.sql
@@ -0,0 +1,6 @@
+ALTER TABLE buildTable
+  MODIFY waterfall ENUM('chromeos', 'chromiumos', 'chromiumos.tryserver',
+                        'chromeos_release', 'chromeos.branch') NOT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (17, '00017_alter_build_table_add_branch_waterfall.sql');
diff --git a/cidb/migrations/00018_alter_build_table_add_chrome_waterfall.sql b/cidb/migrations/00018_alter_build_table_add_chrome_waterfall.sql
new file mode 100644
index 0000000..f2908f6
--- /dev/null
+++ b/cidb/migrations/00018_alter_build_table_add_chrome_waterfall.sql
@@ -0,0 +1,7 @@
+ALTER TABLE buildTable
+  MODIFY waterfall ENUM('chromeos', 'chromiumos', 'chromiumos.tryserver',
+                        'chromeos_release', 'chromeos.branch',
+                        'chromeos.chrome') NOT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (18, '00018_alter_build_table_add_chrome_waterfall.sql');
diff --git a/cidb/migrations/00019_alter_claction_table_add_requeued.sql b/cidb/migrations/00019_alter_claction_table_add_requeued.sql
new file mode 100644
index 0000000..2d3aa96
--- /dev/null
+++ b/cidb/migrations/00019_alter_claction_table_add_requeued.sql
@@ -0,0 +1,17 @@
+ALTER TABLE clActionTable
+  MODIFY action ENUM('picked_up',
+                     'submitted',
+                     'kicked_out',
+                     'submit_failed',
+                     'verified',
+                     'pre_cq_inflight',
+                     'pre_cq_passed',
+                     'pre_cq_failed',
+                     'pre_cq_launching',
+                     'pre_cq_waiting',
+                     'pre_cq_ready_to_submit',
+                     'requeued')
+    NOT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (19, '00019_alter_claction_table_add_requeued.sql');
diff --git a/cidb/migrations/00020_alter_claction_table_add_misc_actions.sql b/cidb/migrations/00020_alter_claction_table_add_misc_actions.sql
new file mode 100644
index 0000000..40b7053
--- /dev/null
+++ b/cidb/migrations/00020_alter_claction_table_add_misc_actions.sql
@@ -0,0 +1,20 @@
+ALTER TABLE clActionTable
+  MODIFY action ENUM('picked_up',
+                     'submitted',
+                     'kicked_out',
+                     'submit_failed',
+                     'verified',
+                     'pre_cq_inflight',
+                     'pre_cq_passed',
+                     'pre_cq_failed',
+                     'pre_cq_launching',
+                     'pre_cq_waiting',
+                     'pre_cq_ready_to_submit',
+                     'requeued',
+                     'screened_for_pre_cq',
+                     'validation_pending_pre_cq',
+                     'irrelevant_to_slave')
+    NOT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (20, '00020_alter_claction_table_add_misc_actions.sql');
diff --git a/cidb/migrations/00021_alter_claction_table_add_launching.sql b/cidb/migrations/00021_alter_claction_table_add_launching.sql
new file mode 100644
index 0000000..e9c06d2
--- /dev/null
+++ b/cidb/migrations/00021_alter_claction_table_add_launching.sql
@@ -0,0 +1,21 @@
+ALTER TABLE clActionTable
+  MODIFY action ENUM('picked_up',
+                     'submitted',
+                     'kicked_out',
+                     'submit_failed',
+                     'verified',
+                     'pre_cq_inflight',
+                     'pre_cq_passed',
+                     'pre_cq_failed',
+                     'pre_cq_launching',
+                     'pre_cq_waiting',
+                     'pre_cq_ready_to_submit',
+                     'requeued',
+                     'screened_for_pre_cq',
+                     'validation_pending_pre_cq',
+                     'irrelevant_to_slave',
+                     'trybot_launching')
+    NOT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (21, '00021_alter_claction_table_add_launching.sql');
diff --git a/cidb/migrations/00022_alter_claction_table_add_speculative.sql b/cidb/migrations/00022_alter_claction_table_add_speculative.sql
new file mode 100644
index 0000000..09d9a01
--- /dev/null
+++ b/cidb/migrations/00022_alter_claction_table_add_speculative.sql
@@ -0,0 +1,22 @@
+ALTER TABLE clActionTable
+  MODIFY action ENUM('picked_up',
+                     'submitted',
+                     'kicked_out',
+                     'submit_failed',
+                     'verified',
+                     'pre_cq_inflight',
+                     'pre_cq_passed',
+                     'pre_cq_failed',
+                     'pre_cq_launching',
+                     'pre_cq_waiting',
+                     'pre_cq_ready_to_submit',
+                     'requeued',
+                     'screened_for_pre_cq',
+                     'validation_pending_pre_cq',
+                     'irrelevant_to_slave',
+                     'trybot_launching',
+                     'speculative')
+    NOT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (22, '00022_alter_claction_table_add_speculative.sql');
diff --git a/cidb/migrations/00023_alter_claction_table_add_timestamp_index.sql b/cidb/migrations/00023_alter_claction_table_add_timestamp_index.sql
new file mode 100644
index 0000000..e5dc5c6
--- /dev/null
+++ b/cidb/migrations/00023_alter_claction_table_add_timestamp_index.sql
@@ -0,0 +1,6 @@
+ALTER TABLE clActionTable
+  ADD INDEX timestamp_index(timestamp);
+
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (23, '00023_alter_claction_table_add_timestamp_index.sql');
diff --git a/cidb/migrations/00024_alter_claction_table_add_forgiven.sql b/cidb/migrations/00024_alter_claction_table_add_forgiven.sql
new file mode 100644
index 0000000..4466f31
--- /dev/null
+++ b/cidb/migrations/00024_alter_claction_table_add_forgiven.sql
@@ -0,0 +1,23 @@
+ALTER TABLE clActionTable
+  MODIFY action ENUM('picked_up',
+                     'submitted',
+                     'kicked_out',
+                     'submit_failed',
+                     'verified',
+                     'pre_cq_inflight',
+                     'pre_cq_passed',
+                     'pre_cq_failed',
+                     'pre_cq_launching',
+                     'pre_cq_waiting',
+                     'pre_cq_ready_to_submit',
+                     'requeued',
+                     'screened_for_pre_cq',
+                     'validation_pending_pre_cq',
+                     'irrelevant_to_slave',
+                     'trybot_launching',
+                     'speculative',
+                     'forgiven')
+    NOT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (24, '00024_alter_claction_table_add_forgiven.sql');
diff --git a/cidb/migrations/00025_alter_build_table_add_summary.sql b/cidb/migrations/00025_alter_build_table_add_summary.sql
new file mode 100644
index 0000000..0ce91db
--- /dev/null
+++ b/cidb/migrations/00025_alter_build_table_add_summary.sql
@@ -0,0 +1,8 @@
+-- The summary field contains an overall human readable summary of the build.
+-- The master builders summarize failures from all their slaves.
+-- slaves summarize only their own failure.
+ALTER TABLE buildTable
+  ADD COLUMN summary varchar(1024) DEFAULT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (25, '00025_alter_build_table_add_summary.sql');
diff --git a/cidb/migrations/00026_create_annotations_table.sql b/cidb/migrations/00026_create_annotations_table.sql
new file mode 100644
index 0000000..9e6a6ec
--- /dev/null
+++ b/cidb/migrations/00026_create_annotations_table.sql
@@ -0,0 +1,25 @@
+CREATE TABLE annotationsTable (
+  id INT NOT NULL AUTO_INCREMENT,
+  build_id INT NOT NULL,
+  last_updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+  last_annotator VARCHAR(80),
+  -- The following ENUM values should match constants.FAILURE_CATEGORY_ALL_CATEGORIES
+  failure_category ENUM(
+      'bad_cl', 'bug_in_tot', 'merge_conflict', 'tree_closed',
+      'scheduled_abort', 'cl_not_ready', 'bad_chrome',
+      'infra_failure', 'test_flake', 'gerrit_failure', 'gs_failure',
+      'lab_failure', 'bad_binary_package', 'build_flake', 'mystery'
+  ) DEFAULT 'mystery',
+  failure_message VARCHAR(1024),
+  blame_url VARCHAR(512),
+  notes VARCHAR(1024),
+
+  PRIMARY KEY (id),
+  FOREIGN KEY (build_id)
+    REFERENCES buildTable(id),
+  INDEX (build_id),
+  INDEX (last_updated)
+);
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (26, '00026_create_annotations_table.sql');
diff --git a/cidb/migrations/00027_create_annotator_user.sql b/cidb/migrations/00027_create_annotator_user.sql
new file mode 100644
index 0000000..4b8a200
--- /dev/null
+++ b/cidb/migrations/00027_create_annotator_user.sql
@@ -0,0 +1,13 @@
+USE cidb;
+
+FLUSH PRIVILEGES;
+CREATE USER annotator
+IDENTIFIED BY PASSWORD '*54C06F8BEE226565D06E0AE8151E3C7381155E9B';
+
+GRANT SELECT on cidb.* to annotator;
+GRANT SELECT, UPDATE, INSERT on cidb.annotationsTable to annotator;
+
+FLUSH PRIVILEGES;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (27, '00027_create_annotator_user.sql');
diff --git a/cidb/migrations/00028_remake_buildstage_table.sql b/cidb/migrations/00028_remake_buildstage_table.sql
new file mode 100644
index 0000000..8b6cb4c
--- /dev/null
+++ b/cidb/migrations/00028_remake_buildstage_table.sql
@@ -0,0 +1,26 @@
+DROP TABLE buildStageTable;
+
+CREATE TABLE buildStageTable (
+  id INT NOT NULL AUTO_INCREMENT,
+  build_id INT NOT NULL,
+  name VARCHAR(80) NOT NULL,
+  board VARCHAR(80),
+  -- This should match constants.BUILDER_ALL_STATUSES
+  status ENUM('fail', 'pass', 'inflight', 'missing', 'aborted', 'planned',
+              'skipped'),
+  last_updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
+    ON UPDATE CURRENT_TIMESTAMP,
+  start_time TIMESTAMP DEFAULT 0,
+  finish_time TIMESTAMP DEFAULT 0,
+  final BOOL NOT NULL DEFAULT false,
+
+  PRIMARY KEY (id),
+  FOREIGN KEY (build_id)
+    REFERENCES buildTable(id),
+  INDEX (build_id),
+  INDEX (last_updated)
+);
+
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (28, '00028_remake_buildstage_table.sql');
diff --git a/cidb/migrations/00029_create_failure_table.sql b/cidb/migrations/00029_create_failure_table.sql
new file mode 100644
index 0000000..1dad8c5
--- /dev/null
+++ b/cidb/migrations/00029_create_failure_table.sql
@@ -0,0 +1,21 @@
+
+CREATE TABLE failureTable (
+  id INT NOT NULL AUTO_INCREMENT,
+  build_stage_id INT NOT NULL,
+  outer_failure_id INT,
+  exception_type VARCHAR(240),
+  exception_message VARCHAR(240),
+  -- This should match constants.EXCEPTION_CATEGORY_ALL_CATEGORIES
+  exception_category ENUM('unknown', 'build', 'test', 'infra', 'lab') NOT NULL
+    DEFAULT 'unknown',
+  extra_info VARCHAR(240),
+  timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
+  PRIMARY KEY (id),
+  FOREIGN KEY (build_stage_id)
+    REFERENCES buildStageTable(id),
+  FOREIGN KEY (outer_failure_id)
+    REFERENCES failureTable(id)
+);
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (29, '00029_create_failure_table.sql');
diff --git a/cidb/migrations/00030_alter_buildstage_table_add_status_forgiven.sql b/cidb/migrations/00030_alter_buildstage_table_add_status_forgiven.sql
new file mode 100644
index 0000000..a9944e5
--- /dev/null
+++ b/cidb/migrations/00030_alter_buildstage_table_add_status_forgiven.sql
@@ -0,0 +1,6 @@
+ALTER TABLE buildStageTable
+  MODIFY COLUMN status ENUM('fail', 'pass', 'inflight', 'missing', 'aborted',
+                            'planned', 'skipped', 'forgiven');
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (30, '00030_alter_buildstage_table_add_status_forgiven.sql');
diff --git a/cidb/migrations/00031_alter_claction_table_add_fully_verified.sql b/cidb/migrations/00031_alter_claction_table_add_fully_verified.sql
new file mode 100644
index 0000000..8837bfe
--- /dev/null
+++ b/cidb/migrations/00031_alter_claction_table_add_fully_verified.sql
@@ -0,0 +1,24 @@
+ALTER TABLE clActionTable
+  MODIFY action ENUM('picked_up',
+                     'submitted',
+                     'kicked_out',
+                     'submit_failed',
+                     'verified',
+                     'pre_cq_inflight',
+                     'pre_cq_passed',
+                     'pre_cq_failed',
+                     'pre_cq_launching',
+                     'pre_cq_waiting',
+                     'pre_cq_ready_to_submit',
+                     'requeued',
+                     'screened_for_pre_cq',
+                     'validation_pending_pre_cq',
+                     'irrelevant_to_slave',
+                     'trybot_launching',
+                     'speculative',
+                     'forgiven',
+                     'pre_cq_fully_verified')
+    NOT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (31, '00031_alter_claction_table_add_fully_verified.sql');
diff --git a/cidb/migrations/00032_alter_build_table_add_deadline.sql b/cidb/migrations/00032_alter_build_table_add_deadline.sql
new file mode 100644
index 0000000..eb192e8
--- /dev/null
+++ b/cidb/migrations/00032_alter_build_table_add_deadline.sql
@@ -0,0 +1,6 @@
+-- The deadline column records the latest expected finish_time for this build.
+ALTER TABLE buildTable
+  ADD COLUMN deadline TIMESTAMP DEFAULT 0;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (32, '00032_alter_build_table_add_deadline.sql');
diff --git a/cidb/migrations/00033_alter_user_create_localhost_users.sql b/cidb/migrations/00033_alter_user_create_localhost_users.sql
new file mode 100644
index 0000000..2e733dc
--- /dev/null
+++ b/cidb/migrations/00033_alter_user_create_localhost_users.sql
@@ -0,0 +1,18 @@
+-- Create @localhost versions of existing users.
+CREATE USER readonly@localhost
+IDENTIFIED BY  PASSWORD '*12D90798C9984D0EFDDBF991FB2A92D7EEFC2E53';
+CREATE USER bot@localhost
+IDENTIFIED BY PASSWORD '*D17E032E3C8F0215AE62E2733FB66463F0746DAA';
+CREATE USER annotator@localhost
+IDENTIFIED BY PASSWORD '*54C06F8BEE226565D06E0AE8151E3C7381155E9B';
+
+-- And grant them the same permissions as their existing counterparts.
+GRANT SELECT on cidb.* to readonly@localhost;
+GRANT SELECT, UPDATE, INSERT on cidb.* to bot@localhost;
+GRANT SELECT on cidb.* to annotator@localhost;
+GRANT SELECT, UPDATE, INSERT on cidb.annotationsTable to annotator@localhost;
+
+FLUSH PRIVILEGES;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (33, '00033_alter_user_create_localhost_users.sql');
diff --git a/cidb/migrations/00034_alter_claction_table_add_reset.sql b/cidb/migrations/00034_alter_claction_table_add_reset.sql
new file mode 100644
index 0000000..531bbf9
--- /dev/null
+++ b/cidb/migrations/00034_alter_claction_table_add_reset.sql
@@ -0,0 +1,25 @@
+ALTER TABLE clActionTable
+  MODIFY action ENUM('picked_up',
+                     'submitted',
+                     'kicked_out',
+                     'submit_failed',
+                     'verified',
+                     'pre_cq_inflight',
+                     'pre_cq_passed',
+                     'pre_cq_failed',
+                     'pre_cq_launching',
+                     'pre_cq_waiting',
+                     'pre_cq_ready_to_submit',
+                     'requeued',
+                     'screened_for_pre_cq',
+                     'validation_pending_pre_cq',
+                     'irrelevant_to_slave',
+                     'trybot_launching',
+                     'speculative',
+                     'forgiven',
+                     'pre_cq_fully_verified',
+                     'pre_cq_reset')
+    NOT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (34, '00034_alter_claction_table_add_reset.sql');
diff --git a/cidb/migrations/00035_create_failure_view.sql b/cidb/migrations/00035_create_failure_view.sql
new file mode 100644
index 0000000..53a5528
--- /dev/null
+++ b/cidb/migrations/00035_create_failure_view.sql
@@ -0,0 +1,12 @@
+CREATE VIEW failureView as
+  SELECT f.*, bs.name as stage_name, bs.board, bs.status as stage_status,
+         b.id as build_id, b.build_config, b.status as build_status,
+         b.final as build_final, b.full_version, b.chrome_version,
+         b.sdk_version, b.milestone_version, b.master_build_id
+  FROM failureTable f JOIN buildStageTable bs on f.build_stage_id = bs.id
+                      JOIN buildTable b on bs.build_id = b.id;
+
+GRANT SHOW VIEW ON cidb.* to bot, readonly, annotator;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (35, '00035_create_failure_view.sql')
diff --git a/cidb/migrations/00036_alter_failure_view.sql b/cidb/migrations/00036_alter_failure_view.sql
new file mode 100644
index 0000000..8cacead
--- /dev/null
+++ b/cidb/migrations/00036_alter_failure_view.sql
@@ -0,0 +1,22 @@
+-- The failureView consists of:
+-- all failureTable columns, by original name.
+-- all buildStageTable columns except build_id, and its own id (which come
+-- from f.*), with non-colliding names
+-- all buildTable columns, with non-colliding names
+ALTER VIEW failureView AS
+  SELECT f.*,
+    bs.name AS stage_name, bs.board, bs.status AS stage_status,
+    bs.last_updated AS stage_last_updated, bs.start_time AS stage_start_time,
+    bs.finish_time AS stage_finish_time, bs.final AS stage_final,
+    b.id AS build_id, b.last_updated AS build_last_updated, b.master_build_id,
+    b.buildbot_generation, b.builder_name, b.waterfall, b.build_number,
+    b.build_config, b.bot_hostname, b.start_time AS build_start_time,
+    b.finish_time AS build_finish_time, b.status AS build_status, b.build_type,
+    b.chrome_version, b.milestone_version, b.platform_version, b.full_version,
+    b.sdk_version, b.toolchain_url, b.final AS build_final, b.metadata_url,
+    b.summary, b.deadline
+  FROM failureTable f JOIN buildStageTable bs on f.build_stage_id = bs.id
+                      JOIN buildTable b on bs.build_id = b.id;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (36, '00036_alter_failure_view.sql')
diff --git a/cidb/migrations/00037_create_claction_view.sql b/cidb/migrations/00037_create_claction_view.sql
new file mode 100644
index 0000000..cecce50
--- /dev/null
+++ b/cidb/migrations/00037_create_claction_view.sql
@@ -0,0 +1,11 @@
+CREATE VIEW clActionView as
+  SELECT c.*,
+    b.last_updated, b.master_build_id, b.buildbot_generation, b.builder_name,
+    b.waterfall, b.build_number, b.build_config, b.bot_hostname, b.start_time,
+    b.finish_time, b.status, b.build_type, b.chrome_version,
+    b.milestone_version, b.platform_version, b.full_version, b.sdk_version,
+    b.toolchain_url, b.final, b.metadata_url, b.summary, b.deadline
+ FROM clActionTable c JOIN buildTable b on c.build_id = b.id;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (37, '00037_create_claction_view.sql')
diff --git a/cidb/migrations/00038_alter_build_table_add_chromium_waterfall.sql b/cidb/migrations/00038_alter_build_table_add_chromium_waterfall.sql
new file mode 100644
index 0000000..daa5dfd
--- /dev/null
+++ b/cidb/migrations/00038_alter_build_table_add_chromium_waterfall.sql
@@ -0,0 +1,7 @@
+ALTER TABLE buildTable
+  MODIFY waterfall ENUM('chromeos', 'chromiumos', 'chromiumos.tryserver',
+                        'chromeos_release', 'chromeos.branch',
+                        'chromeos.chrome', 'chromiumos.chromium') NOT NULL;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (38, '00038_alter_build_table_add_chromium_waterfall.sql');
diff --git a/cidb/migrations/00039_alter_annotations_table_add_deleted.sql b/cidb/migrations/00039_alter_annotations_table_add_deleted.sql
new file mode 100644
index 0000000..fddf871
--- /dev/null
+++ b/cidb/migrations/00039_alter_annotations_table_add_deleted.sql
@@ -0,0 +1,5 @@
+ALTER TABLE annotationsTable
+  ADD COLUMN deleted BOOL NOT NULL DEFAULT false;
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (39, '00039_alter_annotations_table_add_deleted.sql');
diff --git a/cidb/migrations/00040_create_keyval_table.sql b/cidb/migrations/00040_create_keyval_table.sql
new file mode 100644
index 0000000..f09d0b1
--- /dev/null
+++ b/cidb/migrations/00040_create_keyval_table.sql
@@ -0,0 +1,9 @@
+CREATE TABLE keyvalTable (
+  k VARCHAR(240) NOT NULL,
+  v VARCHAR(240),
+  PRIMARY KEY(k)
+);
+
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (40, '00040_create_keyval_table.sql');
diff --git a/cidb/migrations/00041_alter_build_table_stringify_waterfall.sql b/cidb/migrations/00041_alter_build_table_stringify_waterfall.sql
new file mode 100644
index 0000000..f6f1b29
--- /dev/null
+++ b/cidb/migrations/00041_alter_build_table_stringify_waterfall.sql
@@ -0,0 +1,5 @@
+ALTER TABLE buildTable
+  MODIFY COLUMN waterfall VARCHAR(80);
+
+INSERT INTO schemaVersionTable (schemaVersion, scriptName) VALUES
+  (41, '00041_alter_build_table_stringify_waterfall.sql');
diff --git a/cidb/schema.dump b/cidb/schema.dump
new file mode 100644
index 0000000..598bb3e
--- /dev/null
+++ b/cidb/schema.dump
@@ -0,0 +1,318 @@
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+
+DROP TABLE IF EXISTS `annotationsTable`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `annotationsTable` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `build_id` int(11) NOT NULL,
+  `last_updated` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+  `last_annotator` varchar(80) DEFAULT NULL,
+  `failure_category` enum('bad_cl','bug_in_tot','merge_conflict','tree_closed','scheduled_abort','cl_not_ready','bad_chrome','infra_failure','test_flake','gerrit_failure','gs_failure','lab_failure','bad_binary_package','build_flake','mystery') DEFAULT 'mystery',
+  `failure_message` varchar(1024) DEFAULT NULL,
+  `blame_url` varchar(512) DEFAULT NULL,
+  `notes` varchar(1024) DEFAULT NULL,
+  `deleted` tinyint(1) NOT NULL DEFAULT '0',
+  PRIMARY KEY (`id`),
+  KEY `build_id` (`build_id`),
+  KEY `last_updated` (`last_updated`),
+  CONSTRAINT `annotationsTable_ibfk_1` FOREIGN KEY (`build_id`) REFERENCES `buildTable` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+
+DROP TABLE IF EXISTS `boardPerBuildTable`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `boardPerBuildTable` (
+  `build_id` int(11) NOT NULL,
+  `board` varchar(80) NOT NULL,
+  `main_firmware_version` varchar(80) DEFAULT NULL,
+  `ec_firmware_version` varchar(80) DEFAULT NULL,
+  `last_updated` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+  `final` tinyint(1) NOT NULL DEFAULT '0',
+  PRIMARY KEY (`build_id`,`board`),
+  KEY `build_id` (`build_id`),
+  KEY `last_updated_index` (`last_updated`),
+  CONSTRAINT `boardPerBuildTable_ibfk_1` FOREIGN KEY (`build_id`) REFERENCES `buildTable` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+
+DROP TABLE IF EXISTS `buildStageTable`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `buildStageTable` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `build_id` int(11) NOT NULL,
+  `name` varchar(80) NOT NULL,
+  `board` varchar(80) DEFAULT NULL,
+  `status` enum('fail','pass','inflight','missing','aborted','planned','skipped','forgiven') DEFAULT NULL,
+  `last_updated` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+  `start_time` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
+  `finish_time` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
+  `final` tinyint(1) NOT NULL DEFAULT '0',
+  PRIMARY KEY (`id`),
+  KEY `build_id` (`build_id`),
+  KEY `last_updated` (`last_updated`),
+  CONSTRAINT `buildStageTable_ibfk_1` FOREIGN KEY (`build_id`) REFERENCES `buildTable` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+
+DROP TABLE IF EXISTS `buildTable`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `buildTable` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `last_updated` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+  `master_build_id` int(11) DEFAULT NULL,
+  `buildbot_generation` int(11) NOT NULL,
+  `builder_name` varchar(80) NOT NULL,
+  `waterfall` varchar(80) DEFAULT NULL,
+  `build_number` int(11) NOT NULL,
+  `build_config` varchar(80) NOT NULL,
+  `bot_hostname` varchar(80) NOT NULL,
+  `start_time` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
+  `finish_time` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
+  `status` enum('fail','pass','inflight','missing','aborted') NOT NULL DEFAULT 'inflight',
+  `status_pickle` blob,
+  `build_type` varchar(80) DEFAULT NULL,
+  `chrome_version` varchar(80) DEFAULT NULL,
+  `milestone_version` varchar(80) DEFAULT NULL,
+  `platform_version` varchar(80) DEFAULT NULL,
+  `full_version` varchar(80) DEFAULT NULL,
+  `sdk_version` varchar(80) DEFAULT NULL,
+  `toolchain_url` varchar(240) DEFAULT NULL,
+  `final` tinyint(1) NOT NULL DEFAULT '0',
+  `metadata_url` varchar(240) DEFAULT NULL,
+  `summary` varchar(1024) DEFAULT NULL,
+  `deadline` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
+  PRIMARY KEY (`id`),
+  UNIQUE KEY `buildbot_generation` (`buildbot_generation`,`builder_name`,`waterfall`,`build_number`),
+  KEY `master_build_id` (`master_build_id`),
+  KEY `build_config_index` (`build_config`),
+  KEY `last_updated_index` (`last_updated`),
+  CONSTRAINT `buildTable_ibfk_1` FOREIGN KEY (`master_build_id`) REFERENCES `buildTable` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+
+DROP TABLE IF EXISTS `childConfigPerBuildTable`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `childConfigPerBuildTable` (
+  `build_id` int(11) NOT NULL,
+  `child_config` varchar(80) NOT NULL,
+  `last_updated` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+  `status` enum('fail','pass','inflight','missing','aborted') NOT NULL DEFAULT 'inflight',
+  `final` tinyint(1) NOT NULL DEFAULT '0',
+  PRIMARY KEY (`build_id`,`child_config`),
+  KEY `build_id` (`build_id`),
+  KEY `last_updated_index` (`last_updated`),
+  CONSTRAINT `childConfigPerBuildTable_ibfk_1` FOREIGN KEY (`build_id`) REFERENCES `buildTable` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+
+DROP TABLE IF EXISTS `clActionTable`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `clActionTable` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `build_id` int(11) NOT NULL,
+  `change_number` int(11) NOT NULL,
+  `patch_number` int(11) NOT NULL,
+  `change_source` enum('internal','external') NOT NULL,
+  `action` enum('picked_up','submitted','kicked_out','submit_failed','verified','pre_cq_inflight','pre_cq_passed','pre_cq_failed','pre_cq_launching','pre_cq_waiting','pre_cq_ready_to_submit','requeued','screened_for_pre_cq','validation_pending_pre_cq','irrelevant_to_slave','trybot_launching','speculative','forgiven','pre_cq_fully_verified','pre_cq_reset') NOT NULL,
+  `reason` varchar(80) DEFAULT NULL,
+  `timestamp` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+  PRIMARY KEY (`id`),
+  KEY `build_id` (`build_id`),
+  KEY `change_number` (`change_number`,`change_source`),
+  KEY `change_number_2` (`change_number`,`patch_number`,`change_source`),
+  KEY `timestamp_index` (`timestamp`),
+  CONSTRAINT `clActionTable_ibfk_1` FOREIGN KEY (`build_id`) REFERENCES `buildTable` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+
+DROP TABLE IF EXISTS `clActionView`;
+/*!50001 DROP VIEW IF EXISTS `clActionView`*/;
+SET @saved_cs_client     = @@character_set_client;
+SET character_set_client = utf8;
+/*!50001 CREATE TABLE `clActionView` (
+  `id` tinyint NOT NULL,
+  `build_id` tinyint NOT NULL,
+  `change_number` tinyint NOT NULL,
+  `patch_number` tinyint NOT NULL,
+  `change_source` tinyint NOT NULL,
+  `action` tinyint NOT NULL,
+  `reason` tinyint NOT NULL,
+  `timestamp` tinyint NOT NULL,
+  `last_updated` tinyint NOT NULL,
+  `master_build_id` tinyint NOT NULL,
+  `buildbot_generation` tinyint NOT NULL,
+  `builder_name` tinyint NOT NULL,
+  `waterfall` tinyint NOT NULL,
+  `build_number` tinyint NOT NULL,
+  `build_config` tinyint NOT NULL,
+  `bot_hostname` tinyint NOT NULL,
+  `start_time` tinyint NOT NULL,
+  `finish_time` tinyint NOT NULL,
+  `status` tinyint NOT NULL,
+  `build_type` tinyint NOT NULL,
+  `chrome_version` tinyint NOT NULL,
+  `milestone_version` tinyint NOT NULL,
+  `platform_version` tinyint NOT NULL,
+  `full_version` tinyint NOT NULL,
+  `sdk_version` tinyint NOT NULL,
+  `toolchain_url` tinyint NOT NULL,
+  `final` tinyint NOT NULL,
+  `metadata_url` tinyint NOT NULL,
+  `summary` tinyint NOT NULL,
+  `deadline` tinyint NOT NULL
+) ENGINE=MyISAM */;
+SET character_set_client = @saved_cs_client;
+
+
+DROP TABLE IF EXISTS `failureTable`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `failureTable` (
+  `id` int(11) NOT NULL AUTO_INCREMENT,
+  `build_stage_id` int(11) NOT NULL,
+  `outer_failure_id` int(11) DEFAULT NULL,
+  `exception_type` varchar(240) DEFAULT NULL,
+  `exception_message` varchar(240) DEFAULT NULL,
+  `exception_category` enum('unknown','build','test','infra','lab') NOT NULL DEFAULT 'unknown',
+  `extra_info` varchar(240) DEFAULT NULL,
+  `timestamp` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
+  PRIMARY KEY (`id`),
+  KEY `build_stage_id` (`build_stage_id`),
+  KEY `outer_failure_id` (`outer_failure_id`),
+  CONSTRAINT `failureTable_ibfk_1` FOREIGN KEY (`build_stage_id`) REFERENCES `buildStageTable` (`id`),
+  CONSTRAINT `failureTable_ibfk_2` FOREIGN KEY (`outer_failure_id`) REFERENCES `failureTable` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+
+DROP TABLE IF EXISTS `failureView`;
+/*!50001 DROP VIEW IF EXISTS `failureView`*/;
+SET @saved_cs_client     = @@character_set_client;
+SET character_set_client = utf8;
+/*!50001 CREATE TABLE `failureView` (
+  `id` tinyint NOT NULL,
+  `build_stage_id` tinyint NOT NULL,
+  `outer_failure_id` tinyint NOT NULL,
+  `exception_type` tinyint NOT NULL,
+  `exception_message` tinyint NOT NULL,
+  `exception_category` tinyint NOT NULL,
+  `extra_info` tinyint NOT NULL,
+  `timestamp` tinyint NOT NULL,
+  `stage_name` tinyint NOT NULL,
+  `board` tinyint NOT NULL,
+  `stage_status` tinyint NOT NULL,
+  `stage_last_updated` tinyint NOT NULL,
+  `stage_start_time` tinyint NOT NULL,
+  `stage_finish_time` tinyint NOT NULL,
+  `stage_final` tinyint NOT NULL,
+  `build_id` tinyint NOT NULL,
+  `build_last_updated` tinyint NOT NULL,
+  `master_build_id` tinyint NOT NULL,
+  `buildbot_generation` tinyint NOT NULL,
+  `builder_name` tinyint NOT NULL,
+  `waterfall` tinyint NOT NULL,
+  `build_number` tinyint NOT NULL,
+  `build_config` tinyint NOT NULL,
+  `bot_hostname` tinyint NOT NULL,
+  `build_start_time` tinyint NOT NULL,
+  `build_finish_time` tinyint NOT NULL,
+  `build_status` tinyint NOT NULL,
+  `build_type` tinyint NOT NULL,
+  `chrome_version` tinyint NOT NULL,
+  `milestone_version` tinyint NOT NULL,
+  `platform_version` tinyint NOT NULL,
+  `full_version` tinyint NOT NULL,
+  `sdk_version` tinyint NOT NULL,
+  `toolchain_url` tinyint NOT NULL,
+  `build_final` tinyint NOT NULL,
+  `metadata_url` tinyint NOT NULL,
+  `summary` tinyint NOT NULL,
+  `deadline` tinyint NOT NULL
+) ENGINE=MyISAM */;
+SET character_set_client = @saved_cs_client;
+
+
+DROP TABLE IF EXISTS `keyvalTable`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `keyvalTable` (
+  `k` varchar(240) NOT NULL,
+  `v` varchar(240) DEFAULT NULL,
+  PRIMARY KEY (`k`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+
+DROP TABLE IF EXISTS `schemaVersionTable`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `schemaVersionTable` (
+  `schemaVersion` int(11) NOT NULL,
+  `scriptName` varchar(80) DEFAULT NULL,
+  `timestamp` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+  PRIMARY KEY (`schemaVersion`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+
+/*!50001 DROP TABLE IF EXISTS `clActionView`*/;
+/*!50001 DROP VIEW IF EXISTS `clActionView`*/;
+/*!50001 SET @saved_cs_client          = @@character_set_client */;
+/*!50001 SET @saved_cs_results         = @@character_set_results */;
+/*!50001 SET @saved_col_connection     = @@collation_connection */;
+/*!50001 SET character_set_client      = utf8 */;
+/*!50001 SET character_set_results     = utf8 */;
+/*!50001 SET collation_connection      = utf8_general_ci */;
+/*!50001 CREATE ALGORITHM=UNDEFINED */
+/*!50001 VIEW `clActionView` AS select `c`.`id` AS `id`,`c`.`build_id` AS `build_id`,`c`.`change_number` AS `change_number`,`c`.`patch_number` AS `patch_number`,`c`.`change_source` AS `change_source`,`c`.`action` AS `action`,`c`.`reason` AS `reason`,`c`.`timestamp` AS `timestamp`,`b`.`last_updated` AS `last_updated`,`b`.`master_build_id` AS `master_build_id`,`b`.`buildbot_generation` AS `buildbot_generation`,`b`.`builder_name` AS `builder_name`,`b`.`waterfall` AS `waterfall`,`b`.`build_number` AS `build_number`,`b`.`build_config` AS `build_config`,`b`.`bot_hostname` AS `bot_hostname`,`b`.`start_time` AS `start_time`,`b`.`finish_time` AS `finish_time`,`b`.`status` AS `status`,`b`.`build_type` AS `build_type`,`b`.`chrome_version` AS `chrome_version`,`b`.`milestone_version` AS `milestone_version`,`b`.`platform_version` AS `platform_version`,`b`.`full_version` AS `full_version`,`b`.`sdk_version` AS `sdk_version`,`b`.`toolchain_url` AS `toolchain_url`,`b`.`final` AS `final`,`b`.`metadata_url` AS `metadata_url`,`b`.`summary` AS `summary`,`b`.`deadline` AS `deadline` from (`clActionTable` `c` join `buildTable` `b` on((`c`.`build_id` = `b`.`id`))) */;
+/*!50001 SET character_set_client      = @saved_cs_client */;
+/*!50001 SET character_set_results     = @saved_cs_results */;
+/*!50001 SET collation_connection      = @saved_col_connection */;
+
+
+/*!50001 DROP TABLE IF EXISTS `failureView`*/;
+/*!50001 DROP VIEW IF EXISTS `failureView`*/;
+/*!50001 SET @saved_cs_client          = @@character_set_client */;
+/*!50001 SET @saved_cs_results         = @@character_set_results */;
+/*!50001 SET @saved_col_connection     = @@collation_connection */;
+/*!50001 SET character_set_client      = utf8 */;
+/*!50001 SET character_set_results     = utf8 */;
+/*!50001 SET collation_connection      = utf8_general_ci */;
+/*!50001 CREATE ALGORITHM=UNDEFINED */
+/*!50001 VIEW `failureView` AS select `f`.`id` AS `id`,`f`.`build_stage_id` AS `build_stage_id`,`f`.`outer_failure_id` AS `outer_failure_id`,`f`.`exception_type` AS `exception_type`,`f`.`exception_message` AS `exception_message`,`f`.`exception_category` AS `exception_category`,`f`.`extra_info` AS `extra_info`,`f`.`timestamp` AS `timestamp`,`bs`.`name` AS `stage_name`,`bs`.`board` AS `board`,`bs`.`status` AS `stage_status`,`bs`.`last_updated` AS `stage_last_updated`,`bs`.`start_time` AS `stage_start_time`,`bs`.`finish_time` AS `stage_finish_time`,`bs`.`final` AS `stage_final`,`b`.`id` AS `build_id`,`b`.`last_updated` AS `build_last_updated`,`b`.`master_build_id` AS `master_build_id`,`b`.`buildbot_generation` AS `buildbot_generation`,`b`.`builder_name` AS `builder_name`,`b`.`waterfall` AS `waterfall`,`b`.`build_number` AS `build_number`,`b`.`build_config` AS `build_config`,`b`.`bot_hostname` AS `bot_hostname`,`b`.`start_time` AS `build_start_time`,`b`.`finish_time` AS `build_finish_time`,`b`.`status` AS `build_status`,`b`.`build_type` AS `build_type`,`b`.`chrome_version` AS `chrome_version`,`b`.`milestone_version` AS `milestone_version`,`b`.`platform_version` AS `platform_version`,`b`.`full_version` AS `full_version`,`b`.`sdk_version` AS `sdk_version`,`b`.`toolchain_url` AS `toolchain_url`,`b`.`final` AS `build_final`,`b`.`metadata_url` AS `metadata_url`,`b`.`summary` AS `summary`,`b`.`deadline` AS `deadline` from ((`failureTable` `f` join `buildStageTable` `bs` on((`f`.`build_stage_id` = `bs`.`id`))) join `buildTable` `b` on((`bs`.`build_id` = `b`.`id`))) */;
+/*!50001 SET character_set_client      = @saved_cs_client */;
+/*!50001 SET character_set_results     = @saved_cs_results */;
+/*!50001 SET collation_connection      = @saved_col_connection */;
+/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
+
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
+
diff --git a/cidb/schema.dump.readme b/cidb/schema.dump.readme
new file mode 100644
index 0000000..d8e2aef
--- /dev/null
+++ b/cidb/schema.dump.readme
@@ -0,0 +1,15 @@
+These instructions assume you have a full chromiumos checkout at
+~/chromiumos/
+
+To generate a schema dump, run
+  $ lib/cidb_integration_test.py CIDBMigrationsTest.testMigrations --no-wipe
+to bring the test database instance to the latest schema (as reflected in your
+source tree).
+Then, follow the instructions in the developer.readme to re-launch the mysqld
+daemon from the temporary directory and dump the schema using:
+
+$ mysqldump -u root -S ${tmpdir}/mysqld_dir/mysqld.socket --no-data \
+    --single-transaction cidb | grep -v '^--' \
+    > ~/chromiumos/chromite/cidb/schema.dump
+
+Remember to cleanup the temporary directory when you're done.
diff --git a/cidb/test_data/series_0/README b/cidb/test_data/series_0/README
new file mode 100644
index 0000000..56d5057
--- /dev/null
+++ b/cidb/test_data/series_0/README
@@ -0,0 +1,5 @@
+This test data is taken from master-paladin build numbers 1909-1929, as well
+as all of the corresponding slave builds.
+
+They have had a build_type key added, since that metadata key did not exist
+when they were generated.
diff --git a/cidb/test_data/series_0/metadata-1.json b/cidb/test_data/series_0/metadata-1.json
new file mode 100644
index 0000000..faffff1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-1.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 14:51:38 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1267", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843", "reason": "None"}}, "boards": [], "changes": [{"patch_number": "2", "total_pass": 1, "gerrit_number": "206186", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206187", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206188", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206593", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206363", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "168185", "total_fail": 0, "pass": 1, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "167757", "total_fail": 0, "pass": 1, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206504", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206800", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206546", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 1, "gerrit_number": "205980", "total_fail": 2, "pass": 1, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 1, "gerrit_number": "205561", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "205472", "total_fail": 5, "pass": 1, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206792", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}], "metadata-version": "2", "child-configs": [], "build-number": 1929, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [[{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761215, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761215, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761215, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761215, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761215, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761215, ""], [{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761215, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761215, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761215, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761215, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761215, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761215, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761215, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761215, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "submitted", 1404769567, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "submitted", 1404769572, ""], [{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "submitted", 1404769577, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "submitted", 1404769582, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "submitted", 1404769755, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "submitted", 1404769760, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "submitted", 1404769767, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "submitted", 1404769775, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "submitted", 1404769781, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "submitted", 1404769810, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "submitted", 1404769814, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "submitted", 1404769817, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "submitted", 1404769823, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "submitted", 1404769827, ""]], "builder-name": "CQ master", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "time": {"duration": "2:22:53.656036", "start": "Mon, 07 Jul 2014 12:28:44 -0700 (PST)", "finish": "Mon, 07 Jul 2014 14:51:38 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1929/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1929/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:06", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1929/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1929/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:06", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1929/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1929/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1929/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1929/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "2:18:01", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1929/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:22", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-10.json b/cidb/test_data/series_0/metadata-10.json
new file mode 100644
index 0000000..e7d875b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-10.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 10:40:52 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1258", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834", "reason": "None"}}, "boards": [], "changes": [{"patch_number": "1", "total_pass": 1, "gerrit_number": "205441", "total_fail": 1, "pass": 1, "fail": 1, "internal": false}], "metadata-version": "2", "child-configs": [], "build-number": 1920, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662131, ""], [{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "submitted", 1404668387, ""]], "builder-name": "CQ master", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "time": {"duration": "1:44:13.348580", "start": "Sun, 06 Jul 2014 08:56:39 -0700 (PST)", "finish": "Sun, 06 Jul 2014 10:40:52 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1920/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1920/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:30", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1920/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1920/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1920/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1920/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1920/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1920/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:39:33", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1920/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-100.json b/cidb/test_data/series_0/metadata-100.json
new file mode 100644
index 0000000..4f74b12
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-100.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:31:23 -0700 (PST)"}, "boards": ["falco"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:57:30.082516", "start": "Mon, 07 Jul 2014 12:33:53 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:31:23 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:11", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:43", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:32:17", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:10:49", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:33", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:20", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:09:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4302/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4302, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761605, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761605, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761605, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761605, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761605, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761605, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761605, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761605, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761605, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761605, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761605, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761605, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761605, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761605, ""]], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-101.json b/cidb/test_data/series_0/metadata-101.json
new file mode 100644
index 0000000..3558c3f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-101.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 12:58:48 -0700 (PST)"}, "boards": ["gizmo"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:23:59.711135", "start": "Mon, 07 Jul 2014 12:34:48 -0700 (PST)", "finish": "Mon, 07 Jul 2014 12:58:48 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:08:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:06:48", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:09", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:22", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/843/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:22", "name": "CommitQueueCompletion"}], "build-number": 843, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761661, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761661, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761661, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761661, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761661, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761661, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761661, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761661, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761661, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761661, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761661, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761661, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761661, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761661, ""]], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-102.json b/cidb/test_data/series_0/metadata-102.json
new file mode 100644
index 0000000..0f2e1ed
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-102.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:10:48 -0700 (PST)"}, "boards": ["rambi"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:37:00.668130", "start": "Mon, 07 Jul 2014 12:33:47 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:10:48 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:17", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:00", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:33", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:41", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:09:50", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:27", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:04", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:55", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:45", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2340/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2340, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761598, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761598, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761598, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761598, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761598, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761598, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761598, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761598, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761598, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761598, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761598, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761598, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761598, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761598, ""]], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-103.json b/cidb/test_data/series_0/metadata-103.json
new file mode 100644
index 0000000..e1d173f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-103.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:02:17 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:12:32.356069", "start": "Mon, 07 Jul 2014 12:49:44 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:02:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1267/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1267/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:18:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1267/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1267/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1267/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1267/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:41", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1267/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1267/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1267/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:10:08", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1267/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1267/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1267, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404762559, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404762559, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404762559, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404762559, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404762559, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404762559, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404762559, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404762559, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404762559, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404762559, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404762559, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404762559, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404762559, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404762559, ""]], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-104.json b/cidb/test_data/series_0/metadata-104.json
new file mode 100644
index 0000000..25e2777
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-104.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 12:54:57 -0700 (PST)"}, "boards": ["duck"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:22:14.661038", "start": "Mon, 07 Jul 2014 12:32:43 -0700 (PST)", "finish": "Mon, 07 Jul 2014 12:54:57 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:07:45", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:11", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:19", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:44", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:28", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:57", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:29", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:53", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1390/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1390, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761535, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761535, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761535, ""]], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-105.json b/cidb/test_data/series_0/metadata-105.json
new file mode 100644
index 0000000..5bc0838
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-105.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:09:00 -0700 (PST)"}, "boards": ["peppy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:35:56.518883", "start": "Mon, 07 Jul 2014 12:33:03 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:09:00 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:34", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:00", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:09", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:43", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:02", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:45", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:10", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:23", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:58", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:09:11", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:53", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4289/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4289, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761555, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761555, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761555, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761555, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761555, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761555, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761555, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761555, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761555, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761555, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761555, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761555, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761555, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761555, ""]], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-106.json b/cidb/test_data/series_0/metadata-106.json
new file mode 100644
index 0000000..dd0dfb2
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-106.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:33:46 -0700 (PST)"}, "boards": ["butterfly"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:58:17.567320", "start": "Mon, 07 Jul 2014 12:35:29 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:33:46 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:08", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:59", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:48", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:32:09", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:11:35", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:43", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:14", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:15", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:15", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:10:31", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11470/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:23", "name": "CommitQueueCompletion"}], "build-number": 11470, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761702, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761702, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761702, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761702, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761702, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761702, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761702, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761702, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761702, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761702, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761702, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761702, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761702, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761702, ""]], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-107.json b/cidb/test_data/series_0/metadata-107.json
new file mode 100644
index 0000000..044536b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-107.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:11:53 -0700 (PST)"}, "boards": ["monroe"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:36:48.236037", "start": "Mon, 07 Jul 2014 12:35:05 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:11:53 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:14", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:00", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:12:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:12:55", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:13:22", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:26", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:53", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:06", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2344/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2344, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761674, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761674, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761674, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761674, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761674, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761674, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761674, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761674, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761674, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761674, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761674, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761674, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761674, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761674, ""]], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-108.json b/cidb/test_data/series_0/metadata-108.json
new file mode 100644
index 0000000..bad5280
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-108.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:14:56 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:40:57.713498", "start": "Mon, 07 Jul 2014 12:33:58 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:14:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:12:32", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:15", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:05", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:25", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:21", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:24", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:09:56", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:35", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:15:12", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1389/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1389, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761611, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761611, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761611, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761611, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761611, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761611, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761611, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761611, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761611, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761611, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761611, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761611, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761611, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761611, ""]], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-109.json b/cidb/test_data/series_0/metadata-109.json
new file mode 100644
index 0000000..b0d9159
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-109.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 17:22:34 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [], "time": {"duration": "1:05:27.081165", "start": "Fri, 04 Jul 2014 16:17:06 -0700 (PST)", "finish": "Fri, 04 Jul 2014 17:22:34 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:16", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:24", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:30", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:25", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:42:44", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:43", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:06", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:40", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:11", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2640, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-11.json b/cidb/test_data/series_0/metadata-11.json
new file mode 100644
index 0000000..9c9df55
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-11.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:54:33 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1257", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833", "reason": "None"}}, "boards": [], "changes": [], "metadata-version": "2", "child-configs": [], "build-number": 1919, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [], "builder-name": "CQ master", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "time": {"duration": "1:44:25.062561", "start": "Sun, 06 Jul 2014 07:10:08 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:54:33 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1919/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1919/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "4:01:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1919/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1919/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:03", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1919/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1919/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1919/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1919/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:39:42", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1919/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-110.json b/cidb/test_data/series_0/metadata-110.json
new file mode 100644
index 0000000..3b3dbd3
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-110.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 16:33:10 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [], "time": {"duration": "0:14:57.907240", "start": "Fri, 04 Jul 2014 16:18:12 -0700 (PST)", "finish": "Fri, 04 Jul 2014 16:33:10 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:11", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:59", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:41", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:21", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:30", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:04:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2460, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-111.json b/cidb/test_data/series_0/metadata-111.json
new file mode 100644
index 0000000..a0a1bb4
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-111.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 17:59:57 -0700 (PST)"}, "boards": ["wolf"], "changes": [], "time": {"duration": "1:42:23.730013", "start": "Fri, 04 Jul 2014 16:17:33 -0700 (PST)", "finish": "Fri, 04 Jul 2014 17:59:57 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:16", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:14", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:50", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:50", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:36", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:55", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:37", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:51", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "1:10:33", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 3437, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-112.json b/cidb/test_data/series_0/metadata-112.json
new file mode 100644
index 0000000..f2a0a28
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-112.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 16:54:01 -0700 (PST)"}, "boards": ["leon"], "changes": [], "time": {"duration": "0:35:16.646661", "start": "Fri, 04 Jul 2014 16:18:44 -0700 (PST)", "finish": "Fri, 04 Jul 2014 16:54:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:12:37", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:52", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:10:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:53", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:04", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:45", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:52", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2875, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-113.json b/cidb/test_data/series_0/metadata-113.json
new file mode 100644
index 0000000..53f0df6
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-113.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 17:56:42 -0700 (PST)"}, "boards": ["lumpy"], "changes": [], "time": {"duration": "1:40:09.286763", "start": "Fri, 04 Jul 2014 16:16:32 -0700 (PST)", "finish": "Fri, 04 Jul 2014 17:56:42 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:20", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:12:38", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:22", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:25", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:20", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:27", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:10:29", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "1:07:37", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17388, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-114.json b/cidb/test_data/series_0/metadata-114.json
new file mode 100644
index 0000000..c4d0503
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-114.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 17:55:04 -0700 (PST)"}, "boards": ["parrot"], "changes": [], "time": {"duration": "1:36:48.405728", "start": "Fri, 04 Jul 2014 16:18:16 -0700 (PST)", "finish": "Fri, 04 Jul 2014 17:55:04 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:52", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:59", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:36", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:37", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:55", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "1:03:37", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 12107, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-115.json b/cidb/test_data/series_0/metadata-115.json
new file mode 100644
index 0000000..8955883
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-115.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 18:13:18 -0700 (PST)"}, "boards": ["stumpy"], "changes": [], "time": {"duration": "1:55:35.317014", "start": "Fri, 04 Jul 2014 16:17:43 -0700 (PST)", "finish": "Fri, 04 Jul 2014 18:13:18 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:18", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:09", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:38", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:11", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:10:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:23:05", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 17432, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-116.json b/cidb/test_data/series_0/metadata-116.json
new file mode 100644
index 0000000..003097a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-116.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 17:14:25 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [], "time": {"duration": "0:58:52.638270", "start": "Fri, 04 Jul 2014 16:15:32 -0700 (PST)", "finish": "Fri, 04 Jul 2014 17:14:25 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:39", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:08", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:15", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:12", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:01", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:17", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:55", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:37", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:00", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 18710, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-117.json b/cidb/test_data/series_0/metadata-117.json
new file mode 100644
index 0000000..c971108
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-117.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 18:07:56 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [], "time": {"duration": "1:50:03.425615", "start": "Fri, 04 Jul 2014 16:17:52 -0700 (PST)", "finish": "Fri, 04 Jul 2014 18:07:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:33", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:49", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:36", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:53", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:23", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:10:43", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:17:12", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 16871, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-118.json b/cidb/test_data/series_0/metadata-118.json
new file mode 100644
index 0000000..9de3f38
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-118.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 17:14:39 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [], "time": {"duration": "0:59:06.184390", "start": "Fri, 04 Jul 2014 16:15:33 -0700 (PST)", "finish": "Fri, 04 Jul 2014 17:14:39 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:37", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:10", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:14", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:20", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:49", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:39:11", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:57", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:58", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:29", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CommitQueueCompletion"}], "build-number": 18506, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-119.json b/cidb/test_data/series_0/metadata-119.json
new file mode 100644
index 0000000..03d79d3
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-119.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 16:55:37 -0700 (PST)"}, "boards": ["stout"], "changes": [], "time": {"duration": "0:35:17.534305", "start": "Fri, 04 Jul 2014 16:20:19 -0700 (PST)", "finish": "Fri, 04 Jul 2014 16:55:37 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:33", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:37", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:15", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:43", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:13", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:58", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:23", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:11", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:19", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11453, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-12.json b/cidb/test_data/series_0/metadata-12.json
new file mode 100644
index 0000000..8c97251
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-12.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 03:07:35 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1256", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832", "reason": "None"}}, "boards": [], "changes": [{"patch_number": "2", "total_pass": 1, "gerrit_number": "206614", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}], "metadata-version": "2", "child-configs": [], "build-number": 1918, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404634800, ""], [{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "submitted", 1404641191, ""]], "builder-name": "CQ master", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "time": {"duration": "1:46:26.889674", "start": "Sun, 06 Jul 2014 01:21:08 -0700 (PST)", "finish": "Sun, 06 Jul 2014 03:07:35 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1918/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:05", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1918/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:34:25", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1918/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1918/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1918/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1918/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1918/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1918/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:41:53", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1918/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-120.json b/cidb/test_data/series_0/metadata-120.json
new file mode 100644
index 0000000..379a53e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-120.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 17:54:34 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [], "time": {"duration": "1:35:33.788021", "start": "Fri, 04 Jul 2014 16:19:00 -0700 (PST)", "finish": "Fri, 04 Jul 2014 17:54:34 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:43", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:33:47", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:19", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:15", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:41:19", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4376, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-121.json b/cidb/test_data/series_0/metadata-121.json
new file mode 100644
index 0000000..32ace24
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-121.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 18:09:55 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:50:32.946353", "start": "Fri, 04 Jul 2014 16:19:22 -0700 (PST)", "finish": "Fri, 04 Jul 2014 18:09:55 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:37", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:52", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:49", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:49", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:18:15", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1884, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-122.json b/cidb/test_data/series_0/metadata-122.json
new file mode 100644
index 0000000..f7f4f30
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-122.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 16:45:38 -0700 (PST)"}, "boards": ["nyan"], "changes": [], "time": {"duration": "0:28:44.484119", "start": "Fri, 04 Jul 2014 16:16:54 -0700 (PST)", "finish": "Fri, 04 Jul 2014 16:45:38 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:33", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:41", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:07", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:33", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1880, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-123.json b/cidb/test_data/series_0/metadata-123.json
new file mode 100644
index 0000000..7728c67
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-123.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 17:50:00 -0700 (PST)"}, "boards": ["daisy"], "changes": [], "time": {"duration": "1:32:10.984875", "start": "Fri, 04 Jul 2014 16:17:49 -0700 (PST)", "finish": "Fri, 04 Jul 2014 17:50:00 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:32", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:11:10", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:40", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:36", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:04:01", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:36", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:03", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "1:04:04", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 15123, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-124.json b/cidb/test_data/series_0/metadata-124.json
new file mode 100644
index 0000000..333c6e9
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-124.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 16:50:12 -0700 (PST)"}, "boards": ["samus"], "changes": [], "time": {"duration": "0:34:02.925237", "start": "Fri, 04 Jul 2014 16:16:09 -0700 (PST)", "finish": "Fri, 04 Jul 2014 16:50:12 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:21", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:55", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:04", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:00:46", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:36", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:02", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:37", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:20", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2874, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-125.json b/cidb/test_data/series_0/metadata-125.json
new file mode 100644
index 0000000..502bae4
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-125.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 16:54:15 -0700 (PST)"}, "boards": ["panther"], "changes": [], "time": {"duration": "0:34:09.562568", "start": "Fri, 04 Jul 2014 16:20:05 -0700 (PST)", "finish": "Fri, 04 Jul 2014 16:54:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:30", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:53", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:12", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:14", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:09:59", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2319, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-126.json b/cidb/test_data/series_0/metadata-126.json
new file mode 100644
index 0000000..60dca9c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-126.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 17:47:49 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [], "time": {"duration": "1:28:49.034911", "start": "Fri, 04 Jul 2014 16:19:00 -0700 (PST)", "finish": "Fri, 04 Jul 2014 17:47:49 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:11:05", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:59", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:42", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:37", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:37", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:19", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "1:00:36", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4838, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-127.json b/cidb/test_data/series_0/metadata-127.json
new file mode 100644
index 0000000..2a66aab
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-127.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 18:06:59 -0700 (PST)"}, "boards": ["link"], "changes": [], "time": {"duration": "1:49:38.098836", "start": "Fri, 04 Jul 2014 16:17:21 -0700 (PST)", "finish": "Fri, 04 Jul 2014 18:06:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:12:20", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:49", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:16:25", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "CommitQueueCompletion"}], "build-number": 17430, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-128.json b/cidb/test_data/series_0/metadata-128.json
new file mode 100644
index 0000000..19c688a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-128.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 16:51:24 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [], "time": {"duration": "0:33:17.517192", "start": "Fri, 04 Jul 2014 16:18:06 -0700 (PST)", "finish": "Fri, 04 Jul 2014 16:51:24 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:40", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:50", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:04", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:23", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:06", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:13", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2140, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-129.json b/cidb/test_data/series_0/metadata-129.json
new file mode 100644
index 0000000..567f0e5
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-129.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 17:18:04 -0700 (PST)"}, "boards": ["falco"], "changes": [], "time": {"duration": "0:57:11.798821", "start": "Fri, 04 Jul 2014 16:20:52 -0700 (PST)", "finish": "Fri, 04 Jul 2014 17:18:04 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:42", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:32:18", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:10:45", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:30", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:06", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:30", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:30", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:09:22", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4282, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-13.json b/cidb/test_data/series_0/metadata-13.json
new file mode 100644
index 0000000..e3c44b8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-13.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 23:46:00 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1255", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831", "reason": "None"}}, "boards": [], "changes": [{"patch_number": "2", "total_pass": 1, "gerrit_number": "206692", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}], "metadata-version": "2", "child-configs": [], "build-number": 1917, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622260, ""], [{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "submitted", 1404629097, ""]], "builder-name": "CQ master", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "time": {"duration": "1:53:52.034566", "start": "Sat, 05 Jul 2014 21:52:08 -0700 (PST)", "finish": "Sat, 05 Jul 2014 23:46:00 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1917/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1917/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:31", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1917/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1917/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1917/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1917/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1917/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1917/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:49:16", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1917/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-130.json b/cidb/test_data/series_0/metadata-130.json
new file mode 100644
index 0000000..897823e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-130.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 16:40:46 -0700 (PST)"}, "boards": ["gizmo"], "changes": [], "time": {"duration": "0:21:41.890520", "start": "Fri, 04 Jul 2014 16:19:04 -0700 (PST)", "finish": "Fri, 04 Jul 2014 16:40:46 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:06:00", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:06:53", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:31", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:06", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:07", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 823, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-131.json b/cidb/test_data/series_0/metadata-131.json
new file mode 100644
index 0000000..7ce92f2
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-131.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 16:52:19 -0700 (PST)"}, "boards": ["rambi"], "changes": [], "time": {"duration": "0:34:56.891892", "start": "Fri, 04 Jul 2014 16:17:22 -0700 (PST)", "finish": "Fri, 04 Jul 2014 16:52:19 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:34", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:01", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:00:53", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:41", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:31", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "CommitQueueCompletion"}], "build-number": 2320, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-132.json b/cidb/test_data/series_0/metadata-132.json
new file mode 100644
index 0000000..90ed651
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-132.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 16:35:01 -0700 (PST)"}, "boards": ["lumpy"], "changes": [], "time": {"duration": "0:03:10.153032", "start": "Fri, 04 Jul 2014 16:31:51 -0700 (PST)", "finish": "Fri, 04 Jul 2014 16:35:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1247/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1247/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:38", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1247/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1247/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1247/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1247/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:40", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1247/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1247/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1247/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1247/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1247/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1247, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-133.json b/cidb/test_data/series_0/metadata-133.json
new file mode 100644
index 0000000..9d0e9e8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-133.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 16:36:25 -0700 (PST)"}, "boards": ["duck"], "changes": [], "time": {"duration": "0:20:00.055838", "start": "Fri, 04 Jul 2014 16:16:25 -0700 (PST)", "finish": "Fri, 04 Jul 2014 16:36:25 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:36", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:32", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:30", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:59", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:12", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:55", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1370, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-134.json b/cidb/test_data/series_0/metadata-134.json
new file mode 100644
index 0000000..ae8a543
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-134.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 16:52:05 -0700 (PST)"}, "boards": ["peppy"], "changes": [], "time": {"duration": "0:35:08.131557", "start": "Fri, 04 Jul 2014 16:16:57 -0700 (PST)", "finish": "Fri, 04 Jul 2014 16:52:05 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:17", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:37", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:22", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:42", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:24", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:02", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:53", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4269, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-135.json b/cidb/test_data/series_0/metadata-135.json
new file mode 100644
index 0000000..77dcbb9
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-135.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 17:18:23 -0700 (PST)"}, "boards": ["butterfly"], "changes": [], "time": {"duration": "0:59:06.663144", "start": "Fri, 04 Jul 2014 16:19:16 -0700 (PST)", "finish": "Fri, 04 Jul 2014 17:18:23 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:41", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:32:04", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:11:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:47", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:22", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:03", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:05", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:06", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:10:13", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:34", "name": "CommitQueueCompletion"}], "build-number": 11450, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-136.json b/cidb/test_data/series_0/metadata-136.json
new file mode 100644
index 0000000..4e62ef9
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-136.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 16:51:36 -0700 (PST)"}, "boards": ["monroe"], "changes": [], "time": {"duration": "0:34:43.909272", "start": "Fri, 04 Jul 2014 16:16:52 -0700 (PST)", "finish": "Fri, 04 Jul 2014 16:51:36 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:12:15", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:52", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:49", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2324, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-137.json b/cidb/test_data/series_0/metadata-137.json
new file mode 100644
index 0000000..c54e288
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-137.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 17:05:19 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [], "time": {"duration": "0:46:44.307301", "start": "Fri, 04 Jul 2014 16:18:35 -0700 (PST)", "finish": "Fri, 04 Jul 2014 17:05:19 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:11", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:15", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:22:34", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:44", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:32", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:03", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:34", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:16:47", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1369, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "cl_actions": [], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-138.json b/cidb/test_data/series_0/metadata-138.json
new file mode 100644
index 0000000..4355364
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-138.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 20:10:18 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:04:34.623644", "start": "Sun, 06 Jul 2014 19:05:44 -0700 (PST)", "finish": "Sun, 06 Jul 2014 20:10:18 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:16", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:31", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:40:51", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:25", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:10", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:02", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:52", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:30", "name": "CommitQueueCompletion"}], "build-number": 2654, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698737, ""]], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-139.json b/cidb/test_data/series_0/metadata-139.json
new file mode 100644
index 0000000..9fe560c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-139.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:18:20 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:13:46.168279", "start": "Sun, 06 Jul 2014 19:04:34 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:18:20 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:58", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:39", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:23", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:55", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:14", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:03:29", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:24", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 2474, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698668, ""]], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-14.json b/cidb/test_data/series_0/metadata-14.json
new file mode 100644
index 0000000..fd3d9a3
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-14.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:49:58 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1254", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830", "reason": "None"}}, "boards": [], "changes": [], "metadata-version": "2", "child-configs": [], "build-number": 1916, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [], "builder-name": "CQ master", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "time": {"duration": "1:44:45.620045", "start": "Sat, 05 Jul 2014 20:05:12 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:49:58 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1916/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1916/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "4:03:13", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1916/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1916/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:03", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1916/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1916/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1916/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1916/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:39:57", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1916/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:23", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-140.json b/cidb/test_data/series_0/metadata-140.json
new file mode 100644
index 0000000..dfc10e2
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-140.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 20:36:18 -0700 (PST)"}, "boards": ["wolf"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:30:39.744214", "start": "Sun, 06 Jul 2014 19:05:38 -0700 (PST)", "finish": "Sun, 06 Jul 2014 20:36:18 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:25", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:53", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:48", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:53", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:47", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:20", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:50", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:27", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "1:01:13", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 3451, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698732, ""]], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-141.json b/cidb/test_data/series_0/metadata-141.json
new file mode 100644
index 0000000..af16d24
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-141.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:34:08 -0700 (PST)"}, "boards": ["leon"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:30:06.504564", "start": "Sun, 06 Jul 2014 19:04:01 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:34:08 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:01", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:55", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:25", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2889, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698635, ""]], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-142.json b/cidb/test_data/series_0/metadata-142.json
new file mode 100644
index 0000000..beb77fc
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-142.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 20:38:42 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:29:18.334015", "start": "Sun, 06 Jul 2014 19:09:23 -0700 (PST)", "finish": "Sun, 06 Jul 2014 20:38:42 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:25", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:09:05", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:06", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:39", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:06", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:44", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:58:30", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17402, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698956, ""]], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-143.json b/cidb/test_data/series_0/metadata-143.json
new file mode 100644
index 0000000..1e0bf1a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-143.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 20:38:56 -0700 (PST)"}, "boards": ["parrot"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:29:41.872524", "start": "Sun, 06 Jul 2014 19:09:14 -0700 (PST)", "finish": "Sun, 06 Jul 2014 20:38:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:39", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:14", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:13:00", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:35", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:18", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:41", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:56:44", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 12121, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698946, ""]], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-144.json b/cidb/test_data/series_0/metadata-144.json
new file mode 100644
index 0000000..09b1964
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-144.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 20:49:20 -0700 (PST)"}, "boards": ["stumpy"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:40:58.790611", "start": "Sun, 06 Jul 2014 19:08:21 -0700 (PST)", "finish": "Sun, 06 Jul 2014 20:49:20 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:35", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:11", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:04", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:47", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:57", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:07:29", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:11:16", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 17446, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698894, ""]], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-145.json b/cidb/test_data/series_0/metadata-145.json
new file mode 100644
index 0000000..9a51842
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-145.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 20:02:57 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:59:18.916065", "start": "Sun, 06 Jul 2014 19:03:38 -0700 (PST)", "finish": "Sun, 06 Jul 2014 20:02:57 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:37", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:04", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:20", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:15", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:27", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:35", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:56", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:50", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CommitQueueCompletion"}], "build-number": 18724, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698613, ""]], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-146.json b/cidb/test_data/series_0/metadata-146.json
new file mode 100644
index 0000000..dd9793b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-146.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 20:46:54 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:41:38.352838", "start": "Sun, 06 Jul 2014 19:05:16 -0700 (PST)", "finish": "Sun, 06 Jul 2014 20:46:54 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:19", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:49", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:43", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:07", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:12:03", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 16885, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698710, ""]], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-147.json b/cidb/test_data/series_0/metadata-147.json
new file mode 100644
index 0000000..3bf4bfd
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-147.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 20:01:55 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:58:17.498714", "start": "Sun, 06 Jul 2014 19:03:37 -0700 (PST)", "finish": "Sun, 06 Jul 2014 20:01:55 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:37", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:04", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:18", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:20", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:38:21", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:31", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:06:03", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:58", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18520, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698611, ""]], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-148.json b/cidb/test_data/series_0/metadata-148.json
new file mode 100644
index 0000000..17de852
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-148.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:38:01 -0700 (PST)"}, "boards": ["stout"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:31:18.408561", "start": "Sun, 06 Jul 2014 19:06:43 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:38:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:25", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:38", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:37", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:08", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:01", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:03", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:24", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:33", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "CommitQueueCompletion"}], "build-number": 11467, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698795, ""]], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-149.json b/cidb/test_data/series_0/metadata-149.json
new file mode 100644
index 0000000..8b234da
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-149.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 20:31:31 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:23:20.720274", "start": "Sun, 06 Jul 2014 19:08:10 -0700 (PST)", "finish": "Sun, 06 Jul 2014 20:31:31 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:40", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:21:10", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:22", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:41:41", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 4390, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698883, ""]], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-15.json b/cidb/test_data/series_0/metadata-15.json
new file mode 100644
index 0000000..bcaf365
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-15.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 16:00:56 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1253", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829", "reason": "None"}}, "boards": [], "changes": [{"patch_number": "1", "total_pass": 1, "gerrit_number": "206395", "total_fail": 4, "pass": 1, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206396", "total_fail": 1, "pass": 1, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206373", "total_fail": 4, "pass": 1, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206613", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}], "metadata-version": "2", "child-configs": [], "build-number": 1915, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404594720, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404594720, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404594720, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404594720, ""], [{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "submitted", 1404601174, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "submitted", 1404601180, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "submitted", 1404601187, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "submitted", 1404601192, ""]], "builder-name": "CQ master", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "time": {"duration": "1:47:41.259220", "start": "Sat, 05 Jul 2014 14:13:14 -0700 (PST)", "finish": "Sat, 05 Jul 2014 16:00:56 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1915/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1915/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "2:55:25", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1915/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1915/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:25", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1915/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1915/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1915/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1915/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:39:33", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1915/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-150.json b/cidb/test_data/series_0/metadata-150.json
new file mode 100644
index 0000000..d5a5afe
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-150.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 20:42:09 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:36:07.661563", "start": "Sun, 06 Jul 2014 19:06:01 -0700 (PST)", "finish": "Sun, 06 Jul 2014 20:42:09 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:47", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:12:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:01", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:56", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:30", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:56", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:14", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:05:43", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 1898, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-151.json b/cidb/test_data/series_0/metadata-151.json
new file mode 100644
index 0000000..8eb3382
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-151.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:38:37 -0700 (PST)"}, "boards": ["nyan"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:29:06.240079", "start": "Sun, 06 Jul 2014 19:09:31 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:38:37 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:32", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:17", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:33", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:52", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:40", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:04", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:44", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1894, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698965, ""]], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-152.json b/cidb/test_data/series_0/metadata-152.json
new file mode 100644
index 0000000..a4bf598
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-152.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 20:34:12 -0700 (PST)"}, "boards": ["daisy"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:27:33.388133", "start": "Sun, 06 Jul 2014 19:06:39 -0700 (PST)", "finish": "Sun, 06 Jul 2014 20:34:12 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:35", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:11:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:10", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:42", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:59", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:53", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:01", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:59:20", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:24", "name": "CommitQueueCompletion"}], "build-number": 15137, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698792, ""]], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-153.json b/cidb/test_data/series_0/metadata-153.json
new file mode 100644
index 0000000..9e12ec4
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-153.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:36:36 -0700 (PST)"}, "boards": ["samus"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:30:25.352294", "start": "Sun, 06 Jul 2014 19:06:10 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:36:36 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:44", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:54", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:03", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:00", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:26", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:00", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:07:20", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2888, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698764, ""]], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-154.json b/cidb/test_data/series_0/metadata-154.json
new file mode 100644
index 0000000..27b02d3
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-154.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:39:26 -0700 (PST)"}, "boards": ["panther"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:34:28.365508", "start": "Sun, 06 Jul 2014 19:04:58 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:39:26 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:46", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:00", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:00:56", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:45", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:03", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2333, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698691, ""]], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-155.json b/cidb/test_data/series_0/metadata-155.json
new file mode 100644
index 0000000..e27893f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-155.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 20:25:07 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:19:33.708308", "start": "Sun, 06 Jul 2014 19:05:34 -0700 (PST)", "finish": "Sun, 06 Jul 2014 20:25:07 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:12", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:11:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:52", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:40", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:50:38", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "CommitQueueCompletion"}], "build-number": 4852, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698727, ""]], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-156.json b/cidb/test_data/series_0/metadata-156.json
new file mode 100644
index 0000000..c37bb38
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-156.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 20:43:35 -0700 (PST)"}, "boards": ["link"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:36:43.106287", "start": "Sun, 06 Jul 2014 19:06:52 -0700 (PST)", "finish": "Sun, 06 Jul 2014 20:43:35 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:30", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:07", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:22", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:07:07", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17444, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698806, ""]], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-157.json b/cidb/test_data/series_0/metadata-157.json
new file mode 100644
index 0000000..854cf76
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-157.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:38:20 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:29:44.932846", "start": "Sun, 06 Jul 2014 19:08:35 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:38:20 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:25", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:25", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:49", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:26", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:55", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:03", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:06", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:03", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2154, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698909, ""]], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-158.json b/cidb/test_data/series_0/metadata-158.json
new file mode 100644
index 0000000..0b7ac34
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-158.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:52:55 -0700 (PST)"}, "boards": ["falco"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:44:06.739283", "start": "Sun, 06 Jul 2014 19:08:48 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:52:55 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:31", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:21:24", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:27", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:22", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:08", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:53", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:08", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:50", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:30", "name": "CommitQueueCompletion"}], "build-number": 4296, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698922, ""]], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-159.json b/cidb/test_data/series_0/metadata-159.json
new file mode 100644
index 0000000..cce1b07
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-159.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:28:28 -0700 (PST)"}, "boards": ["gizmo"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:21:45.427935", "start": "Sun, 06 Jul 2014 19:06:43 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:28:28 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:56", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:07:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:31", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:02", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:08", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 837, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698795, ""]], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-16.json b/cidb/test_data/series_0/metadata-16.json
new file mode 100644
index 0000000..6c3396c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-16.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 11:17:09 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1252", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828", "reason": "None"}}, "boards": [], "changes": [{"patch_number": "1", "total_pass": 1, "gerrit_number": "206684", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}], "metadata-version": "2", "child-configs": [], "build-number": 1914, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578190, ""], [{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "submitted", 1404584167, ""]], "builder-name": "CQ master", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "time": {"duration": "1:39:31.080128", "start": "Sat, 05 Jul 2014 09:37:38 -0700 (PST)", "finish": "Sat, 05 Jul 2014 11:17:09 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1914/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1914/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1914/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1914/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1914/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1914/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1914/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1914/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:34:56", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1914/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-160.json b/cidb/test_data/series_0/metadata-160.json
new file mode 100644
index 0000000..7df39f4
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-160.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:38:55 -0700 (PST)"}, "boards": ["rambi"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:31:40.599109", "start": "Sun, 06 Jul 2014 19:07:14 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:38:55 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:21", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:21", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:04", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:51", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:39", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:39", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueCompletion"}], "build-number": 2334, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698828, ""]], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-161.json b/cidb/test_data/series_0/metadata-161.json
new file mode 100644
index 0000000..50bc29f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-161.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:27:01 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:03:28.127196", "start": "Sun, 06 Jul 2014 19:23:33 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:27:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1261/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1261/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1261/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1261/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1261/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1261/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:40", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1261/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1261/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1261/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:08", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1261/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1261/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1261, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404699806, ""]], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-162.json b/cidb/test_data/series_0/metadata-162.json
new file mode 100644
index 0000000..e3d006b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-162.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:23:12 -0700 (PST)"}, "boards": ["duck"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:17:57.231941", "start": "Sun, 06 Jul 2014 19:05:15 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:23:12 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:35", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:10", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:25", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:32", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:54", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:05", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1384, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698709, ""]], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-163.json b/cidb/test_data/series_0/metadata-163.json
new file mode 100644
index 0000000..a5bf654
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-163.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:39:56 -0700 (PST)"}, "boards": ["peppy"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:31:12.280836", "start": "Sun, 06 Jul 2014 19:08:44 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:39:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:40", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:20", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:04:58", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:11", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:04:58", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:51", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:57", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:10", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4283, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698918, ""]], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-164.json b/cidb/test_data/series_0/metadata-164.json
new file mode 100644
index 0000000..046816f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-164.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:52:42 -0700 (PST)"}, "boards": ["butterfly"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:46:23.556373", "start": "Sun, 06 Jul 2014 19:06:18 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:52:42 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:41", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:46", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:22:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:22", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:39", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:35", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:18", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:22", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 11464, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698772, ""]], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-165.json b/cidb/test_data/series_0/metadata-165.json
new file mode 100644
index 0000000..8036c68
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-165.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:37:29 -0700 (PST)"}, "boards": ["monroe"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:29:41.454558", "start": "Sun, 06 Jul 2014 19:07:48 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:37:29 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:11", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:48", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:52", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:13", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2338, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698861, ""]], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-166.json b/cidb/test_data/series_0/metadata-166.json
new file mode 100644
index 0000000..0f777b2
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-166.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:46:43 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [{"patch_number": "3", "total_pass": 0, "gerrit_number": "205753", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:38:33.048673", "start": "Sun, 06 Jul 2014 19:08:10 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:46:43 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:27", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:10", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:29", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:42", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:43", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:20", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:03", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:22", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:35", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1383, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698884, ""]], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-167.json b/cidb/test_data/series_0/metadata-167.json
new file mode 100644
index 0000000..c28d40b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-167.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:13:17 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [], "time": {"duration": "1:06:13.891541", "start": "Sat, 05 Jul 2014 20:07:03 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:13:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:21", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:15", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:31", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:19", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:43:37", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:42", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:39", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:20", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2647/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2647, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-168.json b/cidb/test_data/series_0/metadata-168.json
new file mode 100644
index 0000000..450342b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-168.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 20:24:44 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [], "time": {"duration": "0:14:59.898689", "start": "Sat, 05 Jul 2014 20:09:44 -0700 (PST)", "finish": "Sat, 05 Jul 2014 20:24:44 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:13", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:59", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:41", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:22", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:54", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:31", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:04:34", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:30", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2467/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 2467, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-169.json b/cidb/test_data/series_0/metadata-169.json
new file mode 100644
index 0000000..28c9425
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-169.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:35:59 -0700 (PST)"}, "boards": ["wolf"], "changes": [], "time": {"duration": "1:27:26.411385", "start": "Sat, 05 Jul 2014 20:08:33 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:35:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:44", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:59", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:49", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:50", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:55:25", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3444/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 3444, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-17.json b/cidb/test_data/series_0/metadata-17.json
new file mode 100644
index 0000000..0ae32b2
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-17.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 09:35:27 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1251", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827", "reason": "None"}}, "boards": [], "changes": [], "metadata-version": "2", "child-configs": [], "build-number": 1913, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [], "builder-name": "CQ master", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "time": {"duration": "1:39:28.429714", "start": "Sat, 05 Jul 2014 07:55:58 -0700 (PST)", "finish": "Sat, 05 Jul 2014 09:35:27 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1913/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1913/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "4:02:24", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1913/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1913/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1913/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1913/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1913/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:05", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1913/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:34:54", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1913/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-170.json b/cidb/test_data/series_0/metadata-170.json
new file mode 100644
index 0000000..80759c3
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-170.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 20:43:01 -0700 (PST)"}, "boards": ["leon"], "changes": [], "time": {"duration": "0:33:57.393051", "start": "Sat, 05 Jul 2014 20:09:04 -0700 (PST)", "finish": "Sat, 05 Jul 2014 20:43:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:14", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:10:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:58", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:07", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:51", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2882/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2882, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-171.json b/cidb/test_data/series_0/metadata-171.json
new file mode 100644
index 0000000..86e51bc
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-171.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:34:29 -0700 (PST)"}, "boards": ["lumpy"], "changes": [], "time": {"duration": "1:25:15.745961", "start": "Sat, 05 Jul 2014 20:09:13 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:34:29 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:18", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:12:55", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:16", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:26", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:50", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:10:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:52:44", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17395/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 17395, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-172.json b/cidb/test_data/series_0/metadata-172.json
new file mode 100644
index 0000000..dc22a8b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-172.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:32:59 -0700 (PST)"}, "boards": ["parrot"], "changes": [], "time": {"duration": "1:24:17.327441", "start": "Sat, 05 Jul 2014 20:08:42 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:32:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:59", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:11", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:54", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:58", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:41", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:57", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:23", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:45", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:51:12", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12114/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 12114, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-173.json b/cidb/test_data/series_0/metadata-173.json
new file mode 100644
index 0000000..0db5894
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-173.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:48:36 -0700 (PST)"}, "boards": ["stumpy"], "changes": [], "time": {"duration": "1:39:04.153514", "start": "Sat, 05 Jul 2014 20:09:32 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:48:36 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:55", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:17", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:10:30", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:06:36", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17439/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17439, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-174.json b/cidb/test_data/series_0/metadata-174.json
new file mode 100644
index 0000000..76a3bb7
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-174.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:07:24 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [], "time": {"duration": "1:00:30.986635", "start": "Sat, 05 Jul 2014 20:06:53 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:07:24 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:19", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:38", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:11:03", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:14", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:35", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:23", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:16", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:54", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:35", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:53", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:41", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18717/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18717, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-175.json b/cidb/test_data/series_0/metadata-175.json
new file mode 100644
index 0000000..06b850b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-175.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:46:29 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [], "time": {"duration": "1:36:13.287226", "start": "Sat, 05 Jul 2014 20:10:16 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:46:29 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:41", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:51", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:55", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:44", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:30", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:10:43", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:03:33", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16878/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 16878, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-176.json b/cidb/test_data/series_0/metadata-176.json
new file mode 100644
index 0000000..5592467
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-176.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:05:35 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [], "time": {"duration": "0:59:03.122564", "start": "Sat, 05 Jul 2014 20:06:32 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:05:35 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:58", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:39", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:18", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:23", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:48", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:38:26", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:51", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:28", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:39", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18513/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 18513, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-177.json b/cidb/test_data/series_0/metadata-177.json
new file mode 100644
index 0000000..a899fab
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-177.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 20:46:57 -0700 (PST)"}, "boards": ["stout"], "changes": [], "time": {"duration": "0:36:06.078995", "start": "Sat, 05 Jul 2014 20:10:51 -0700 (PST)", "finish": "Sat, 05 Jul 2014 20:46:57 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:24", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:34", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:14", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:46", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:19", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:00:55", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:54", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:20", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11460/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 11460, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-178.json b/cidb/test_data/series_0/metadata-178.json
new file mode 100644
index 0000000..1f86a90
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-178.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:35:25 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [], "time": {"duration": "1:28:21.180666", "start": "Sat, 05 Jul 2014 20:07:04 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:35:25 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:56", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:32:07", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:44", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:26", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:14", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:35:42", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4383/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 4383, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-179.json b/cidb/test_data/series_0/metadata-179.json
new file mode 100644
index 0000000..c0c5c72
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-179.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:40:59 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:31:35.979475", "start": "Sat, 05 Jul 2014 20:09:23 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:40:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:20", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:48", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:34", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:36", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:42", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:59:41", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1891/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1891, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-18.json b/cidb/test_data/series_0/metadata-18.json
new file mode 100644
index 0000000..ef52a66
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-18.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:52:58 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1250", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826", "reason": "None"}}, "boards": [], "changes": [], "metadata-version": "2", "child-configs": [], "build-number": 1912, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [], "builder-name": "CQ master", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "time": {"duration": "1:42:40.393708", "start": "Sat, 05 Jul 2014 02:10:18 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:52:58 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1912/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1912/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "4:04:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1912/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1912/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:59", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1912/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1912/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1912/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1912/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:38:02", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1912/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-180.json b/cidb/test_data/series_0/metadata-180.json
new file mode 100644
index 0000000..e455736
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-180.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 20:40:28 -0700 (PST)"}, "boards": ["nyan"], "changes": [], "time": {"duration": "0:29:16.200175", "start": "Sat, 05 Jul 2014 20:11:12 -0700 (PST)", "finish": "Sat, 05 Jul 2014 20:40:28 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:33", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:54", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:31", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:39", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:30", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1887/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1887, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-181.json b/cidb/test_data/series_0/metadata-181.json
new file mode 100644
index 0000000..e2ff3cd
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-181.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:27:46 -0700 (PST)"}, "boards": ["daisy"], "changes": [], "time": {"duration": "1:20:42.555987", "start": "Sat, 05 Jul 2014 20:07:04 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:27:46 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:13", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:12:26", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:09", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:48", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:59", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:06", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:51:11", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15130/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 15130, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-182.json b/cidb/test_data/series_0/metadata-182.json
new file mode 100644
index 0000000..8400732
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-182.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 20:43:44 -0700 (PST)"}, "boards": ["samus"], "changes": [], "time": {"duration": "0:34:15.062188", "start": "Sat, 05 Jul 2014 20:09:29 -0700 (PST)", "finish": "Sat, 05 Jul 2014 20:43:44 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:58", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:03", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:09:51", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2881/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "CommitQueueCompletion"}], "build-number": 2881, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-183.json b/cidb/test_data/series_0/metadata-183.json
new file mode 100644
index 0000000..99fd038
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-183.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 20:44:56 -0700 (PST)"}, "boards": ["panther"], "changes": [], "time": {"duration": "0:35:36.387359", "start": "Sat, 05 Jul 2014 20:09:19 -0700 (PST)", "finish": "Sat, 05 Jul 2014 20:44:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:31", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:05", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:00:56", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:15", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:05", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2326/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "CommitQueueCompletion"}], "build-number": 2326, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-184.json b/cidb/test_data/series_0/metadata-184.json
new file mode 100644
index 0000000..d2ecf44
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-184.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:24:06 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [], "time": {"duration": "1:15:19.778493", "start": "Sat, 05 Jul 2014 20:08:47 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:24:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:59", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:59", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:14", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:56", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:39", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:35", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:15", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:35", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:44", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:46:19", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4845/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4845, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-185.json b/cidb/test_data/series_0/metadata-185.json
new file mode 100644
index 0000000..0053476
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-185.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:41:46 -0700 (PST)"}, "boards": ["link"], "changes": [], "time": {"duration": "1:32:17.408604", "start": "Sat, 05 Jul 2014 20:09:29 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:41:46 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:59", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:50", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:53", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:33", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:15", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:35", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:00:09", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17437/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17437, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-186.json b/cidb/test_data/series_0/metadata-186.json
new file mode 100644
index 0000000..2f09643
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-186.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 20:42:36 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [], "time": {"duration": "0:33:22.618712", "start": "Sat, 05 Jul 2014 20:09:13 -0700 (PST)", "finish": "Sat, 05 Jul 2014 20:42:36 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:35", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:51", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:03", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:53", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:26", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:09:43", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2147/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2147, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-187.json b/cidb/test_data/series_0/metadata-187.json
new file mode 100644
index 0000000..a07bd5c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-187.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:06:07 -0700 (PST)"}, "boards": ["falco"], "changes": [], "time": {"duration": "0:57:33.318459", "start": "Sat, 05 Jul 2014 20:08:34 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:06:07 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:42", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:32:29", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:10:50", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:31", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:10", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:41", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:09:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4289/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4289, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-188.json b/cidb/test_data/series_0/metadata-188.json
new file mode 100644
index 0000000..50bee07
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-188.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 20:30:04 -0700 (PST)"}, "boards": ["gizmo"], "changes": [], "time": {"duration": "0:21:48.779582", "start": "Sat, 05 Jul 2014 20:08:15 -0700 (PST)", "finish": "Sat, 05 Jul 2014 20:30:04 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:58", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:07:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:32", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:58", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:07", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:06", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:04", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/830/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 830, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-189.json b/cidb/test_data/series_0/metadata-189.json
new file mode 100644
index 0000000..2400c91
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-189.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 20:44:23 -0700 (PST)"}, "boards": ["rambi"], "changes": [], "time": {"duration": "0:35:15.724844", "start": "Sat, 05 Jul 2014 20:09:08 -0700 (PST)", "finish": "Sat, 05 Jul 2014 20:44:23 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:03", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:16", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:06", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:35", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:37", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:02", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2327/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueCompletion"}], "build-number": 2327, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-19.json b/cidb/test_data/series_0/metadata-19.json
new file mode 100644
index 0000000..baeef32
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-19.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 22:04:31 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1249", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825", "reason": "None"}}, "boards": [], "changes": [{"patch_number": "1", "total_pass": 1, "gerrit_number": "168144", "total_fail": 0, "pass": 1, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "168141", "total_fail": 0, "pass": 1, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206678", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}], "metadata-version": "2", "child-configs": [], "build-number": 1911, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530498, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530498, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530498, ""], [{"gerrit_number": "206515", "patch_number": "1", "internal": false}, "picked_up", 1404530498, ""], [{"gerrit_number": "206515", "patch_number": "1", "internal": false}, "kicked_out", 1404530553, ""], [{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "submitted", 1404536600, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "submitted", 1404536604, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "submitted", 1404536608, ""]], "builder-name": "CQ master", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "time": {"duration": "1:41:36.287377", "start": "Fri, 04 Jul 2014 20:22:55 -0700 (PST)", "finish": "Fri, 04 Jul 2014 22:04:31 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1911/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1911/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:04:21", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1911/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1911/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1911/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1911/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1911/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1911/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:36:58", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1911/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:22", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-190.json b/cidb/test_data/series_0/metadata-190.json
new file mode 100644
index 0000000..ada3097
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-190.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 20:27:45 -0700 (PST)"}, "boards": ["lumpy"], "changes": [], "time": {"duration": "0:03:10.808916", "start": "Sat, 05 Jul 2014 20:24:34 -0700 (PST)", "finish": "Sat, 05 Jul 2014 20:27:45 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1254/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1254/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:42", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1254/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1254/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1254/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1254/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:40", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1254/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1254/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1254/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:49", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1254/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1254/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 1254, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-191.json b/cidb/test_data/series_0/metadata-191.json
new file mode 100644
index 0000000..5710001
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-191.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 20:27:08 -0700 (PST)"}, "boards": ["duck"], "changes": [], "time": {"duration": "0:20:07.021361", "start": "Sat, 05 Jul 2014 20:07:01 -0700 (PST)", "finish": "Sat, 05 Jul 2014 20:27:08 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:03", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:34", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:10", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:28", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:33", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:04", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:59", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:12", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1377/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 1377, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-192.json b/cidb/test_data/series_0/metadata-192.json
new file mode 100644
index 0000000..7581814
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-192.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 20:43:33 -0700 (PST)"}, "boards": ["peppy"], "changes": [], "time": {"duration": "0:35:17.422427", "start": "Sat, 05 Jul 2014 20:08:16 -0700 (PST)", "finish": "Sat, 05 Jul 2014 20:43:33 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:20", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:41", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:24", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:43", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:22", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:59", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:34", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:56", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4276/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4276, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-193.json b/cidb/test_data/series_0/metadata-193.json
new file mode 100644
index 0000000..125d7c7
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-193.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 21:08:19 -0700 (PST)"}, "boards": ["butterfly"], "changes": [], "time": {"duration": "0:57:53.053889", "start": "Sat, 05 Jul 2014 20:10:26 -0700 (PST)", "finish": "Sat, 05 Jul 2014 21:08:19 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:31:54", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:11:40", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:48", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:37", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:05", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:01", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:10:11", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11457/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 11457, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-194.json b/cidb/test_data/series_0/metadata-194.json
new file mode 100644
index 0000000..5da382a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-194.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 20:44:06 -0700 (PST)"}, "boards": ["monroe"], "changes": [], "time": {"duration": "0:33:48.370546", "start": "Sat, 05 Jul 2014 20:10:18 -0700 (PST)", "finish": "Sat, 05 Jul 2014 20:44:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:10", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:32", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:56", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:48", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2331/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:37", "name": "CommitQueueCompletion"}], "build-number": 2331, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-195.json b/cidb/test_data/series_0/metadata-195.json
new file mode 100644
index 0000000..e643437
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-195.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 20:49:10 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [], "time": {"duration": "0:39:11.700703", "start": "Sat, 05 Jul 2014 20:09:58 -0700 (PST)", "finish": "Sat, 05 Jul 2014 20:49:10 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:59", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:09", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:08", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:18", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:23", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:23", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:01", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:25", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:59", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1376/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1376, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc1", "full": "R38-6023.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-196.json b/cidb/test_data/series_0/metadata-196.json
new file mode 100644
index 0000000..5f4dd64
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-196.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:43:47 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:02:37.885759", "start": "Mon, 07 Jul 2014 08:41:10 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:43:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:57", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:30", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:29", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:39:47", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:10", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:14", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:58", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:04", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 2658, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747663, ""]], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-197.json b/cidb/test_data/series_0/metadata-197.json
new file mode 100644
index 0000000..15af70b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-197.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 08:54:56 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:13:34.194930", "start": "Mon, 07 Jul 2014 08:41:22 -0700 (PST)", "finish": "Mon, 07 Jul 2014 08:54:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:12", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:53", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:43", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:22", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:03:22", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:21", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 2478, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747676, ""]], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-198.json b/cidb/test_data/series_0/metadata-198.json
new file mode 100644
index 0000000..be9bbdc
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-198.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:07:32 -0700 (PST)"}, "boards": ["wolf"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:25:20.037141", "start": "Mon, 07 Jul 2014 08:42:12 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:07:32 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:59", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:54", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:15", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:19", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:55:32", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 3455, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747726, ""]], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-199.json b/cidb/test_data/series_0/metadata-199.json
new file mode 100644
index 0000000..1996cb0
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-199.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:13:53 -0700 (PST)"}, "boards": ["leon"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:30:22.336238", "start": "Mon, 07 Jul 2014 08:43:31 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:13:53 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:56", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:17", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:25", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:33", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2893, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747804, ""]], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-2.json b/cidb/test_data/series_0/metadata-2.json
new file mode 100644
index 0000000..5a6b427
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-2.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 12:23:40 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1266", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842", "reason": "None"}}, "boards": [], "changes": [{"patch_number": "4", "total_pass": 1, "gerrit_number": "200037", "total_fail": 5, "pass": 1, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 1, "gerrit_number": "201937", "total_fail": 5, "pass": 1, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 1, "gerrit_number": "202162", "total_fail": 5, "pass": 1, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 1, "gerrit_number": "202208", "total_fail": 5, "pass": 1, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "202971", "total_fail": 5, "pass": 1, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206588", "total_fail": 1, "pass": 1, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206171", "total_fail": 6, "pass": 1, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206142", "total_fail": 6, "pass": 1, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206840", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 1, "gerrit_number": "206082", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206410", "total_fail": 4, "pass": 1, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206182", "total_fail": 3, "pass": 1, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206055", "total_fail": 4, "pass": 1, "fail": 4, "internal": false}], "metadata-version": "2", "child-configs": [], "build-number": 1928, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [[{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404753844, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404753844, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404753844, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404753844, ""], [{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404753844, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404753844, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404753844, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404753844, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404753844, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404753844, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404753844, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404753844, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404753844, ""], [{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "submitted", 1404760880, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "submitted", 1404760885, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "submitted", 1404760890, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "submitted", 1404760894, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "submitted", 1404760899, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "submitted", 1404760904, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "submitted", 1404760909, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "submitted", 1404760914, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "submitted", 1404760920, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "submitted", 1404760924, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "submitted", 1404760928, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "submitted", 1404760941, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "submitted", 1404760946, ""]], "builder-name": "CQ master", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "time": {"duration": "1:58:08.318646", "start": "Mon, 07 Jul 2014 10:25:32 -0700 (PST)", "finish": "Mon, 07 Jul 2014 12:23:40 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1928/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1928/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:04:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1928/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1928/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1928/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1928/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1928/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:05", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1928/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:53:24", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1928/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:22", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-20.json b/cidb/test_data/series_0/metadata-20.json
new file mode 100644
index 0000000..615c295
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-20.json
@@ -0,0 +1 @@
+{"status": {"status": "failed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:17:22 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1248", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824", "reason": "None"}}, "boards": [], "changes": [{"patch_number": "2", "total_pass": 1, "gerrit_number": "168017", "total_fail": 0, "pass": 1, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206364", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "168150", "total_fail": 0, "pass": 1, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206592", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206548", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206581", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}], "metadata-version": "2", "child-configs": [], "build-number": 1910, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [[{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404523805, ""], [{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404523805, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404523805, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404523805, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404523805, ""], [{"gerrit_number": "206515", "patch_number": "1", "internal": false}, "picked_up", 1404523805, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404523805, ""], [{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "submitted", 1404530090, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "submitted", 1404530094, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "submitted", 1404530142, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "submitted", 1404530146, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "submitted", 1404530150, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "kicked_out", 1404530152, ""]], "builder-name": "CQ master", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "time": {"duration": "1:46:04.448388", "start": "Fri, 04 Jul 2014 18:31:18 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:17:22 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1910/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1910/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:15:57", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1910/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1910/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1910/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1910/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1910/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:05", "name": "MasterUploadPrebuilts"}, {"status": "failed", "description": "FAILED TO SUBMIT ALL CHANGES:  Could not verify that changes avakulenko:*168150:*d314d198 were submitted", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1910/steps/CommitQueueCompletion/logs/stdio", "summary": "FAILED TO SUBMIT ALL CHANGES:  Could not verify that changes avakulenko:*168150:*d314d198 were submitted", "board": "", "duration": "1:40:14", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1910/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:31", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-200.json b/cidb/test_data/series_0/metadata-200.json
new file mode 100644
index 0000000..e126865
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-200.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:04:57 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:21:20.145781", "start": "Mon, 07 Jul 2014 08:43:37 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:04:57 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:42", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:29", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:20", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:08", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:46", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:08", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:07:41", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:51:12", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17406, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747810, ""]], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-201.json b/cidb/test_data/series_0/metadata-201.json
new file mode 100644
index 0000000..6bb7be3
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-201.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:05:59 -0700 (PST)"}, "boards": ["parrot"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:24:02.256860", "start": "Mon, 07 Jul 2014 08:41:57 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:05:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:03", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:32", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:48", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:13:12", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:38", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:22", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:07:05", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:07:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:02", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:49:58", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 12125, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747710, ""]], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-202.json b/cidb/test_data/series_0/metadata-202.json
new file mode 100644
index 0000000..371d0e2
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-202.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:18:03 -0700 (PST)"}, "boards": ["stumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:35:43.261554", "start": "Mon, 07 Jul 2014 08:42:20 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:18:03 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:37", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:29", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:09", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:12", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:19", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:37", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:19", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:07:43", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:04:39", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17450, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747734, ""]], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-203.json b/cidb/test_data/series_0/metadata-203.json
new file mode 100644
index 0000000..f5d54ed
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-203.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:40:04 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:59:18.647510", "start": "Mon, 07 Jul 2014 08:40:45 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:40:04 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:14", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:47", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:16", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:38:46", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:09", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:07", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:10", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:38", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:13", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:35", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18728, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747639, ""]], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-204.json b/cidb/test_data/series_0/metadata-204.json
new file mode 100644
index 0000000..6a9b094
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-204.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:17:15 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:34:13.205369", "start": "Mon, 07 Jul 2014 08:43:01 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:17:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:53", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:37", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:21", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:22", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:22", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:08:24", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:02:06", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:35", "name": "CommitQueueCompletion"}], "build-number": 16889, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747775, ""]], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-205.json b/cidb/test_data/series_0/metadata-205.json
new file mode 100644
index 0000000..a39f99d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-205.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:40:15 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:59:30.082235", "start": "Mon, 07 Jul 2014 08:40:45 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:40:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:15", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:52", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:22", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:38:38", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:11", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:01", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:54", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:31", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:06:20", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:12:39", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 18524, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747639, ""]], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-206.json b/cidb/test_data/series_0/metadata-206.json
new file mode 100644
index 0000000..6d9ade6
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-206.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:15:16 -0700 (PST)"}, "boards": ["stout"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:31:26.645981", "start": "Mon, 07 Jul 2014 08:43:50 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:15:16 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:38", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:33", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:40", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:16", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:04", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:25", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:27", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:34", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:18", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:42", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "CommitQueueCompletion"}], "build-number": 11471, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747823, ""]], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-207.json b/cidb/test_data/series_0/metadata-207.json
new file mode 100644
index 0000000..23b3abe
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-207.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:04:00 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:21:47.017210", "start": "Mon, 07 Jul 2014 08:42:13 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:04:00 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:41", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:21:42", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:57", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:22", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:12", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:39:29", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "CommitQueueCompletion"}], "build-number": 4394, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747727, ""]], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-208.json b/cidb/test_data/series_0/metadata-208.json
new file mode 100644
index 0000000..61fcf18
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-208.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:12:30 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:30:22.357310", "start": "Mon, 07 Jul 2014 08:42:08 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:12:30 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:12", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:00:32", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1902, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-209.json b/cidb/test_data/series_0/metadata-209.json
new file mode 100644
index 0000000..ba676cb
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-209.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:12:01 -0700 (PST)"}, "boards": ["nyan"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:29:30.669710", "start": "Mon, 07 Jul 2014 08:42:30 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:12:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:17", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:34", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:01", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:26", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:04", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:27", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:07:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1898, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747744, ""]], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-21.json b/cidb/test_data/series_0/metadata-21.json
new file mode 100644
index 0000000..376831b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-21.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 18:14:42 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2640", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2460", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3437", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2875", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17388", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12107", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17432", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18710", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16871", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18506", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11453", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4376", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1884", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1880", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15123", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2874", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2319", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4838", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17430", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2140", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4282", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1369", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2320", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1247", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1370", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4269", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11450", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2324", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/823", "reason": "None"}}, "boards": [], "changes": [], "metadata-version": "2", "child-configs": [], "build-number": 1909, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [], "builder-name": "CQ master", "version": {"chrome": "38.0.2082.1", "platform": "6019.0.0-rc2", "full": "R38-6019.0.0-rc2", "milestone": "38"}, "time": {"duration": "2:00:13.281983", "start": "Fri, 04 Jul 2014 16:14:28 -0700 (PST)", "finish": "Fri, 04 Jul 2014 18:14:42 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.04.105605.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1909/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1909/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "4:01:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1909/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1909/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:00", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1909/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1909/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1909/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1909/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:55:32", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1909/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.04.105605", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-210.json b/cidb/test_data/series_0/metadata-210.json
new file mode 100644
index 0000000..ebb7208
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-210.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:02:27 -0700 (PST)"}, "boards": ["daisy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:19:44.102108", "start": "Mon, 07 Jul 2014 08:42:43 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:02:27 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:12:10", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:45", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:05", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:56", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:53", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:27", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:50:15", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 15141, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747757, ""]], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-211.json b/cidb/test_data/series_0/metadata-211.json
new file mode 100644
index 0000000..92fb14f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-211.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:13:16 -0700 (PST)"}, "boards": ["samus"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:30:27.241528", "start": "Mon, 07 Jul 2014 08:42:48 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:13:16 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:06", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:17", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:26", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:07:42", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2892, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747762, ""]], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-212.json b/cidb/test_data/series_0/metadata-212.json
new file mode 100644
index 0000000..e6e23db
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-212.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:20:12 -0700 (PST)"}, "boards": ["panther"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:34:35.177519", "start": "Mon, 07 Jul 2014 08:45:37 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:20:12 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:21", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:37", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:12", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:12", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:14", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:33", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:35", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:21", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2337, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747930, ""]], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-213.json b/cidb/test_data/series_0/metadata-213.json
new file mode 100644
index 0000000..18319af
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-213.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:01:48 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:16:26.718250", "start": "Mon, 07 Jul 2014 08:45:22 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:01:48 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:11:51", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:53", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:04", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:56", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:56", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:07:07", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:46:44", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4856, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747915, ""]], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-214.json b/cidb/test_data/series_0/metadata-214.json
new file mode 100644
index 0000000..74d2b47
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-214.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:16:15 -0700 (PST)"}, "boards": ["link"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:28:10.650924", "start": "Mon, 07 Jul 2014 08:48:05 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:16:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:20", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:40", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:10", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:03", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:15", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:35", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:41", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:57:55", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17448, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404748079, ""]], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-215.json b/cidb/test_data/series_0/metadata-215.json
new file mode 100644
index 0000000..0a431da
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-215.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:17:44 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:31:44.052250", "start": "Mon, 07 Jul 2014 08:46:00 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:17:44 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:14", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:49", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:37", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:20", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:07", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:20", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2158, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747954, ""]], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-216.json b/cidb/test_data/series_0/metadata-216.json
new file mode 100644
index 0000000..43b7698
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-216.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:27:26 -0700 (PST)"}, "boards": ["falco"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:43:58.639288", "start": "Mon, 07 Jul 2014 08:43:28 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:27:26 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:42", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:21:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:22", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:39", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:08", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:47", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:08", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:52", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4300, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747801, ""]], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-217.json b/cidb/test_data/series_0/metadata-217.json
new file mode 100644
index 0000000..d8e1d11
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-217.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:08:15 -0700 (PST)"}, "boards": ["gizmo"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:25:24.316575", "start": "Mon, 07 Jul 2014 08:42:51 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:08:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:08:45", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:07:54", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:40", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:54", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:11", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 841, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747764, ""]], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-218.json b/cidb/test_data/series_0/metadata-218.json
new file mode 100644
index 0000000..43ee49d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-218.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:19:00 -0700 (PST)"}, "boards": ["rambi"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:31:14.210328", "start": "Mon, 07 Jul 2014 08:47:46 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:19:00 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:35", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:32", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:38", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:19", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:15", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:54", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:50", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:56", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:04", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 2338, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404748059, ""]], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-219.json b/cidb/test_data/series_0/metadata-219.json
new file mode 100644
index 0000000..4d1b127
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-219.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:10:30 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:12:15.286412", "start": "Mon, 07 Jul 2014 08:58:15 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:10:30 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1265/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1265/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:25", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1265/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1265/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1265/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1265/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:41", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1265/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1265/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1265/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:09:54", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1265/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1265/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1265, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404748689, ""]], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-22.json b/cidb/test_data/series_0/metadata-22.json
new file mode 100644
index 0000000..9b2e4d0
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-22.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:39:30 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:05:48.852277", "start": "Fri, 04 Jul 2014 18:33:41 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:39:30 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:20", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:31", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:27", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:43:10", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:49", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:18", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:11", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:41", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:13", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:00", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2641/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2641, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524013, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524013, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524013, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524013, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524013, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524013, ""]], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-220.json b/cidb/test_data/series_0/metadata-220.json
new file mode 100644
index 0000000..2299d96
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-220.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:03:13 -0700 (PST)"}, "boards": ["duck"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:20:52.817474", "start": "Mon, 07 Jul 2014 08:42:20 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:03:13 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:18", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:08:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:10", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:22", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:37", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:05", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:08", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:22", "name": "CommitQueueCompletion"}], "build-number": 1388, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747734, ""]], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-221.json b/cidb/test_data/series_0/metadata-221.json
new file mode 100644
index 0000000..241d3db
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-221.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:16:33 -0700 (PST)"}, "boards": ["peppy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:33:05.666699", "start": "Mon, 07 Jul 2014 08:43:27 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:16:33 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:45", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:44", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:38", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:04:50", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:10", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:21", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:46", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:09:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4287, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747800, ""]], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-222.json b/cidb/test_data/series_0/metadata-222.json
new file mode 100644
index 0000000..c5bdbbd
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-222.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:36:24 -0700 (PST)"}, "boards": ["butterfly"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:48:19.992246", "start": "Mon, 07 Jul 2014 08:48:04 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:36:24 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:36", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:23:33", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:08", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:21", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:10", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:10:34", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 11468, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404748077, ""]], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-223.json b/cidb/test_data/series_0/metadata-223.json
new file mode 100644
index 0000000..26be10e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-223.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:14:56 -0700 (PST)"}, "boards": ["monroe"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:30:20.302953", "start": "Mon, 07 Jul 2014 08:44:35 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:14:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:31", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:08:04", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:56", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:19", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:57", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:47", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2342, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747869, ""]], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-224.json b/cidb/test_data/series_0/metadata-224.json
new file mode 100644
index 0000000..aa90613
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-224.json
@@ -0,0 +1 @@
+{"status": {"status": "failed", "summary": "", "current-time": "Mon, 07 Jul 2014 09:33:47 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:51:28.508704", "start": "Mon, 07 Jul 2014 08:42:19 -0700 (PST)", "finish": "Mon, 07 Jul 2014 09:33:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:30", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:11", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:24", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "failed", "description": "Packages failed in cros_run_unit_tests: chromeos-base/autotest-server", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/UnitTest/logs/stdio", "summary": "Packages failed in cros_run_unit_tests: chromeos-base/autotest-server", "board": "stumpy_moblab", "duration": "0:26:38", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:37", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:19", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:09:53", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:21", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:16:55", "name": "UploadTestArtifacts"}, {"status": "passed", "description": "", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage failed but was optional", "board": "", "duration": "0:00:37", "name": "CommitQueueCompletion"}], "build-number": 1387, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747732, ""]], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-225.json b/cidb/test_data/series_0/metadata-225.json
new file mode 100644
index 0000000..d7aca95
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-225.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:28:07 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:10:45.874653", "start": "Sat, 05 Jul 2014 14:17:21 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:28:07 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:22", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:03:06", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:35", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:25", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:53", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:41:40", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:13", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:10", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:19", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:49", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:42", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2646/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2646, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595029, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595029, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595029, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595029, ""]], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-226.json b/cidb/test_data/series_0/metadata-226.json
new file mode 100644
index 0000000..00ab782
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-226.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 14:40:54 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:22:29.420648", "start": "Sat, 05 Jul 2014 14:18:25 -0700 (PST)", "finish": "Sat, 05 Jul 2014 14:40:54 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:05", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:04", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:16", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:55", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:05:38", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:39", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:30", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:03:23", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2466/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2466, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595092, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595092, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595092, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595092, ""]], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-227.json b/cidb/test_data/series_0/metadata-227.json
new file mode 100644
index 0000000..c97f645
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-227.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:50:59 -0700 (PST)"}, "boards": ["wolf"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:32:21.289249", "start": "Sat, 05 Jul 2014 14:18:38 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:50:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:59", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:24", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:03:03", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:12:30", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:46", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:07", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:12", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:06", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:31", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:20", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:31", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:08:00", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:56:01", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3443/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 3443, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595106, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595106, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595106, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595106, ""]], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-228.json b/cidb/test_data/series_0/metadata-228.json
new file mode 100644
index 0000000..5b7ab18
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-228.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 14:55:30 -0700 (PST)"}, "boards": ["leon"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:36:38.647468", "start": "Sat, 05 Jul 2014 14:18:52 -0700 (PST)", "finish": "Sat, 05 Jul 2014 14:55:30 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:21", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:03:00", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:49", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:25", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:06", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:25", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:11", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2881/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2881, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595121, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595121, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595121, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595121, ""]], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-229.json b/cidb/test_data/series_0/metadata-229.json
new file mode 100644
index 0000000..12bc7c4
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-229.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:44:31 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:29:37.679930", "start": "Sat, 05 Jul 2014 14:14:53 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:44:31 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:00", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:27", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:03:08", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:19", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:51", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:09:25", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:30", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:13", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:06", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:39", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:06", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:35", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:53:39", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17394/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 17394, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404594878, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404594878, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404594878, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404594878, ""]], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-23.json b/cidb/test_data/series_0/metadata-23.json
new file mode 100644
index 0000000..15ed309
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-23.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 18:48:21 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:14:53.129042", "start": "Fri, 04 Jul 2014 18:33:28 -0700 (PST)", "finish": "Fri, 04 Jul 2014 18:48:21 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:05", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:12", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:54", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:42", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:26", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:31", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:04:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:24", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2461/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2461, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524000, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524000, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524000, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524000, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524000, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524000, ""]], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-230.json b/cidb/test_data/series_0/metadata-230.json
new file mode 100644
index 0000000..d6ba03b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-230.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:47:06 -0700 (PST)"}, "boards": ["parrot"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:31:39.583158", "start": "Sat, 05 Jul 2014 14:15:26 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:47:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:21", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:32", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:03:05", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:46", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:50", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:22", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:32", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:09", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:23", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:19", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:53:17", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12113/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 12113, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404594913, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404594913, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404594913, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404594913, ""]], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-231.json b/cidb/test_data/series_0/metadata-231.json
new file mode 100644
index 0000000..d94fbcc
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-231.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:59:08 -0700 (PST)"}, "boards": ["stumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206613", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}], "time": {"duration": "1:43:19.524985", "start": "Sat, 05 Jul 2014 14:15:49 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:59:08 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:03", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:20", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:03:08", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:51", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:09:28", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:34", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:08", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:00", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:31", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:00", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:31", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:07:01", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17438/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 17438, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404594930, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404594930, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404594930, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404594930, ""]], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-232.json b/cidb/test_data/series_0/metadata-232.json
new file mode 100644
index 0000000..fe82568
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-232.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:21:53 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:04:51.171332", "start": "Sat, 05 Jul 2014 14:17:02 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:21:53 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:25", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:07", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:03:05", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:11:10", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:11:43", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:38:26", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:13", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:55", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:34", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:15", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:07:55", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18716/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18716, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595009, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595009, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595009, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595009, ""]], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-233.json b/cidb/test_data/series_0/metadata-233.json
new file mode 100644
index 0000000..b23f9a2
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-233.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:57:53 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:39:31.973116", "start": "Sat, 05 Jul 2014 14:18:21 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:57:53 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:58", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:20", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:03:03", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:48", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:43", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:08:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:12", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:57", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:08:26", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:03:36", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16877/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 16877, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595088, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595088, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595088, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595088, ""]], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-234.json b/cidb/test_data/series_0/metadata-234.json
new file mode 100644
index 0000000..7ed1525
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-234.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:21:21 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:04:21.682706", "start": "Sat, 05 Jul 2014 14:16:59 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:21:21 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:24", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:08", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:03:04", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:41", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:15", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:22", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:38:16", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:18", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:00:59", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:06:05", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:22", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18512/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:16", "name": "CommitQueueCompletion"}], "build-number": 18512, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595008, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595008, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595008, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595008, ""]], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-235.json b/cidb/test_data/series_0/metadata-235.json
new file mode 100644
index 0000000..edf5896
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-235.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 14:59:06 -0700 (PST)"}, "boards": ["stout"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:37:51.691536", "start": "Sat, 05 Jul 2014 14:21:15 -0700 (PST)", "finish": "Sat, 05 Jul 2014 14:59:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:20", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:03:05", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:31", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:09:28", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:02", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:27", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:13", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:38", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:28", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11459/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11459, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595263, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595263, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595263, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595263, ""]], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-236.json b/cidb/test_data/series_0/metadata-236.json
new file mode 100644
index 0000000..814ac29
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-236.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:39:19 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:22:40.728408", "start": "Sat, 05 Jul 2014 14:16:38 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:39:19 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:59", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:23", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:26", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:21:52", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:52", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:17", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:35:01", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4382/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4382, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404594985, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404594985, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404594985, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404594985, ""]], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-237.json b/cidb/test_data/series_0/metadata-237.json
new file mode 100644
index 0000000..f96275a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-237.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:48:10 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:32:39.539924", "start": "Sat, 05 Jul 2014 14:15:30 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:48:10 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:19", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:27", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:31", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:10", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:03:23", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1890/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1890, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-238.json b/cidb/test_data/series_0/metadata-238.json
new file mode 100644
index 0000000..ee16e6a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-238.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 14:50:47 -0700 (PST)"}, "boards": ["nyan"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:35:00.540199", "start": "Sat, 05 Jul 2014 14:15:46 -0700 (PST)", "finish": "Sat, 05 Jul 2014 14:50:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:00", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:23", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:00", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:12:02", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:34", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1886/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1886, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404594933, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404594933, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404594933, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404594933, ""]], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-239.json b/cidb/test_data/series_0/metadata-239.json
new file mode 100644
index 0000000..ae165a0
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-239.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:45:14 -0700 (PST)"}, "boards": ["daisy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:26:09.340746", "start": "Sat, 05 Jul 2014 14:19:04 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:45:14 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:58", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:28", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:04", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:11:47", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:59", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:58", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:07", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:52:05", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15129/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 15129, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595133, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595133, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595133, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595133, ""]], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-24.json b/cidb/test_data/series_0/metadata-24.json
new file mode 100644
index 0000000..383e69b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-24.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:09:40 -0700 (PST)"}, "boards": ["wolf"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:29:42.747254", "start": "Fri, 04 Jul 2014 18:39:58 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:09:40 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:27", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:12:00", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:47", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:47", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:10", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:56", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:10:10", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:56:49", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3438/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 3438, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524388, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524388, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524388, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524388, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524388, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524388, ""]], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-240.json b/cidb/test_data/series_0/metadata-240.json
new file mode 100644
index 0000000..7fe93ac
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-240.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 14:53:17 -0700 (PST)"}, "boards": ["samus"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:36:53.449066", "start": "Sat, 05 Jul 2014 14:16:23 -0700 (PST)", "finish": "Sat, 05 Jul 2014 14:53:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:57", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:23", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:03:07", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:52", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:09:13", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:23", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:01", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:23", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:14", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2880/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2880, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404594971, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404594971, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404594971, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404594971, ""]], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-241.json b/cidb/test_data/series_0/metadata-241.json
new file mode 100644
index 0000000..87bc2a1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-241.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 14:56:54 -0700 (PST)"}, "boards": ["panther"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:38:56.409419", "start": "Sat, 05 Jul 2014 14:17:58 -0700 (PST)", "finish": "Sat, 05 Jul 2014 14:56:54 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:32", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:18", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:03:06", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:41", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:52", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:20", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:09:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:08", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:36", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:07:44", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:09:34", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2325/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2325, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595065, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595065, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595065, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595065, ""]], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-242.json b/cidb/test_data/series_0/metadata-242.json
new file mode 100644
index 0000000..8f83625
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-242.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:41:40 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:22:57.250988", "start": "Sat, 05 Jul 2014 14:18:43 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:41:40 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:58", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:21", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:03", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:11:48", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:58", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:53", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:41", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:48:30", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4844/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4844, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595112, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595112, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595112, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595112, ""]], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-243.json b/cidb/test_data/series_0/metadata-243.json
new file mode 100644
index 0000000..70b2021
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-243.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:53:11 -0700 (PST)"}, "boards": ["link"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:36:05.594113", "start": "Sat, 05 Jul 2014 14:17:06 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:53:11 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:56", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:21", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:03:08", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:37", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:12", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:04", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:07", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:30", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:07", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:00:19", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17436/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17436, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595013, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595013, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595013, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595013, ""]], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-244.json b/cidb/test_data/series_0/metadata-244.json
new file mode 100644
index 0000000..9e4dd43
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-244.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 14:54:17 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:36:04.984308", "start": "Sat, 05 Jul 2014 14:18:12 -0700 (PST)", "finish": "Sat, 05 Jul 2014 14:54:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:02", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:22", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:03:06", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:46", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:49", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:57", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:59", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2146/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2146, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595079, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595079, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595079, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595079, ""]], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-245.json b/cidb/test_data/series_0/metadata-245.json
new file mode 100644
index 0000000..1706c7c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-245.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:11:00 -0700 (PST)"}, "boards": ["falco"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:52:26.224371", "start": "Sat, 05 Jul 2014 14:18:34 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:11:00 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:58", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:21", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:23:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:09:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:14", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:09", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:56", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:09", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:59", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4288/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4288, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595102, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595102, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595102, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595102, ""]], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-246.json b/cidb/test_data/series_0/metadata-246.json
new file mode 100644
index 0000000..4103c3c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-246.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 14:45:23 -0700 (PST)"}, "boards": ["gizmo"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:26:06.740002", "start": "Sat, 05 Jul 2014 14:19:17 -0700 (PST)", "finish": "Sat, 05 Jul 2014 14:45:23 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:04", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:23", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:05", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:06:08", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:55", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:41", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:58", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:07", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:10", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/829/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 829, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595145, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595145, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595145, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595145, ""]], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-247.json b/cidb/test_data/series_0/metadata-247.json
new file mode 100644
index 0000000..4f14c16
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-247.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 14:54:05 -0700 (PST)"}, "boards": ["rambi"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:37:27.775647", "start": "Sat, 05 Jul 2014 14:16:38 -0700 (PST)", "finish": "Sat, 05 Jul 2014 14:54:05 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:23", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:31", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:03:05", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:34", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:49", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:09:33", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:18", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:06", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:54", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:44", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:54", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2326/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2326, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404594985, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404594985, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404594985, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404594985, ""]], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-248.json b/cidb/test_data/series_0/metadata-248.json
new file mode 100644
index 0000000..ce1fafe
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-248.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 14:48:33 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:16:36.988902", "start": "Sat, 05 Jul 2014 14:31:56 -0700 (PST)", "finish": "Sat, 05 Jul 2014 14:48:33 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1253/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1253/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1253/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1253/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1253/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1253/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:26", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1253/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1253/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1253/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:29", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1253/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1253/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1253, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595905, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595905, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595905, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595905, ""]], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-249.json b/cidb/test_data/series_0/metadata-249.json
new file mode 100644
index 0000000..395e093
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-249.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 14:42:15 -0700 (PST)"}, "boards": ["duck"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:23:42.253149", "start": "Sat, 05 Jul 2014 14:18:33 -0700 (PST)", "finish": "Sat, 05 Jul 2014 14:42:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:01", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:23", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:05", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:47", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:13", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:36", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:39", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:05", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:18", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:08", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1376/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1376, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595101, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595101, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595101, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595101, ""]], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-25.json b/cidb/test_data/series_0/metadata-25.json
new file mode 100644
index 0000000..1f31b5e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-25.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:08:20 -0700 (PST)"}, "boards": ["leon"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:34:10.145057", "start": "Fri, 04 Jul 2014 18:34:10 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:08:20 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:35", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:10:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:13", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:05", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:50", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2876/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 2876, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524042, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524042, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524042, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524042, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524042, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524042, ""]], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-250.json b/cidb/test_data/series_0/metadata-250.json
new file mode 100644
index 0000000..32c4911
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-250.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 14:55:52 -0700 (PST)"}, "boards": ["peppy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:37:42.691640", "start": "Sat, 05 Jul 2014 14:18:09 -0700 (PST)", "finish": "Sat, 05 Jul 2014 14:55:52 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:26", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:23", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:03:00", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:32", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:31", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:09:38", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:52", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:31", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:01", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:49", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:25", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:39", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4275/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4275, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595078, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595078, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595078, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595078, ""]], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-251.json b/cidb/test_data/series_0/metadata-251.json
new file mode 100644
index 0000000..3bce3e1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-251.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:12:06 -0700 (PST)"}, "boards": ["butterfly"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:54:57.102670", "start": "Sat, 05 Jul 2014 14:17:09 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:12:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:56", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:27", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:30", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:24:41", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:10:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:21", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:24", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:25", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:45", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:43", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11456/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 11456, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595017, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595017, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595017, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595017, ""]], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-252.json b/cidb/test_data/series_0/metadata-252.json
new file mode 100644
index 0000000..780c3e8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-252.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 14:54:46 -0700 (PST)"}, "boards": ["monroe"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:36:56.026808", "start": "Sat, 05 Jul 2014 14:17:50 -0700 (PST)", "finish": "Sat, 05 Jul 2014 14:54:46 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:25", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:22", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:03:04", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:49", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:53", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:06", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:09", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:08:02", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2330/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2330, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595058, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595058, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595058, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595058, ""]], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-253.json b/cidb/test_data/series_0/metadata-253.json
new file mode 100644
index 0000000..717844d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-253.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 15:03:45 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206395", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206396", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206373", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206613", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:44:26.275753", "start": "Sat, 05 Jul 2014 14:19:18 -0700 (PST)", "finish": "Sat, 05 Jul 2014 15:03:45 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:21", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:03:03", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:48", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:12:33", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:12:17", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:11", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:37", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:09:41", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:31", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1375/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1375, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc2", "full": "R38-6022.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206395", "patch_number": "1", "internal": false}, "picked_up", 1404595146, ""], [{"gerrit_number": "206396", "patch_number": "2", "internal": false}, "picked_up", 1404595146, ""], [{"gerrit_number": "206373", "patch_number": "1", "internal": false}, "picked_up", 1404595146, ""], [{"gerrit_number": "206613", "patch_number": "1", "internal": false}, "picked_up", 1404595146, ""]], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-254.json b/cidb/test_data/series_0/metadata-254.json
new file mode 100644
index 0000000..7eb5a9a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-254.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 12:24:17 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "1:03:05.683619", "start": "Sun, 06 Jul 2014 11:21:11 -0700 (PST)", "finish": "Sun, 06 Jul 2014 12:24:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:30", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:40:24", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:09", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:22", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:44", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2652, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404670864, ""]], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-255.json b/cidb/test_data/series_0/metadata-255.json
new file mode 100644
index 0000000..349f5ce
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-255.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 11:37:09 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:13:42.065991", "start": "Sun, 06 Jul 2014 11:23:27 -0700 (PST)", "finish": "Sun, 06 Jul 2014 11:37:09 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:05", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:14", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:52", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:38", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:22", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:03:32", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 2472, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671001, ""]], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-256.json b/cidb/test_data/series_0/metadata-256.json
new file mode 100644
index 0000000..ae124bf
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-256.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 12:52:39 -0700 (PST)"}, "boards": ["wolf"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "1:30:31.269123", "start": "Sun, 06 Jul 2014 11:22:07 -0700 (PST)", "finish": "Sun, 06 Jul 2014 12:52:39 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:55", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:14", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "1:01:08", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 3449, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404670920, ""]], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-257.json b/cidb/test_data/series_0/metadata-257.json
new file mode 100644
index 0000000..2f60b70
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-257.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 11:52:43 -0700 (PST)"}, "boards": ["leon"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:30:20.059587", "start": "Sun, 06 Jul 2014 11:22:23 -0700 (PST)", "finish": "Sun, 06 Jul 2014 11:52:43 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:56", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:06", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:19", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2887, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404670937, ""]], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-258.json b/cidb/test_data/series_0/metadata-258.json
new file mode 100644
index 0000000..4919792
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-258.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 12:55:07 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "1:30:05.515787", "start": "Sun, 06 Jul 2014 11:25:01 -0700 (PST)", "finish": "Sun, 06 Jul 2014 12:55:07 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:20", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:58", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:42", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:58", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:07:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "1:00:18", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 17400, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671095, ""]], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-259.json b/cidb/test_data/series_0/metadata-259.json
new file mode 100644
index 0000000..6745dab
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-259.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 12:55:30 -0700 (PST)"}, "boards": ["parrot"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "1:29:14.251294", "start": "Sun, 06 Jul 2014 11:26:16 -0700 (PST)", "finish": "Sun, 06 Jul 2014 12:55:30 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:30", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:58", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:36", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:23", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:13", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:55:25", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 12119, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671169, ""]], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-26.json b/cidb/test_data/series_0/metadata-26.json
new file mode 100644
index 0000000..9084c3b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-26.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:03:36 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:26:19.678206", "start": "Fri, 04 Jul 2014 18:37:16 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:03:36 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:20", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:12:40", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:23", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:11", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:35", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:24", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:37", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:10:29", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:53:44", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17389/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17389, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524228, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524228, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524228, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524228, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524228, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524228, ""]], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-260.json b/cidb/test_data/series_0/metadata-260.json
new file mode 100644
index 0000000..3b30b06
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-260.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 13:03:17 -0700 (PST)"}, "boards": ["stumpy"], "changes": [{"patch_number": "2", "total_pass": 1, "gerrit_number": "206113", "total_fail": 2, "pass": 1, "fail": 2, "internal": false}], "time": {"duration": "1:39:32.444651", "start": "Sun, 06 Jul 2014 11:23:44 -0700 (PST)", "finish": "Sun, 06 Jul 2014 13:03:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:12", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:14", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:34", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:14", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:07:32", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:09:56", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 17444, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671017, ""]], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-261.json b/cidb/test_data/series_0/metadata-261.json
new file mode 100644
index 0000000..504a304
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-261.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 12:20:15 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:59:18.454697", "start": "Sun, 06 Jul 2014 11:20:57 -0700 (PST)", "finish": "Sun, 06 Jul 2014 12:20:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:38", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:23", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:21", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:17", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:37", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:03", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:58", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:37", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:11", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18722, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404670851, ""]], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-262.json b/cidb/test_data/series_0/metadata-262.json
new file mode 100644
index 0000000..0d103bb
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-262.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 13:00:34 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "1:37:30.506060", "start": "Sun, 06 Jul 2014 11:23:03 -0700 (PST)", "finish": "Sun, 06 Jul 2014 13:00:34 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:51", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:50", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:32", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:05", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:05", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:39", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:06:22", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 16883, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404670976, ""]], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-263.json b/cidb/test_data/series_0/metadata-263.json
new file mode 100644
index 0000000..662ae30
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-263.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 12:20:38 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:59:42.233969", "start": "Sun, 06 Jul 2014 11:20:56 -0700 (PST)", "finish": "Sun, 06 Jul 2014 12:20:38 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:36", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:24", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:26", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:23", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:47", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:39:12", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:11", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:31", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:03", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:09:08", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18518, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404670850, ""]], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-264.json b/cidb/test_data/series_0/metadata-264.json
new file mode 100644
index 0000000..cb19659
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-264.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 11:52:57 -0700 (PST)"}, "boards": ["stout"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:31:47.890301", "start": "Sun, 06 Jul 2014 11:21:09 -0700 (PST)", "finish": "Sun, 06 Jul 2014 11:52:57 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:41", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:41", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:10", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:04", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:04", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:53", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:33", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueCompletion"}], "build-number": 11465, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404670862, ""]], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-265.json b/cidb/test_data/series_0/metadata-265.json
new file mode 100644
index 0000000..80766ea
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-265.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 12:45:58 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "1:21:48.676411", "start": "Sun, 06 Jul 2014 11:24:09 -0700 (PST)", "finish": "Sun, 06 Jul 2014 12:45:58 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:21:46", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:59", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:47", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:39:03", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4388, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671043, ""]], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-266.json b/cidb/test_data/series_0/metadata-266.json
new file mode 100644
index 0000000..ffdee0d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-266.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 13:00:59 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:37:39.716957", "start": "Sun, 06 Jul 2014 11:23:19 -0700 (PST)", "finish": "Sun, 06 Jul 2014 13:00:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:09", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:16", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:46", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:03", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:05:58", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1896, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-267.json b/cidb/test_data/series_0/metadata-267.json
new file mode 100644
index 0000000..09c5e41
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-267.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 11:54:05 -0700 (PST)"}, "boards": ["nyan"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:28:52.889088", "start": "Sun, 06 Jul 2014 11:25:12 -0700 (PST)", "finish": "Sun, 06 Jul 2014 11:54:05 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:10", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:34", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:40", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:43", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:24", "name": "CommitQueueCompletion"}], "build-number": 1892, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671105, ""]], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-268.json b/cidb/test_data/series_0/metadata-268.json
new file mode 100644
index 0000000..91f9cf0
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-268.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 12:51:14 -0700 (PST)"}, "boards": ["daisy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "1:26:10.768640", "start": "Sun, 06 Jul 2014 11:25:03 -0700 (PST)", "finish": "Sun, 06 Jul 2014 12:51:14 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:11:42", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:11", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:04:02", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:44", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:08", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:57:25", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 15135, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671097, ""]], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-269.json b/cidb/test_data/series_0/metadata-269.json
new file mode 100644
index 0000000..c33d2ce
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-269.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 11:54:36 -0700 (PST)"}, "boards": ["samus"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:30:30.452100", "start": "Sun, 06 Jul 2014 11:24:06 -0700 (PST)", "finish": "Sun, 06 Jul 2014 11:54:36 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:41", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:04", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:00:56", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:31", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:59", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:07:26", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2886, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671039, ""]], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-27.json b/cidb/test_data/series_0/metadata-27.json
new file mode 100644
index 0000000..575474a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-27.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:59:19 -0700 (PST)"}, "boards": ["parrot"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:25:34.734329", "start": "Fri, 04 Jul 2014 18:33:45 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:59:19 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:01", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:13:05", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:41", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:13", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:23", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:52:40", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12108/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 12108, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524014, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524014, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524014, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524014, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524014, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524014, ""]], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-270.json b/cidb/test_data/series_0/metadata-270.json
new file mode 100644
index 0000000..07be034
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-270.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 11:58:26 -0700 (PST)"}, "boards": ["panther"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:34:37.577194", "start": "Sun, 06 Jul 2014 11:23:48 -0700 (PST)", "finish": "Sun, 06 Jul 2014 11:58:26 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:42", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:09", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:51", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:14", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:53", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2331, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671020, ""]], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-271.json b/cidb/test_data/series_0/metadata-271.json
new file mode 100644
index 0000000..fa2b702
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-271.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 12:49:50 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "1:24:03.816176", "start": "Sun, 06 Jul 2014 11:25:46 -0700 (PST)", "finish": "Sun, 06 Jul 2014 12:49:50 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:12:40", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:42", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:17", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:53:55", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4850, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671140, ""]], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-272.json b/cidb/test_data/series_0/metadata-272.json
new file mode 100644
index 0000000..287b28e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-272.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 13:02:47 -0700 (PST)"}, "boards": ["link"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "1:38:21.122673", "start": "Sun, 06 Jul 2014 11:24:26 -0700 (PST)", "finish": "Sun, 06 Jul 2014 13:02:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:33", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:03", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:33", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:08:16", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17442, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671060, ""]], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-273.json b/cidb/test_data/series_0/metadata-273.json
new file mode 100644
index 0000000..7816b5a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-273.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 11:54:17 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:30:19.067421", "start": "Sun, 06 Jul 2014 11:23:57 -0700 (PST)", "finish": "Sun, 06 Jul 2014 11:54:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:54", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:50", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:36", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:02", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:06", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:02", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:41", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "CommitQueueCompletion"}], "build-number": 2152, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671031, ""]], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-274.json b/cidb/test_data/series_0/metadata-274.json
new file mode 100644
index 0000000..2caff33
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-274.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 12:12:44 -0700 (PST)"}, "boards": ["falco"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:44:14.436227", "start": "Sun, 06 Jul 2014 11:28:30 -0700 (PST)", "finish": "Sun, 06 Jul 2014 12:12:44 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:27", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:21:36", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:29", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:41", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:21", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:52", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:21", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 4294, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671303, ""]], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-275.json b/cidb/test_data/series_0/metadata-275.json
new file mode 100644
index 0000000..a7b1814
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-275.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 11:43:46 -0700 (PST)"}, "boards": ["gizmo"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:21:44.134655", "start": "Sun, 06 Jul 2014 11:22:02 -0700 (PST)", "finish": "Sun, 06 Jul 2014 11:43:46 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:56", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:06:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:32", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:56", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:10", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:12", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 835, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404670916, ""]], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-276.json b/cidb/test_data/series_0/metadata-276.json
new file mode 100644
index 0000000..16738b1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-276.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 11:52:33 -0700 (PST)"}, "boards": ["rambi"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:31:22.761934", "start": "Sun, 06 Jul 2014 11:21:10 -0700 (PST)", "finish": "Sun, 06 Jul 2014 11:52:33 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:30", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:20", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:03", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:40", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:03", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:07:43", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2332, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404670864, ""]], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-277.json b/cidb/test_data/series_0/metadata-277.json
new file mode 100644
index 0000000..c55bdcd
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-277.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 11:48:52 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:06:00.041932", "start": "Sun, 06 Jul 2014 11:42:52 -0700 (PST)", "finish": "Sun, 06 Jul 2014 11:48:52 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1259/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:03", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1259/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:19:17", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1259/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1259/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1259/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1259/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:41", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1259/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1259/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1259/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:03:38", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1259/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1259/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1259, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404672167, ""]], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-278.json b/cidb/test_data/series_0/metadata-278.json
new file mode 100644
index 0000000..d1d6dc4
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-278.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 11:42:40 -0700 (PST)"}, "boards": ["duck"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:18:00.041002", "start": "Sun, 06 Jul 2014 11:24:40 -0700 (PST)", "finish": "Sun, 06 Jul 2014 11:42:40 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:33", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:15", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:23", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:29", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:55", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:17", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1382, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671074, ""]], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-279.json b/cidb/test_data/series_0/metadata-279.json
new file mode 100644
index 0000000..c552180
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-279.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 11:53:26 -0700 (PST)"}, "boards": ["peppy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:32:09.048163", "start": "Sun, 06 Jul 2014 11:21:17 -0700 (PST)", "finish": "Sun, 06 Jul 2014 11:53:26 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:21", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:36", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:34", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:01", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:10", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:48", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:06", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "CommitQueueCompletion"}], "build-number": 4281, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404670870, ""]], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-28.json b/cidb/test_data/series_0/metadata-28.json
new file mode 100644
index 0000000..df6ea95
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-28.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:14:24 -0700 (PST)"}, "boards": ["stumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206592", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206548", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206581", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}], "time": {"duration": "1:40:27.289422", "start": "Fri, 04 Jul 2014 18:33:57 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:14:24 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:38", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:53", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:43", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:04", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:13", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:34", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:36", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:10:29", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:06:40", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17433/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17433, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524029, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524029, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524029, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524029, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524029, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524029, ""]], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-280.json b/cidb/test_data/series_0/metadata-280.json
new file mode 100644
index 0000000..f63c30d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-280.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 12:10:01 -0700 (PST)"}, "boards": ["butterfly"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:47:04.920770", "start": "Sun, 06 Jul 2014 11:22:56 -0700 (PST)", "finish": "Sun, 06 Jul 2014 12:10:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:45", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:23:15", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:34", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:04", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:22", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:19", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 11462, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404670969, ""]], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-281.json b/cidb/test_data/series_0/metadata-281.json
new file mode 100644
index 0000000..6bb4b14
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-281.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 11:54:54 -0700 (PST)"}, "boards": ["monroe"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:30:21.532349", "start": "Sun, 06 Jul 2014 11:24:32 -0700 (PST)", "finish": "Sun, 06 Jul 2014 11:54:54 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:37", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:47", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:51", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:57", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:20", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:30", "name": "CommitQueueCompletion"}], "build-number": 2336, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671066, ""]], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-282.json b/cidb/test_data/series_0/metadata-282.json
new file mode 100644
index 0000000..a4de0c0
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-282.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 12:03:53 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206113", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}], "time": {"duration": "0:39:21.493396", "start": "Sun, 06 Jul 2014 11:24:32 -0700 (PST)", "finish": "Sun, 06 Jul 2014 12:03:53 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:35", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:13", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:43", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:24", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:00", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:26", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:54", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1381, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404671065, ""]], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-283.json b/cidb/test_data/series_0/metadata-283.json
new file mode 100644
index 0000000..e836936
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-283.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 08:02:20 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:06:33.452832", "start": "Mon, 07 Jul 2014 06:55:47 -0700 (PST)", "finish": "Mon, 07 Jul 2014 08:02:20 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:01", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:26", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:35", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:43", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:42:41", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:42", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:01", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:07", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:37", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:22", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:13:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2657, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741326, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741326, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741326, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741326, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741326, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741326, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741326, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741326, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741326, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741326, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741326, ""]], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-284.json b/cidb/test_data/series_0/metadata-284.json
new file mode 100644
index 0000000..f29c9f8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-284.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:10:52 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:15:14.252139", "start": "Mon, 07 Jul 2014 06:55:38 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:10:52 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:05", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:03", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:15", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:49", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:43", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:24", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:30", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:04:56", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:27", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2477, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741318, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741318, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741318, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741318, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741318, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741318, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741318, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741318, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741318, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741318, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741318, ""]], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-285.json b/cidb/test_data/series_0/metadata-285.json
new file mode 100644
index 0000000..ad8d2c3
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-285.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 08:23:43 -0700 (PST)"}, "boards": ["wolf"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:29:01.727181", "start": "Mon, 07 Jul 2014 06:54:42 -0700 (PST)", "finish": "Mon, 07 Jul 2014 08:23:43 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:00", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:41", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:46", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:08", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:57", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:44", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:10:15", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:56:28", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 3454, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741262, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741262, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741262, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741262, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741262, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741262, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741262, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741262, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741262, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741262, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741262, ""]], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-286.json b/cidb/test_data/series_0/metadata-286.json
new file mode 100644
index 0000000..1435b7b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-286.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:35:45 -0700 (PST)"}, "boards": ["leon"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:34:32.012931", "start": "Mon, 07 Jul 2014 07:01:13 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:35:45 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:40", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:58", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:10:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:57", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:00", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:07", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:02", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:57", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2892, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741654, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741654, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741654, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741654, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741654, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741654, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741654, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741654, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741654, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741654, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741654, ""]], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-287.json b/cidb/test_data/series_0/metadata-287.json
new file mode 100644
index 0000000..fd229c7
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-287.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 08:21:09 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:26:09.755034", "start": "Mon, 07 Jul 2014 06:54:59 -0700 (PST)", "finish": "Mon, 07 Jul 2014 08:21:09 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:12:58", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:12:39", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:17", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:57", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:26", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:50", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:10:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:51:50", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17405, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741277, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741277, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741277, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741277, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741277, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741277, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741277, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741277, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741277, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741277, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741277, ""]], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-288.json b/cidb/test_data/series_0/metadata-288.json
new file mode 100644
index 0000000..1fb4152
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-288.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 08:28:48 -0700 (PST)"}, "boards": ["parrot"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:27:51.859100", "start": "Mon, 07 Jul 2014 07:00:56 -0700 (PST)", "finish": "Mon, 07 Jul 2014 08:28:48 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:04:08", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:32", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:31", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:53", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:53", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:29", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:05", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:28", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:17:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:47:32", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 12124, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741633, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741633, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741633, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741633, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741633, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741633, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741633, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741633, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741633, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741633, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741633, ""]], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-289.json b/cidb/test_data/series_0/metadata-289.json
new file mode 100644
index 0000000..957139a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-289.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 08:32:19 -0700 (PST)"}, "boards": ["stumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "167453", "total_fail": 1, "pass": 1, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206786", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206787", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "205535", "total_fail": 1, "pass": 1, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "205534", "total_fail": 1, "pass": 1, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206648", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 1, "gerrit_number": "206764", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206647", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206649", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206780", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}], "time": {"duration": "1:37:16.679515", "start": "Mon, 07 Jul 2014 06:55:03 -0700 (PST)", "finish": "Mon, 07 Jul 2014 08:32:19 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:33", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:55", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:18", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:57", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:10:53", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:03:53", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17449, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741282, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741282, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741282, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741282, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741282, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741282, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741282, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741282, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741282, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741282, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741282, ""]], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-29.json b/cidb/test_data/series_0/metadata-29.json
new file mode 100644
index 0000000..177031e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-29.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:32:00 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:59:40.732776", "start": "Fri, 04 Jul 2014 18:32:19 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:32:00 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:39", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:08", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:18", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:48", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:23", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:02:02", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:00", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:43", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:58", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18711/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18711, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404523931, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404523931, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404523931, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404523931, ""]], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-290.json b/cidb/test_data/series_0/metadata-290.json
new file mode 100644
index 0000000..ea3b632
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-290.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:54:16 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:00:13.715131", "start": "Mon, 07 Jul 2014 06:54:02 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:54:16 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:25", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:29", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:13", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:50", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:16", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:12", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:35", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:23", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:09:13", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:16", "name": "CommitQueueCompletion"}], "build-number": 18727, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741225, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741225, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741225, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741225, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741225, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741225, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741225, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741225, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741225, ""]], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-291.json b/cidb/test_data/series_0/metadata-291.json
new file mode 100644
index 0000000..7d31cf0
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-291.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 08:31:33 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206649", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206780", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}], "time": {"duration": "1:35:16.227318", "start": "Mon, 07 Jul 2014 06:56:17 -0700 (PST)", "finish": "Mon, 07 Jul 2014 08:31:33 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:01", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:51", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:08", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:42", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:21", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:55", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:07", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:01:59", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 16888, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741358, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741358, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""]], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-292.json b/cidb/test_data/series_0/metadata-292.json
new file mode 100644
index 0000000..dc7deab
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-292.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:52:59 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:58:51.939674", "start": "Mon, 07 Jul 2014 06:54:07 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:52:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:24", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:26", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:38:07", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:19", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:54", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:31", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:28", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:52", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:16", "name": "CommitQueueCompletion"}], "build-number": 18523, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741229, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741229, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741229, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741229, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741229, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741229, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741229, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741229, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741229, ""]], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-293.json b/cidb/test_data/series_0/metadata-293.json
new file mode 100644
index 0000000..c8e43d9
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-293.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:31:43 -0700 (PST)"}, "boards": ["stout"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:35:24.582854", "start": "Mon, 07 Jul 2014 06:56:19 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:31:43 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:31", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:40", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:21", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:58", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:25", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:56", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:23", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:27", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:29", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11470, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741358, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741358, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741358, ""]], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-294.json b/cidb/test_data/series_0/metadata-294.json
new file mode 100644
index 0000000..d3c6f1e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-294.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 08:23:54 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:29:23.130441", "start": "Mon, 07 Jul 2014 06:54:31 -0700 (PST)", "finish": "Mon, 07 Jul 2014 08:23:54 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:45", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:31:51", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:59", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:16", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:49", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:36:52", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4393, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741249, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741249, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""]], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-295.json b/cidb/test_data/series_0/metadata-295.json
new file mode 100644
index 0000000..c4616ae
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-295.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 08:26:15 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:30:36.720639", "start": "Mon, 07 Jul 2014 06:55:39 -0700 (PST)", "finish": "Mon, 07 Jul 2014 08:26:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:43", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:34", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:50", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:20", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:14", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:58:00", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1901, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-296.json b/cidb/test_data/series_0/metadata-296.json
new file mode 100644
index 0000000..d28a7d2
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-296.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:28:36 -0700 (PST)"}, "boards": ["nyan"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:29:59.605111", "start": "Mon, 07 Jul 2014 06:58:37 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:28:36 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:00", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:17", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:47", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:40", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:57", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:06", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:07:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 1897, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741496, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741496, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741496, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741496, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741496, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741496, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741496, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741496, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741496, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741496, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741496, ""]], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-297.json b/cidb/test_data/series_0/metadata-297.json
new file mode 100644
index 0000000..d5c1071
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-297.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 08:13:48 -0700 (PST)"}, "boards": ["daisy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:18:50.462782", "start": "Mon, 07 Jul 2014 06:54:57 -0700 (PST)", "finish": "Mon, 07 Jul 2014 08:13:48 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:12:02", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:08", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:40", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:59", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:26", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:49:24", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 15140, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741276, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741276, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741276, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741276, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741276, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741276, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741276, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741276, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741276, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741276, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741276, ""]], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-298.json b/cidb/test_data/series_0/metadata-298.json
new file mode 100644
index 0000000..3a3805e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-298.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:28:48 -0700 (PST)"}, "boards": ["samus"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:34:15.990019", "start": "Mon, 07 Jul 2014 06:54:32 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:28:48 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:57", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:00:55", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:03", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:09:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2891, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741249, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741249, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741249, ""]], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-299.json b/cidb/test_data/series_0/metadata-299.json
new file mode 100644
index 0000000..41ef070
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-299.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:35:29 -0700 (PST)"}, "boards": ["panther"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:37:28.197262", "start": "Mon, 07 Jul 2014 06:58:01 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:35:29 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:05", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:57", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:00", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:00:56", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:55", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:13:41", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2336, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741461, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741461, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741461, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741461, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741461, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741461, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741461, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741461, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741461, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741461, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741461, ""]], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-3.json b/cidb/test_data/series_0/metadata-3.json
new file mode 100644
index 0000000..a7378bc
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-3.json
@@ -0,0 +1 @@
+{"status": {"status": "failed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:19:55 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2658", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2478", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3455", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2893", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17406", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12125", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17450", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18728", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16889", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18524", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11471", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4394", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1902", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1898", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15141", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2892", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2337", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4856", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17448", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2158", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4300", "reason": "None"}, "stumpy_moblab-paladin": {"status": "fail", "message": "stumpy_moblab-paladin: The UnitTest stage failed: Packages failed in cros_run_unit_tests: chromeos-base/autotest-server in https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1387", "reason": "The UnitTest stage failed: Packages failed in cros_run_unit_tests: chromeos-base/autotest-server"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2338", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1265", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1388", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4287", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11468", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2342", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/841", "reason": "None"}}, "boards": [], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}], "metadata-version": "2", "child-configs": [], "build-number": 1927, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [[{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404747519, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "kicked_out", 1404753472, ""]], "builder-name": "CQ master", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc3", "full": "R38-6028.0.0-rc3", "milestone": "38"}, "time": {"duration": "1:40:10.129817", "start": "Mon, 07 Jul 2014 08:39:45 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:19:55 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1927/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1927/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:36", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1927/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1927/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1927/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1927/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1927/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "failed", "description": "", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1927/steps/CommitQueueCompletion/logs/stdio", "summary": "", "board": "", "duration": "1:34:04", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1927/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:47", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-30.json b/cidb/test_data/series_0/metadata-30.json
new file mode 100644
index 0000000..e2670bc
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-30.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:12:01 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:37:59.609101", "start": "Fri, 04 Jul 2014 18:34:01 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:12:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:48", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:03", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:42", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:12", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:54", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:56", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:10:52", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:05:21", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16872/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 16872, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524032, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524032, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524032, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524032, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524032, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524032, ""]], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-300.json b/cidb/test_data/series_0/metadata-300.json
new file mode 100644
index 0000000..073840d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-300.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 08:13:18 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:17:41.301628", "start": "Mon, 07 Jul 2014 06:55:37 -0700 (PST)", "finish": "Mon, 07 Jul 2014 08:13:18 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:17", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:12:37", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:08", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:01", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:47", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:08:19", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:45:48", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4855, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741317, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741317, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741317, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741317, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741317, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741317, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741317, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741317, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741317, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741317, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741317, ""]], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-301.json b/cidb/test_data/series_0/metadata-301.json
new file mode 100644
index 0000000..15e7f9a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-301.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 08:27:55 -0700 (PST)"}, "boards": ["link"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:32:08.874629", "start": "Mon, 07 Jul 2014 06:55:46 -0700 (PST)", "finish": "Mon, 07 Jul 2014 08:27:55 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:12:32", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:14:26", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:24", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:04", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:58:38", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17447, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741325, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741325, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741325, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741325, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741325, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741325, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741325, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741325, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741325, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741325, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741325, ""]], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-302.json b/cidb/test_data/series_0/metadata-302.json
new file mode 100644
index 0000000..ec897d9
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-302.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:30:42 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:33:41.573042", "start": "Mon, 07 Jul 2014 06:57:01 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:30:42 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:02", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:49", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:52", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:07", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:54", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:55", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:24", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:57", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2157, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741399, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741399, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741399, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741399, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741399, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741399, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741399, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741399, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741399, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741399, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741399, ""]], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-303.json b/cidb/test_data/series_0/metadata-303.json
new file mode 100644
index 0000000..cfd4bc1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-303.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:54:46 -0700 (PST)"}, "boards": ["falco"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:57:44.138991", "start": "Mon, 07 Jul 2014 06:57:02 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:54:46 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:00", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:32:26", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:11:02", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:33", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:30", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:09:29", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 4299, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741401, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741401, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741401, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741401, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741401, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741401, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741401, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741401, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741401, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741401, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741401, ""]], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-304.json b/cidb/test_data/series_0/metadata-304.json
new file mode 100644
index 0000000..272f008
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-304.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:20:10 -0700 (PST)"}, "boards": ["gizmo"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:22:10.809708", "start": "Mon, 07 Jul 2014 06:58:00 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:20:10 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:03", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:06:10", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:07:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:38", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:58", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:14", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:08", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 840, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741456, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741456, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741456, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741456, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741456, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741456, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741456, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741456, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741456, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741456, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741456, ""]], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-305.json b/cidb/test_data/series_0/metadata-305.json
new file mode 100644
index 0000000..9601241
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-305.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:33:39 -0700 (PST)"}, "boards": ["rambi"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:37:09.950015", "start": "Mon, 07 Jul 2014 06:56:29 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:33:39 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:31", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:31", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:13:13", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:09", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:07:16", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:07:18", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:39", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2337, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741368, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741368, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741368, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741368, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741368, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741368, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741368, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741368, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741368, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741368, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741368, ""]], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-306.json b/cidb/test_data/series_0/metadata-306.json
new file mode 100644
index 0000000..e8b420f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-306.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:20:22 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:07:59.307753", "start": "Mon, 07 Jul 2014 07:12:22 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:20:22 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1264/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:03", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1264/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:18:09", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1264/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1264/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1264/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1264/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:53", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1264/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1264/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1264/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:25", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1264/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1264/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1264, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404742323, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404742323, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404742323, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404742323, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404742323, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404742323, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404742323, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404742323, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404742323, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404742323, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404742323, ""]], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-307.json b/cidb/test_data/series_0/metadata-307.json
new file mode 100644
index 0000000..0fafc52
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-307.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:18:37 -0700 (PST)"}, "boards": ["duck"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:20:08.936554", "start": "Mon, 07 Jul 2014 06:58:28 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:18:37 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:04", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:36", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:09", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:29", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:38", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:04", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:58", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:29", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:00", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1387, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741487, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741487, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741487, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741487, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741487, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741487, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741487, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741487, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741487, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741487, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741487, ""]], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-308.json b/cidb/test_data/series_0/metadata-308.json
new file mode 100644
index 0000000..7e7e47a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-308.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:32:24 -0700 (PST)"}, "boards": ["peppy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:37:47.752724", "start": "Mon, 07 Jul 2014 06:54:36 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:32:24 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:00", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:31", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:41", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:36", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:44", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:27", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:56", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:36", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:18", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:14:45", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4286, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741255, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741255, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741255, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741255, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741255, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741255, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741255, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741255, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741255, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741255, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741255, ""]], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-309.json b/cidb/test_data/series_0/metadata-309.json
new file mode 100644
index 0000000..751be3c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-309.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:52:15 -0700 (PST)"}, "boards": ["butterfly"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:57:54.312118", "start": "Mon, 07 Jul 2014 06:54:21 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:52:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:04", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:31:56", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:11:41", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:45", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:03", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:17", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:06", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:17", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:10:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 11467, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741241, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741241, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741241, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741241, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741241, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741241, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741241, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741241, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741241, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741241, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741241, ""]], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-31.json b/cidb/test_data/series_0/metadata-31.json
new file mode 100644
index 0000000..5c9a443
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-31.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:30:56 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:58:37.976045", "start": "Fri, 04 Jul 2014 18:32:18 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:30:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:37", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:08", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:14", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:23", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:45", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:38:37", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:14", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:57", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:03", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:35", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:09:13", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18507/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 18507, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404523932, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404523932, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404523932, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404523932, ""]], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-310.json b/cidb/test_data/series_0/metadata-310.json
new file mode 100644
index 0000000..d86173d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-310.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:29:50 -0700 (PST)"}, "boards": ["monroe"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:34:11.272393", "start": "Mon, 07 Jul 2014 06:55:39 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:29:50 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:04", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:36", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:53", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:49", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:10:21", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2341, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741319, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741319, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741319, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741319, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741319, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741319, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741319, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741319, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741319, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741319, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741319, ""]], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-311.json b/cidb/test_data/series_0/metadata-311.json
new file mode 100644
index 0000000..80cd916
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-311.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 07:38:11 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "167452", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "167453", "total_fail": 1, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206786", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206787", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206648", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "206764", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206647", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206649", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206780", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:39:19.614339", "start": "Mon, 07 Jul 2014 06:58:51 -0700 (PST)", "finish": "Mon, 07 Jul 2014 07:38:11 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:04", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:16", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:08", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:16", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:56", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:27", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:04", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:29", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:15:04", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1386, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741512, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741512, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741512, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741512, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741512, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741512, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741512, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741512, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741512, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741512, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741512, ""]], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-312.json b/cidb/test_data/series_0/metadata-312.json
new file mode 100644
index 0000000..97c1c32
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-312.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:35:00 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:08:30.357183", "start": "Sun, 06 Jul 2014 01:26:30 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:35:00 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:03:01", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:57", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:26", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:59", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:42:39", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:10", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:04", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:17", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:54", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:47", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2649/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2649, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635184, ""]], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-313.json b/cidb/test_data/series_0/metadata-313.json
new file mode 100644
index 0000000..3b6978b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-313.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 01:43:07 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:18:02.586610", "start": "Sun, 06 Jul 2014 01:25:05 -0700 (PST)", "finish": "Sun, 06 Jul 2014 01:43:07 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:57", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:04:34", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:43", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:27", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:03:21", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2469/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2469, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635100, ""]], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-314.json b/cidb/test_data/series_0/metadata-314.json
new file mode 100644
index 0000000..af68d3b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-314.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:58:38 -0700 (PST)"}, "boards": ["wolf"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:29:43.336024", "start": "Sun, 06 Jul 2014 01:28:54 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:58:38 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:18", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:03:05", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:53", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:04", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:04", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:07", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:28", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:14", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:28", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:55", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:57:25", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3446/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 3446, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635328, ""]], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-315.json b/cidb/test_data/series_0/metadata-315.json
new file mode 100644
index 0000000..0024c92
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-315.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 01:58:43 -0700 (PST)"}, "boards": ["leon"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:34:49.414868", "start": "Sun, 06 Jul 2014 01:23:54 -0700 (PST)", "finish": "Sun, 06 Jul 2014 01:58:43 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:03:01", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:13:09", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:53", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:16", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:06", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:22", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:45", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:15", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2884/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 2884, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635028, ""]], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-316.json b/cidb/test_data/series_0/metadata-316.json
new file mode 100644
index 0000000..0b8a4f4
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-316.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:52:50 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:25:54.946238", "start": "Sun, 06 Jul 2014 01:26:56 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:52:50 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:03:00", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:09:29", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:29", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:06", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:39", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:22", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:53:27", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17397/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 17397, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635209, ""]], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-317.json b/cidb/test_data/series_0/metadata-317.json
new file mode 100644
index 0000000..b5d0080
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-317.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:53:31 -0700 (PST)"}, "boards": ["parrot"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:25:47.118689", "start": "Sun, 06 Jul 2014 01:27:44 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:53:31 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:04:04", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:32", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:03:01", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:51", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:21", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:30", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:09", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:26", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:07:01", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:50:43", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12116/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 12116, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635258, ""]], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-318.json b/cidb/test_data/series_0/metadata-318.json
new file mode 100644
index 0000000..c7baa97
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-318.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 03:05:58 -0700 (PST)"}, "boards": ["stumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:39:25.557926", "start": "Sun, 06 Jul 2014 01:26:32 -0700 (PST)", "finish": "Sun, 06 Jul 2014 03:05:58 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:26", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:03:05", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:45", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:09:27", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:40", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:07", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:28", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:20", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:06:55", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17441/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17441, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635187, ""]], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-319.json b/cidb/test_data/series_0/metadata-319.json
new file mode 100644
index 0000000..abe8f71
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-319.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:23:29 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:01:15.549787", "start": "Sun, 06 Jul 2014 01:22:14 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:23:29 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:40", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:31", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:03:04", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:44", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:14", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:11:27", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:38:47", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:39", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:38", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:59", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:08", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18719/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CommitQueueCompletion"}], "build-number": 18719, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404634930, ""]], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-32.json b/cidb/test_data/series_0/metadata-32.json
new file mode 100644
index 0000000..9a59517
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-32.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:12:28 -0700 (PST)"}, "boards": ["stout"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:35:13.699014", "start": "Fri, 04 Jul 2014 18:37:14 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:12:28 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:33", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:38", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:15", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:47", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:14", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:14", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:08", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:10", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11454/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11454, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524226, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524226, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524226, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524226, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524226, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524226, ""]], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-320.json b/cidb/test_data/series_0/metadata-320.json
new file mode 100644
index 0000000..a59814a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-320.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:59:07 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:36:31.245715", "start": "Sun, 06 Jul 2014 01:22:36 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:59:07 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:03:02", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:47", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:50", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:09:08", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:57", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:10", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:01", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:01", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:08:31", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:03:47", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16880/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "CommitQueueCompletion"}], "build-number": 16880, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404634950, ""]], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-321.json b/cidb/test_data/series_0/metadata-321.json
new file mode 100644
index 0000000..6bfbffa
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-321.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:23:01 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:00:48.328762", "start": "Sun, 06 Jul 2014 01:22:13 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:23:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:38", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:29", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:03:03", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:38", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:21", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:01", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:38:19", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:09:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:00:58", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:20", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:06:04", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18515/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:16", "name": "CommitQueueCompletion"}], "build-number": 18515, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404634929, ""]], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-322.json b/cidb/test_data/series_0/metadata-322.json
new file mode 100644
index 0000000..c214221
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-322.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 01:58:55 -0700 (PST)"}, "boards": ["stout"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:34:34.121848", "start": "Sun, 06 Jul 2014 01:24:21 -0700 (PST)", "finish": "Sun, 06 Jul 2014 01:58:55 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:39", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:02:56", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:03", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:38", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:09:31", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:14", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:40", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:10", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:12", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:41", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:36", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:28", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11462/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 11462, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635056, ""]], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-323.json b/cidb/test_data/series_0/metadata-323.json
new file mode 100644
index 0000000..96b187d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-323.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:46:38 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:20:55.590998", "start": "Sun, 06 Jul 2014 01:25:42 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:46:38 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:25", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:21:59", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:59", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:56", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:28", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:50", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:12", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:36:50", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4385/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4385, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635137, ""]], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-324.json b/cidb/test_data/series_0/metadata-324.json
new file mode 100644
index 0000000..f088b81
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-324.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:51:54 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:29:05.685578", "start": "Sun, 06 Jul 2014 01:22:48 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:51:54 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:12:04", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:02", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:24", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:58:46", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1893/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1893, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-325.json b/cidb/test_data/series_0/metadata-325.json
new file mode 100644
index 0000000..69c0db0
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-325.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 01:54:30 -0700 (PST)"}, "boards": ["nyan"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:31:21.617828", "start": "Sun, 06 Jul 2014 01:23:08 -0700 (PST)", "finish": "Sun, 06 Jul 2014 01:54:30 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:01", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:12:16", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:36", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:05", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:33", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1889/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1889, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404634982, ""]], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-326.json b/cidb/test_data/series_0/metadata-326.json
new file mode 100644
index 0000000..0780970
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-326.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:49:51 -0700 (PST)"}, "boards": ["daisy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:23:14.237530", "start": "Sun, 06 Jul 2014 01:26:37 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:49:51 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:06", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:13:08", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:04:00", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:05", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:51:23", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15132/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 15132, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635191, ""]], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-327.json b/cidb/test_data/series_0/metadata-327.json
new file mode 100644
index 0000000..7e7df32
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-327.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 01:57:11 -0700 (PST)"}, "boards": ["samus"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:33:25.596997", "start": "Sun, 06 Jul 2014 01:23:46 -0700 (PST)", "finish": "Sun, 06 Jul 2014 01:57:11 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:03:07", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:34", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:09:18", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:29", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:02", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:51", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:12", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2883/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2883, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635021, ""]], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-328.json b/cidb/test_data/series_0/metadata-328.json
new file mode 100644
index 0000000..4f7b284
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-328.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:00:16 -0700 (PST)"}, "boards": ["panther"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:35:42.426805", "start": "Sun, 06 Jul 2014 01:24:34 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:00:16 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:02:59", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:54", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:27", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:09:55", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:05", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:07:43", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:09:27", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2328/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2328, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635069, ""]], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-329.json b/cidb/test_data/series_0/metadata-329.json
new file mode 100644
index 0000000..e1ff84b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-329.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:44:47 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:19:45.458164", "start": "Sun, 06 Jul 2014 01:25:01 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:44:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:00", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:12:59", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:55", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:33", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:33", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:39", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:47:41", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4847/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4847, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635096, ""]], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-33.json b/cidb/test_data/series_0/metadata-33.json
new file mode 100644
index 0000000..01e6a89
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-33.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:01:51 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:24:45.324841", "start": "Fri, 04 Jul 2014 18:37:06 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:01:51 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:31:59", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:47", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:41", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:02", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:32:34", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4377/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4377, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524218, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524218, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524218, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524218, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524218, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524218, ""]], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-330.json b/cidb/test_data/series_0/metadata-330.json
new file mode 100644
index 0000000..92c191d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-330.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:58:56 -0700 (PST)"}, "boards": ["link"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:33:19.820525", "start": "Sun, 06 Jul 2014 01:25:36 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:58:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:43", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:03:06", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:12:04", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:19", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:06", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:05", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:00:24", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17439/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "CommitQueueCompletion"}], "build-number": 17439, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635131, ""]], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-331.json b/cidb/test_data/series_0/metadata-331.json
new file mode 100644
index 0000000..2916629
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-331.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 01:59:12 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:33:02.181142", "start": "Sun, 06 Jul 2014 01:26:09 -0700 (PST)", "finish": "Sun, 06 Jul 2014 01:59:12 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:43", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:03:02", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:58", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:51", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:54", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:54", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:26", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2149/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2149, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635165, ""]], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-332.json b/cidb/test_data/series_0/metadata-332.json
new file mode 100644
index 0000000..2881cd8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-332.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:14:46 -0700 (PST)"}, "boards": ["falco"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:49:04.316001", "start": "Sun, 06 Jul 2014 01:25:42 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:14:46 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:43", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:23", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:23:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:09:49", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:25", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:05", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:50", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:05", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4291/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4291, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635137, ""]], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-333.json b/cidb/test_data/series_0/metadata-333.json
new file mode 100644
index 0000000..37b549a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-333.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 01:50:32 -0700 (PST)"}, "boards": ["gizmo"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:22:39.454590", "start": "Sun, 06 Jul 2014 01:27:53 -0700 (PST)", "finish": "Sun, 06 Jul 2014 01:50:32 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:57", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:06", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:06:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:33", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:05", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:07", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/832/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 832, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635268, ""]], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-334.json b/cidb/test_data/series_0/metadata-334.json
new file mode 100644
index 0000000..1cb529e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-334.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:00:27 -0700 (PST)"}, "boards": ["rambi"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:33:59.869480", "start": "Sun, 06 Jul 2014 01:26:27 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:00:27 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:31", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:02:57", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:38", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:09:37", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:14", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:02", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:40", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:51", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2329/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2329, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635182, ""]], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-335.json b/cidb/test_data/series_0/metadata-335.json
new file mode 100644
index 0000000..825e15f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-335.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 01:47:47 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:08:56.472210", "start": "Sun, 06 Jul 2014 01:38:51 -0700 (PST)", "finish": "Sun, 06 Jul 2014 01:47:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1256/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1256/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:41", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1256/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1256/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1256/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1256/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:02:25", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1256/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1256/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1256/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1256/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1256/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1256, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635926, ""]], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-336.json b/cidb/test_data/series_0/metadata-336.json
new file mode 100644
index 0000000..ba62329
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-336.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 01:43:22 -0700 (PST)"}, "boards": ["duck"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:20:18.899666", "start": "Sun, 06 Jul 2014 01:23:04 -0700 (PST)", "finish": "Sun, 06 Jul 2014 01:43:22 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:00", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:51", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:10", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:38", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:31", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:55", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:17", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:26", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1379/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:31", "name": "CommitQueueCompletion"}], "build-number": 1379, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404634979, ""]], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-337.json b/cidb/test_data/series_0/metadata-337.json
new file mode 100644
index 0000000..6175677
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-337.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:01:43 -0700 (PST)"}, "boards": ["peppy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:34:43.232296", "start": "Sun, 06 Jul 2014 01:27:00 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:01:43 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:03:03", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:54", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:37", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:09:41", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:57", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:29", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:06", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:55", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:50", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:32", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:48", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4278/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4278, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635215, ""]], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-338.json b/cidb/test_data/series_0/metadata-338.json
new file mode 100644
index 0000000..6d325a4
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-338.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:14:59 -0700 (PST)"}, "boards": ["butterfly"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:50:21.639209", "start": "Sun, 06 Jul 2014 01:24:37 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:14:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:24", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:24:07", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:10:27", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:04", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:18", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:14", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:17", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11459/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 11459, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635072, ""]], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-339.json b/cidb/test_data/series_0/metadata-339.json
new file mode 100644
index 0000000..19bbc17
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-339.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 01:56:54 -0700 (PST)"}, "boards": ["monroe"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:33:10.832331", "start": "Sun, 06 Jul 2014 01:23:44 -0700 (PST)", "finish": "Sun, 06 Jul 2014 01:56:54 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:03:06", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:34", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:08", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:05", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:08:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2333/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2333, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635018, ""]], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-34.json b/cidb/test_data/series_0/metadata-34.json
new file mode 100644
index 0000000..0413ba1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-34.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:09:15 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:34:58.317005", "start": "Fri, 04 Jul 2014 18:34:17 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:09:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:35", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:59", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:32", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:33", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:21", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:02:15", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1885/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1885, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-340.json b/cidb/test_data/series_0/metadata-340.json
new file mode 100644
index 0000000..b3fa783
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-340.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 02:05:03 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206614", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:40:28.467226", "start": "Sun, 06 Jul 2014 01:24:34 -0700 (PST)", "finish": "Sun, 06 Jul 2014 02:05:03 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:03:05", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:38", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:12:39", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:12:22", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:57", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:23", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:09:34", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:25", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:13", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1378/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1378, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc1", "full": "R38-6024.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206614", "patch_number": "2", "internal": false}, "picked_up", 1404635068, ""]], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-341.json b/cidb/test_data/series_0/metadata-341.json
new file mode 100644
index 0000000..1dbb31c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-341.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 09:03:30 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [], "time": {"duration": "1:02:17.302027", "start": "Sat, 05 Jul 2014 08:01:13 -0700 (PST)", "finish": "Sat, 05 Jul 2014 09:03:30 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:45", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:08", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:30", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:39:58", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:13", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:04", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:44", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2644/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2644, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-342.json b/cidb/test_data/series_0/metadata-342.json
new file mode 100644
index 0000000..bf00348
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-342.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:12:07 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [], "time": {"duration": "0:13:27.933714", "start": "Sat, 05 Jul 2014 07:58:40 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:12:07 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:11", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:55", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:40", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:22", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:03:20", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2464/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2464, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-343.json b/cidb/test_data/series_0/metadata-343.json
new file mode 100644
index 0000000..38824ad
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-343.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 09:22:01 -0700 (PST)"}, "boards": ["wolf"], "changes": [], "time": {"duration": "1:24:17.521619", "start": "Sat, 05 Jul 2014 07:57:43 -0700 (PST)", "finish": "Sat, 05 Jul 2014 09:22:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:48", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:51", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:17", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:55:14", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3441/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 3441, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-344.json b/cidb/test_data/series_0/metadata-344.json
new file mode 100644
index 0000000..ed26193
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-344.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:30:38 -0700 (PST)"}, "boards": ["leon"], "changes": [], "time": {"duration": "0:31:16.009160", "start": "Sat, 05 Jul 2014 07:59:22 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:30:38 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:12:35", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:58", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:22", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2879/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2879, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-345.json b/cidb/test_data/series_0/metadata-345.json
new file mode 100644
index 0000000..459dede
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-345.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 09:21:41 -0700 (PST)"}, "boards": ["lumpy"], "changes": [], "time": {"duration": "1:22:42.096778", "start": "Sat, 05 Jul 2014 07:58:59 -0700 (PST)", "finish": "Sat, 05 Jul 2014 09:21:41 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:45", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:37", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:16", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:04", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:38", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:04", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:07:27", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:52:56", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17392/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "CommitQueueCompletion"}], "build-number": 17392, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-346.json b/cidb/test_data/series_0/metadata-346.json
new file mode 100644
index 0000000..acf9ff8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-346.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 09:26:42 -0700 (PST)"}, "boards": ["parrot"], "changes": [], "time": {"duration": "1:23:50.008466", "start": "Sat, 05 Jul 2014 08:02:52 -0700 (PST)", "finish": "Sat, 05 Jul 2014 09:26:42 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:34", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:13:09", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:40", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:45", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:47", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:50:11", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12111/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:33", "name": "CommitQueueCompletion"}], "build-number": 12111, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-347.json b/cidb/test_data/series_0/metadata-347.json
new file mode 100644
index 0000000..9738f30
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-347.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 09:34:25 -0700 (PST)"}, "boards": ["stumpy"], "changes": [], "time": {"duration": "1:34:32.294855", "start": "Sat, 05 Jul 2014 07:59:53 -0700 (PST)", "finish": "Sat, 05 Jul 2014 09:34:25 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:14", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:53", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:15", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:53", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:00", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:39", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:00", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:07:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:05:05", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17436/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 17436, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-348.json b/cidb/test_data/series_0/metadata-348.json
new file mode 100644
index 0000000..c80cc79
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-348.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:56:18 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [], "time": {"duration": "0:59:13.667717", "start": "Sat, 05 Jul 2014 07:57:04 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:56:18 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:37", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:04", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:13", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:33", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:30", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:36", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:56", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18714/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18714, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-349.json b/cidb/test_data/series_0/metadata-349.json
new file mode 100644
index 0000000..fd65b73
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-349.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 09:30:31 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [], "time": {"duration": "1:31:51.012923", "start": "Sat, 05 Jul 2014 07:58:40 -0700 (PST)", "finish": "Sat, 05 Jul 2014 09:30:31 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:48", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:40", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:07", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:02:21", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16875/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 16875, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-35.json b/cidb/test_data/series_0/metadata-35.json
new file mode 100644
index 0000000..e1449c0
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-35.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:05:38 -0700 (PST)"}, "boards": ["nyan"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:28:30.114650", "start": "Fri, 04 Jul 2014 18:37:08 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:05:38 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:06", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:31", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:59", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:35", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1881/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1881, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524220, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524220, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524220, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524220, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524220, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524220, ""]], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-350.json b/cidb/test_data/series_0/metadata-350.json
new file mode 100644
index 0000000..3c6945c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-350.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:56:04 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [], "time": {"duration": "0:58:59.684030", "start": "Sat, 05 Jul 2014 07:57:04 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:56:04 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:36", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:04", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:16", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:24", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:14:02", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:39:01", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:06:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:55", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18510/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:16", "name": "CommitQueueCompletion"}], "build-number": 18510, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-351.json b/cidb/test_data/series_0/metadata-351.json
new file mode 100644
index 0000000..5f47793
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-351.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:31:41 -0700 (PST)"}, "boards": ["stout"], "changes": [], "time": {"duration": "0:30:52.814794", "start": "Sat, 05 Jul 2014 08:00:48 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:31:41 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:31", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:33", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:04", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:04", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:10", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:06", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:13", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:37", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11457/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11457, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-352.json b/cidb/test_data/series_0/metadata-352.json
new file mode 100644
index 0000000..e2c0a5c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-352.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 09:14:47 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [], "time": {"duration": "1:16:04.139970", "start": "Sat, 05 Jul 2014 07:58:43 -0700 (PST)", "finish": "Sat, 05 Jul 2014 09:14:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:49", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:21:19", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:47", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:33:54", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4380/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4380, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-353.json b/cidb/test_data/series_0/metadata-353.json
new file mode 100644
index 0000000..9044d6a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-353.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 09:28:00 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:30:36.207229", "start": "Sat, 05 Jul 2014 07:57:24 -0700 (PST)", "finish": "Sat, 05 Jul 2014 09:28:00 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:00", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:04", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:30", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:44", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:00:45", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1888/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1888, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-354.json b/cidb/test_data/series_0/metadata-354.json
new file mode 100644
index 0000000..774ec37
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-354.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:26:40 -0700 (PST)"}, "boards": ["nyan"], "changes": [], "time": {"duration": "0:28:54.422713", "start": "Sat, 05 Jul 2014 07:57:46 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:26:40 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:19", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:47", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:40", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:05", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1884/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1884, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-355.json b/cidb/test_data/series_0/metadata-355.json
new file mode 100644
index 0000000..bd8ae35
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-355.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 09:19:11 -0700 (PST)"}, "boards": ["daisy"], "changes": [], "time": {"duration": "1:18:47.903456", "start": "Sat, 05 Jul 2014 08:00:23 -0700 (PST)", "finish": "Sat, 05 Jul 2014 09:19:11 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:11:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:37", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:57", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:41", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:58", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:50:49", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15127/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 15127, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-356.json b/cidb/test_data/series_0/metadata-356.json
new file mode 100644
index 0000000..7524385
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-356.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:29:37 -0700 (PST)"}, "boards": ["samus"], "changes": [], "time": {"duration": "0:30:02.723939", "start": "Sat, 05 Jul 2014 07:59:34 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:29:37 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:08", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:00:47", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:01", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:30", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:01", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:07:26", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2878/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 2878, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-357.json b/cidb/test_data/series_0/metadata-357.json
new file mode 100644
index 0000000..1a2f9f1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-357.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:35:38 -0700 (PST)"}, "boards": ["panther"], "changes": [], "time": {"duration": "0:34:11.153019", "start": "Sat, 05 Jul 2014 08:01:27 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:35:38 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:11", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:16", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:04", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:11", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2323/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:23", "name": "CommitQueueCompletion"}], "build-number": 2323, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-358.json b/cidb/test_data/series_0/metadata-358.json
new file mode 100644
index 0000000..ac1c59b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-358.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 09:12:41 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [], "time": {"duration": "1:14:53.874516", "start": "Sat, 05 Jul 2014 07:57:47 -0700 (PST)", "finish": "Sat, 05 Jul 2014 09:12:41 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:17", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:11:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:56", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:38", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:33", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:14", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:33", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:41", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:46:02", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4842/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4842, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-359.json b/cidb/test_data/series_0/metadata-359.json
new file mode 100644
index 0000000..f52fcb0
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-359.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 09:26:27 -0700 (PST)"}, "boards": ["link"], "changes": [], "time": {"duration": "1:28:49.011823", "start": "Sat, 05 Jul 2014 07:57:38 -0700 (PST)", "finish": "Sat, 05 Jul 2014 09:26:27 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:12:09", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:04", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:51", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:35", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:20", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:58:27", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17434/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17434, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-36.json b/cidb/test_data/series_0/metadata-36.json
new file mode 100644
index 0000000..c64c208
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-36.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:57:46 -0700 (PST)"}, "boards": ["daisy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:21:43.577444", "start": "Fri, 04 Jul 2014 18:36:02 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:57:46 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:11:19", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:41", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:36", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:57", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:37", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:28", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:53:03", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15124/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 15124, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524154, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524154, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524154, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524154, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524154, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524154, ""]], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-360.json b/cidb/test_data/series_0/metadata-360.json
new file mode 100644
index 0000000..157ee64
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-360.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:28:29 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [], "time": {"duration": "0:30:35.651696", "start": "Sat, 05 Jul 2014 07:57:54 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:28:29 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:50", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:33", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:54", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:06", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:54", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2144/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2144, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-361.json b/cidb/test_data/series_0/metadata-361.json
new file mode 100644
index 0000000..31befe3
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-361.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:41:57 -0700 (PST)"}, "boards": ["falco"], "changes": [], "time": {"duration": "0:44:14.577099", "start": "Sat, 05 Jul 2014 07:57:42 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:41:57 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:21:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:29", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:54", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:31", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:09", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:50", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:09", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:05", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4286/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4286, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-362.json b/cidb/test_data/series_0/metadata-362.json
new file mode 100644
index 0000000..e49f910
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-362.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:21:05 -0700 (PST)"}, "boards": ["gizmo"], "changes": [], "time": {"duration": "0:21:37.680500", "start": "Sat, 05 Jul 2014 07:59:28 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:21:05 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:58", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:06:53", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:30", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:05", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:14", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/827/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 827, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-363.json b/cidb/test_data/series_0/metadata-363.json
new file mode 100644
index 0000000..6840e15
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-363.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:28:41 -0700 (PST)"}, "boards": ["rambi"], "changes": [], "time": {"duration": "0:31:02.415188", "start": "Sat, 05 Jul 2014 07:57:39 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:28:41 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:34", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:24", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:07", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:46", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:07:43", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2324/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2324, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-364.json b/cidb/test_data/series_0/metadata-364.json
new file mode 100644
index 0000000..5471afd
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-364.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:19:21 -0700 (PST)"}, "boards": ["lumpy"], "changes": [], "time": {"duration": "0:03:10.073527", "start": "Sat, 05 Jul 2014 08:16:11 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:19:21 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1251/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1251/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:11", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1251/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1251/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1251/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1251/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:41", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1251/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1251/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1251/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1251/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1251/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1251, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-365.json b/cidb/test_data/series_0/metadata-365.json
new file mode 100644
index 0000000..87192b2
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-365.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:15:44 -0700 (PST)"}, "boards": ["duck"], "changes": [], "time": {"duration": "0:17:56.534171", "start": "Sat, 05 Jul 2014 07:57:47 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:15:44 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:35", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:10", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:27", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:29", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:55", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:15", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:05", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:15", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1374/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1374, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-366.json b/cidb/test_data/series_0/metadata-366.json
new file mode 100644
index 0000000..70c8883
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-366.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:29:46 -0700 (PST)"}, "boards": ["peppy"], "changes": [], "time": {"duration": "0:31:33.224323", "start": "Sat, 05 Jul 2014 07:58:12 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:29:46 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:31", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:39", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:41", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:04:52", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:11", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:05", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:44", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:20", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:52", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4273/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4273, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-367.json b/cidb/test_data/series_0/metadata-367.json
new file mode 100644
index 0000000..0fc8491
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-367.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:49:51 -0700 (PST)"}, "boards": ["butterfly"], "changes": [], "time": {"duration": "0:46:39.096203", "start": "Sat, 05 Jul 2014 08:03:12 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:49:51 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:16", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:47", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:23:41", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:09", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:36", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:19", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:59", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:19", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11454/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 11454, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-368.json b/cidb/test_data/series_0/metadata-368.json
new file mode 100644
index 0000000..e3584d4
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-368.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:29:00 -0700 (PST)"}, "boards": ["monroe"], "changes": [], "time": {"duration": "0:29:59.319219", "start": "Sat, 05 Jul 2014 07:59:01 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:29:00 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:20", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:52", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:53", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:53", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:12", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2328/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2328, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-369.json b/cidb/test_data/series_0/metadata-369.json
new file mode 100644
index 0000000..b48e8d3
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-369.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 08:40:46 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [], "time": {"duration": "0:41:01.630878", "start": "Sat, 05 Jul 2014 07:59:44 -0700 (PST)", "finish": "Sat, 05 Jul 2014 08:40:46 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:12:15", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:15", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:52", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:28", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:02", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:30", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:52", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1373/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:25", "name": "CommitQueueCompletion"}], "build-number": 1373, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc2", "full": "R38-6021.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-37.json b/cidb/test_data/series_0/metadata-37.json
new file mode 100644
index 0000000..7d0f69b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-37.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:09:02 -0700 (PST)"}, "boards": ["samus"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:34:01.698995", "start": "Fri, 04 Jul 2014 18:35:01 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:09:02 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:18", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:18", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:54", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:57", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:06", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:01", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:09:53", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2875/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2875, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524093, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524093, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524093, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524093, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524093, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524093, ""]], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-370.json b/cidb/test_data/series_0/metadata-370.json
new file mode 100644
index 0000000..d2f6524
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-370.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:18:33 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [], "time": {"duration": "1:06:09.958319", "start": "Sun, 06 Jul 2014 07:12:23 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:18:33 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:17", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:09", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:29", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:38", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:43:41", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:46", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:00:46", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:55", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:57", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:13", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2650/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2650, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-371.json b/cidb/test_data/series_0/metadata-371.json
new file mode 100644
index 0000000..ae888bb
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-371.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 07:30:09 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [], "time": {"duration": "0:14:58.615481", "start": "Sun, 06 Jul 2014 07:15:10 -0700 (PST)", "finish": "Sun, 06 Jul 2014 07:30:09 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:12", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:00", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:40", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:23", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:35", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:04:35", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2470/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2470, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-372.json b/cidb/test_data/series_0/metadata-372.json
new file mode 100644
index 0000000..cacf6f1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-372.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:44:15 -0700 (PST)"}, "boards": ["wolf"], "changes": [], "time": {"duration": "1:30:04.696580", "start": "Sun, 06 Jul 2014 07:14:10 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:44:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:47", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:48", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:35", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:57", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:37", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:52", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:57:55", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3447/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 3447, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-373.json b/cidb/test_data/series_0/metadata-373.json
new file mode 100644
index 0000000..2935923
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-373.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 07:48:28 -0700 (PST)"}, "boards": ["leon"], "changes": [], "time": {"duration": "0:34:20.687100", "start": "Sun, 06 Jul 2014 07:14:07 -0700 (PST)", "finish": "Sun, 06 Jul 2014 07:48:28 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:38", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:48", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:10:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:53", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:06", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:10:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2885/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2885, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-374.json b/cidb/test_data/series_0/metadata-374.json
new file mode 100644
index 0000000..19f594e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-374.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:41:10 -0700 (PST)"}, "boards": ["lumpy"], "changes": [], "time": {"duration": "1:27:52.024180", "start": "Sun, 06 Jul 2014 07:13:18 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:41:10 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:26", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:12:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:21", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:10:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:54:40", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17398/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17398, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-375.json b/cidb/test_data/series_0/metadata-375.json
new file mode 100644
index 0000000..9cd76a4
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-375.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:43:05 -0700 (PST)"}, "boards": ["parrot"], "changes": [], "time": {"duration": "1:25:44.160945", "start": "Sun, 06 Jul 2014 07:17:21 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:43:05 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:04:10", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:47", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:13:10", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:35", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:56", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:23", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:55", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:42", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:51:01", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12117/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 12117, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-376.json b/cidb/test_data/series_0/metadata-376.json
new file mode 100644
index 0000000..1e64326
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-376.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:53:19 -0700 (PST)"}, "boards": ["stumpy"], "changes": [], "time": {"duration": "1:39:29.075021", "start": "Sun, 06 Jul 2014 07:13:50 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:53:19 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:10:24", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:07:05", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17442/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17442, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-377.json b/cidb/test_data/series_0/metadata-377.json
new file mode 100644
index 0000000..a2704a5
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-377.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:11:17 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [], "time": {"duration": "1:00:04.416528", "start": "Sun, 06 Jul 2014 07:11:13 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:11:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:36", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:32", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:15", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:45", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:51", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:23", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:37", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:58", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18720/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CommitQueueCompletion"}], "build-number": 18720, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-378.json b/cidb/test_data/series_0/metadata-378.json
new file mode 100644
index 0000000..60a5852
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-378.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:51:40 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [], "time": {"duration": "1:37:49.623154", "start": "Sun, 06 Jul 2014 07:13:50 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:51:40 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:52", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:12", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:39", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:24", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:54", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:10:51", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:04:02", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16881/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 16881, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-379.json b/cidb/test_data/series_0/metadata-379.json
new file mode 100644
index 0000000..d8b3080
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-379.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:10:52 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [], "time": {"duration": "0:59:38.152370", "start": "Sun, 06 Jul 2014 07:11:13 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:10:52 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:37", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:31", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:26", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:16", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:25", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:39:11", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:00:47", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:30", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:06:36", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:35", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18516/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:16", "name": "CommitQueueCompletion"}], "build-number": 18516, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-38.json b/cidb/test_data/series_0/metadata-38.json
new file mode 100644
index 0000000..51cf902
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-38.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:08:09 -0700 (PST)"}, "boards": ["panther"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:34:18.872760", "start": "Fri, 04 Jul 2014 18:33:50 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:08:09 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:41", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:17", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:31", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:05", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:09", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:11", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:29", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:31", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:29", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2320/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2320, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524022, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524022, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524022, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524022, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524022, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524022, ""]], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-380.json b/cidb/test_data/series_0/metadata-380.json
new file mode 100644
index 0000000..aacaa65
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-380.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 07:47:52 -0700 (PST)"}, "boards": ["stout"], "changes": [], "time": {"duration": "0:36:29.355540", "start": "Sun, 06 Jul 2014 07:11:23 -0700 (PST)", "finish": "Sun, 06 Jul 2014 07:47:52 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:42", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:37", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:21", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:48", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:17", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:22", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:22", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:21", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11463/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11463, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-381.json b/cidb/test_data/series_0/metadata-381.json
new file mode 100644
index 0000000..8358afe
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-381.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:41:47 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [], "time": {"duration": "1:28:38.097802", "start": "Sun, 06 Jul 2014 07:13:09 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:41:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:48", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:31:54", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:58", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:17", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:50", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:06", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:36:09", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4386/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:23", "name": "CommitQueueCompletion"}], "build-number": 4386, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-382.json b/cidb/test_data/series_0/metadata-382.json
new file mode 100644
index 0000000..3d9f881
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-382.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:43:34 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:31:44.981601", "start": "Sun, 06 Jul 2014 07:11:49 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:43:34 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:21", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:57", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:32", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:34", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:44", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:59:43", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1894/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1894, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-383.json b/cidb/test_data/series_0/metadata-383.json
new file mode 100644
index 0000000..78e5e99
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-383.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 07:42:18 -0700 (PST)"}, "boards": ["nyan"], "changes": [], "time": {"duration": "0:29:46.443229", "start": "Sun, 06 Jul 2014 07:12:32 -0700 (PST)", "finish": "Sun, 06 Jul 2014 07:42:18 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:43", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:15", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:01", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:45", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:44", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:04", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:07:51", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1890/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1890, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-384.json b/cidb/test_data/series_0/metadata-384.json
new file mode 100644
index 0000000..4009d23
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-384.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:36:21 -0700 (PST)"}, "boards": ["daisy"], "changes": [], "time": {"duration": "1:22:07.542930", "start": "Sun, 06 Jul 2014 07:14:13 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:36:21 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:12:42", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:40", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:43", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:56", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:52", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:52:22", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15133/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "CommitQueueCompletion"}], "build-number": 15133, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-385.json b/cidb/test_data/series_0/metadata-385.json
new file mode 100644
index 0000000..88e9226
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-385.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 07:50:05 -0700 (PST)"}, "boards": ["samus"], "changes": [], "time": {"duration": "0:34:09.806870", "start": "Sun, 06 Jul 2014 07:15:55 -0700 (PST)", "finish": "Sun, 06 Jul 2014 07:50:05 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:59", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:15", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:00:56", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:05", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:45", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:26", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2884/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "CommitQueueCompletion"}], "build-number": 2884, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-386.json b/cidb/test_data/series_0/metadata-386.json
new file mode 100644
index 0000000..97188de
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-386.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 07:49:50 -0700 (PST)"}, "boards": ["panther"], "changes": [], "time": {"duration": "0:33:51.921302", "start": "Sun, 06 Jul 2014 07:15:58 -0700 (PST)", "finish": "Sun, 06 Jul 2014 07:49:50 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:04", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:06", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:57", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:34", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:15", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:36", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:39", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2329/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2329, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-387.json b/cidb/test_data/series_0/metadata-387.json
new file mode 100644
index 0000000..cad5684
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-387.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:36:04 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [], "time": {"duration": "1:17:11.467405", "start": "Sun, 06 Jul 2014 07:18:52 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:36:04 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:29", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:15", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:11:35", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:57", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:40", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:14", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:50", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:48:20", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4848/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 4848, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-388.json b/cidb/test_data/series_0/metadata-388.json
new file mode 100644
index 0000000..6c70e12
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-388.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:49:43 -0700 (PST)"}, "boards": ["link"], "changes": [], "time": {"duration": "1:33:49.044957", "start": "Sun, 06 Jul 2014 07:15:54 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:49:43 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:21", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:48", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:12", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:01:24", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17440/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17440, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-389.json b/cidb/test_data/series_0/metadata-389.json
new file mode 100644
index 0000000..547015a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-389.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 07:45:06 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [], "time": {"duration": "0:33:24.168830", "start": "Sun, 06 Jul 2014 07:11:42 -0700 (PST)", "finish": "Sun, 06 Jul 2014 07:45:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:43", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:48", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:47", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:22", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:09:59", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2150/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2150, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-39.json b/cidb/test_data/series_0/metadata-39.json
new file mode 100644
index 0000000..b03a974
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-39.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:51:11 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:16:24.896685", "start": "Fri, 04 Jul 2014 18:34:46 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:51:11 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:15", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:11:08", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:56", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:16", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:48:18", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4839/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4839, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524078, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524078, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524078, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524078, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524078, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524078, ""]], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-390.json b/cidb/test_data/series_0/metadata-390.json
new file mode 100644
index 0000000..e9509c1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-390.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:11:34 -0700 (PST)"}, "boards": ["falco"], "changes": [], "time": {"duration": "0:57:13.936702", "start": "Sun, 06 Jul 2014 07:14:21 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:11:34 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:41", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:32:18", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:53", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:10:47", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:29", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:09", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:09:41", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4292/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4292, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-391.json b/cidb/test_data/series_0/metadata-391.json
new file mode 100644
index 0000000..6ba05b2
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-391.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 07:35:48 -0700 (PST)"}, "boards": ["gizmo"], "changes": [], "time": {"duration": "0:21:52.691317", "start": "Sun, 06 Jul 2014 07:13:55 -0700 (PST)", "finish": "Sun, 06 Jul 2014 07:35:48 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:57", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:07:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:32", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:00", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:06", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/833/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 833, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-392.json b/cidb/test_data/series_0/metadata-392.json
new file mode 100644
index 0000000..b7ca87f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-392.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 07:51:59 -0700 (PST)"}, "boards": ["rambi"], "changes": [], "time": {"duration": "0:35:12.936804", "start": "Sun, 06 Jul 2014 07:16:46 -0700 (PST)", "finish": "Sun, 06 Jul 2014 07:51:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:25", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:59", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:31", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:18", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:31", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2330/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2330, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-393.json b/cidb/test_data/series_0/metadata-393.json
new file mode 100644
index 0000000..e3fe256
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-393.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 07:31:01 -0700 (PST)"}, "boards": ["lumpy"], "changes": [], "time": {"duration": "0:03:09.614677", "start": "Sun, 06 Jul 2014 07:27:52 -0700 (PST)", "finish": "Sun, 06 Jul 2014 07:31:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1257/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1257/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:38", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1257/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1257/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1257/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1257/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:40", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1257/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1257/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1257/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1257/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1257/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1257, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-394.json b/cidb/test_data/series_0/metadata-394.json
new file mode 100644
index 0000000..26a9e7b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-394.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 07:32:32 -0700 (PST)"}, "boards": ["duck"], "changes": [], "time": {"duration": "0:20:03.564959", "start": "Sun, 06 Jul 2014 07:12:28 -0700 (PST)", "finish": "Sun, 06 Jul 2014 07:32:32 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:34", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:11", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:27", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:32", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:00", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:01", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:16", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:58", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1380/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1380, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-395.json b/cidb/test_data/series_0/metadata-395.json
new file mode 100644
index 0000000..6812f09
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-395.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 07:49:37 -0700 (PST)"}, "boards": ["peppy"], "changes": [], "time": {"duration": "0:36:39.266537", "start": "Sun, 06 Jul 2014 07:12:57 -0700 (PST)", "finish": "Sun, 06 Jul 2014 07:49:37 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:33", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:38", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:32", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:35", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:22", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:15", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4279/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4279, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-396.json b/cidb/test_data/series_0/metadata-396.json
new file mode 100644
index 0000000..cff7b2c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-396.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 08:11:01 -0700 (PST)"}, "boards": ["butterfly"], "changes": [], "time": {"duration": "0:58:09.776935", "start": "Sun, 06 Jul 2014 07:12:52 -0700 (PST)", "finish": "Sun, 06 Jul 2014 08:11:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:47", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:32:09", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:11:41", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:53", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:00", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:06", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:02", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:10:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11460/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 11460, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-397.json b/cidb/test_data/series_0/metadata-397.json
new file mode 100644
index 0000000..3065bbb
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-397.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 07:46:31 -0700 (PST)"}, "boards": ["monroe"], "changes": [], "time": {"duration": "0:33:47.928364", "start": "Sun, 06 Jul 2014 07:12:43 -0700 (PST)", "finish": "Sun, 06 Jul 2014 07:46:31 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:14", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:07", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:45", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:55", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:58", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:51", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:45", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2334/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2334, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-398.json b/cidb/test_data/series_0/metadata-398.json
new file mode 100644
index 0000000..be0fb53
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-398.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 07:54:44 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [], "time": {"duration": "0:38:41.073529", "start": "Sun, 06 Jul 2014 07:16:03 -0700 (PST)", "finish": "Sun, 06 Jul 2014 07:54:44 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:00", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:09", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:14", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:55", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:22", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:02", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:24", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:41", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1379/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1379, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2083.1", "platform": "6024.0.0-rc2", "full": "R38-6024.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.014406.tar.xz", "cl_actions": [], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.06.014406", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-399.json b/cidb/test_data/series_0/metadata-399.json
new file mode 100644
index 0000000..318368f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-399.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 10:06:56 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "1:07:32.413349", "start": "Sun, 06 Jul 2014 08:59:24 -0700 (PST)", "finish": "Sun, 06 Jul 2014 10:06:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:31", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:29", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:38", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:44:38", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:45", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:07", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:37", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:27", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:03", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2651/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2651, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662357, ""]], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-4.json b/cidb/test_data/series_0/metadata-4.json
new file mode 100644
index 0000000..84059f2
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-4.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 08:36:26 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2657", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2477", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3454", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2892", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17405", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12124", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17449", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18727", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16888", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18523", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11470", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4393", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1901", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1897", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15140", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2891", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2336", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4855", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17447", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2157", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4299", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1386", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2337", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1264", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1387", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4286", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11467", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2341", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/840", "reason": "None"}}, "boards": [], "changes": [{"patch_number": "2", "total_pass": 1, "gerrit_number": "167452", "total_fail": 1, "pass": 1, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "167453", "total_fail": 1, "pass": 1, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206786", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206787", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "205535", "total_fail": 1, "pass": 1, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "205534", "total_fail": 1, "pass": 1, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206648", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "3", "total_pass": 1, "gerrit_number": "206764", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206647", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206649", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206780", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}], "metadata-version": "2", "child-configs": [], "build-number": 1926, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [[{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "picked_up", 1404741080, ""], [{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "picked_up", 1404741080, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "picked_up", 1404741080, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "picked_up", 1404741080, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "picked_up", 1404741080, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "picked_up", 1404741080, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "picked_up", 1404741080, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404741080, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404741080, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "picked_up", 1404741080, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "picked_up", 1404741080, ""], [{"gerrit_number": "167452", "patch_number": "2", "internal": true}, "submitted", 1404747155, ""], [{"gerrit_number": "167453", "patch_number": "2", "internal": true}, "submitted", 1404747159, ""], [{"gerrit_number": "206786", "patch_number": "1", "internal": false}, "submitted", 1404747164, ""], [{"gerrit_number": "206787", "patch_number": "1", "internal": false}, "submitted", 1404747278, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "submitted", 1404747282, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "submitted", 1404747288, ""], [{"gerrit_number": "206648", "patch_number": "1", "internal": false}, "submitted", 1404747293, ""], [{"gerrit_number": "206764", "patch_number": "3", "internal": false}, "submitted", 1404747297, ""], [{"gerrit_number": "206647", "patch_number": "1", "internal": false}, "submitted", 1404747306, ""], [{"gerrit_number": "206649", "patch_number": "1", "internal": false}, "submitted", 1404747310, ""], [{"gerrit_number": "206780", "patch_number": "1", "internal": false}, "submitted", 1404747314, ""]], "builder-name": "CQ master", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc2", "full": "R38-6028.0.0-rc2", "milestone": "38"}, "time": {"duration": "1:43:34.434958", "start": "Mon, 07 Jul 2014 06:52:51 -0700 (PST)", "finish": "Mon, 07 Jul 2014 08:36:26 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1926/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:05", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1926/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "2:21:38", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1926/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1926/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:00", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1926/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1926/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1926/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1926/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:38:46", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1926/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:26", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-40.json b/cidb/test_data/series_0/metadata-40.json
new file mode 100644
index 0000000..0e7a5b1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-40.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:10:58 -0700 (PST)"}, "boards": ["link"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:36:46.247741", "start": "Fri, 04 Jul 2014 18:34:12 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:10:58 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:08", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:56", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:10", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:51", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:14", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:53", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:05", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:03:02", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17431/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:35", "name": "CommitQueueCompletion"}], "build-number": 17431, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524044, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524044, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524044, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524044, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524044, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524044, ""]], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-400.json b/cidb/test_data/series_0/metadata-400.json
new file mode 100644
index 0000000..87fc85e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-400.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:14:48 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:14:54.219352", "start": "Sun, 06 Jul 2014 08:59:54 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:14:48 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:13", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:54", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:38", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:21", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:03", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:04:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:27", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2471/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2471, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662387, ""]], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-401.json b/cidb/test_data/series_0/metadata-401.json
new file mode 100644
index 0000000..b5a50cb
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-401.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 10:28:49 -0700 (PST)"}, "boards": ["wolf"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "1:29:49.720979", "start": "Sun, 06 Jul 2014 08:58:59 -0700 (PST)", "finish": "Sun, 06 Jul 2014 10:28:49 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:35", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:39", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:46", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:47", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:58", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:49", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:57:44", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3448/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 3448, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662333, ""]], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-402.json b/cidb/test_data/series_0/metadata-402.json
new file mode 100644
index 0000000..3f5ecf8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-402.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:37:23 -0700 (PST)"}, "boards": ["leon"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:33:49.754579", "start": "Sun, 06 Jul 2014 09:03:33 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:37:23 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:25", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:17", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:43", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:10:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:04", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:51", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2886/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2886, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662606, ""]], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-403.json b/cidb/test_data/series_0/metadata-403.json
new file mode 100644
index 0000000..50396f7
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-403.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 10:29:03 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "1:27:15.171815", "start": "Sun, 06 Jul 2014 09:01:47 -0700 (PST)", "finish": "Sun, 06 Jul 2014 10:29:03 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:22", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:25", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:12:29", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:18", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:24", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:10:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:54:17", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17399/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:32", "name": "CommitQueueCompletion"}], "build-number": 17399, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662501, ""]], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-404.json b/cidb/test_data/series_0/metadata-404.json
new file mode 100644
index 0000000..c96b053
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-404.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 10:31:33 -0700 (PST)"}, "boards": ["parrot"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "1:27:24.688886", "start": "Sun, 06 Jul 2014 09:04:08 -0700 (PST)", "finish": "Sun, 06 Jul 2014 10:31:33 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:04:05", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:13:16", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:34", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:53:25", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12118/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 12118, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662641, ""]], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-405.json b/cidb/test_data/series_0/metadata-405.json
new file mode 100644
index 0000000..525c54f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-405.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 10:38:12 -0700 (PST)"}, "boards": ["stumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "1:39:45.129605", "start": "Sun, 06 Jul 2014 08:58:26 -0700 (PST)", "finish": "Sun, 06 Jul 2014 10:38:12 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:56", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:23", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:34", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:29", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:10", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:49", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:10:31", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:06:51", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17443/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 17443, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662300, ""]], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-406.json b/cidb/test_data/series_0/metadata-406.json
new file mode 100644
index 0000000..f4ed72c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-406.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:57:12 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:59:35.136666", "start": "Sun, 06 Jul 2014 08:57:37 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:57:12 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:37", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:31", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:19", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:16", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:15", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:14", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:00:58", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:02", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:37", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:33", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18721/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18721, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662251, ""]], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-407.json b/cidb/test_data/series_0/metadata-407.json
new file mode 100644
index 0000000..43248b7
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-407.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 10:36:34 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "1:36:42.408994", "start": "Sun, 06 Jul 2014 08:59:51 -0700 (PST)", "finish": "Sun, 06 Jul 2014 10:36:34 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:15", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:52", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:26", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:30", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:10", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:12", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:10:51", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:03:54", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16882/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 16882, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662385, ""]], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-408.json b/cidb/test_data/series_0/metadata-408.json
new file mode 100644
index 0000000..ad63487
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-408.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:57:21 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:59:12.763400", "start": "Sun, 06 Jul 2014 08:58:08 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:57:21 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:29", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:21", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:26", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:43", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:38:37", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:00:55", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:02", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:06:10", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:51", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18517/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 18517, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662283, ""]], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-409.json b/cidb/test_data/series_0/metadata-409.json
new file mode 100644
index 0000000..b103b84
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-409.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:36:05 -0700 (PST)"}, "boards": ["stout"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:34:52.075341", "start": "Sun, 06 Jul 2014 09:01:13 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:36:05 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:17", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:35", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:09", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:44", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:20", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:19", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:23", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:22", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11464/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11464, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662467, ""]], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-41.json b/cidb/test_data/series_0/metadata-41.json
new file mode 100644
index 0000000..05bca1d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-41.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:10:37 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:33:39.058399", "start": "Fri, 04 Jul 2014 18:36:58 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:10:37 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:01", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:50", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:01", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:49", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:24", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:09:42", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2141/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 2141, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524210, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524210, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524210, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524210, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524210, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524210, ""]], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-410.json b/cidb/test_data/series_0/metadata-410.json
new file mode 100644
index 0000000..5087fa6
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-410.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 10:39:19 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "1:40:05.218362", "start": "Sun, 06 Jul 2014 08:59:14 -0700 (PST)", "finish": "Sun, 06 Jul 2014 10:39:19 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:57", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:45", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:31:42", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:55", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:50", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:15", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:47:48", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4387/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4387, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662348, ""]], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-411.json b/cidb/test_data/series_0/metadata-411.json
new file mode 100644
index 0000000..dce9bea
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-411.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 10:34:09 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:33:18.371615", "start": "Sun, 06 Jul 2014 09:00:51 -0700 (PST)", "finish": "Sun, 06 Jul 2014 10:34:09 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:12:07", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:55", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:01:11", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1895/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1895, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-412.json b/cidb/test_data/series_0/metadata-412.json
new file mode 100644
index 0000000..da9d45e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-412.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:32:15 -0700 (PST)"}, "boards": ["nyan"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:30:18.332241", "start": "Sun, 06 Jul 2014 09:01:57 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:32:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:33", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:15", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:12:08", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:45", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:43", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:32", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:05", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:32", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:07:03", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1891/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:33", "name": "CommitQueueCompletion"}], "build-number": 1891, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662510, ""]], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-413.json b/cidb/test_data/series_0/metadata-413.json
new file mode 100644
index 0000000..bc03fb3
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-413.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 10:22:23 -0700 (PST)"}, "boards": ["daisy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "1:22:12.593326", "start": "Sun, 06 Jul 2014 09:00:11 -0700 (PST)", "finish": "Sun, 06 Jul 2014 10:22:23 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:11:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:12", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:41", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:42", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:57", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:14", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:52:36", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15134/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:24", "name": "CommitQueueCompletion"}], "build-number": 15134, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662404, ""]], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-414.json b/cidb/test_data/series_0/metadata-414.json
new file mode 100644
index 0000000..4367d6b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-414.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:36:50 -0700 (PST)"}, "boards": ["samus"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:33:51.447709", "start": "Sun, 06 Jul 2014 09:02:58 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:36:50 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:29", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:17", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:47", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:04", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:50", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:14", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2885/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2885, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662571, ""]], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-415.json b/cidb/test_data/series_0/metadata-415.json
new file mode 100644
index 0000000..a766546
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-415.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:33:54 -0700 (PST)"}, "boards": ["panther"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:34:06.763117", "start": "Sun, 06 Jul 2014 08:59:47 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:33:54 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:29", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:47", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:10", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:44", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:03", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2330/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2330, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662381, ""]], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-416.json b/cidb/test_data/series_0/metadata-416.json
new file mode 100644
index 0000000..1a711e4
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-416.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 10:17:56 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "1:16:04.334364", "start": "Sun, 06 Jul 2014 09:01:52 -0700 (PST)", "finish": "Sun, 06 Jul 2014 10:17:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:11:08", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:53", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:39", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:35", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:35", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:14", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:47:52", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4849/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4849, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662505, ""]], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-417.json b/cidb/test_data/series_0/metadata-417.json
new file mode 100644
index 0000000..2e26a21
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-417.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 10:35:06 -0700 (PST)"}, "boards": ["link"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "1:34:51.791134", "start": "Sun, 06 Jul 2014 09:00:14 -0700 (PST)", "finish": "Sun, 06 Jul 2014 10:35:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:57", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:49", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:50", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:01:43", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17441/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 17441, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662408, ""]], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-418.json b/cidb/test_data/series_0/metadata-418.json
new file mode 100644
index 0000000..0eb1b5c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-418.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:32:00 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:33:25.563904", "start": "Sun, 06 Jul 2014 08:58:35 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:32:00 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:13", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:30", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:47", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:57", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:51", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:06", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:24", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:09", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:09:59", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2151/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2151, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662308, ""]], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-419.json b/cidb/test_data/series_0/metadata-419.json
new file mode 100644
index 0000000..e46028b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-419.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:56:07 -0700 (PST)"}, "boards": ["falco"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:57:34.906571", "start": "Sun, 06 Jul 2014 08:58:32 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:56:07 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:46", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:32:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:10:56", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:28", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:08", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:09:30", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4293/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 4293, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662304, ""]], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-42.json b/cidb/test_data/series_0/metadata-42.json
new file mode 100644
index 0000000..0a9f796
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-42.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:29:51 -0700 (PST)"}, "boards": ["falco"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:57:05.434776", "start": "Fri, 04 Jul 2014 18:32:46 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:29:51 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:43", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:32:10", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:10:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:34", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:12", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:30", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:09:22", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4283/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4283, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404523958, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404523958, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404523958, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404523958, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404523958, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404523958, ""]], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-420.json b/cidb/test_data/series_0/metadata-420.json
new file mode 100644
index 0000000..b0bc5bc
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-420.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:22:35 -0700 (PST)"}, "boards": ["gizmo"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:21:45.479953", "start": "Sun, 06 Jul 2014 09:00:49 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:22:35 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:55", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:07:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:32", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:55", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:17", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:10", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/834/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 834, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662443, ""]], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-421.json b/cidb/test_data/series_0/metadata-421.json
new file mode 100644
index 0000000..d5667cb
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-421.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:35:14 -0700 (PST)"}, "boards": ["rambi"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:35:39.873932", "start": "Sun, 06 Jul 2014 08:59:34 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:35:14 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:23", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:08", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:12", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:09:56", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2331/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2331, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662366, ""]], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-422.json b/cidb/test_data/series_0/metadata-422.json
new file mode 100644
index 0000000..34847f5
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-422.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:20:05 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:03:13.176920", "start": "Sun, 06 Jul 2014 09:16:52 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:20:05 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1258/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1258/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:28", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1258/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1258/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1258/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1258/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:42", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1258/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1258/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1258/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1258/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1258/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 1258, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404663406, ""]], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-423.json b/cidb/test_data/series_0/metadata-423.json
new file mode 100644
index 0000000..49a9c59
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-423.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:21:32 -0700 (PST)"}, "boards": ["duck"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:20:04.557876", "start": "Sun, 06 Jul 2014 09:01:28 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:21:32 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:37", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:29", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:30", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:03", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:01", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:14", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:59", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1381/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1381, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662481, ""]], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-424.json b/cidb/test_data/series_0/metadata-424.json
new file mode 100644
index 0000000..0f5b6c6
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-424.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:35:31 -0700 (PST)"}, "boards": ["peppy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:35:37.108975", "start": "Sun, 06 Jul 2014 08:59:54 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:35:31 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:54", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:35", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:18", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:42", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:22", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:06", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:34", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:09:00", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:34", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4280/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4280, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662387, ""]], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-425.json b/cidb/test_data/series_0/metadata-425.json
new file mode 100644
index 0000000..fb2556a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-425.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 10:00:33 -0700 (PST)"}, "boards": ["butterfly"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:58:17.802503", "start": "Sun, 06 Jul 2014 09:02:15 -0700 (PST)", "finish": "Sun, 06 Jul 2014 10:00:33 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:21", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:47", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:32:20", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:11:40", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:43", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:07", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:06", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:09", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:10:14", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11461/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 11461, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662528, ""]], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-426.json b/cidb/test_data/series_0/metadata-426.json
new file mode 100644
index 0000000..575e441
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-426.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:39:03 -0700 (PST)"}, "boards": ["monroe"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:33:48.292455", "start": "Sun, 06 Jul 2014 09:05:14 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:39:03 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:22", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:16", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:46", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:35", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:47", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:37", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2335/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2335, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662708, ""]], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-427.json b/cidb/test_data/series_0/metadata-427.json
new file mode 100644
index 0000000..41a606d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-427.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 09:39:17 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "205441", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "0:39:05.071715", "start": "Sun, 06 Jul 2014 09:00:12 -0700 (PST)", "finish": "Sun, 06 Jul 2014 09:39:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:16", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:30", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:17", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:00", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:19", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1380/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:30", "name": "CommitQueueCompletion"}], "build-number": 1380, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2083.1", "platform": "6025.0.0-rc1", "full": "R38-6025.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "cl_actions": [[{"gerrit_number": "205441", "patch_number": "1", "internal": false}, "picked_up", 1404662406, ""]], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-428.json b/cidb/test_data/series_0/metadata-428.json
new file mode 100644
index 0000000..316182b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-428.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:47:21 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:02:18.332909", "start": "Sat, 05 Jul 2014 09:45:03 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:47:21 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:24", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:45", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:29", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:37", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:39:54", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:00:46", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:01", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:25", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:42", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2645/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2645, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578697, ""]], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-429.json b/cidb/test_data/series_0/metadata-429.json
new file mode 100644
index 0000000..91a5540
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-429.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 09:57:54 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:13:43.698061", "start": "Sat, 05 Jul 2014 09:44:10 -0700 (PST)", "finish": "Sat, 05 Jul 2014 09:57:54 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:05", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:45", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:12", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:59", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:42", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:22", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:03:29", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:24", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2465/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2465, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578646, ""]], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-43.json b/cidb/test_data/series_0/metadata-43.json
new file mode 100644
index 0000000..211501d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-43.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 18:55:16 -0700 (PST)"}, "boards": ["gizmo"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:21:43.158699", "start": "Fri, 04 Jul 2014 18:33:33 -0700 (PST)", "finish": "Fri, 04 Jul 2014 18:55:16 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:53", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:07:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:39", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:56", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:45", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:10", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/824/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 824, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524005, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524005, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524005, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524005, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524005, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524005, ""]], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-430.json b/cidb/test_data/series_0/metadata-430.json
new file mode 100644
index 0000000..dc2802e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-430.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 11:06:02 -0700 (PST)"}, "boards": ["wolf"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:23:18.975342", "start": "Sat, 05 Jul 2014 09:42:43 -0700 (PST)", "finish": "Sat, 05 Jul 2014 11:06:02 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:26", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:52", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:51", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:15", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:44", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:53:54", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3442/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 3442, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578559, ""]], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-431.json b/cidb/test_data/series_0/metadata-431.json
new file mode 100644
index 0000000..0f1dceb
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-431.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:12:20 -0700 (PST)"}, "boards": ["leon"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:30:38.171486", "start": "Sat, 05 Jul 2014 09:41:42 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:12:20 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:19", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:00", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:22", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:49", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:21", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2880/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueCompletion"}], "build-number": 2880, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578497, ""]], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-432.json b/cidb/test_data/series_0/metadata-432.json
new file mode 100644
index 0000000..629b5e9
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-432.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 11:00:51 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:20:53.780450", "start": "Sat, 05 Jul 2014 09:39:58 -0700 (PST)", "finish": "Sat, 05 Jul 2014 11:00:51 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:21", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:15", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:07", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:36", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:07:22", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:51:29", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17393/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17393, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578393, ""]], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-433.json b/cidb/test_data/series_0/metadata-433.json
new file mode 100644
index 0000000..7f0ed31
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-433.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 11:05:46 -0700 (PST)"}, "boards": ["parrot"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:24:54.633568", "start": "Sat, 05 Jul 2014 09:40:51 -0700 (PST)", "finish": "Sat, 05 Jul 2014 11:05:46 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:01", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:56", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:38", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:55", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:51:54", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12112/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 12112, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578446, ""]], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-434.json b/cidb/test_data/series_0/metadata-434.json
new file mode 100644
index 0000000..6ad7031
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-434.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 11:15:37 -0700 (PST)"}, "boards": ["stumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:33:53.071001", "start": "Sat, 05 Jul 2014 09:41:44 -0700 (PST)", "finish": "Sat, 05 Jul 2014 11:15:37 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:37", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:54", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:18", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:55", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:55", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:07:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:04:01", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17437/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17437, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578499, ""]], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-435.json b/cidb/test_data/series_0/metadata-435.json
new file mode 100644
index 0000000..a16b56b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-435.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:37:06 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:58:23.266082", "start": "Sat, 05 Jul 2014 09:38:43 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:37:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:36", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:04", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:10", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:11", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:31", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:38:46", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:56", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:33", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:05", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18715/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18715, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578319, ""]], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-436.json b/cidb/test_data/series_0/metadata-436.json
new file mode 100644
index 0000000..90ca9f1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-436.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 11:11:43 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:32:40.633877", "start": "Sat, 05 Jul 2014 09:39:02 -0700 (PST)", "finish": "Sat, 05 Jul 2014 11:11:43 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:09", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:48", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:32", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:57", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:29", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:03:15", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16876/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:34", "name": "CommitQueueCompletion"}], "build-number": 16876, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578338, ""]], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-437.json b/cidb/test_data/series_0/metadata-437.json
new file mode 100644
index 0000000..e5df9c0
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-437.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:40:10 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:58:47.560089", "start": "Sat, 05 Jul 2014 09:41:23 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:40:10 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:17", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:04", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:24", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:43", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:38:54", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:26", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:06:02", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:57", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18511/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CommitQueueCompletion"}], "build-number": 18511, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578479, ""]], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-438.json b/cidb/test_data/series_0/metadata-438.json
new file mode 100644
index 0000000..5eefacb
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-438.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:17:16 -0700 (PST)"}, "boards": ["stout"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:30:57.501060", "start": "Sat, 05 Jul 2014 09:46:19 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:17:16 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:27", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:30", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:38", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:06", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:07", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:04:54", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:36", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:08", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:37", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11458/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11458, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578774, ""]], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-439.json b/cidb/test_data/series_0/metadata-439.json
new file mode 100644
index 0000000..87892fe
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-439.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:56:36 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:14:09.833242", "start": "Sat, 05 Jul 2014 09:42:26 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:56:36 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:20:55", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:55", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:53", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:49", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:10", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:32:49", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4381/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 4381, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578542, ""]], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-44.json b/cidb/test_data/series_0/metadata-44.json
new file mode 100644
index 0000000..1d00f1c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-44.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:09:24 -0700 (PST)"}, "boards": ["rambi"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:35:16.104458", "start": "Fri, 04 Jul 2014 18:34:08 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:09:24 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:30", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:00", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:04", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:13", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:26", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:29", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2321/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2321, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524039, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524039, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524039, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524039, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524039, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524039, ""]], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-440.json b/cidb/test_data/series_0/metadata-440.json
new file mode 100644
index 0000000..9ea9b44
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-440.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 11:09:53 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:27:56.253090", "start": "Sat, 05 Jul 2014 09:41:56 -0700 (PST)", "finish": "Sat, 05 Jul 2014 11:09:53 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:49", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:59", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:30", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:58:04", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1889/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1889, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-441.json b/cidb/test_data/series_0/metadata-441.json
new file mode 100644
index 0000000..18ddd1f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-441.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:11:59 -0700 (PST)"}, "boards": ["nyan"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:28:46.135193", "start": "Sat, 05 Jul 2014 09:43:12 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:11:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:10:59", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:41", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:41", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:34", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:03", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:34", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:50", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1885/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:31", "name": "CommitQueueCompletion"}], "build-number": 1885, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578586, ""]], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-442.json b/cidb/test_data/series_0/metadata-442.json
new file mode 100644
index 0000000..2e25c2a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-442.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:59:05 -0700 (PST)"}, "boards": ["daisy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:17:58.857248", "start": "Sat, 05 Jul 2014 09:41:06 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:59:05 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:11:17", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:04:19", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:56", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:04:19", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:49:42", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15128/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 15128, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578462, ""]], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-443.json b/cidb/test_data/series_0/metadata-443.json
new file mode 100644
index 0000000..cd4705e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-443.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:11:11 -0700 (PST)"}, "boards": ["samus"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:30:06.563096", "start": "Sat, 05 Jul 2014 09:41:05 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:11:11 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:19", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:00", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:01", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:25", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:01", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:07:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2879/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2879, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578460, ""]], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-444.json b/cidb/test_data/series_0/metadata-444.json
new file mode 100644
index 0000000..bd215ad
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-444.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:17:08 -0700 (PST)"}, "boards": ["panther"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:35:00.334439", "start": "Sat, 05 Jul 2014 09:42:08 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:17:08 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:15", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:02", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:10", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:06", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2324/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2324, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578522, ""]], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-445.json b/cidb/test_data/series_0/metadata-445.json
new file mode 100644
index 0000000..f6c7474
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-445.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:55:35 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:14:12.637831", "start": "Sat, 05 Jul 2014 09:41:22 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:55:35 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:43", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:10:55", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:56", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:19", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:46:11", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4843/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4843, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578478, ""]], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-446.json b/cidb/test_data/series_0/metadata-446.json
new file mode 100644
index 0000000..41a5137
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-446.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 11:11:27 -0700 (PST)"}, "boards": ["link"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:28:01.648804", "start": "Sat, 05 Jul 2014 09:43:25 -0700 (PST)", "finish": "Sat, 05 Jul 2014 11:11:27 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:09", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:55", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:49", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:33", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:58:12", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17435/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17435, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578600, ""]], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-447.json b/cidb/test_data/series_0/metadata-447.json
new file mode 100644
index 0000000..1ad8024
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-447.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:12:07 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:29:31.965531", "start": "Sat, 05 Jul 2014 09:42:35 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:12:07 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:20", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:47", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:35", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:02", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:05", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:02", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:42", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2145/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2145, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578550, ""]], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-448.json b/cidb/test_data/series_0/metadata-448.json
new file mode 100644
index 0000000..6e0a0e6
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-448.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:24:50 -0700 (PST)"}, "boards": ["falco"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:43:50.195000", "start": "Sat, 05 Jul 2014 09:41:00 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:24:50 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:42", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:21:07", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:36", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:36", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:15", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:53", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:15", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:49", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4287/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4287, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578455, ""]], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-449.json b/cidb/test_data/series_0/metadata-449.json
new file mode 100644
index 0000000..234047b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-449.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:04:58 -0700 (PST)"}, "boards": ["gizmo"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:21:43.760526", "start": "Sat, 05 Jul 2014 09:43:14 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:04:58 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:57", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:06:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:32", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:07", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:22", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:02", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/828/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 828, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578587, ""]], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-45.json b/cidb/test_data/series_0/metadata-45.json
new file mode 100644
index 0000000..87ab88b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-45.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 18:52:48 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:03:10.063718", "start": "Fri, 04 Jul 2014 18:49:38 -0700 (PST)", "finish": "Fri, 04 Jul 2014 18:52:48 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1248/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:03", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1248/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:33", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1248/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1248/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1248/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1248/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:40", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1248/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1248/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1248/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1248/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1248/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1248, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524970, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524970, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524970, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524970, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524970, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524970, ""]], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-450.json b/cidb/test_data/series_0/metadata-450.json
new file mode 100644
index 0000000..2fb6f4b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-450.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:10:58 -0700 (PST)"}, "boards": ["rambi"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:30:53.185691", "start": "Sat, 05 Jul 2014 09:40:05 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:10:58 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:58", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:30", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:14", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:00:53", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:41", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:45", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:00", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2325/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 2325, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578399, ""]], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-451.json b/cidb/test_data/series_0/metadata-451.json
new file mode 100644
index 0000000..ad7879a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-451.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 09:58:59 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:03:11.287076", "start": "Sat, 05 Jul 2014 09:55:48 -0700 (PST)", "finish": "Sat, 05 Jul 2014 09:58:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1252/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1252/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1252/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1252/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1252/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1252/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:41", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1252/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1252/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1252/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:49", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1252/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1252/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 1252, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404579344, ""]], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-452.json b/cidb/test_data/series_0/metadata-452.json
new file mode 100644
index 0000000..5f1006b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-452.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:00:02 -0700 (PST)"}, "boards": ["duck"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:17:50.290330", "start": "Sat, 05 Jul 2014 09:42:11 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:00:02 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:34", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:08", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:25", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:32", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:56", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:02", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:10", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1375/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1375, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578527, ""]], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-453.json b/cidb/test_data/series_0/metadata-453.json
new file mode 100644
index 0000000..acd4e58
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-453.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:17:32 -0700 (PST)"}, "boards": ["peppy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:31:30.676842", "start": "Sat, 05 Jul 2014 09:46:01 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:17:32 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:18", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:37", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:28", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:04:54", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:09", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:04:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:45", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:59", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4274/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:36", "name": "CommitQueueCompletion"}], "build-number": 4274, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578756, ""]], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-454.json b/cidb/test_data/series_0/metadata-454.json
new file mode 100644
index 0000000..adbe28d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-454.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:28:42 -0700 (PST)"}, "boards": ["butterfly"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:45:35.157375", "start": "Sat, 05 Jul 2014 09:43:07 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:28:42 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:33", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:51", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:21:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:54", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:33", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:34", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:23", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:17", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:20", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11455/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11455, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578582, ""]], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-455.json b/cidb/test_data/series_0/metadata-455.json
new file mode 100644
index 0000000..bf6001b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-455.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:11:47 -0700 (PST)"}, "boards": ["monroe"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:30:40.173244", "start": "Sat, 05 Jul 2014 09:41:07 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:11:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:12:07", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:54", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:08", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2329/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2329, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578462, ""]], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-456.json b/cidb/test_data/series_0/metadata-456.json
new file mode 100644
index 0000000..d792a8a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-456.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 10:21:05 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "206684", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:39:13.533079", "start": "Sat, 05 Jul 2014 09:41:52 -0700 (PST)", "finish": "Sat, 05 Jul 2014 10:21:05 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:34", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:00", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:22", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:47", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:28", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:01", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:30", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:49", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1374/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1374, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2083.1", "platform": "6022.0.0-rc1", "full": "R38-6022.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [[{"gerrit_number": "206684", "patch_number": "1", "internal": false}, "picked_up", 1404578507, ""]], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-457.json b/cidb/test_data/series_0/metadata-457.json
new file mode 100644
index 0000000..9ccf8b6
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-457.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 23:00:37 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:02:39.447793", "start": "Sat, 05 Jul 2014 21:57:58 -0700 (PST)", "finish": "Sat, 05 Jul 2014 23:00:37 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:14", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:30", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:47", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:40:07", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:00", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:09", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:33", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2648/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2648, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622671, ""]], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-458.json b/cidb/test_data/series_0/metadata-458.json
new file mode 100644
index 0000000..72d5293
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-458.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:11:29 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:13:26.176173", "start": "Sat, 05 Jul 2014 21:58:02 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:11:29 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:45", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:12", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:53", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:39", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:20", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:03:19", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2468/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2468, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622676, ""]], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-459.json b/cidb/test_data/series_0/metadata-459.json
new file mode 100644
index 0000000..7e31441
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-459.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 23:26:04 -0700 (PST)"}, "boards": ["wolf"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:32:08.942153", "start": "Sat, 05 Jul 2014 21:53:55 -0700 (PST)", "finish": "Sat, 05 Jul 2014 23:26:04 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:56", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:06", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:54", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:56", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:55", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:44", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "1:03:13", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3445/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 3445, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622427, ""]], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-46.json b/cidb/test_data/series_0/metadata-46.json
new file mode 100644
index 0000000..5013728
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-46.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 18:57:06 -0700 (PST)"}, "boards": ["duck"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:20:14.159961", "start": "Fri, 04 Jul 2014 18:36:51 -0700 (PST)", "finish": "Fri, 04 Jul 2014 18:57:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:44", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:11", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:30", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:36", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:03", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:02", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:12", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:57", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1371/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1371, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524204, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524204, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524204, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524204, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524204, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524204, ""]], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-460.json b/cidb/test_data/series_0/metadata-460.json
new file mode 100644
index 0000000..4d2562b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-460.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:29:47 -0700 (PST)"}, "boards": ["leon"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:29:48.732027", "start": "Sat, 05 Jul 2014 21:59:58 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:29:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:28", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:08", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:56", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:25", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:13", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2883/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2883, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622792, ""]], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-461.json b/cidb/test_data/series_0/metadata-461.json
new file mode 100644
index 0000000..45049c9
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-461.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 23:26:12 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:28:11.004656", "start": "Sat, 05 Jul 2014 21:58:01 -0700 (PST)", "finish": "Sat, 05 Jul 2014 23:26:12 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:21", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:21", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:05", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:40", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:05", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:07:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:58:43", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17396/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17396, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622675, ""]], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-462.json b/cidb/test_data/series_0/metadata-462.json
new file mode 100644
index 0000000..228c559
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-462.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 23:27:30 -0700 (PST)"}, "boards": ["parrot"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:30:24.810324", "start": "Sat, 05 Jul 2014 21:57:05 -0700 (PST)", "finish": "Sat, 05 Jul 2014 23:27:30 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:19", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:30", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:59", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:30", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:36", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:22", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:37", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:48", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:57:17", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12115/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 12115, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622617, ""]], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-463.json b/cidb/test_data/series_0/metadata-463.json
new file mode 100644
index 0000000..a92c0a8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-463.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 23:44:31 -0700 (PST)"}, "boards": ["stumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:44:03.971993", "start": "Sat, 05 Jul 2014 22:00:27 -0700 (PST)", "finish": "Sat, 05 Jul 2014 23:44:31 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:17", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:33", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:16", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:01", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:33", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:01", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:07:27", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:14:22", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17440/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 17440, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622820, ""]], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-464.json b/cidb/test_data/series_0/metadata-464.json
new file mode 100644
index 0000000..aa66668
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-464.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:53:19 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:00:08.863865", "start": "Sat, 05 Jul 2014 21:53:10 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:53:19 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:39", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:29", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:11:04", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:15", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:39", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:06", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:13", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:14", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:35", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:03", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:50", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18718/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18718, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622385, ""]], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-465.json b/cidb/test_data/series_0/metadata-465.json
new file mode 100644
index 0000000..0235b97
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-465.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 23:33:02 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:37:23.401611", "start": "Sat, 05 Jul 2014 21:55:38 -0700 (PST)", "finish": "Sat, 05 Jul 2014 23:33:02 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:51", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:38", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:53", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:06", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:06", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:08:00", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16879/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 16879, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622532, ""]], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-466.json b/cidb/test_data/series_0/metadata-466.json
new file mode 100644
index 0000000..36aacdd
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-466.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:52:22 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:59:09.315185", "start": "Sat, 05 Jul 2014 21:53:12 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:52:22 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:40", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:30", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:24", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:38:10", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:06:00", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18514/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:16", "name": "CommitQueueCompletion"}], "build-number": 18514, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622386, ""]], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-467.json b/cidb/test_data/series_0/metadata-467.json
new file mode 100644
index 0000000..786dee5
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-467.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:27:52 -0700 (PST)"}, "boards": ["stout"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:31:52.397866", "start": "Sat, 05 Jul 2014 21:55:59 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:27:52 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:38", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:07", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:09:01", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:04:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:28", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:17", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:19", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11461/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:32", "name": "CommitQueueCompletion"}], "build-number": 11461, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622550, ""]], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-468.json b/cidb/test_data/series_0/metadata-468.json
new file mode 100644
index 0000000..b200b89
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-468.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 23:19:35 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:24:50.511491", "start": "Sat, 05 Jul 2014 21:54:44 -0700 (PST)", "finish": "Sat, 05 Jul 2014 23:19:35 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:43", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:21:42", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:55", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:10", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:42:43", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4384/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:23", "name": "CommitQueueCompletion"}], "build-number": 4384, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622477, ""]], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-469.json b/cidb/test_data/series_0/metadata-469.json
new file mode 100644
index 0000000..b4c9b25
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-469.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 23:35:08 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:35:59.715888", "start": "Sat, 05 Jul 2014 21:59:09 -0700 (PST)", "finish": "Sat, 05 Jul 2014 23:35:08 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:19", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:01", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:33", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:50", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:49", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:06:00", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1892/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1892, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-47.json b/cidb/test_data/series_0/metadata-47.json
new file mode 100644
index 0000000..7d32f95
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-47.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:12:01 -0700 (PST)"}, "boards": ["peppy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:36:45.584035", "start": "Fri, 04 Jul 2014 18:35:16 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:12:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:48", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:34", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:33", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:37", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:24", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:09", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:28", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:35", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:09:06", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4270/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4270, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524107, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524107, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524107, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524107, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524107, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524107, ""]], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-470.json b/cidb/test_data/series_0/metadata-470.json
new file mode 100644
index 0000000..278697e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-470.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:25:56 -0700 (PST)"}, "boards": ["nyan"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:29:12.916566", "start": "Sat, 05 Jul 2014 21:56:43 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:25:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:17", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:31", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:32", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:39", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:01", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:05", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:01", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:52", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1888/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1888, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622597, ""]], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-471.json b/cidb/test_data/series_0/metadata-471.json
new file mode 100644
index 0000000..6423f94
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-471.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 23:21:37 -0700 (PST)"}, "boards": ["daisy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:24:58.492279", "start": "Sat, 05 Jul 2014 21:56:38 -0700 (PST)", "finish": "Sat, 05 Jul 2014 23:21:37 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:11:17", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:38", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:43", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:37", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:57", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:37", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:05", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:56:44", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15131/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 15131, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622590, ""]], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-472.json b/cidb/test_data/series_0/metadata-472.json
new file mode 100644
index 0000000..b2e6984
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-472.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:28:10 -0700 (PST)"}, "boards": ["samus"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:30:10.736029", "start": "Sat, 05 Jul 2014 21:57:59 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:28:10 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:17", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:07", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:05", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:05", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:07:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2882/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2882, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622673, ""]], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-473.json b/cidb/test_data/series_0/metadata-473.json
new file mode 100644
index 0000000..ff3189e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-473.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:34:56 -0700 (PST)"}, "boards": ["panther"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:36:45.729547", "start": "Sat, 05 Jul 2014 21:58:10 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:34:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:13:48", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:12", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:41", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:10", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2327/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2327, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622683, ""]], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-474.json b/cidb/test_data/series_0/metadata-474.json
new file mode 100644
index 0000000..b7410fc
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-474.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 23:16:54 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:21:38.783978", "start": "Sat, 05 Jul 2014 21:55:16 -0700 (PST)", "finish": "Sat, 05 Jul 2014 23:16:54 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:10:58", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:36", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:36", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:21", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:53:33", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4846/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4846, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622509, ""]], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-475.json b/cidb/test_data/series_0/metadata-475.json
new file mode 100644
index 0000000..bfd237c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-475.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 23:29:32 -0700 (PST)"}, "boards": ["link"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:36:02.031546", "start": "Sat, 05 Jul 2014 21:53:30 -0700 (PST)", "finish": "Sat, 05 Jul 2014 23:29:32 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:18", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:57", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:28", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:15", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:06:43", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17438/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17438, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622402, ""]], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-476.json b/cidb/test_data/series_0/metadata-476.json
new file mode 100644
index 0000000..3a45616
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-476.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:24:24 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:29:07.613543", "start": "Sat, 05 Jul 2014 21:55:16 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:24:24 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:05", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:47", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:28", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:12", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:07", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:12", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:26", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2148/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2148, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622510, ""]], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-477.json b/cidb/test_data/series_0/metadata-477.json
new file mode 100644
index 0000000..55a59b6
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-477.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:40:50 -0700 (PST)"}, "boards": ["falco"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:44:52.519430", "start": "Sat, 05 Jul 2014 21:55:58 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:40:50 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:56", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:43", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:21:42", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:55", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:36", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:13", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:07", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:13", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4290/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "CommitQueueCompletion"}], "build-number": 4290, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622550, ""]], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-478.json b/cidb/test_data/series_0/metadata-478.json
new file mode 100644
index 0000000..dc163ea
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-478.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:16:14 -0700 (PST)"}, "boards": ["gizmo"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:21:41.018575", "start": "Sat, 05 Jul 2014 21:54:33 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:16:14 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:56", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:56", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:06:57", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:31", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:54", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:12", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:10", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/831/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 831, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622466, ""]], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-479.json b/cidb/test_data/series_0/metadata-479.json
new file mode 100644
index 0000000..768ac76
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-479.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:24:48 -0700 (PST)"}, "boards": ["rambi"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:30:41.429828", "start": "Sat, 05 Jul 2014 21:54:06 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:24:48 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:02", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:30", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:16", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:16", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:44", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:59", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:07:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2328/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2328, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622439, ""]], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-48.json b/cidb/test_data/series_0/metadata-48.json
new file mode 100644
index 0000000..cb52553
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-48.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:36:24 -0700 (PST)"}, "boards": ["butterfly"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:57:59.584341", "start": "Fri, 04 Jul 2014 18:38:24 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:36:24 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:34", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:43", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:32:08", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:11:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:45", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:32", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:08", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:03", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:09", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:10:15", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11451/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11451, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524294, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524294, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524294, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524294, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524294, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524294, ""]], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-480.json b/cidb/test_data/series_0/metadata-480.json
new file mode 100644
index 0000000..db9505e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-480.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:12:01 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:03:12.869888", "start": "Sat, 05 Jul 2014 22:08:48 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:12:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1255/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1255/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:26", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1255/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1255/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1255/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1255/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:40", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1255/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1255/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1255/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1255/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1255/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 1255, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404623322, ""]], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-481.json b/cidb/test_data/series_0/metadata-481.json
new file mode 100644
index 0000000..55ae8c6
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-481.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:14:17 -0700 (PST)"}, "boards": ["duck"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:18:06.089105", "start": "Sat, 05 Jul 2014 21:56:11 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:14:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:46", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:09", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:25", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:30", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:56", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:14", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:08", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1378/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1378, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622565, ""]], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-482.json b/cidb/test_data/series_0/metadata-482.json
new file mode 100644
index 0000000..2c0f6ef
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-482.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:25:15 -0700 (PST)"}, "boards": ["peppy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:31:33.855644", "start": "Sat, 05 Jul 2014 21:53:41 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:25:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:36", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:23", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:04:50", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:09", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:07", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:51", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4277/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4277, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622413, ""]], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-483.json b/cidb/test_data/series_0/metadata-483.json
new file mode 100644
index 0000000..a258000
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-483.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:43:37 -0700 (PST)"}, "boards": ["butterfly"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:45:20.227714", "start": "Sat, 05 Jul 2014 21:58:17 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:43:37 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:45", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:21:59", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:08", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:22", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:32", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:32", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:08", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11458/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 11458, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622691, ""]], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-484.json b/cidb/test_data/series_0/metadata-484.json
new file mode 100644
index 0000000..9aa99c6
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-484.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:25:31 -0700 (PST)"}, "boards": ["monroe"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:29:50.448697", "start": "Sat, 05 Jul 2014 21:55:41 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:25:31 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:19", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:52", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:52", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:44", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2332/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2332, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622534, ""]], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-485.json b/cidb/test_data/series_0/metadata-485.json
new file mode 100644
index 0000000..c1ff8ba
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-485.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 22:32:59 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206692", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:39:11.205208", "start": "Sat, 05 Jul 2014 21:53:48 -0700 (PST)", "finish": "Sat, 05 Jul 2014 22:32:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:45", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:19", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:24", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:25", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:01", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:27", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:37", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1377/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1377, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2083.1", "platform": "6023.0.0-rc2", "full": "R38-6023.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.185337.tar.xz", "cl_actions": [[{"gerrit_number": "206692", "patch_number": "2", "internal": false}, "picked_up", 1404622420, ""]], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.05.185337", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-486.json b/cidb/test_data/series_0/metadata-486.json
new file mode 100644
index 0000000..60c768b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-486.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:34:50 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:06:08.432732", "start": "Mon, 07 Jul 2014 10:28:41 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:34:50 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:18", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:03:03", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:53", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:33", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:09:11", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:41:09", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:09", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:41", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:02", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:17", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:57", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2659/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2659, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754101, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754101, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754101, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754101, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754101, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754101, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754101, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754101, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754101, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754101, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754101, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754101, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754101, ""]], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-487.json b/cidb/test_data/series_0/metadata-487.json
new file mode 100644
index 0000000..6738da4
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-487.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:45:53 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:18:41.251457", "start": "Mon, 07 Jul 2014 10:27:12 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:45:53 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:58", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:04:33", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:40", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:27", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:04:00", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:32", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2479/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 2479, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754011, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754011, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754011, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754011, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754011, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754011, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754011, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754011, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754011, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754011, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754011, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754011, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754011, ""]], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-488.json b/cidb/test_data/series_0/metadata-488.json
new file mode 100644
index 0000000..fcaa393
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-488.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 12:09:36 -0700 (PST)"}, "boards": ["wolf"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:39:38.797011", "start": "Mon, 07 Jul 2014 10:29:57 -0700 (PST)", "finish": "Mon, 07 Jul 2014 12:09:36 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:15", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:02:59", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:12:18", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:17", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:11", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:35", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:20", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:08:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "1:06:31", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3456/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 3456, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754171, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754171, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754171, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754171, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754171, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754171, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754171, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754171, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754171, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754171, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754171, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754171, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754171, ""]], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-489.json b/cidb/test_data/series_0/metadata-489.json
new file mode 100644
index 0000000..9d675c7
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-489.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:05:37 -0700 (PST)"}, "boards": ["leon"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:34:41.784702", "start": "Mon, 07 Jul 2014 10:30:55 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:05:37 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:15", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:03:01", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:12:52", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:21", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:10", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:37", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:58", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:58", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2894/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2894, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754233, ""]], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-49.json b/cidb/test_data/series_0/metadata-49.json
new file mode 100644
index 0000000..4faa43a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-49.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:07:31 -0700 (PST)"}, "boards": ["monroe"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:33:53.909014", "start": "Fri, 04 Jul 2014 18:33:37 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:07:31 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:30", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:52", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:54", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:12", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:37", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:51", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:54", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2325/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2325, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524009, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524009, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524009, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524009, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524009, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524009, ""]], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-490.json b/cidb/test_data/series_0/metadata-490.json
new file mode 100644
index 0000000..8e1ea8e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-490.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 12:04:33 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:35:07.875355", "start": "Mon, 07 Jul 2014 10:29:25 -0700 (PST)", "finish": "Mon, 07 Jul 2014 12:04:33 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:11", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:03:06", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:13:00", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:09:50", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:31", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:39", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:42", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:59", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:39", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "1:00:53", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17407/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17407, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754138, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754138, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754138, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754138, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754138, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754138, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754138, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754138, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754138, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754138, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754138, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754138, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754138, ""]], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-491.json b/cidb/test_data/series_0/metadata-491.json
new file mode 100644
index 0000000..8c57c67
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-491.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 12:10:40 -0700 (PST)"}, "boards": ["parrot"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:37:36.960969", "start": "Mon, 07 Jul 2014 10:33:03 -0700 (PST)", "finish": "Mon, 07 Jul 2014 12:10:40 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:04:01", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:35", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:03:09", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:43", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:43", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:41", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:51", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:53", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "1:01:19", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12126/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 12126, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754351, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754351, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754351, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754351, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754351, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754351, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754351, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754351, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754351, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754351, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754351, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754351, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754351, ""]], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-492.json b/cidb/test_data/series_0/metadata-492.json
new file mode 100644
index 0000000..3040888
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-492.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 12:20:47 -0700 (PST)"}, "boards": ["stumpy"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 1, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "202971", "total_fail": 5, "pass": 1, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206588", "total_fail": 1, "pass": 1, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206171", "total_fail": 6, "pass": 1, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206142", "total_fail": 6, "pass": 1, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206840", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 1, "gerrit_number": "206082", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206410", "total_fail": 4, "pass": 1, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206182", "total_fail": 3, "pass": 1, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206055", "total_fail": 4, "pass": 1, "fail": 4, "internal": false}], "time": {"duration": "1:49:25.418829", "start": "Mon, 07 Jul 2014 10:31:22 -0700 (PST)", "finish": "Mon, 07 Jul 2014 12:20:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:03:08", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:09:36", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:34", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:31", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:57", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:35", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:15:53", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17451/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17451, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754261, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754261, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754261, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754261, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754261, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754261, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754261, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754261, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754261, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754261, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754261, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754261, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754261, ""]], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-493.json b/cidb/test_data/series_0/metadata-493.json
new file mode 100644
index 0000000..1b7b2b3
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-493.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:29:20 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:02:34.462806", "start": "Mon, 07 Jul 2014 10:26:45 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:29:20 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:14", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:30", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:03:05", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:06", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:20", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:11:32", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:38:50", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:45", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:14", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:56", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:37", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:08", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18729/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:16", "name": "CommitQueueCompletion"}], "build-number": 18729, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404753983, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404753983, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404753983, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404753983, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404753983, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404753983, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404753983, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404753983, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404753983, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404753983, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404753983, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404753983, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404753983, ""]], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-494.json b/cidb/test_data/series_0/metadata-494.json
new file mode 100644
index 0000000..101290c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-494.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 12:10:51 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:42:55.190691", "start": "Mon, 07 Jul 2014 10:27:56 -0700 (PST)", "finish": "Mon, 07 Jul 2014 12:10:51 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:03:02", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:12", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:08:57", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:09", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:39", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:04", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:04", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:09:42", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:08:38", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16890/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 16890, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754055, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754055, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754055, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754055, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754055, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754055, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754055, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754055, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754055, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754055, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754055, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754055, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754055, ""]], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-495.json b/cidb/test_data/series_0/metadata-495.json
new file mode 100644
index 0000000..f884109
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-495.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:30:07 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:03:22.200306", "start": "Mon, 07 Jul 2014 10:26:45 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:30:07 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:15", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:31", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:03:00", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:32", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:26", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:19", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:40:10", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:18", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:13", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:58", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:06:16", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:33", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18525/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18525, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404753984, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404753984, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404753984, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404753984, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404753984, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404753984, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404753984, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404753984, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404753984, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404753984, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404753984, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404753984, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404753984, ""]], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-496.json b/cidb/test_data/series_0/metadata-496.json
new file mode 100644
index 0000000..a8d2274
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-496.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:05:53 -0700 (PST)"}, "boards": ["stout"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:35:20.391701", "start": "Mon, 07 Jul 2014 10:30:32 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:05:53 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:24", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:03:00", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:26", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:42", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:09:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:28", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:27", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:41", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:36", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:41", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11472/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11472, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754200, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754200, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754200, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754200, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754200, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754200, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754200, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754200, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754200, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754200, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754200, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754200, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754200, ""]], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-497.json b/cidb/test_data/series_0/metadata-497.json
new file mode 100644
index 0000000..c91088e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-497.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 12:04:53 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:33:43.579021", "start": "Mon, 07 Jul 2014 10:31:10 -0700 (PST)", "finish": "Mon, 07 Jul 2014 12:04:53 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:24", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:28", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:22:02", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:04", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:17", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:50", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:49:06", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4395/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:39", "name": "CommitQueueCompletion"}], "build-number": 4395, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754240, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754240, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754240, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754240, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754240, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754240, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754240, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754240, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754240, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754240, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754240, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754240, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754240, ""]], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-498.json b/cidb/test_data/series_0/metadata-498.json
new file mode 100644
index 0000000..4c238cc
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-498.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 12:08:29 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:40:00.001820", "start": "Mon, 07 Jul 2014 10:28:29 -0700 (PST)", "finish": "Mon, 07 Jul 2014 12:08:29 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:11", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:12:26", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:08", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:48", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:00", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:00", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:09:01", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1903/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1903, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-499.json b/cidb/test_data/series_0/metadata-499.json
new file mode 100644
index 0000000..ab4b6e7
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-499.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:03:17 -0700 (PST)"}, "boards": ["nyan"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:32:03.880913", "start": "Mon, 07 Jul 2014 10:31:13 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:03:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:12", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:02:58", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:12:20", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:45", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:18", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:45", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:07:10", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1899/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1899, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754252, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754252, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754252, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754252, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754252, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754252, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754252, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754252, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754252, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754252, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754252, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754252, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754252, ""]], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-5.json b/cidb/test_data/series_0/metadata-5.json
new file mode 100644
index 0000000..af11cdb
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-5.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 04:30:35 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1263", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839", "reason": "None"}}, "boards": [], "changes": [], "metadata-version": "2", "child-configs": [], "build-number": 1925, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [], "builder-name": "CQ master", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "time": {"duration": "1:42:08.335799", "start": "Mon, 07 Jul 2014 02:48:27 -0700 (PST)", "finish": "Mon, 07 Jul 2014 04:30:35 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1925/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1925/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "4:01:30", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1925/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1925/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1925/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1925/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1925/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1925/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:37:30", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1925/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-50.json b/cidb/test_data/series_0/metadata-50.json
new file mode 100644
index 0000000..7a10919
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-50.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 19:13:55 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "168017", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206364", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168150", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206592", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206548", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206581", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:39:03.420420", "start": "Fri, 04 Jul 2014 18:34:51 -0700 (PST)", "finish": "Fri, 04 Jul 2014 19:13:55 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:26", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:13", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:39", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:24", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:01", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:26", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:49", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1370/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1370, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc1", "full": "R38-6020.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168017", "patch_number": "2", "internal": true}, "picked_up", 1404524082, ""], [{"gerrit_number": "206364", "patch_number": "2", "internal": false}, "picked_up", 1404524082, ""], [{"gerrit_number": "168150", "patch_number": "1", "internal": true}, "picked_up", 1404524082, ""], [{"gerrit_number": "206592", "patch_number": "1", "internal": false}, "picked_up", 1404524082, ""], [{"gerrit_number": "206548", "patch_number": "1", "internal": false}, "picked_up", 1404524082, ""], [{"gerrit_number": "206581", "patch_number": "1", "internal": false}, "picked_up", 1404524082, ""]], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-500.json b/cidb/test_data/series_0/metadata-500.json
new file mode 100644
index 0000000..38ba5bf
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-500.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 12:00:05 -0700 (PST)"}, "boards": ["daisy"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:29:23.854785", "start": "Mon, 07 Jul 2014 10:30:41 -0700 (PST)", "finish": "Mon, 07 Jul 2014 12:00:05 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:12", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:02:58", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:14:05", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:11", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:22", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:58", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:08", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:56:30", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15142/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:22", "name": "CommitQueueCompletion"}], "build-number": 15142, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754214, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754214, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754214, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754214, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754214, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754214, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754214, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754214, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754214, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754214, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754214, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754214, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754214, ""]], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-501.json b/cidb/test_data/series_0/metadata-501.json
new file mode 100644
index 0000000..43830bb
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-501.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:06:07 -0700 (PST)"}, "boards": ["samus"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:34:56.196512", "start": "Mon, 07 Jul 2014 10:31:11 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:06:07 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:17", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:03:05", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:12:31", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:09:30", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:20", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:15", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:28", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:16", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2893/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:33", "name": "CommitQueueCompletion"}], "build-number": 2893, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754244, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754244, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754244, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754244, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754244, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754244, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754244, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754244, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754244, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754244, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754244, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754244, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754244, ""]], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-502.json b/cidb/test_data/series_0/metadata-502.json
new file mode 100644
index 0000000..eacdb1a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-502.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:06:53 -0700 (PST)"}, "boards": ["panther"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:37:09.786800", "start": "Mon, 07 Jul 2014 10:29:44 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:06:53 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:19", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:03:00", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:56", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:43", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:09", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:37", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:07:50", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:54", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:09:37", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2338/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2338, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754153, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754153, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754153, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754153, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754153, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754153, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754153, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754153, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754153, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754153, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754153, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754153, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754153, ""]], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-503.json b/cidb/test_data/series_0/metadata-503.json
new file mode 100644
index 0000000..7de7b2e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-503.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:57:27 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:26:29.870395", "start": "Mon, 07 Jul 2014 10:30:57 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:57:27 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:17", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:01", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:13:17", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:12", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:14", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:55", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:53:58", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4857/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4857, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754234, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754234, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754234, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754234, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754234, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754234, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754234, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754234, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754234, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754234, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754234, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754234, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754234, ""]], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-504.json b/cidb/test_data/series_0/metadata-504.json
new file mode 100644
index 0000000..b82b8dc
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-504.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 12:12:24 -0700 (PST)"}, "boards": ["link"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "1:44:55.791679", "start": "Mon, 07 Jul 2014 10:27:28 -0700 (PST)", "finish": "Mon, 07 Jul 2014 12:12:24 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:02:59", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:12:46", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:11", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:37", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:15", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:39", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:11:04", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17449/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:25", "name": "CommitQueueCompletion"}], "build-number": 17449, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754023, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754023, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754023, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754023, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754023, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754023, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754023, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754023, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754023, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754023, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754023, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754023, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754023, ""]], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-505.json b/cidb/test_data/series_0/metadata-505.json
new file mode 100644
index 0000000..8f7ef75
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-505.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:03:57 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:33:07.732213", "start": "Mon, 07 Jul 2014 10:30:49 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:03:57 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:13", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:03:03", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:49", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:53", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:55", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:06", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:06", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:41", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2159/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2159, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754223, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754223, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754223, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754223, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754223, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754223, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754223, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754223, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754223, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754223, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754223, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754223, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754223, ""]], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-506.json b/cidb/test_data/series_0/metadata-506.json
new file mode 100644
index 0000000..a44287b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-506.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:18:32 -0700 (PST)"}, "boards": ["falco"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:47:31.397066", "start": "Mon, 07 Jul 2014 10:31:00 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:18:32 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:21", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:30", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:21:35", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:09:59", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:05", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:04", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:47", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:04", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:59", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4301/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4301, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754233, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754233, ""]], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-507.json b/cidb/test_data/series_0/metadata-507.json
new file mode 100644
index 0000000..fb0794f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-507.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:53:11 -0700 (PST)"}, "boards": ["gizmo"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:23:59.953259", "start": "Mon, 07 Jul 2014 10:29:11 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:53:11 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:23", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:58", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:07:32", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:51", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:01", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:06", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:14", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:08", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/842/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "CommitQueueCompletion"}], "build-number": 842, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754123, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754123, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754123, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754123, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754123, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754123, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754123, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754123, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754123, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754123, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754123, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754123, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754123, ""]], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-508.json b/cidb/test_data/series_0/metadata-508.json
new file mode 100644
index 0000000..e1d1923
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-508.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:04:30 -0700 (PST)"}, "boards": ["rambi"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:35:14.136331", "start": "Mon, 07 Jul 2014 10:29:16 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:04:30 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:25", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:34", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:03:06", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:26", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:09:41", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:16", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:42", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:58", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:40", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:58", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:45", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2339/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2339, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754127, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754127, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754127, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754127, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754127, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754127, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754127, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754127, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754127, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754127, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754127, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754127, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754127, ""]], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-509.json b/cidb/test_data/series_0/metadata-509.json
new file mode 100644
index 0000000..6add67e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-509.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:59:15 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:14:18.299267", "start": "Mon, 07 Jul 2014 10:44:57 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:59:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1266/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1266/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:17:12", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1266/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1266/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1266/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1266/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:02:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1266/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1266/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1266/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:10:14", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1266/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1266/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1266, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404755074, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404755074, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404755074, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404755074, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404755074, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404755074, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404755074, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404755074, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404755074, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404755074, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404755074, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404755074, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404755074, ""]], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-51.json b/cidb/test_data/series_0/metadata-51.json
new file mode 100644
index 0000000..b1cbb49
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-51.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:19:14 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [], "time": {"duration": "1:05:50.336380", "start": "Sun, 06 Jul 2014 17:13:23 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:19:14 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:24", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:29", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:37", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:43:03", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:45", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:58", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:34", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:29", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:15", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:24", "name": "CommitQueueCompletion"}], "build-number": 2653, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-510.json b/cidb/test_data/series_0/metadata-510.json
new file mode 100644
index 0000000..1c94576
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-510.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 10:49:55 -0700 (PST)"}, "boards": ["duck"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:22:01.307329", "start": "Mon, 07 Jul 2014 10:27:54 -0700 (PST)", "finish": "Mon, 07 Jul 2014 10:49:55 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:05", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:07:38", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:11", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:38", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:08", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:18", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:10", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:20", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1389/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1389, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754051, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754051, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754051, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754051, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754051, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754051, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754051, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754051, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754051, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754051, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754051, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754051, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754051, ""]], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-511.json b/cidb/test_data/series_0/metadata-511.json
new file mode 100644
index 0000000..229596a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-511.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:07:28 -0700 (PST)"}, "boards": ["peppy"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:35:37.555881", "start": "Mon, 07 Jul 2014 10:31:50 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:07:28 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:11", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:03:02", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:09", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:40", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:13", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:54", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:21", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:44", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:51", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:00", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:44", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:09:14", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4288/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4288, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754288, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754288, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754288, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754288, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754288, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754288, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754288, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754288, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754288, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754288, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754288, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754288, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754288, ""]], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-512.json b/cidb/test_data/series_0/metadata-512.json
new file mode 100644
index 0000000..8ae2f57
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-512.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:18:19 -0700 (PST)"}, "boards": ["butterfly"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:50:54.251903", "start": "Mon, 07 Jul 2014 10:27:25 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:18:19 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:11", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:30", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:24:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:10:36", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:27", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:33", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11469/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 11469, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754022, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754022, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754022, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754022, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754022, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754022, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754022, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754022, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754022, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754022, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754022, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754022, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754022, ""]], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-513.json b/cidb/test_data/series_0/metadata-513.json
new file mode 100644
index 0000000..fa02572
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-513.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:02:52 -0700 (PST)"}, "boards": ["monroe"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:34:32.156967", "start": "Mon, 07 Jul 2014 10:28:20 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:02:52 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:17", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:02:58", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:12:44", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:19", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:37", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:08:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2343/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2343, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754077, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754077, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754077, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754077, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754077, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754077, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754077, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754077, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754077, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754077, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754077, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754077, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754077, ""]], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-514.json b/cidb/test_data/series_0/metadata-514.json
new file mode 100644
index 0000000..87aaf00
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-514.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 11:11:40 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "200037", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "3", "total_pass": 0, "gerrit_number": "201937", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "202162", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "202208", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "202971", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206588", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206171", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206142", "total_fail": 6, "pass": 0, "fail": 6, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206840", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "6", "total_pass": 0, "gerrit_number": "206082", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206410", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206182", "total_fail": 3, "pass": 0, "fail": 3, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206055", "total_fail": 4, "pass": 0, "fail": 4, "internal": false}], "time": {"duration": "0:41:49.208832", "start": "Mon, 07 Jul 2014 10:29:51 -0700 (PST)", "finish": "Mon, 07 Jul 2014 11:11:40 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:17", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:03:04", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:12:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:12", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:10", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:32", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:31", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:09:41", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:33", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:14", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1388/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1388, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc1", "full": "R38-6029.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.050345.tar.xz", "cl_actions": [[{"gerrit_number": "200037", "patch_number": "4", "internal": false}, "picked_up", 1404754160, ""], [{"gerrit_number": "201937", "patch_number": "3", "internal": false}, "picked_up", 1404754160, ""], [{"gerrit_number": "202162", "patch_number": "5", "internal": false}, "picked_up", 1404754160, ""], [{"gerrit_number": "202208", "patch_number": "6", "internal": false}, "picked_up", 1404754160, ""], [{"gerrit_number": "202971", "patch_number": "2", "internal": false}, "picked_up", 1404754160, ""], [{"gerrit_number": "206588", "patch_number": "1", "internal": false}, "picked_up", 1404754160, ""], [{"gerrit_number": "206171", "patch_number": "1", "internal": false}, "picked_up", 1404754160, ""], [{"gerrit_number": "206142", "patch_number": "2", "internal": false}, "picked_up", 1404754160, ""], [{"gerrit_number": "206840", "patch_number": "1", "internal": false}, "picked_up", 1404754160, ""], [{"gerrit_number": "206082", "patch_number": "6", "internal": false}, "picked_up", 1404754160, ""], [{"gerrit_number": "206410", "patch_number": "2", "internal": false}, "picked_up", 1404754160, ""], [{"gerrit_number": "206182", "patch_number": "2", "internal": false}, "picked_up", 1404754160, ""], [{"gerrit_number": "206055", "patch_number": "2", "internal": false}, "picked_up", 1404754160, ""]], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.07.050345", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-515.json b/cidb/test_data/series_0/metadata-515.json
new file mode 100644
index 0000000..499c13c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-515.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:58:32 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:03:38.287658", "start": "Sun, 06 Jul 2014 20:54:54 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:58:32 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:29", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:33", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:40:45", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:04", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:04", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:02", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:14", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:04", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 2655, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705286, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705286, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705286, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705286, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705286, ""]], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-516.json b/cidb/test_data/series_0/metadata-516.json
new file mode 100644
index 0000000..263c845
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-516.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:13:12 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:14:52.424319", "start": "Sun, 06 Jul 2014 20:58:20 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:13:12 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:14", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:03:02", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:43", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:25", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:03:29", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2475, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705492, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705492, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705492, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705492, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705492, ""]], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-517.json b/cidb/test_data/series_0/metadata-517.json
new file mode 100644
index 0000000..f5ab8d2
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-517.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 22:32:44 -0700 (PST)"}, "boards": ["wolf"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:33:43.383421", "start": "Sun, 06 Jul 2014 20:59:00 -0700 (PST)", "finish": "Sun, 06 Jul 2014 22:32:44 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:53", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:53", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:51", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:22", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "1:04:13", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 3452, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705533, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705533, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705533, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705533, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705533, ""]], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-518.json b/cidb/test_data/series_0/metadata-518.json
new file mode 100644
index 0000000..b6bc79a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-518.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:28:18 -0700 (PST)"}, "boards": ["leon"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:32:45.357806", "start": "Sun, 06 Jul 2014 20:55:32 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:28:18 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:31", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:10:28", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:53", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:52", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:43", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 2890, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705325, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705325, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705325, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705325, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705325, ""]], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-519.json b/cidb/test_data/series_0/metadata-519.json
new file mode 100644
index 0000000..666f039
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-519.json
@@ -0,0 +1 @@
+{"status": {"status": "failed", "summary": "", "current-time": "Sun, 06 Jul 2014 22:28:33 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:29:56.444395", "start": "Sun, 06 Jul 2014 20:58:37 -0700 (PST)", "finish": "Sun, 06 Jul 2014 22:28:33 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:43", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:24", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:55", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:01", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:41", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:02", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:07:38", "name": "UploadTestArtifacts"}, {"status": "failed", "description": "** HWTest failed (code 1) **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "** HWTest failed (code 1) **", "board": "lumpy", "duration": "1:00:07", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": "", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage failed but was optional", "board": "", "duration": "0:00:05", "name": "CommitQueueCompletion"}], "build-number": 17403, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705508, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705508, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705508, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705508, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705508, ""]], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-52.json b/cidb/test_data/series_0/metadata-52.json
new file mode 100644
index 0000000..6e800f1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-52.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 17:28:07 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [], "time": {"duration": "0:14:58.729439", "start": "Sun, 06 Jul 2014 17:13:09 -0700 (PST)", "finish": "Sun, 06 Jul 2014 17:28:07 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:13", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:39", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:22", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:24", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:36", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:04:50", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:24", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 2473, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-520.json b/cidb/test_data/series_0/metadata-520.json
new file mode 100644
index 0000000..9340504
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-520.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 22:31:23 -0700 (PST)"}, "boards": ["parrot"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:34:39.140422", "start": "Sun, 06 Jul 2014 20:56:43 -0700 (PST)", "finish": "Sun, 06 Jul 2014 22:31:23 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:32", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:57", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:05:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:53", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:17", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:10", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:37", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:42", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "1:00:41", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 12122, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705395, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705395, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705395, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705395, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705395, ""]], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-521.json b/cidb/test_data/series_0/metadata-521.json
new file mode 100644
index 0000000..4258e3e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-521.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 22:43:53 -0700 (PST)"}, "boards": ["stumpy"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 2, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "time": {"duration": "1:44:20.735543", "start": "Sun, 06 Jul 2014 20:59:33 -0700 (PST)", "finish": "Sun, 06 Jul 2014 22:43:53 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:25", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:54", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:19", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:01", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:01", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:07:39", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:14:35", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17447, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705564, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705564, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705564, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705564, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705564, ""]], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-522.json b/cidb/test_data/series_0/metadata-522.json
new file mode 100644
index 0000000..40db468
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-522.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:55:19 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:00:49.072304", "start": "Sun, 06 Jul 2014 20:54:30 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:55:19 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:39", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:19", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:11:16", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:17", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:26", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:43", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:11:52", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:34", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:13", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:24", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18725, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705262, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705262, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705262, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705262, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705262, ""]], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-523.json b/cidb/test_data/series_0/metadata-523.json
new file mode 100644
index 0000000..3a2719e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-523.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 22:32:58 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:37:32.366179", "start": "Sun, 06 Jul 2014 20:55:25 -0700 (PST)", "finish": "Sun, 06 Jul 2014 22:32:58 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:26", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:51", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:41", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:04", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:03", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:03", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:32", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:07:49", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:26", "name": "CommitQueueCompletion"}], "build-number": 16886, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705318, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705318, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705318, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705318, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705318, ""]], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-524.json b/cidb/test_data/series_0/metadata-524.json
new file mode 100644
index 0000000..69504ff
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-524.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:55:07 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:00:38.701944", "start": "Sun, 06 Jul 2014 20:54:28 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:55:07 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:38", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:20", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:38", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:27", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:38:55", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:57", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:00:56", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:06:21", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:47", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 18521, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705261, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705261, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705261, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705261, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705261, ""]], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-525.json b/cidb/test_data/series_0/metadata-525.json
new file mode 100644
index 0000000..679c67b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-525.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:32:01 -0700 (PST)"}, "boards": ["stout"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:32:41.347840", "start": "Sun, 06 Jul 2014 20:59:20 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:32:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:56", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:23", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:53", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:43", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:14", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:06", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:07", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:03", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:01", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:33", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:04", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:28", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11468, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705552, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705552, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705552, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705552, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705552, ""]], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-526.json b/cidb/test_data/series_0/metadata-526.json
new file mode 100644
index 0000000..79e9ba5
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-526.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 22:24:16 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:26:58.249639", "start": "Sun, 06 Jul 2014 20:57:18 -0700 (PST)", "finish": "Sun, 06 Jul 2014 22:24:16 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:56", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:21:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:08", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:52", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:51", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:45:02", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 4391, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705430, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705430, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705430, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705430, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705430, ""]], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-527.json b/cidb/test_data/series_0/metadata-527.json
new file mode 100644
index 0000000..9ccc173
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-527.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 22:39:29 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:37:41.816182", "start": "Sun, 06 Jul 2014 21:01:48 -0700 (PST)", "finish": "Sun, 06 Jul 2014 22:39:29 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:39", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:24", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:02", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:31", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:59", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:21", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:08:11", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1899, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-528.json b/cidb/test_data/series_0/metadata-528.json
new file mode 100644
index 0000000..a845899
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-528.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:26:25 -0700 (PST)"}, "boards": ["nyan"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:29:05.610039", "start": "Sun, 06 Jul 2014 20:57:20 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:26:25 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:17", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:41", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:55", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:41", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:35", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1895, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705432, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705432, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705432, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705432, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705432, ""]], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-529.json b/cidb/test_data/series_0/metadata-529.json
new file mode 100644
index 0000000..f7dffbf
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-529.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 22:26:35 -0700 (PST)"}, "boards": ["daisy"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:30:08.944333", "start": "Sun, 06 Jul 2014 20:56:26 -0700 (PST)", "finish": "Sun, 06 Jul 2014 22:26:35 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:12:00", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:12", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:41", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:47", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:04:01", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:41", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:11", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "1:01:00", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 15138, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705379, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705379, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705379, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705379, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705379, ""]], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-53.json b/cidb/test_data/series_0/metadata-53.json
new file mode 100644
index 0000000..6bfe714
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-53.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:44:33 -0700 (PST)"}, "boards": ["wolf"], "changes": [], "time": {"duration": "1:31:57.016019", "start": "Sun, 06 Jul 2014 17:12:36 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:44:33 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:40", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:51", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:46", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:58", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:45", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:50", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:59:43", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 3450, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-530.json b/cidb/test_data/series_0/metadata-530.json
new file mode 100644
index 0000000..7759bc8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-530.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:28:53 -0700 (PST)"}, "boards": ["samus"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:31:29.534778", "start": "Sun, 06 Jul 2014 20:57:23 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:28:53 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:12:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:08", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:00:57", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:26", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:53", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:03", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2889, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705436, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705436, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705436, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705436, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705436, ""]], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-531.json b/cidb/test_data/series_0/metadata-531.json
new file mode 100644
index 0000000..b1c8612
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-531.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:34:05 -0700 (PST)"}, "boards": ["panther"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:34:32.698754", "start": "Sun, 06 Jul 2014 20:59:32 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:34:05 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:56", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:52", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:37", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:05", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:15", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2334, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705564, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705564, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705564, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705564, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705564, ""]], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-532.json b/cidb/test_data/series_0/metadata-532.json
new file mode 100644
index 0000000..50b4c66
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-532.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 22:20:13 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:25:06.134143", "start": "Sun, 06 Jul 2014 20:55:07 -0700 (PST)", "finish": "Sun, 06 Jul 2014 22:20:13 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:11:54", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:45", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:49", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:55:36", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4853, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705297, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705297, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705297, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705297, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705297, ""]], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-533.json b/cidb/test_data/series_0/metadata-533.json
new file mode 100644
index 0000000..7487965
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-533.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 22:41:12 -0700 (PST)"}, "boards": ["link"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:42:11.257689", "start": "Sun, 06 Jul 2014 20:59:00 -0700 (PST)", "finish": "Sun, 06 Jul 2014 22:41:12 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:13:03", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:36", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:53", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:05", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:54", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:08:18", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 17445, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705533, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705533, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705533, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705533, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705533, ""]], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-534.json b/cidb/test_data/series_0/metadata-534.json
new file mode 100644
index 0000000..2faff74
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-534.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:27:05 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:30:11.282145", "start": "Sun, 06 Jul 2014 20:56:54 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:27:05 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:58", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:54", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:51", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:36", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:05", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:26", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 2155, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705407, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705407, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705407, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705407, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705407, ""]], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-535.json b/cidb/test_data/series_0/metadata-535.json
new file mode 100644
index 0000000..08e1691
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-535.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:38:14 -0700 (PST)"}, "boards": ["falco"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:43:08.157604", "start": "Sun, 06 Jul 2014 20:55:05 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:38:14 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:20:34", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:27", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:45", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:23", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:04", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:42", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:04", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:34", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4297, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705294, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705294, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705294, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705294, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705294, ""]], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-536.json b/cidb/test_data/series_0/metadata-536.json
new file mode 100644
index 0000000..479020a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-536.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:20:41 -0700 (PST)"}, "boards": ["gizmo"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:21:29.165935", "start": "Sun, 06 Jul 2014 20:59:12 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:20:41 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:23", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:06:05", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:06:35", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:32", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:18", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:20", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:07", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 838, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705544, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705544, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705544, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705544, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705544, ""]], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-537.json b/cidb/test_data/series_0/metadata-537.json
new file mode 100644
index 0000000..3a0eaa8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-537.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:31:27 -0700 (PST)"}, "boards": ["rambi"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:33:00.916543", "start": "Sun, 06 Jul 2014 20:58:26 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:31:27 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:57", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:31", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:23", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:06", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:29", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:04", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:01", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:51", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:48", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:14", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:32", "name": "CommitQueueCompletion"}], "build-number": 2335, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705497, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705497, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705497, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705497, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705497, ""]], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-538.json b/cidb/test_data/series_0/metadata-538.json
new file mode 100644
index 0000000..268d0fc
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-538.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:27:24 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:08:40.177128", "start": "Sun, 06 Jul 2014 21:18:44 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:27:24 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1262/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1262/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:20:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1262/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1262/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1262/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1262/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:53", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1262/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1262/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1262/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:05", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1262/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1262/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 1262, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404706716, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404706716, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404706716, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404706716, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404706716, ""]], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-539.json b/cidb/test_data/series_0/metadata-539.json
new file mode 100644
index 0000000..26ef00f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-539.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:15:49 -0700 (PST)"}, "boards": ["duck"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:18:04.512995", "start": "Sun, 06 Jul 2014 20:57:45 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:15:49 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:40", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:12", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:24", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:32", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:06", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:12", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1385, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705457, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705457, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705457, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705457, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705457, ""]], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-54.json b/cidb/test_data/series_0/metadata-54.json
new file mode 100644
index 0000000..66d7bff
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-54.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 17:49:21 -0700 (PST)"}, "boards": ["leon"], "changes": [], "time": {"duration": "0:35:20.021342", "start": "Sun, 06 Jul 2014 17:14:01 -0700 (PST)", "finish": "Sun, 06 Jul 2014 17:49:21 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:12:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:47", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:02", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:10:11", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:33", "name": "CommitQueueCompletion"}], "build-number": 2888, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-540.json b/cidb/test_data/series_0/metadata-540.json
new file mode 100644
index 0000000..d805ad7
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-540.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:30:52 -0700 (PST)"}, "boards": ["peppy"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:31:33.391417", "start": "Sun, 06 Jul 2014 20:59:19 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:30:52 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:23", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:30", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:41", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:32", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:12", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:57", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:15", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:41", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:20", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:04", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4284, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705551, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705551, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705551, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705551, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705551, ""]], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-541.json b/cidb/test_data/series_0/metadata-541.json
new file mode 100644
index 0000000..c6f5aae
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-541.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:41:16 -0700 (PST)"}, "boards": ["butterfly"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:45:53.557678", "start": "Sun, 06 Jul 2014 20:55:23 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:41:16 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:46", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:22:16", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:26", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:45", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:44", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:17", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:17", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 11465, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705312, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705312, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705312, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705312, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705312, ""]], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-542.json b/cidb/test_data/series_0/metadata-542.json
new file mode 100644
index 0000000..3577f6b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-542.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:30:17 -0700 (PST)"}, "boards": ["monroe"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:30:53.814698", "start": "Sun, 06 Jul 2014 20:59:24 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:30:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:01", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:24", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:12:11", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:52", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:55", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:06", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2339, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705556, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705556, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705556, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705556, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705556, ""]], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-543.json b/cidb/test_data/series_0/metadata-543.json
new file mode 100644
index 0000000..5d80891
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-543.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 21:39:50 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:40:45.055799", "start": "Sun, 06 Jul 2014 20:59:05 -0700 (PST)", "finish": "Sun, 06 Jul 2014 21:39:50 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:25", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:12:40", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:08", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:06", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:42", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:37", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:00", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:15:05", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1384, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [[{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705537, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705537, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705537, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705537, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705537, ""]], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-544.json b/cidb/test_data/series_0/metadata-544.json
new file mode 100644
index 0000000..31f1c49
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-544.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:27:17 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:03:04.380205", "start": "Fri, 04 Jul 2014 20:24:12 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:27:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:31", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:37", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:40:28", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:14", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:03", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:42", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2642/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2642, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530645, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530645, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530645, ""]], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-545.json b/cidb/test_data/series_0/metadata-545.json
new file mode 100644
index 0000000..80a2283
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-545.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:39:35 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:13:27.813248", "start": "Fri, 04 Jul 2014 20:26:07 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:39:35 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:12", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:42", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:27", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:03:21", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:20", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2462/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2462, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530758, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530758, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530758, ""]], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-546.json b/cidb/test_data/series_0/metadata-546.json
new file mode 100644
index 0000000..39f93d9
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-546.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:52:43 -0700 (PST)"}, "boards": ["wolf"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:28:10.105513", "start": "Fri, 04 Jul 2014 20:24:33 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:52:43 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:55", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:10", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:53", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:54", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:16", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:05:20", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:45", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:10", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:59:12", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3439/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 3439, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530666, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530666, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530666, ""]], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-547.json b/cidb/test_data/series_0/metadata-547.json
new file mode 100644
index 0000000..d60617f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-547.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:56:49 -0700 (PST)"}, "boards": ["leon"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:29:54.015915", "start": "Fri, 04 Jul 2014 20:26:55 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:56:49 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:09", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:57", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:11", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:24", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:21", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2877/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2877, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530807, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530807, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530807, ""]], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-548.json b/cidb/test_data/series_0/metadata-548.json
new file mode 100644
index 0000000..9f6edb4
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-548.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:52:18 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:23:42.032431", "start": "Fri, 04 Jul 2014 20:28:36 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:52:18 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:42", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:30", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:14", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:12", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:44", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:12", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:07:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:53:43", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17390/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17390, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530907, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530907, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530907, ""]], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-549.json b/cidb/test_data/series_0/metadata-549.json
new file mode 100644
index 0000000..c69bd38
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-549.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:55:11 -0700 (PST)"}, "boards": ["parrot"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:26:29.329054", "start": "Fri, 04 Jul 2014 20:28:42 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:55:11 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:04:07", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:30", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:13:01", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:34", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:11", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:53:24", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12109/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 12109, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530913, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530913, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530913, ""]], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-55.json b/cidb/test_data/series_0/metadata-55.json
new file mode 100644
index 0000000..f754ca8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-55.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:38:56 -0700 (PST)"}, "boards": ["lumpy"], "changes": [], "time": {"duration": "1:27:53.885600", "start": "Sun, 06 Jul 2014 17:11:02 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:38:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:17", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:54", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:12:45", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:18", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:49", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:10:48", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:55:05", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 17401, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-550.json b/cidb/test_data/series_0/metadata-550.json
new file mode 100644
index 0000000..b04f171
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-550.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 22:02:47 -0700 (PST)"}, "boards": ["stumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:36:55.703727", "start": "Fri, 04 Jul 2014 20:25:52 -0700 (PST)", "finish": "Fri, 04 Jul 2014 22:02:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:29", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:53", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:17", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:14", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:03", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:39", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:03", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:07:37", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:07:18", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17434/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17434, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530744, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530744, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530744, ""]], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-551.json b/cidb/test_data/series_0/metadata-551.json
new file mode 100644
index 0000000..1ac9b10
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-551.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:23:15 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:59:30.173690", "start": "Fri, 04 Jul 2014 20:23:45 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:23:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:35", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:03", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:09", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:15", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:49", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:02:02", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:41", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:53", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18712/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:16", "name": "CommitQueueCompletion"}], "build-number": 18712, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530620, ""]], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-552.json b/cidb/test_data/series_0/metadata-552.json
new file mode 100644
index 0000000..f643e36
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-552.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 22:02:38 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:33:56.226940", "start": "Fri, 04 Jul 2014 20:28:42 -0700 (PST)", "finish": "Fri, 04 Jul 2014 22:02:38 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:43", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:15", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:52", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:40", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:15", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:06", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:06", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:04:17", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16873/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 16873, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530914, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530914, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530914, ""]], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-553.json b/cidb/test_data/series_0/metadata-553.json
new file mode 100644
index 0000000..81d6cd0
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-553.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:22:41 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:58:51.472641", "start": "Fri, 04 Jul 2014 20:23:50 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:22:41 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:40", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:03", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:18", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:49", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:38:35", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:02:04", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:01", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:06:04", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18508/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:16", "name": "CommitQueueCompletion"}], "build-number": 18508, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530624, ""]], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-554.json b/cidb/test_data/series_0/metadata-554.json
new file mode 100644
index 0000000..cd29a83
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-554.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:00:50 -0700 (PST)"}, "boards": ["stout"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:31:56.324714", "start": "Fri, 04 Jul 2014 20:28:54 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:00:50 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:19", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:38", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:06", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:01", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:14", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:04:58", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:33", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:39", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11455/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:28", "name": "CommitQueueCompletion"}], "build-number": 11455, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530925, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530925, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530925, ""]], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-555.json b/cidb/test_data/series_0/metadata-555.json
new file mode 100644
index 0000000..c06a478
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-555.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:44:16 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:18:15.467874", "start": "Fri, 04 Jul 2014 20:26:00 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:44:16 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:43", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:21:29", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:03", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:50", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:17", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:36:24", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4378/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4378, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530752, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530752, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530752, ""]], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-556.json b/cidb/test_data/series_0/metadata-556.json
new file mode 100644
index 0000000..ccd4ddf
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-556.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:54:28 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:28:32.058485", "start": "Fri, 04 Jul 2014 20:25:56 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:54:28 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:19", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:58", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:14", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:59:14", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1886/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1886, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-557.json b/cidb/test_data/series_0/metadata-557.json
new file mode 100644
index 0000000..02f9029
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-557.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:55:59 -0700 (PST)"}, "boards": ["nyan"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:29:21.957955", "start": "Fri, 04 Jul 2014 20:26:37 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:55:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:14", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:47", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:37", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:58", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:51", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1882/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1882, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530788, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530788, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530788, ""]], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-558.json b/cidb/test_data/series_0/metadata-558.json
new file mode 100644
index 0000000..d186819
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-558.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:46:58 -0700 (PST)"}, "boards": ["daisy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:21:22.479556", "start": "Fri, 04 Jul 2014 20:25:36 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:46:58 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:11:38", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:08", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:43", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:02", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:58", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:49", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:02", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:52:53", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15125/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 15125, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530727, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530727, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530727, ""]], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-559.json b/cidb/test_data/series_0/metadata-559.json
new file mode 100644
index 0000000..8f6d9bd
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-559.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:54:31 -0700 (PST)"}, "boards": ["samus"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:30:00.955193", "start": "Fri, 04 Jul 2014 20:24:30 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:54:31 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:46", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:15", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:03", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:06", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:25", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:07:19", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2876/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2876, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530662, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530662, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530662, ""]], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-56.json b/cidb/test_data/series_0/metadata-56.json
new file mode 100644
index 0000000..b62c3a9
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-56.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:41:57 -0700 (PST)"}, "boards": ["parrot"], "changes": [], "time": {"duration": "1:28:58.980556", "start": "Sun, 06 Jul 2014 17:12:58 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:41:57 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:19", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:23", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:36", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:13:12", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:32", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:59", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:07:08", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:22", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:07:10", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:48", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:54:26", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 12120, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-560.json b/cidb/test_data/series_0/metadata-560.json
new file mode 100644
index 0000000..48a3fda
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-560.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:57:56 -0700 (PST)"}, "boards": ["panther"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:33:45.737344", "start": "Fri, 04 Jul 2014 20:24:10 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:57:56 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:52", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:55", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:15", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:15", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:19", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:10", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2321/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2321, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530643, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530643, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530643, ""]], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-561.json b/cidb/test_data/series_0/metadata-561.json
new file mode 100644
index 0000000..3c4d91c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-561.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:43:11 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:17:59.482586", "start": "Fri, 04 Jul 2014 20:25:11 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:43:11 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:14", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:11:06", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:53", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:58", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:44", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:47:17", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4840/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:31", "name": "CommitQueueCompletion"}], "build-number": 4840, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530703, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530703, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530703, ""]], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-562.json b/cidb/test_data/series_0/metadata-562.json
new file mode 100644
index 0000000..660a3cf
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-562.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:56:20 -0700 (PST)"}, "boards": ["link"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:31:10.714914", "start": "Fri, 04 Jul 2014 20:25:09 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:56:20 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:12:27", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:02", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:12", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:35", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:21", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:00:36", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17432/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17432, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530701, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530701, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530701, ""]], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-563.json b/cidb/test_data/series_0/metadata-563.json
new file mode 100644
index 0000000..aacf063
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-563.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:58:16 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:29:14.375604", "start": "Fri, 04 Jul 2014 20:29:02 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:58:16 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:37", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:10", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:49", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:32", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:58", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:34", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2142/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 2142, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530934, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530934, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530934, ""]], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-564.json b/cidb/test_data/series_0/metadata-564.json
new file mode 100644
index 0000000..8780b79
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-564.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:08:36 -0700 (PST)"}, "boards": ["falco"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:43:06.306902", "start": "Fri, 04 Jul 2014 20:25:30 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:08:36 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:43", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:20:55", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:54", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:08", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:09", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:26", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:04", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:38", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:05:04", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:31", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4284/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4284, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530722, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530722, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530722, ""]], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-565.json b/cidb/test_data/series_0/metadata-565.json
new file mode 100644
index 0000000..91b0a51
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-565.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:46:06 -0700 (PST)"}, "boards": ["gizmo"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:21:38.271974", "start": "Fri, 04 Jul 2014 20:24:28 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:46:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:58", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:06:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:39", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:00", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:06", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:06", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/825/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 825, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530660, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530660, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530660, ""]], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-566.json b/cidb/test_data/series_0/metadata-566.json
new file mode 100644
index 0000000..8fa062f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-566.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:59:03 -0700 (PST)"}, "boards": ["rambi"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:31:59.181312", "start": "Fri, 04 Jul 2014 20:27:04 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:59:03 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:30", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:16", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:19", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:00", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:42", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:00", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:07:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2322/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "CommitQueueCompletion"}], "build-number": 2322, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530816, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530816, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530816, ""]], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-567.json b/cidb/test_data/series_0/metadata-567.json
new file mode 100644
index 0000000..ab626e1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-567.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:48:01 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:03:23.306472", "start": "Fri, 04 Jul 2014 20:44:38 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:48:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1249/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:03", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1249/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1249/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1249/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1249/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1249/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:40", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1249/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1249/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1249/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:03", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1249/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1249/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1249, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404531868, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404531868, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404531868, ""]], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-568.json b/cidb/test_data/series_0/metadata-568.json
new file mode 100644
index 0000000..7d0a28c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-568.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:43:21 -0700 (PST)"}, "boards": ["duck"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:18:08.084020", "start": "Fri, 04 Jul 2014 20:25:13 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:43:21 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:33", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:43", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:37", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:08", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:08", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1372/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1372, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530705, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530705, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530705, ""]], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-569.json b/cidb/test_data/series_0/metadata-569.json
new file mode 100644
index 0000000..940b8a8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-569.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:56:28 -0700 (PST)"}, "boards": ["peppy"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:31:06.218878", "start": "Fri, 04 Jul 2014 20:25:22 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:56:28 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:47", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:16", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:37", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:29", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:04:51", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:12", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:13", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:04:51", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:50", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:05", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4271/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4271, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530715, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530715, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530715, ""]], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-57.json b/cidb/test_data/series_0/metadata-57.json
new file mode 100644
index 0000000..7a7ed9c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-57.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:59:17 -0700 (PST)"}, "boards": ["stumpy"], "changes": [], "time": {"duration": "1:47:06.284377", "start": "Sun, 06 Jul 2014 17:12:11 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:59:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:52", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:46", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:49", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:07", "name": "UploadTestArtifacts"}, {"status": "passed", "description": "** Suite passed with a warning code **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "stumpy", "duration": "1:12:55", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 17445, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-570.json b/cidb/test_data/series_0/metadata-570.json
new file mode 100644
index 0000000..19244ab
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-570.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:10:47 -0700 (PST)"}, "boards": ["butterfly"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:46:10.874280", "start": "Fri, 04 Jul 2014 20:24:36 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:10:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:43", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:23:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:53", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:45", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:37", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:43", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:22", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:07", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:05:23", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:57", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11452/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 11452, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530667, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530667, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530667, ""]], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-571.json b/cidb/test_data/series_0/metadata-571.json
new file mode 100644
index 0000000..52dde61
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-571.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 20:58:52 -0700 (PST)"}, "boards": ["monroe"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:29:52.525619", "start": "Fri, 04 Jul 2014 20:28:59 -0700 (PST)", "finish": "Fri, 04 Jul 2014 20:58:52 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:54", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:50", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:55", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:11", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:05:14", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:11", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2326/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 2326, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530931, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530931, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530931, ""]], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-572.json b/cidb/test_data/series_0/metadata-572.json
new file mode 100644
index 0000000..719e694
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-572.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Fri, 04 Jul 2014 21:06:38 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [{"patch_number": "1", "total_pass": 0, "gerrit_number": "168144", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "168141", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206678", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:39:15.838685", "start": "Fri, 04 Jul 2014 20:27:22 -0700 (PST)", "finish": "Fri, 04 Jul 2014 21:06:38 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:47", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:33", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:05", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:21", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:02", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:23", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:37", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1371/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1371, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2082.1", "platform": "6020.0.0-rc2", "full": "R38-6020.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.04.174410.tar.xz", "cl_actions": [[{"gerrit_number": "168144", "patch_number": "1", "internal": true}, "picked_up", 1404530833, ""], [{"gerrit_number": "168141", "patch_number": "1", "internal": true}, "picked_up", 1404530833, ""], [{"gerrit_number": "206678", "patch_number": "2", "internal": false}, "picked_up", 1404530833, ""]], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.04.174410", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-573.json b/cidb/test_data/series_0/metadata-573.json
new file mode 100644
index 0000000..e5d8d0b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-573.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:57:03 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [], "time": {"duration": "1:07:05.230199", "start": "Mon, 07 Jul 2014 02:49:57 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:57:03 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:33", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:36", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:43:17", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:40", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:40", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:01", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:17", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2656/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2656, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-574.json b/cidb/test_data/series_0/metadata-574.json
new file mode 100644
index 0000000..4ebf56e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-574.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:08:50 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [], "time": {"duration": "0:15:05.776827", "start": "Mon, 07 Jul 2014 02:53:44 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:08:50 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:13", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:03", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:38", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:21", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:34", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:04:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:28", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2476/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 2476, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-575.json b/cidb/test_data/series_0/metadata-575.json
new file mode 100644
index 0000000..ea4b092
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-575.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 04:25:16 -0700 (PST)"}, "boards": ["wolf"], "changes": [], "time": {"duration": "1:33:50.396498", "start": "Mon, 07 Jul 2014 02:51:25 -0700 (PST)", "finish": "Mon, 07 Jul 2014 04:25:16 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:16", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:47", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:58", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": "** Suite passed with a warning code **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "wolf", "duration": "1:01:53", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3453/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 3453, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-576.json b/cidb/test_data/series_0/metadata-576.json
new file mode 100644
index 0000000..4d3cc7a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-576.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:28:22 -0700 (PST)"}, "boards": ["leon"], "changes": [], "time": {"duration": "0:34:04.553717", "start": "Mon, 07 Jul 2014 02:54:17 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:28:22 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:37", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:43", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:46", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:03", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:42", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2891/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 2891, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-577.json b/cidb/test_data/series_0/metadata-577.json
new file mode 100644
index 0000000..15b6516
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-577.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 04:18:22 -0700 (PST)"}, "boards": ["lumpy"], "changes": [], "time": {"duration": "1:25:46.681656", "start": "Mon, 07 Jul 2014 02:52:35 -0700 (PST)", "finish": "Mon, 07 Jul 2014 04:18:22 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:12:49", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:16", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:44", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:37", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:22", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:10:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:52:57", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17404/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17404, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-578.json b/cidb/test_data/series_0/metadata-578.json
new file mode 100644
index 0000000..74ce9b1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-578.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 04:20:47 -0700 (PST)"}, "boards": ["parrot"], "changes": [], "time": {"duration": "1:27:24.549684", "start": "Mon, 07 Jul 2014 02:53:23 -0700 (PST)", "finish": "Mon, 07 Jul 2014 04:20:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:04:54", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:13:03", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:40", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:24", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:44", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:53:13", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12123/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 12123, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-579.json b/cidb/test_data/series_0/metadata-579.json
new file mode 100644
index 0000000..2ae2be9
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-579.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 04:29:47 -0700 (PST)"}, "boards": ["stumpy"], "changes": [], "time": {"duration": "1:37:27.846868", "start": "Mon, 07 Jul 2014 02:52:19 -0700 (PST)", "finish": "Mon, 07 Jul 2014 04:29:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:29", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:11", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:47", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:37", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:15", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:10:24", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:04:47", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17448/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 17448, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-58.json b/cidb/test_data/series_0/metadata-58.json
new file mode 100644
index 0000000..bcc8a57
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-58.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:09:31 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [], "time": {"duration": "0:59:00.463116", "start": "Sun, 06 Jul 2014 17:10:30 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:09:31 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:37", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:10", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:17", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:11", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:13:08", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:10", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:00:53", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:35", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:05", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CommitQueueCompletion"}], "build-number": 18723, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-580.json b/cidb/test_data/series_0/metadata-580.json
new file mode 100644
index 0000000..d98d727
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-580.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:48:27 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [], "time": {"duration": "0:58:49.443666", "start": "Mon, 07 Jul 2014 02:49:37 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:48:27 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:10", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:25", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:13", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:41", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:38:49", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:04", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:36", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:19", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:09:06", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18726/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CommitQueueCompletion"}], "build-number": 18726, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-581.json b/cidb/test_data/series_0/metadata-581.json
new file mode 100644
index 0000000..9616e6d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-581.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 04:27:46 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [], "time": {"duration": "1:34:26.193923", "start": "Mon, 07 Jul 2014 02:53:20 -0700 (PST)", "finish": "Mon, 07 Jul 2014 04:27:46 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:13", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:44", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:06", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:39", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:57", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:26", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:02", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:01:41", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16887/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 16887, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-582.json b/cidb/test_data/series_0/metadata-582.json
new file mode 100644
index 0000000..5b10450
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-582.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:48:08 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [], "time": {"duration": "0:58:38.354886", "start": "Mon, 07 Jul 2014 02:49:30 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:48:08 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:36", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:10", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:21", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:40", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:38:30", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:59", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:59", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:41", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18522/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:16", "name": "CommitQueueCompletion"}], "build-number": 18522, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-583.json b/cidb/test_data/series_0/metadata-583.json
new file mode 100644
index 0000000..b2b8d4c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-583.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:27:58 -0700 (PST)"}, "boards": ["stout"], "changes": [], "time": {"duration": "0:35:19.489715", "start": "Mon, 07 Jul 2014 02:52:38 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:27:58 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:18", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:38", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:24", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:45", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:21", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:22", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:13", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:15", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11469/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:26", "name": "CommitQueueCompletion"}], "build-number": 11469, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-584.json b/cidb/test_data/series_0/metadata-584.json
new file mode 100644
index 0000000..c785988
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-584.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 04:22:12 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [], "time": {"duration": "1:29:32.566604", "start": "Mon, 07 Jul 2014 02:52:39 -0700 (PST)", "finish": "Mon, 07 Jul 2014 04:22:12 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:49", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:31:37", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:07:02", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:56", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:43", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:37:35", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4392/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4392, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-585.json b/cidb/test_data/series_0/metadata-585.json
new file mode 100644
index 0000000..76b078b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-585.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 04:23:52 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:31:14.454579", "start": "Mon, 07 Jul 2014 02:52:38 -0700 (PST)", "finish": "Mon, 07 Jul 2014 04:23:52 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:26", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:55", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:04", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:52", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:59:01", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1900/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1900, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-586.json b/cidb/test_data/series_0/metadata-586.json
new file mode 100644
index 0000000..3ebfbbf
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-586.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:21:28 -0700 (PST)"}, "boards": ["nyan"], "changes": [], "time": {"duration": "0:28:49.070894", "start": "Mon, 07 Jul 2014 02:52:39 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:21:28 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:17", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:10", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:36", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:37", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:37", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1896/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1896, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-587.json b/cidb/test_data/series_0/metadata-587.json
new file mode 100644
index 0000000..84206c8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-587.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 04:08:31 -0700 (PST)"}, "boards": ["daisy"], "changes": [], "time": {"duration": "1:17:52.091332", "start": "Mon, 07 Jul 2014 02:50:39 -0700 (PST)", "finish": "Mon, 07 Jul 2014 04:08:31 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:11:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:59", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:49:25", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15139/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 15139, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-588.json b/cidb/test_data/series_0/metadata-588.json
new file mode 100644
index 0000000..b41c607
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-588.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:23:48 -0700 (PST)"}, "boards": ["samus"], "changes": [], "time": {"duration": "0:33:59.906223", "start": "Mon, 07 Jul 2014 02:49:48 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:23:48 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:57", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:51", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:02", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:53", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:21", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2890/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2890, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-589.json b/cidb/test_data/series_0/metadata-589.json
new file mode 100644
index 0000000..88243ff
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-589.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:28:09 -0700 (PST)"}, "boards": ["panther"], "changes": [], "time": {"duration": "0:34:33.335837", "start": "Mon, 07 Jul 2014 02:53:36 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:28:09 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:12", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:04", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:11", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:08", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:07:05", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:14", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:07:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:09:58", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2335/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:34", "name": "CommitQueueCompletion"}], "build-number": 2335, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-59.json b/cidb/test_data/series_0/metadata-59.json
new file mode 100644
index 0000000..77d666b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-59.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:53:23 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [], "time": {"duration": "1:38:07.520004", "start": "Sun, 06 Jul 2014 17:15:15 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:53:23 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:45", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:51", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:13:27", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:15:33", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:26", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:29", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:04:11", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 16884, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-590.json b/cidb/test_data/series_0/metadata-590.json
new file mode 100644
index 0000000..949e07c
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-590.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 04:09:59 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [], "time": {"duration": "1:17:14.563740", "start": "Mon, 07 Jul 2014 02:52:44 -0700 (PST)", "finish": "Mon, 07 Jul 2014 04:09:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:11:21", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:12", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:40", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:21", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:22", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:09:23", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:45:46", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4854/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4854, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-591.json b/cidb/test_data/series_0/metadata-591.json
new file mode 100644
index 0000000..778fe36
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-591.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 04:28:02 -0700 (PST)"}, "boards": ["link"], "changes": [], "time": {"duration": "1:31:01.991729", "start": "Mon, 07 Jul 2014 02:57:00 -0700 (PST)", "finish": "Mon, 07 Jul 2014 04:28:02 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:34", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:41", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:57", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:58:25", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17446/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:26", "name": "CommitQueueCompletion"}], "build-number": 17446, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-592.json b/cidb/test_data/series_0/metadata-592.json
new file mode 100644
index 0000000..724ce16
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-592.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:25:45 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [], "time": {"duration": "0:33:35.456313", "start": "Mon, 07 Jul 2014 02:52:09 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:25:45 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:43", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:02", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:48", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:57", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:55", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:23", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:25", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:08:23", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:42", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2156/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2156, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-593.json b/cidb/test_data/series_0/metadata-593.json
new file mode 100644
index 0000000..cb52236
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-593.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:48:46 -0700 (PST)"}, "boards": ["falco"], "changes": [], "time": {"duration": "0:57:20.690219", "start": "Mon, 07 Jul 2014 02:51:26 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:48:46 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:32:24", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:51", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:10:53", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:33", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:26", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:28", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:09:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4298/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 4298, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-594.json b/cidb/test_data/series_0/metadata-594.json
new file mode 100644
index 0000000..fd22920
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-594.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:14:04 -0700 (PST)"}, "boards": ["gizmo"], "changes": [], "time": {"duration": "0:21:47.801451", "start": "Mon, 07 Jul 2014 02:52:16 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:14:04 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:56", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:07:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:32", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:05", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:10", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:07", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:07", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/839/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 839, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-595.json b/cidb/test_data/series_0/metadata-595.json
new file mode 100644
index 0000000..313c343
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-595.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:29:19 -0700 (PST)"}, "boards": ["rambi"], "changes": [], "time": {"duration": "0:35:13.364622", "start": "Mon, 07 Jul 2014 02:54:06 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:29:19 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:30", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:45", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:10", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:25", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:39", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2336/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2336, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-596.json b/cidb/test_data/series_0/metadata-596.json
new file mode 100644
index 0000000..ccca040
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-596.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:11:57 -0700 (PST)"}, "boards": ["lumpy"], "changes": [], "time": {"duration": "0:03:07.792518", "start": "Mon, 07 Jul 2014 03:08:49 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:11:57 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1263/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1263/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:31", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1263/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1263/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1263/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1263/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:41", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1263/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1263/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1263/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:49", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1263/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1263/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1263, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-597.json b/cidb/test_data/series_0/metadata-597.json
new file mode 100644
index 0000000..7e13ccd
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-597.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:14:32 -0700 (PST)"}, "boards": ["duck"], "changes": [], "time": {"duration": "0:20:08.757679", "start": "Mon, 07 Jul 2014 02:54:23 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:14:32 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:36", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:11", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:32", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:28", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:01", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:00", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:10", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:58", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1386/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1386, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-598.json b/cidb/test_data/series_0/metadata-598.json
new file mode 100644
index 0000000..0daade3
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-598.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:28:35 -0700 (PST)"}, "boards": ["peppy"], "changes": [], "time": {"duration": "0:36:21.385824", "start": "Mon, 07 Jul 2014 02:52:14 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:28:35 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:21", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:36", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:34", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:50", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:19", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:07:53", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:31", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4285/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 4285, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-599.json b/cidb/test_data/series_0/metadata-599.json
new file mode 100644
index 0000000..7020607
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-599.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:50:15 -0700 (PST)"}, "boards": ["butterfly"], "changes": [], "time": {"duration": "0:57:49.449768", "start": "Mon, 07 Jul 2014 02:52:25 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:50:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:45", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:32:07", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:53", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:11:34", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:47", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:44", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:06", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:10:32", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11466/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11466, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-6.json b/cidb/test_data/series_0/metadata-6.json
new file mode 100644
index 0000000..7cd6da8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-6.json
@@ -0,0 +1 @@
+{"status": {"status": "failed", "summary": "", "current-time": "Sun, 06 Jul 2014 22:45:50 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2655", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2475", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3452", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2890", "reason": "None"}, "lumpy-paladin": {"status": "fail", "message": "lumpy-paladin: The HWTest [bvt_cq] stage failed: ** HWTest failed (code 1) ** in https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17403", "reason": "The HWTest [bvt_cq] stage failed: ** HWTest failed (code 1) **"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12122", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17447", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18725", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16886", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18521", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11468", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4391", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1899", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1895", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15138", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2889", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2334", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4853", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17445", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2155", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4297", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1384", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2335", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1262", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1385", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4284", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11465", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2339", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/838", "reason": "None"}}, "boards": [], "changes": [{"patch_number": "4", "total_pass": 0, "gerrit_number": "204676", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204677", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "4", "total_pass": 0, "gerrit_number": "204678", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205535", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205534", "total_fail": 1, "pass": 0, "fail": 1, "internal": false}], "metadata-version": "2", "child-configs": [], "build-number": 1924, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [[{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "picked_up", 1404705132, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "picked_up", 1404705132, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "picked_up", 1404705132, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "picked_up", 1404705132, ""], [{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "picked_up", 1404705132, ""], [{"gerrit_number": "204676", "patch_number": "4", "internal": false}, "kicked_out", 1404711828, ""], [{"gerrit_number": "205534", "patch_number": "1", "internal": false}, "kicked_out", 1404711828, ""], [{"gerrit_number": "204678", "patch_number": "4", "internal": false}, "kicked_out", 1404711829, ""], [{"gerrit_number": "204677", "patch_number": "4", "internal": false}, "kicked_out", 1404711829, ""], [{"gerrit_number": "205535", "patch_number": "1", "internal": false}, "kicked_out", 1404711829, ""]], "builder-name": "CQ master", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc3", "full": "R38-6027.0.0-rc3", "milestone": "38"}, "time": {"duration": "1:52:20.999759", "start": "Sun, 06 Jul 2014 20:53:29 -0700 (PST)", "finish": "Sun, 06 Jul 2014 22:45:50 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1924/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1924/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1924/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1924/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1924/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1924/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1924/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "failed", "description": "", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1924/steps/CommitQueueCompletion/logs/stdio", "summary": "", "board": "", "duration": "1:46:18", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1924/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:45", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-60.json b/cidb/test_data/series_0/metadata-60.json
new file mode 100644
index 0000000..7975b28
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-60.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:09:25 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [], "time": {"duration": "0:58:54.748251", "start": "Sun, 06 Jul 2014 17:10:30 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:09:25 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:36", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:08", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:20", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:20", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:38:50", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:08", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:30", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:06:04", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:50", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:14", "name": "CommitQueueCompletion"}], "build-number": 18519, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-600.json b/cidb/test_data/series_0/metadata-600.json
new file mode 100644
index 0000000..979a2ad
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-600.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:27:47 -0700 (PST)"}, "boards": ["monroe"], "changes": [], "time": {"duration": "0:34:02.426815", "start": "Mon, 07 Jul 2014 02:53:45 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:27:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:33", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:41", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:52", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:49", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:43", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2340/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2340, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-601.json b/cidb/test_data/series_0/metadata-601.json
new file mode 100644
index 0000000..fcadc84
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-601.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 03:30:03 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [], "time": {"duration": "0:39:23.501282", "start": "Mon, 07 Jul 2014 02:50:40 -0700 (PST)", "finish": "Mon, 07 Jul 2014 03:30:03 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:42", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:08", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:21", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:53", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:22", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:01", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:24", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:45", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1385/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1385, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2084.1", "platform": "6028.0.0-rc1", "full": "R38-6028.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.221428.tar.xz", "cl_actions": [], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.06.221428", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-602.json b/cidb/test_data/series_0/metadata-602.json
new file mode 100644
index 0000000..f3684ec
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-602.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:21:40 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [], "time": {"duration": "1:07:14.424045", "start": "Sat, 05 Jul 2014 02:14:26 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:21:40 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:14", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:34", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:44:39", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:47", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:36", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:01", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:05", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2643/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2643, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-603.json b/cidb/test_data/series_0/metadata-603.json
new file mode 100644
index 0000000..5ad2b3e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-603.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 02:28:29 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [], "time": {"duration": "0:14:57.348335", "start": "Sat, 05 Jul 2014 02:13:32 -0700 (PST)", "finish": "Sat, 05 Jul 2014 02:28:29 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:12", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:01", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:41", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:22", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:30", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:04:36", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:28", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2463/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2463, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-604.json b/cidb/test_data/series_0/metadata-604.json
new file mode 100644
index 0000000..2e1d000
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-604.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:45:47 -0700 (PST)"}, "boards": ["wolf"], "changes": [], "time": {"duration": "1:31:17.053610", "start": "Sat, 05 Jul 2014 02:14:30 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:45:47 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:12:29", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:54", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:35", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:53", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:53", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:39", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:57", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:41", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:50", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:58:17", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3440/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 3440, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-605.json b/cidb/test_data/series_0/metadata-605.json
new file mode 100644
index 0000000..d64afb1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-605.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 02:47:59 -0700 (PST)"}, "boards": ["leon"], "changes": [], "time": {"duration": "0:33:53.148562", "start": "Sat, 05 Jul 2014 02:14:05 -0700 (PST)", "finish": "Sat, 05 Jul 2014 02:47:59 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:15", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:09", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:53", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:59", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:36", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:01", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:44", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2878/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2878, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-606.json b/cidb/test_data/series_0/metadata-606.json
new file mode 100644
index 0000000..63ded15
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-606.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:39:45 -0700 (PST)"}, "boards": ["lumpy"], "changes": [], "time": {"duration": "1:23:54.254519", "start": "Sat, 05 Jul 2014 02:15:51 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:39:45 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:31", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:12:03", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:12:40", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:25", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:58", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:22", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:06:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:10:27", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:50:32", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17391/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 17391, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-607.json b/cidb/test_data/series_0/metadata-607.json
new file mode 100644
index 0000000..5d2ef5b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-607.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:39:06 -0700 (PST)"}, "boards": ["parrot"], "changes": [], "time": {"duration": "1:21:55.453734", "start": "Sat, 05 Jul 2014 02:17:10 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:39:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:11", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:59", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:29", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:59", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:06:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:34", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:48:58", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12110/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 12110, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-608.json b/cidb/test_data/series_0/metadata-608.json
new file mode 100644
index 0000000..7284a68
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-608.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:49:35 -0700 (PST)"}, "boards": ["stumpy"], "changes": [], "time": {"duration": "1:36:15.214318", "start": "Sat, 05 Jul 2014 02:13:20 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:49:35 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:15", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:35", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:11:12", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:58", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:15", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:06:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:10:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:03:53", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17435/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 17435, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-609.json b/cidb/test_data/series_0/metadata-609.json
new file mode 100644
index 0000000..37f16ea
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-609.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:11:43 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [], "time": {"duration": "1:00:22.156892", "start": "Sat, 05 Jul 2014 02:11:21 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:11:43 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:38", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:11", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:10:52", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:19", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:34", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:46", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:12", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:35", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:55", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:08:30", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18713/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:16", "name": "CommitQueueCompletion"}], "build-number": 18713, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-61.json b/cidb/test_data/series_0/metadata-61.json
new file mode 100644
index 0000000..8d52547
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-61.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 17:49:08 -0700 (PST)"}, "boards": ["stout"], "changes": [], "time": {"duration": "0:35:10.249292", "start": "Sun, 06 Jul 2014 17:13:58 -0700 (PST)", "finish": "Sun, 06 Jul 2014 17:49:08 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:34", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:26", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:40", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:21", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:00:47", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:21", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:25", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:24", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11466, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-610.json b/cidb/test_data/series_0/metadata-610.json
new file mode 100644
index 0000000..9aae7dc
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-610.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:52:06 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [], "time": {"duration": "1:36:08.401605", "start": "Sat, 05 Jul 2014 02:15:57 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:52:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:17", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:52", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:58", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:37", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:59", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:23", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:10:37", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:03:36", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16874/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 16874, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-611.json b/cidb/test_data/series_0/metadata-611.json
new file mode 100644
index 0000000..cfac4a8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-611.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:13:29 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [], "time": {"duration": "0:59:17.324783", "start": "Sat, 05 Jul 2014 02:14:12 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:13:29 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:27", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:10", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:10:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:20", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:39:10", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:09", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:06:04", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:39", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18509/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18509, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-612.json b/cidb/test_data/series_0/metadata-612.json
new file mode 100644
index 0000000..e47124b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-612.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 02:50:45 -0700 (PST)"}, "boards": ["stout"], "changes": [], "time": {"duration": "0:35:01.828014", "start": "Sat, 05 Jul 2014 02:15:43 -0700 (PST)", "finish": "Sat, 05 Jul 2014 02:50:45 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:24", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:37", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:13", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:49", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:16", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:00:59", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:26", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:03", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:22", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11456/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 11456, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-613.json b/cidb/test_data/series_0/metadata-613.json
new file mode 100644
index 0000000..bcd9951
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-613.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:39:31 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [], "time": {"duration": "1:24:19.117960", "start": "Sat, 05 Jul 2014 02:15:12 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:39:31 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:43", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:32:03", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:53", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:44", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:10", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:31:53", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4379/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4379, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-614.json b/cidb/test_data/series_0/metadata-614.json
new file mode 100644
index 0000000..93a4d85
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-614.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:43:57 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:29:44.686099", "start": "Sat, 05 Jul 2014 02:14:12 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:43:57 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:43", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:32", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:06", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:34", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:36", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:49", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:57:31", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1887/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1887, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-615.json b/cidb/test_data/series_0/metadata-615.json
new file mode 100644
index 0000000..c483a84
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-615.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 02:44:48 -0700 (PST)"}, "boards": ["nyan"], "changes": [], "time": {"duration": "0:28:26.603515", "start": "Sat, 05 Jul 2014 02:16:21 -0700 (PST)", "finish": "Sat, 05 Jul 2014 02:44:48 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:10", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:48", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:33", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:33", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1883/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1883, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-616.json b/cidb/test_data/series_0/metadata-616.json
new file mode 100644
index 0000000..323322e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-616.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:33:57 -0700 (PST)"}, "boards": ["daisy"], "changes": [], "time": {"duration": "1:18:08.361285", "start": "Sat, 05 Jul 2014 02:15:48 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:33:57 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:11:21", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:40", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:51", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:04:02", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:38", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:05", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:49:50", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15126/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 15126, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-617.json b/cidb/test_data/series_0/metadata-617.json
new file mode 100644
index 0000000..308afb2
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-617.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 02:50:03 -0700 (PST)"}, "boards": ["samus"], "changes": [], "time": {"duration": "0:33:44.657737", "start": "Sat, 05 Jul 2014 02:16:19 -0700 (PST)", "finish": "Sat, 05 Jul 2014 02:50:03 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:50", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:56", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:39", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:00:59", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:07:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:02", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:07:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:09:55", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2877/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2877, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-618.json b/cidb/test_data/series_0/metadata-618.json
new file mode 100644
index 0000000..13b1ff3
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-618.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 02:47:02 -0700 (PST)"}, "boards": ["panther"], "changes": [], "time": {"duration": "0:34:13.405485", "start": "Sat, 05 Jul 2014 02:12:49 -0700 (PST)", "finish": "Sat, 05 Jul 2014 02:47:02 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:54", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:00", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:10", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:35", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:15", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:37", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:09:59", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2322/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2322, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-619.json b/cidb/test_data/series_0/metadata-619.json
new file mode 100644
index 0000000..72f90a6
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-619.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:29:18 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [], "time": {"duration": "1:13:27.717891", "start": "Sat, 05 Jul 2014 02:15:50 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:29:18 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:15", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:11:04", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:47", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:35", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:35", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:45:27", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4841/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4841, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-62.json b/cidb/test_data/series_0/metadata-62.json
new file mode 100644
index 0000000..84c9b36
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-62.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:42:43 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [], "time": {"duration": "1:29:13.737055", "start": "Sun, 06 Jul 2014 17:13:29 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:42:43 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:42", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:31:56", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:13", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:03", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:18", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:03:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:20", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:36:44", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 4389, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-620.json b/cidb/test_data/series_0/metadata-620.json
new file mode 100644
index 0000000..f614479
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-620.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:41:15 -0700 (PST)"}, "boards": ["link"], "changes": [], "time": {"duration": "1:28:27.452410", "start": "Sat, 05 Jul 2014 02:12:48 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:41:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:28", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:52", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:58", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:50", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:54", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:56:11", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17433/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17433, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-621.json b/cidb/test_data/series_0/metadata-621.json
new file mode 100644
index 0000000..fb7fbdf
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-621.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 02:49:07 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [], "time": {"duration": "0:34:27.565904", "start": "Sat, 05 Jul 2014 02:14:40 -0700 (PST)", "finish": "Sat, 05 Jul 2014 02:49:07 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:44", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:53", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:48", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:50", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:22", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:50", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:09:44", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2143/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2143, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-622.json b/cidb/test_data/series_0/metadata-622.json
new file mode 100644
index 0000000..231b358
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-622.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:12:00 -0700 (PST)"}, "boards": ["falco"], "changes": [], "time": {"duration": "0:57:16.807398", "start": "Sat, 05 Jul 2014 02:14:43 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:12:00 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:32:20", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:53", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:10:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:37", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:05", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:32", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:29", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:06:32", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:09:28", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4285/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4285, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-623.json b/cidb/test_data/series_0/metadata-623.json
new file mode 100644
index 0000000..afbd272
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-623.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 02:37:37 -0700 (PST)"}, "boards": ["gizmo"], "changes": [], "time": {"duration": "0:21:45.685069", "start": "Sat, 05 Jul 2014 02:15:51 -0700 (PST)", "finish": "Sat, 05 Jul 2014 02:37:37 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:58", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:06:59", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:33", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:58", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:09", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:04", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/826/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:17", "name": "CommitQueueCompletion"}], "build-number": 826, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-624.json b/cidb/test_data/series_0/metadata-624.json
new file mode 100644
index 0000000..83eba5f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-624.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 02:48:11 -0700 (PST)"}, "boards": ["rambi"], "changes": [], "time": {"duration": "0:34:41.533256", "start": "Sat, 05 Jul 2014 02:13:29 -0700 (PST)", "finish": "Sat, 05 Jul 2014 02:48:11 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:07", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:23", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:00:57", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:23", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:49", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:19", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2323/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2323, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-625.json b/cidb/test_data/series_0/metadata-625.json
new file mode 100644
index 0000000..a3c77f9
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-625.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 02:34:06 -0700 (PST)"}, "boards": ["lumpy"], "changes": [], "time": {"duration": "0:03:19.263613", "start": "Sat, 05 Jul 2014 02:30:46 -0700 (PST)", "finish": "Sat, 05 Jul 2014 02:34:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1250/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1250/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:35", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1250/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1250/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1250/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1250/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:41", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1250/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1250/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1250/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:49", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1250/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1250/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "CommitQueueCompletion"}], "build-number": 1250, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-626.json b/cidb/test_data/series_0/metadata-626.json
new file mode 100644
index 0000000..65e3541
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-626.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 02:33:54 -0700 (PST)"}, "boards": ["duck"], "changes": [], "time": {"duration": "0:19:59.986617", "start": "Sat, 05 Jul 2014 02:13:54 -0700 (PST)", "finish": "Sat, 05 Jul 2014 02:33:54 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:42", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:35", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:29", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:31", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:59", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:10", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1373/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 1373, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-627.json b/cidb/test_data/series_0/metadata-627.json
new file mode 100644
index 0000000..691b2a8
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-627.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 02:50:18 -0700 (PST)"}, "boards": ["peppy"], "changes": [], "time": {"duration": "0:35:37.469356", "start": "Sat, 05 Jul 2014 02:14:40 -0700 (PST)", "finish": "Sat, 05 Jul 2014 02:50:18 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:43", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:33", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:36", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:25", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:41", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:24", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:57", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:36", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:12", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4272/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:31", "name": "CommitQueueCompletion"}], "build-number": 4272, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-628.json b/cidb/test_data/series_0/metadata-628.json
new file mode 100644
index 0000000..b50fc2f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-628.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 03:13:25 -0700 (PST)"}, "boards": ["butterfly"], "changes": [], "time": {"duration": "0:57:49.462043", "start": "Sat, 05 Jul 2014 02:15:35 -0700 (PST)", "finish": "Sat, 05 Jul 2014 03:13:25 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:32:01", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:59", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:11:36", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:46", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:19", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:03", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:04", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:06", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:10:22", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11453/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11453, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-629.json b/cidb/test_data/series_0/metadata-629.json
new file mode 100644
index 0000000..6bdd49e
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-629.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 02:45:43 -0700 (PST)"}, "boards": ["monroe"], "changes": [], "time": {"duration": "0:33:41.617718", "start": "Sat, 05 Jul 2014 02:12:01 -0700 (PST)", "finish": "Sat, 05 Jul 2014 02:45:43 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:15", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:39", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:46", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:56", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:41", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:49", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:06:43", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:44", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2327/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2327, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-63.json b/cidb/test_data/series_0/metadata-63.json
new file mode 100644
index 0000000..61ce165
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-63.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:49:13 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "1:36:09.634396", "start": "Sun, 06 Jul 2014 17:13:04 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:49:13 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:24", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:57", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:45", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:47", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:59", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:03:52", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 1897, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-630.json b/cidb/test_data/series_0/metadata-630.json
new file mode 100644
index 0000000..5fcf42b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-630.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sat, 05 Jul 2014 02:52:37 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [], "time": {"duration": "0:38:56.045898", "start": "Sat, 05 Jul 2014 02:13:41 -0700 (PST)", "finish": "Sat, 05 Jul 2014 02:52:37 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:11:25", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:03", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:17", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:38", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:52", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:01", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1372/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1372, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2083.1", "platform": "6021.0.0-rc1", "full": "R38-6021.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.05.002952.tar.xz", "cl_actions": [], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.05.002952", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-64.json b/cidb/test_data/series_0/metadata-64.json
new file mode 100644
index 0000000..0a2221d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-64.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 17:42:00 -0700 (PST)"}, "boards": ["nyan"], "changes": [], "time": {"duration": "0:29:54.637522", "start": "Sun, 06 Jul 2014 17:12:05 -0700 (PST)", "finish": "Sun, 06 Jul 2014 17:42:00 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:12:03", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:37", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:00:40", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:42", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:09", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:07:01", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1893, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-65.json b/cidb/test_data/series_0/metadata-65.json
new file mode 100644
index 0000000..ef37719
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-65.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:35:34 -0700 (PST)"}, "boards": ["daisy"], "changes": [], "time": {"duration": "1:21:52.439101", "start": "Sun, 06 Jul 2014 17:13:41 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:35:34 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:11:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:08", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:43", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:00:44", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:59", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:59", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:53:34", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 15136, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-66.json b/cidb/test_data/series_0/metadata-66.json
new file mode 100644
index 0000000..fb6ff2f
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-66.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 17:48:16 -0700 (PST)"}, "boards": ["samus"], "changes": [], "time": {"duration": "0:34:29.869556", "start": "Sun, 06 Jul 2014 17:13:46 -0700 (PST)", "finish": "Sun, 06 Jul 2014 17:48:16 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:38", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:12:02", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:03", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:00:57", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:08:03", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:22", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2887, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-67.json b/cidb/test_data/series_0/metadata-67.json
new file mode 100644
index 0000000..68bec03
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-67.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 17:47:52 -0700 (PST)"}, "boards": ["panther"], "changes": [], "time": {"duration": "0:35:15.249676", "start": "Sun, 06 Jul 2014 17:12:37 -0700 (PST)", "finish": "Sun, 06 Jul 2014 17:47:52 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:52", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:06", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:11:12", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:03", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:44", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:16", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:46", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:09", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2332, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-68.json b/cidb/test_data/series_0/metadata-68.json
new file mode 100644
index 0000000..069bc3b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-68.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:32:24 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [], "time": {"duration": "1:19:19.025673", "start": "Sun, 06 Jul 2014 17:13:05 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:32:24 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:16", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:11:57", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:56", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:42", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:15", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:03:40", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:07:18", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:49:14", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4851, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-69.json b/cidb/test_data/series_0/metadata-69.json
new file mode 100644
index 0000000..b30c062
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-69.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:50:00 -0700 (PST)"}, "boards": ["link"], "changes": [], "time": {"duration": "1:35:28.526608", "start": "Sun, 06 Jul 2014 17:14:32 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:50:00 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:45", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:23", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:47", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:53", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:35", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:37", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:29", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:02:45", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 17443, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-7.json b/cidb/test_data/series_0/metadata-7.json
new file mode 100644
index 0000000..4c3d9c4
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-7.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 20:51:01 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2654", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2474", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3451", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2889", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17402", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12121", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17446", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18724", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16885", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18520", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11467", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4390", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1898", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1894", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15137", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2888", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2333", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4852", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17444", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2154", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4296", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1383", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2334", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1261", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1384", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4283", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11464", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2338", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/837", "reason": "None"}}, "boards": [], "changes": [{"patch_number": "3", "total_pass": 1, "gerrit_number": "205753", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}], "metadata-version": "2", "child-configs": [], "build-number": 1923, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [[{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "picked_up", 1404698495, ""], [{"gerrit_number": "205753", "patch_number": "3", "internal": false}, "submitted", 1404704993, ""]], "builder-name": "CQ master", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc2", "full": "R38-6027.0.0-rc2", "milestone": "38"}, "time": {"duration": "1:48:19.792521", "start": "Sun, 06 Jul 2014 19:02:41 -0700 (PST)", "finish": "Sun, 06 Jul 2014 20:51:01 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1923/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1923/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:26", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1923/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1923/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:55", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1923/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1923/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1923/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:05", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1923/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:43:44", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1923/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-70.json b/cidb/test_data/series_0/metadata-70.json
new file mode 100644
index 0000000..70127ba
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-70.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 17:48:06 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [], "time": {"duration": "0:33:49.763185", "start": "Sun, 06 Jul 2014 17:14:16 -0700 (PST)", "finish": "Sun, 06 Jul 2014 17:48:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:00", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:49", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:10", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:51", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:19", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:06:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:09:47", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2153, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-71.json b/cidb/test_data/series_0/metadata-71.json
new file mode 100644
index 0000000..090f2ed
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-71.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:08:17 -0700 (PST)"}, "boards": ["falco"], "changes": [], "time": {"duration": "0:57:17.078360", "start": "Sun, 06 Jul 2014 17:11:00 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:08:17 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:42", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:32:15", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:10:52", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:08:31", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:12", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:15", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:31", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:07:15", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:09:33", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 4295, "child-configs": [], "bot-config": "falco-paladin", "builder-name": "falco paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build148-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-72.json b/cidb/test_data/series_0/metadata-72.json
new file mode 100644
index 0000000..c477471
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-72.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 17:33:52 -0700 (PST)"}, "boards": ["gizmo"], "changes": [], "time": {"duration": "0:22:00.008460", "start": "Sun, 06 Jul 2014 17:11:52 -0700 (PST)", "finish": "Sun, 06 Jul 2014 17:33:52 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:05:57", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:00", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:07:05", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:00:32", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "gizmo", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:25", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:03:08", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:01:27", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "gizmo", "duration": "0:02:06", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "CommitQueueCompletion"}], "build-number": 836, "child-configs": [], "bot-config": "gizmo-paladin", "builder-name": "gizmo paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build118-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-73.json b/cidb/test_data/series_0/metadata-73.json
new file mode 100644
index 0000000..e9b169a
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-73.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 17:46:50 -0700 (PST)"}, "boards": ["rambi"], "changes": [], "time": {"duration": "0:35:03.964721", "start": "Sun, 06 Jul 2014 17:11:46 -0700 (PST)", "finish": "Sun, 06 Jul 2014 17:46:50 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:59", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:30", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:30", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:12:21", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:00:54", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:40", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:08:23", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:06:42", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:11:21", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2333, "child-configs": [], "bot-config": "rambi-paladin", "builder-name": "rambi paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build144-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-74.json b/cidb/test_data/series_0/metadata-74.json
new file mode 100644
index 0000000..c002676
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-74.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 17:33:38 -0700 (PST)"}, "boards": ["lumpy"], "changes": [], "time": {"duration": "0:03:10.535557", "start": "Sun, 06 Jul 2014 17:30:28 -0700 (PST)", "finish": "Sun, 06 Jul 2014 17:33:38 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1260/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1260/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:16:37", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1260/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1260/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1260/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1260/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:40", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1260/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1260/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1260/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1260/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1260/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1260, "child-configs": [], "bot-config": "lumpy-incremental-paladin", "builder-name": "lumpy incremental-paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build168-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-75.json b/cidb/test_data/series_0/metadata-75.json
new file mode 100644
index 0000000..abe6e34
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-75.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 17:32:20 -0700 (PST)"}, "boards": ["duck"], "changes": [], "time": {"duration": "0:20:07.339253", "start": "Sun, 06 Jul 2014 17:12:12 -0700 (PST)", "finish": "Sun, 06 Jul 2014 17:32:20 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:48", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:08", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:28", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:00:30", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:00", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:59", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:01:22", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 1383, "child-configs": [], "bot-config": "duck-paladin", "builder-name": "duck paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build169-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-76.json b/cidb/test_data/series_0/metadata-76.json
new file mode 100644
index 0000000..c77b979
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-76.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 17:48:33 -0700 (PST)"}, "boards": ["peppy"], "changes": [], "time": {"duration": "0:35:35.227984", "start": "Sun, 06 Jul 2014 17:12:57 -0700 (PST)", "finish": "Sun, 06 Jul 2014 17:48:33 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:47", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:52", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:19", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:05:41", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:12:34", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:44", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:10:49", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:46", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:06:46", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:34", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:26", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:11:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:28", "name": "CommitQueueCompletion"}], "build-number": 4282, "child-configs": [], "bot-config": "peppy-paladin", "builder-name": "peppy paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build146-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-77.json b/cidb/test_data/series_0/metadata-77.json
new file mode 100644
index 0000000..3a7e833
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-77.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 18:10:31 -0700 (PST)"}, "boards": ["butterfly"], "changes": [], "time": {"duration": "0:58:00.806378", "start": "Sun, 06 Jul 2014 17:12:30 -0700 (PST)", "finish": "Sun, 06 Jul 2014 18:10:31 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:46", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:53", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:51", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:32:04", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:04:55", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:11:37", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:41", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:07:46", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:08:02", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:10:30", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 11463, "child-configs": [], "bot-config": "butterfly-paladin", "builder-name": "butterfly paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build129-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-78.json b/cidb/test_data/series_0/metadata-78.json
new file mode 100644
index 0000000..54abceb
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-78.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 17:48:45 -0700 (PST)"}, "boards": ["monroe"], "changes": [], "time": {"duration": "0:33:55.516545", "start": "Sun, 06 Jul 2014 17:14:50 -0700 (PST)", "finish": "Sun, 06 Jul 2014 17:48:45 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:49", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:54", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:21", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:11:44", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:49", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:50", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:13", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:50", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:07:15", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:09:48", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2337, "child-configs": [], "bot-config": "monroe-paladin", "builder-name": "monroe paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build142-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-79.json b/cidb/test_data/series_0/metadata-79.json
new file mode 100644
index 0000000..48d6178
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-79.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 17:54:09 -0700 (PST)"}, "boards": ["stumpy_moblab"], "changes": [], "time": {"duration": "0:39:44.629400", "start": "Sun, 06 Jul 2014 17:14:25 -0700 (PST)", "finish": "Sun, 06 Jul 2014 17:54:09 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:51", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:12:00", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:07:06", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:17", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:13:49", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:00:49", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:29", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:09:59", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:06:31", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:14:50", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1382, "child-configs": [], "bot-config": "stumpy_moblab-paladin", "builder-name": "stumpy_moblab paladin", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "cl_actions": [], "bot-hostname": "build140-m2.golo.chromium.org", "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-8.json b/cidb/test_data/series_0/metadata-8.json
new file mode 100644
index 0000000..3136b40
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-8.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 19:00:36 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2653", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2473", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3450", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2888", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17401", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12120", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17445", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18723", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16884", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18519", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11466", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4389", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1897", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1893", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15136", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2887", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2332", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4851", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17443", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2153", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4295", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1382", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2333", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1260", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1383", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4282", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11463", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2337", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/836", "reason": "None"}}, "boards": [], "changes": [], "metadata-version": "2", "child-configs": [], "build-number": 1922, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [], "builder-name": "CQ master", "version": {"chrome": "38.0.2083.1", "platform": "6027.0.0-rc1", "full": "R38-6027.0.0-rc1", "milestone": "38"}, "time": {"duration": "1:51:02.658595", "start": "Sun, 06 Jul 2014 17:09:33 -0700 (PST)", "finish": "Sun, 06 Jul 2014 19:00:36 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.06.151853.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1922/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:03", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1922/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "4:04:24", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1922/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1922/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1922/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1922/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1922/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1922/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:46:24", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1922/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.06.151853", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-80.json b/cidb/test_data/series_0/metadata-80.json
new file mode 100644
index 0000000..7b9de44
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-80.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:45:38 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:09:36.909213", "start": "Mon, 07 Jul 2014 12:36:02 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:45:38 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:54", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:05:34", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:09", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:44:37", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:12:33", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:25", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:07", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:38", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:58", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:55", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2660/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:25", "name": "CommitQueueCompletion"}], "build-number": 2660, "child-configs": [], "bot-config": "x86-mario-paladin", "builder-name": "x86-mario paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761736, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761736, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761736, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761736, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761736, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761736, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761736, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761736, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761736, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761736, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761736, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761736, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761736, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761736, ""]], "bot-hostname": "build143-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-81.json b/cidb/test_data/series_0/metadata-81.json
new file mode 100644
index 0000000..7f5cdc6
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-81.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 12:48:16 -0700 (PST)"}, "boards": ["beaglebone"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:16:18.148230", "start": "Mon, 07 Jul 2014 12:31:57 -0700 (PST)", "finish": "Mon, 07 Jul 2014 12:48:16 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:05", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:14", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:13", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:03:02", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:44", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:27", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:00:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:33", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:04:41", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:31", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2480/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 2480, "child-configs": [], "bot-config": "beaglebone-paladin", "builder-name": "beaglebone paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761491, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761491, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761491, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761491, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761491, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761491, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761491, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761491, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761491, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761491, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761491, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761491, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761491, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761491, ""]], "bot-hostname": "build163-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-82.json b/cidb/test_data/series_0/metadata-82.json
new file mode 100644
index 0000000..bc003bc
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-82.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 14:36:41 -0700 (PST)"}, "boards": ["wolf"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "2:02:38.383424", "start": "Mon, 07 Jul 2014 12:34:02 -0700 (PST)", "finish": "Mon, 07 Jul 2014 14:36:41 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:13", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:59", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:12:24", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:04:57", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:11:20", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:28", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:28", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:48", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:37", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:06:48", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:09:34", "name": "UploadTestArtifacts"}, {"status": "passed", "description": "** Suite passed with a warning code **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "wolf", "duration": "1:29:44", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3457/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 3457, "child-configs": [], "bot-config": "wolf-paladin", "builder-name": "wolf paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761617, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761617, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761617, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761617, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761617, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761617, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761617, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761617, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761617, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761617, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761617, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761617, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761617, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761617, ""]], "bot-hostname": "build145-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-83.json b/cidb/test_data/series_0/metadata-83.json
new file mode 100644
index 0000000..4541814
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-83.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:09:14 -0700 (PST)"}, "boards": ["leon"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:35:56.779488", "start": "Mon, 07 Jul 2014 12:33:17 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:09:14 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:01", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:13:08", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:05:02", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:11:28", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:41", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:18", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:57", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:41", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:06:57", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:09:50", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2895/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:34", "name": "CommitQueueCompletion"}], "build-number": 2895, "child-configs": [], "bot-config": "leon-paladin", "builder-name": "leon paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761572, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761572, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761572, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761572, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761572, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761572, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761572, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761572, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761572, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761572, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761572, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761572, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761572, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761572, ""]], "bot-hostname": "build115-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-84.json b/cidb/test_data/series_0/metadata-84.json
new file mode 100644
index 0000000..d9235c0
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-84.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 14:24:15 -0700 (PST)"}, "boards": ["lumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:50:47.196317", "start": "Mon, 07 Jul 2014 12:33:28 -0700 (PST)", "finish": "Mon, 07 Jul 2014 14:24:15 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:11", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:59", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:12:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:05:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:12:10", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:11:09", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:01:27", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:07:20", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:08:24", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:07:22", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:10:19", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "1:16:46", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17408/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17408, "child-configs": [], "bot-config": "lumpy-paladin", "builder-name": "lumpy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761580, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761580, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761580, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761580, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761580, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761580, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761580, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761580, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761580, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761580, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761580, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761580, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761580, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761580, ""]], "bot-hostname": "build134-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-85.json b/cidb/test_data/series_0/metadata-85.json
new file mode 100644
index 0000000..338e9d1
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-85.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 14:22:09 -0700 (PST)"}, "boards": ["parrot"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:50:44.251715", "start": "Mon, 07 Jul 2014 12:31:25 -0700 (PST)", "finish": "Mon, 07 Jul 2014 14:22:09 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:52", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:33", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:32", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:05:04", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:37", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:12:23", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:23", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:07:11", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:08:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:07:13", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:10:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "1:15:50", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12127/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 12127, "child-configs": [], "bot-config": "parrot-paladin", "builder-name": "parrot paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761455, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761455, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761455, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761455, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761455, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761455, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761455, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761455, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761455, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761455, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761455, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761455, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761455, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761455, ""]], "bot-hostname": "build137-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-86.json b/cidb/test_data/series_0/metadata-86.json
new file mode 100644
index 0000000..e524d74
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-86.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 14:42:12 -0700 (PST)"}, "boards": ["stumpy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "2:07:30.704551", "start": "Mon, 07 Jul 2014 12:34:41 -0700 (PST)", "finish": "Mon, 07 Jul 2014 14:42:12 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:12", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:13", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:00", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:21", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:22", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:04:58", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:12:19", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:10:58", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:28", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:07:01", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:08:17", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:07:04", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:10:39", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:33:32", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17452/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17452, "child-configs": [], "bot-config": "stumpy-paladin", "builder-name": "stumpy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761652, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761652, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761652, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761652, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761652, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761652, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761652, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761652, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761652, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761652, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761652, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761652, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761652, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761652, ""]], "bot-hostname": "build124-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-87.json b/cidb/test_data/series_0/metadata-87.json
new file mode 100644
index 0000000..9e816ef
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-87.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:31:02 -0700 (PST)"}, "boards": ["x86-generic"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:00:56.632777", "start": "Mon, 07 Jul 2014 12:30:05 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:31:02 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:32", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:18", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:11:38", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:05:19", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:12:30", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:39:13", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:11:46", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:01:06", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:04:54", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:37", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:06:22", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-generic", "duration": "0:09:29", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18730/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:15", "name": "CommitQueueCompletion"}], "build-number": 18730, "child-configs": [], "bot-config": "x86-generic-paladin", "builder-name": "x86-generic paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761376, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761376, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761376, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761376, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761376, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761376, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761376, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761376, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761376, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761376, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761376, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761376, ""]], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-88.json b/cidb/test_data/series_0/metadata-88.json
new file mode 100644
index 0000000..19550bf
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-88.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 14:45:30 -0700 (PST)"}, "boards": ["x86-alex"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206593", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206363", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "168185", "total_fail": 0, "pass": 1, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "167757", "total_fail": 0, "pass": 1, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206504", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206800", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "206546", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 1, "gerrit_number": "205980", "total_fail": 2, "pass": 1, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 1, "gerrit_number": "205561", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 1, "gerrit_number": "205472", "total_fail": 5, "pass": 1, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 1, "gerrit_number": "206792", "total_fail": 0, "pass": 1, "fail": 0, "internal": false}], "time": {"duration": "2:10:26.725721", "start": "Mon, 07 Jul 2014 12:35:03 -0700 (PST)", "finish": "Mon, 07 Jul 2014 14:45:30 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:12", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:57", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:10", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:04:54", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:46", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:12:33", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:22", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:50", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:07:27", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:06:52", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:10:36", "name": "UploadTestArtifacts"}, {"status": "passed", "description": "** Suite passed with a warning code **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "x86-alex", "duration": "1:36:52", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16891/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 16891, "child-configs": [], "bot-config": "x86-alex-paladin", "builder-name": "x86-alex paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761676, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761676, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761676, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761676, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761676, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761676, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761676, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761676, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761676, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761676, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761676, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761676, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761676, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761676, ""]], "bot-hostname": "build133-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-89.json b/cidb/test_data/series_0/metadata-89.json
new file mode 100644
index 0000000..92a43c6
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-89.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:31:11 -0700 (PST)"}, "boards": ["amd64-generic"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:01:10.232066", "start": "Mon, 07 Jul 2014 12:30:00 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:31:11 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:01", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:34", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:29", "name": "Uprev"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:28", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:13:42", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:39:07", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:11:02", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:01:09", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "amd64-generic", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:04:52", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:07:32", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:05:56", "name": "Archive"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "amd64-generic", "duration": "0:08:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18526/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 18526, "child-configs": [], "bot-config": "amd64-generic-paladin", "builder-name": "amd64-generic paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761375, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761375, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761375, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761375, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761375, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761375, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761375, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761375, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761375, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761375, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761375, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761375, ""]], "bot-hostname": "build141-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-9.json b/cidb/test_data/series_0/metadata-9.json
new file mode 100644
index 0000000..c0ac05b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-9.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Sun, 06 Jul 2014 13:04:28 -0700 (PST)"}, "slave_targets": {"x86-mario-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20paladin/builds/2652", "reason": "None"}, "beaglebone-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20paladin/builds/2472", "reason": "None"}, "wolf-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/wolf%20paladin/builds/3449", "reason": "None"}, "leon-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/leon%20paladin/builds/2887", "reason": "None"}, "lumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20paladin/builds/17400", "reason": "None"}, "parrot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/parrot%20paladin/builds/12119", "reason": "None"}, "stumpy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy%20paladin/builds/17444", "reason": "None"}, "x86-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/x86-generic%20paladin/builds/18722", "reason": "None"}, "x86-alex-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-alex%20paladin/builds/16883", "reason": "None"}, "amd64-generic-paladin": {"status": "pass", "message": "None", "dashboard_url": "http://build.chromium.org/p/chromiumos/builders/amd64-generic%20paladin/builds/18518", "reason": "None"}, "stout-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11465", "reason": "None"}, "daisy_spring-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4388", "reason": "None"}, "link-tot-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1896", "reason": "None"}, "nyan-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1892", "reason": "None"}, "daisy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15135", "reason": "None"}, "samus-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2886", "reason": "None"}, "panther-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2331", "reason": "None"}, "peach_pit-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4850", "reason": "None"}, "link-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17442", "reason": "None"}, "x86-mario-nowithdebug-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2152", "reason": "None"}, "falco-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/falco%20paladin/builds/4294", "reason": "None"}, "stumpy_moblab-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20paladin/builds/1381", "reason": "None"}, "rambi-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi%20paladin/builds/2332", "reason": "None"}, "lumpy-incremental-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/lumpy%20incremental-paladin/builds/1259", "reason": "None"}, "duck-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20paladin/builds/1382", "reason": "None"}, "peppy-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peppy%20paladin/builds/4281", "reason": "None"}, "butterfly-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/butterfly%20paladin/builds/11462", "reason": "None"}, "monroe-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/monroe%20paladin/builds/2336", "reason": "None"}, "gizmo-paladin": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/gizmo%20paladin/builds/835", "reason": "None"}}, "boards": [], "changes": [{"patch_number": "2", "total_pass": 1, "gerrit_number": "206113", "total_fail": 2, "pass": 1, "fail": 2, "internal": false}], "metadata-version": "2", "child-configs": [], "build-number": 1921, "bot-hostname": "build170-m2.golo.chromium.org", "bot-config": "master-paladin", "cl_actions": [[{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "picked_up", 1404670724, ""], [{"gerrit_number": "206113", "patch_number": "2", "internal": false}, "submitted", 1404677003, ""]], "builder-name": "CQ master", "version": {"chrome": "38.0.2083.1", "platform": "6026.0.0-rc1", "full": "R38-6026.0.0-rc1", "milestone": "38"}, "time": {"duration": "1:44:26.969773", "start": "Sun, 06 Jul 2014 11:20:01 -0700 (PST)", "finish": "Sun, 06 Jul 2014 13:04:28 -0700 (PST)"}, "toolchain-url": "2014/07/%(target)s-2014.07.06.083123.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1921/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1921/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:38:28", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1921/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1921/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:56", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1921/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1921/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1921/steps/MasterUploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "MasterUploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1921/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "1:39:48", "name": "CommitQueueCompletion"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/CQ%20master/builds/1921/steps/PublishUprevChanges/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "PublishUprevChanges"}], "sdk-version": "2014.07.06.083123", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-90.json b/cidb/test_data/series_0/metadata-90.json
new file mode 100644
index 0000000..48cadf7
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-90.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:10:06 -0700 (PST)"}, "boards": ["stout"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:35:57.086360", "start": "Mon, 07 Jul 2014 12:34:09 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:10:06 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:14", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:12:20", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:05:48", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:11:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:24", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:10:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:23", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:17", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:02", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:07:36", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:09:53", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stout%20paladin/builds/11473/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 11473, "child-configs": [], "bot-config": "stout-paladin", "builder-name": "stout paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761620, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761620, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761620, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761620, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761620, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761620, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761620, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761620, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761620, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761620, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761620, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761620, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761620, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761620, ""]], "bot-hostname": "build139-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-91.json b/cidb/test_data/series_0/metadata-91.json
new file mode 100644
index 0000000..015d20d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-91.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 14:29:23 -0700 (PST)"}, "boards": ["daisy_spring"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:54:29.683575", "start": "Mon, 07 Jul 2014 12:34:54 -0700 (PST)", "finish": "Mon, 07 Jul 2014 14:29:23 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:11", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:00", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:44", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:31:52", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:08", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:05:57", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:09", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy_spring", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:05", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:15", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:04:05", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "0:06:29", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy_spring", "duration": "1:01:37", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy_spring%20paladin/builds/4396/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CommitQueueCompletion"}], "build-number": 4396, "child-configs": [], "bot-config": "daisy_spring-paladin", "builder-name": "daisy_spring paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761667, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761667, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761667, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761667, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761667, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761667, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761667, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761667, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761667, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761667, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761667, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761667, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761667, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761667, ""]], "bot-hostname": "build131-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-92.json b/cidb/test_data/series_0/metadata-92.json
new file mode 100644
index 0000000..4b6994b
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-92.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 14:34:12 -0700 (PST)"}, "boards": ["link"], "time": {"duration": "2:00:06.505407", "start": "Mon, 07 Jul 2014 12:34:06 -0700 (PST)", "finish": "Mon, 07 Jul 2014 14:34:12 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/MasterSlaveSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:45", "name": "MasterSlaveSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:06", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:07", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:12:01", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:08", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:55", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:08:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:57", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:10:08", "name": "UploadTestArtifacts"}, {"status": "passed", "description": "** Suite passed with a warning code **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "link", "duration": "1:27:20", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20tot-paladin/builds/1904/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 1904, "child-configs": [], "bot-config": "link-tot-paladin", "builder-name": "link tot-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [], "bot-hostname": "build135-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-93.json b/cidb/test_data/series_0/metadata-93.json
new file mode 100644
index 0000000..b5b9660
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-93.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:02:49 -0700 (PST)"}, "boards": ["nyan"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:30:31.847315", "start": "Mon, 07 Jul 2014 12:32:18 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:02:49 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:14", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:00", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:17", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:12:00", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:10", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:05:51", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:20", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:00", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:13", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:00", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:07:26", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20paladin/builds/1900/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CommitQueueCompletion"}], "build-number": 1900, "child-configs": [], "bot-config": "nyan-paladin", "builder-name": "nyan paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761509, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761509, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761509, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761509, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761509, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761509, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761509, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761509, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761509, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761509, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761509, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761509, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761509, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761509, ""]], "bot-hostname": "build125-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-94.json b/cidb/test_data/series_0/metadata-94.json
new file mode 100644
index 0000000..29d5d5d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-94.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 14:20:12 -0700 (PST)"}, "boards": ["daisy"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:47:28.064349", "start": "Mon, 07 Jul 2014 12:32:44 -0700 (PST)", "finish": "Mon, 07 Jul 2014 14:20:12 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:10", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:12:37", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:12", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:05:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:01:09", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "daisy", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:37", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:54", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:03:37", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "0:06:38", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "daisy", "duration": "1:16:59", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20paladin/builds/15143/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:24", "name": "CommitQueueCompletion"}], "build-number": 15143, "child-configs": [], "bot-config": "daisy-paladin", "builder-name": "daisy paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761535, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761535, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761535, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761535, ""]], "bot-hostname": "build132-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-95.json b/cidb/test_data/series_0/metadata-95.json
new file mode 100644
index 0000000..b1faf29
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-95.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:09:41 -0700 (PST)"}, "boards": ["samus"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:34:47.949708", "start": "Mon, 07 Jul 2014 12:34:53 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:09:41 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:12", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:12:11", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:05", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:11:33", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:09:50", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:25", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:37", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:07:46", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:39", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:09:49", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20paladin/builds/2894/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:18", "name": "CommitQueueCompletion"}], "build-number": 2894, "child-configs": [], "bot-config": "samus-paladin", "builder-name": "samus paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761665, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761665, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761665, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761665, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761665, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761665, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761665, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761665, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761665, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761665, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761665, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761665, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761665, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761665, ""]], "bot-hostname": "build164-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-96.json b/cidb/test_data/series_0/metadata-96.json
new file mode 100644
index 0000000..c4468fa
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-96.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:07:36 -0700 (PST)"}, "boards": ["panther"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:35:12.691033", "start": "Mon, 07 Jul 2014 12:32:23 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:07:36 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:11", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:01", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:05", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:05:01", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:12:04", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:54", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:01:25", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:49", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:07", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:51", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:10:07", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/panther%20paladin/builds/2339/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2339, "child-configs": [], "bot-config": "panther-paladin", "builder-name": "panther paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761517, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761517, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761517, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761517, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761517, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761517, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761517, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761517, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761517, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761517, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761517, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761517, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761517, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761517, ""]], "bot-hostname": "build138-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-97.json b/cidb/test_data/series_0/metadata-97.json
new file mode 100644
index 0000000..ddf15b0
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-97.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 14:14:40 -0700 (PST)"}, "boards": ["peach_pit"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:40:05.668988", "start": "Mon, 07 Jul 2014 12:34:34 -0700 (PST)", "finish": "Mon, 07 Jul 2014 14:14:40 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:09", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:15", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:59", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:17", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:12:02", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:10", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:04", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:05", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:04", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:12", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:04", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:06:24", "name": "UploadTestArtifacts"}, {"status": "passed", "description": "** Suite passed with a warning code **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "peach_pit", "duration": "1:10:33", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach_pit%20paladin/builds/4858/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:22", "name": "CommitQueueCompletion"}], "build-number": 4858, "child-configs": [], "bot-config": "peach_pit-paladin", "builder-name": "peach_pit paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761649, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761649, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761649, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761649, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761649, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761649, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761649, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761649, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761649, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761649, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761649, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761649, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761649, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761649, ""]], "bot-hostname": "build147-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-98.json b/cidb/test_data/series_0/metadata-98.json
new file mode 100644
index 0000000..875757d
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-98.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 14:32:25 -0700 (PST)"}, "boards": ["link"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "1:59:05.928804", "start": "Mon, 07 Jul 2014 12:33:19 -0700 (PST)", "finish": "Mon, 07 Jul 2014 14:32:25 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:11", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:11", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:02", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:20", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:12:29", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:10", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:36", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:44", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:26", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:47", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:07:51", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:49", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:09:46", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/HWTest%20%5Bbvt_cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:25:42", "name": "HWTest [bvt_cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20paladin/builds/17450/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 17450, "child-configs": [], "bot-config": "link-paladin", "builder-name": "link paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761574, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761574, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761574, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761574, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761574, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761574, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761574, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761574, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761574, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761574, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761574, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761574, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761574, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761574, ""]], "bot-hostname": "build127-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_0/metadata-99.json b/cidb/test_data/series_0/metadata-99.json
new file mode 100644
index 0000000..2ecd029
--- /dev/null
+++ b/cidb/test_data/series_0/metadata-99.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 07 Jul 2014 13:08:01 -0700 (PST)"}, "boards": ["x86-mario"], "changes": [{"patch_number": "2", "total_pass": 0, "gerrit_number": "206186", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206187", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206188", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206593", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206363", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "168185", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "167757", "total_fail": 0, "pass": 0, "fail": 0, "internal": true}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206504", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206800", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "206546", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205980", "total_fail": 2, "pass": 0, "fail": 2, "internal": false}, {"patch_number": "5", "total_pass": 0, "gerrit_number": "205561", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}, {"patch_number": "1", "total_pass": 0, "gerrit_number": "205472", "total_fail": 5, "pass": 0, "fail": 5, "internal": false}, {"patch_number": "2", "total_pass": 0, "gerrit_number": "206792", "total_fail": 0, "pass": 0, "fail": 0, "internal": false}], "time": {"duration": "0:35:03.707210", "start": "Mon, 07 Jul 2014 12:32:58 -0700 (PST)", "finish": "Mon, 07 Jul 2014 13:08:01 -0700 (PST)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:10", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/CommitQueueSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "CommitQueueSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:59", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:09", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:01:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:13:08", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:53", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:10:54", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:01", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:08", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:25", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:07:10", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:09:56", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/x86-mario%20nowithdebug-paladin/builds/2160/steps/CommitQueueCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:19", "name": "CommitQueueCompletion"}], "build-number": 2160, "child-configs": [], "bot-config": "x86-mario-nowithdebug-paladin", "builder-name": "x86-mario nowithdebug-paladin", "version": {"chrome": "38.0.2084.1", "platform": "6029.0.0-rc2", "full": "R38-6029.0.0-rc2", "milestone": "38"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/07/%(target)s-2014.07.07.120822.tar.xz", "cl_actions": [[{"gerrit_number": "206186", "patch_number": "2", "internal": false}, "picked_up", 1404761551, ""], [{"gerrit_number": "206187", "patch_number": "2", "internal": false}, "picked_up", 1404761551, ""], [{"gerrit_number": "206188", "patch_number": "2", "internal": false}, "picked_up", 1404761551, ""], [{"gerrit_number": "206593", "patch_number": "1", "internal": false}, "picked_up", 1404761551, ""], [{"gerrit_number": "206363", "patch_number": "2", "internal": false}, "picked_up", 1404761551, ""], [{"gerrit_number": "168185", "patch_number": "2", "internal": true}, "picked_up", 1404761551, ""], [{"gerrit_number": "167757", "patch_number": "1", "internal": true}, "picked_up", 1404761551, ""], [{"gerrit_number": "206504", "patch_number": "1", "internal": false}, "picked_up", 1404761551, ""], [{"gerrit_number": "206800", "patch_number": "2", "internal": false}, "picked_up", 1404761551, ""], [{"gerrit_number": "206546", "patch_number": "1", "internal": false}, "picked_up", 1404761551, ""], [{"gerrit_number": "205980", "patch_number": "5", "internal": false}, "picked_up", 1404761551, ""], [{"gerrit_number": "205561", "patch_number": "5", "internal": false}, "picked_up", 1404761551, ""], [{"gerrit_number": "205472", "patch_number": "1", "internal": false}, "picked_up", 1404761551, ""], [{"gerrit_number": "206792", "patch_number": "2", "internal": false}, "picked_up", 1404761551, ""]], "bot-hostname": "build114-m2.golo.chromium.org", "sdk-version": "2014.07.07.120822", "build_type": "paladin"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/README b/cidb/test_data/series_1/README
new file mode 100644
index 0000000..7d0d9f9
--- /dev/null
+++ b/cidb/test_data/series_1/README
@@ -0,0 +1,5 @@
+This test data is taken from master-canary build number 70, as well as the
+corresponding slave builds for which a metadata file was available at
+gs://chromeos-image-archive/[config]/[version]/metadata.json
+
+Note: A few slave builds were missing this file, so they are no represented.
diff --git a/cidb/test_data/series_1/beaglebone-release-group-metadata.json b/cidb/test_data/series_1/beaglebone-release-group-metadata.json
new file mode 100644
index 0000000..25ab874
--- /dev/null
+++ b/cidb/test_data/series_1/beaglebone-release-group-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 01:56:36 -0700 (PDT)"}, "board-metadata": {"beaglebone_servo": {"main-firmware-version": null, "ec-firmware-version": null}, "beaglebone": {"main-firmware-version": null, "ec-firmware-version": null}}, "boards": ["beaglebone"], "time": {"duration": "0:53:23.615099", "start": "Mon, 18 Aug 2014 01:03:13 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 01:56:36 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:58", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:29", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:15", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/SetupBoard%20%5Bbeaglebone%5D/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:04:19", "name": "SetupBoard [beaglebone]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/SetupBoard%20%5Bbeaglebone_servo%5D/logs/stdio", "summary": "Stage was successful", "board": "beaglebone_servo", "duration": "0:00:46", "name": "SetupBoard [beaglebone_servo]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/BuildPackages%20%5Bbeaglebone%5D/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:17:31", "name": "BuildPackages [beaglebone]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/BuildImage%20%5Bbeaglebone%5D/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:53", "name": "BuildImage [beaglebone]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/VMTest%20%28attempt%201%29%20%5Bbeaglebone%5D/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "VMTest (attempt 1) [beaglebone]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/SignerTest%20%5Bbeaglebone%5D/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "SignerTest [beaglebone]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/Paygen%20%28attempt%201%29%20%5Bbeaglebone%5D/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "Paygen (attempt 1) [beaglebone]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/ImageTest%20%5Bbeaglebone%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "beaglebone", "duration": "0:03:31", "name": "ImageTest [beaglebone]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/UnitTest%20%5Bbeaglebone%5D/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UnitTest [beaglebone]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/UploadPrebuilts%20%5Bbeaglebone%5D/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "UploadPrebuilts [beaglebone]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/DevInstallerPrebuilts%20%5Bbeaglebone%5D/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone", "duration": "0:00:00", "name": "DevInstallerPrebuilts [beaglebone]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/DebugSymbols%20%5Bbeaglebone%5D/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:18:56", "name": "DebugSymbols [beaglebone]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/CPEExport%20%5Bbeaglebone%5D/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:02:04", "name": "CPEExport [beaglebone]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/Archive%20%5Bbeaglebone%5D/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:03:43", "name": "Archive [beaglebone]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/UploadTestArtifacts%20%5Bbeaglebone%5D/logs/stdio", "summary": "Stage was successful", "board": "beaglebone", "duration": "0:01:27", "name": "UploadTestArtifacts [beaglebone]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/BuildPackages%20%5Bbeaglebone_servo%5D/logs/stdio", "summary": "Stage was successful", "board": "beaglebone_servo", "duration": "0:05:52", "name": "BuildPackages [beaglebone_servo]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/BuildImage%20%5Bbeaglebone_servo%5D/logs/stdio", "summary": "Stage was successful", "board": "beaglebone_servo", "duration": "0:02:34", "name": "BuildImage [beaglebone_servo]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/VMTest%20%28attempt%201%29%20%5Bbeaglebone_servo%5D/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone_servo", "duration": "0:00:00", "name": "VMTest (attempt 1) [beaglebone_servo]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/SignerTest%20%5Bbeaglebone_servo%5D/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone_servo", "duration": "0:00:00", "name": "SignerTest [beaglebone_servo]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/Paygen%20%28attempt%201%29%20%5Bbeaglebone_servo%5D/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone_servo", "duration": "0:00:00", "name": "Paygen (attempt 1) [beaglebone_servo]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/ImageTest%20%5Bbeaglebone_servo%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "beaglebone_servo", "duration": "0:03:33", "name": "ImageTest [beaglebone_servo]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/UnitTest%20%5Bbeaglebone_servo%5D/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone_servo", "duration": "0:00:00", "name": "UnitTest [beaglebone_servo]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/UploadPrebuilts%20%5Bbeaglebone_servo%5D/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone_servo", "duration": "0:00:00", "name": "UploadPrebuilts [beaglebone_servo]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/DevInstallerPrebuilts%20%5Bbeaglebone_servo%5D/logs/stdio", "summary": "Stage was skipped", "board": "beaglebone_servo", "duration": "0:00:00", "name": "DevInstallerPrebuilts [beaglebone_servo]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/DebugSymbols%20%5Bbeaglebone_servo%5D/logs/stdio", "summary": "Stage was successful", "board": "beaglebone_servo", "duration": "0:16:48", "name": "DebugSymbols [beaglebone_servo]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/CPEExport%20%5Bbeaglebone_servo%5D/logs/stdio", "summary": "Stage was successful", "board": "beaglebone_servo", "duration": "0:02:46", "name": "CPEExport [beaglebone_servo]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/Archive%20%5Bbeaglebone_servo%5D/logs/stdio", "summary": "Stage was successful", "board": "beaglebone_servo", "duration": "0:04:00", "name": "Archive [beaglebone_servo]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/UploadTestArtifacts%20%5Bbeaglebone_servo%5D/logs/stdio", "summary": "Stage was successful", "board": "beaglebone_servo", "duration": "0:01:51", "name": "UploadTestArtifacts [beaglebone_servo]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:37", "name": "CanaryCompletion"}], "build-number": 1102, "child-configs": [{"name": "beaglebone-release", "boards": ["beaglebone"]}, {"name": "beaglebone_servo-release", "boards": ["beaglebone_servo"]}], "bot-config": "beaglebone-release-group", "builder-name": "beaglebone canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build22-b2.chrome.corp.google.com", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/beltino-a-release-group-metadata.json b/cidb/test_data/series_1/beltino-a-release-group-metadata.json
new file mode 100644
index 0000000..00ee0e1
--- /dev/null
+++ b/cidb/test_data/series_1/beltino-a-release-group-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 06:07:22 -0700 (PDT)"}, "board-metadata": {"mccloud": {"main-firmware-version": "Google_Mccloud.5827.11.0", "ec-firmware-version": null}, "panther": {"main-firmware-version": "Google_Panther.4920.24.25", "ec-firmware-version": null}}, "boards": ["panther"], "time": {"duration": "5:00:25.664232", "start": "Mon, 18 Aug 2014 01:06:56 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 06:07:22 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:49", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:25", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:33", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:22", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/SetupBoard%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:04:23", "name": "SetupBoard [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/SetupBoard%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "0:04:21", "name": "SetupBoard [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/SyncChrome/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:09:40", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/BuildPackages%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "1:08:17", "name": "BuildPackages [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/BuildImage%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:08:09", "name": "BuildImage [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/ChromeSDK%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "1:26:34", "name": "ChromeSDK [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/VMTest%20%28attempt%201%29%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:36:18", "name": "VMTest (attempt 1) [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/SignerTest%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:06:17", "name": "SignerTest [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/Paygen%20%28attempt%201%29%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "1:48:28", "name": "Paygen (attempt 1) [panther]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/ImageTest%20%5Bpanther%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "panther", "duration": "0:24:18", "name": "ImageTest [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/UnitTest%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:23:55", "name": "UnitTest [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/UploadPrebuilts%20%5Bpanther%5D/logs/stdio", "summary": "Stage was skipped", "board": "panther", "duration": "0:00:00", "name": "UploadPrebuilts [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/DevInstallerPrebuilts%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:02:00", "name": "DevInstallerPrebuilts [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/DebugSymbols%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "1:17:29", "name": "DebugSymbols [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/CPEExport%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:13:19", "name": "CPEExport [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/Archive%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:33:49", "name": "Archive [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/UploadTestArtifacts%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:18:23", "name": "UploadTestArtifacts [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/HWTest%20%5Bbvt-inline%5D%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:26:06", "name": "HWTest [bvt-inline] [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/AUTest%20%5Bau%5D%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:15:10", "name": "AUTest [au] [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:00:03", "name": "ASyncHWTest [bvt-cq] [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bpanther%5D/logs/stdio", "summary": "Stage was successful", "board": "panther", "duration": "0:00:03", "name": "ASyncHWTest [bvt-perbuild] [panther]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/BuildPackages%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "1:48:26", "name": "BuildPackages [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/BuildImage%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "0:05:59", "name": "BuildImage [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/ChromeSDK%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "0:08:52", "name": "ChromeSDK [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/VMTest%20%28attempt%201%29%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was skipped", "board": "mccloud", "duration": "0:00:00", "name": "VMTest (attempt 1) [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/SignerTest%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "0:02:55", "name": "SignerTest [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/Paygen%20%28attempt%201%29%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "1:33:18", "name": "Paygen (attempt 1) [mccloud]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/ImageTest%20%5Bmccloud%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "mccloud", "duration": "0:11:33", "name": "ImageTest [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/UnitTest%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was skipped", "board": "mccloud", "duration": "0:00:00", "name": "UnitTest [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/UploadPrebuilts%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was skipped", "board": "mccloud", "duration": "0:00:00", "name": "UploadPrebuilts [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/DevInstallerPrebuilts%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "0:01:20", "name": "DevInstallerPrebuilts [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/DebugSymbols%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "1:20:08", "name": "DebugSymbols [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/CPEExport%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "0:06:07", "name": "CPEExport [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/Archive%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "0:10:15", "name": "Archive [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/UploadTestArtifacts%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "0:08:25", "name": "UploadTestArtifacts [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/HWTest%20%5Bbvt-inline%5D%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "0:51:12", "name": "HWTest [bvt-inline] [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/AUTest%20%5Bau%5D%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "0:22:05", "name": "AUTest [au] [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "0:00:03", "name": "ASyncHWTest [bvt-cq] [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bmccloud%5D/logs/stdio", "summary": "Stage was successful", "board": "mccloud", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [mccloud]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:56", "name": "CanaryCompletion"}], "build-number": 237, "child-configs": [{"name": "panther-release", "boards": ["panther"]}, {"name": "mccloud-release", "boards": ["mccloud"]}], "bot-config": "beltino-a-release-group", "builder-name": "beltino-a canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build18-b2.chrome.corp.google.com", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/beltino-b-release-group-metadata.json b/cidb/test_data/series_1/beltino-b-release-group-metadata.json
new file mode 100644
index 0000000..bf48e9a
--- /dev/null
+++ b/cidb/test_data/series_1/beltino-b-release-group-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 06:00:28 -0700 (PDT)"}, "board-metadata": {"tricky": {"main-firmware-version": "Google_Tricky.5829.12.0", "ec-firmware-version": null}, "zako": {"main-firmware-version": "Google_Zako.5219.27.0", "ec-firmware-version": null}, "monroe": {"main-firmware-version": "Google_Monroe.4921.17.0", "ec-firmware-version": null}}, "boards": ["monroe"], "time": {"duration": "4:52:43.084720", "start": "Mon, 18 Aug 2014 01:07:45 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 06:00:28 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:15", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:31", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:41", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:24", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/SetupBoard%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:24", "name": "SetupBoard [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/SetupBoard%20%5Btricky%5D/logs/stdio", "summary": "Stage was successful", "board": "tricky", "duration": "0:00:47", "name": "SetupBoard [tricky]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/SetupBoard%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "0:00:47", "name": "SetupBoard [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/SyncChrome/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:10:08", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/BuildPackages%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "1:08:08", "name": "BuildPackages [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/BuildImage%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:10:56", "name": "BuildImage [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/ChromeSDK%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "1:12:12", "name": "ChromeSDK [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/VMTest%20%28attempt%201%29%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:38:02", "name": "VMTest (attempt 1) [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/SignerTest%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:08:04", "name": "SignerTest [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/Paygen%20%28attempt%201%29%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "2:28:12", "name": "Paygen (attempt 1) [monroe]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/ImageTest%20%5Bmonroe%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "monroe", "duration": "0:25:13", "name": "ImageTest [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/UnitTest%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:21:16", "name": "UnitTest [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/UploadPrebuilts%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was skipped", "board": "monroe", "duration": "0:00:00", "name": "UploadPrebuilts [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/DevInstallerPrebuilts%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:04:05", "name": "DevInstallerPrebuilts [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/DebugSymbols%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "1:14:23", "name": "DebugSymbols [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/CPEExport%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:13:00", "name": "CPEExport [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/Archive%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:29:06", "name": "Archive [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/UploadTestArtifacts%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:18:13", "name": "UploadTestArtifacts [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/HWTest%20%5Bbvt-inline%5D%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:53:42", "name": "HWTest [bvt-inline] [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/AUTest%20%5Bau%5D%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:31:23", "name": "AUTest [au] [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:03", "name": "ASyncHWTest [bvt-cq] [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bmonroe%5D/logs/stdio", "summary": "Stage was successful", "board": "monroe", "duration": "0:00:03", "name": "ASyncHWTest [bvt-perbuild] [monroe]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/BuildPackages%20%5Btricky%5D/logs/stdio", "summary": "Stage was successful", "board": "tricky", "duration": "0:20:15", "name": "BuildPackages [tricky]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/BuildImage%20%5Btricky%5D/logs/stdio", "summary": "Stage was successful", "board": "tricky", "duration": "0:08:13", "name": "BuildImage [tricky]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/ChromeSDK%20%5Btricky%5D/logs/stdio", "summary": "Stage was successful", "board": "tricky", "duration": "0:17:13", "name": "ChromeSDK [tricky]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/VMTest%20%28attempt%201%29%20%5Btricky%5D/logs/stdio", "summary": "Stage was skipped", "board": "tricky", "duration": "0:00:00", "name": "VMTest (attempt 1) [tricky]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/SignerTest%20%5Btricky%5D/logs/stdio", "summary": "Stage was successful", "board": "tricky", "duration": "0:04:10", "name": "SignerTest [tricky]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/Paygen%20%28attempt%201%29%20%5Btricky%5D/logs/stdio", "summary": "Stage was successful", "board": "tricky", "duration": "2:53:21", "name": "Paygen (attempt 1) [tricky]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/ImageTest%20%5Btricky%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "tricky", "duration": "0:17:03", "name": "ImageTest [tricky]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/UnitTest%20%5Btricky%5D/logs/stdio", "summary": "Stage was skipped", "board": "tricky", "duration": "0:00:00", "name": "UnitTest [tricky]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/UploadPrebuilts%20%5Btricky%5D/logs/stdio", "summary": "Stage was skipped", "board": "tricky", "duration": "0:00:00", "name": "UploadPrebuilts [tricky]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/DevInstallerPrebuilts%20%5Btricky%5D/logs/stdio", "summary": "Stage was successful", "board": "tricky", "duration": "0:01:43", "name": "DevInstallerPrebuilts [tricky]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/DebugSymbols%20%5Btricky%5D/logs/stdio", "summary": "Stage was successful", "board": "tricky", "duration": "1:10:55", "name": "DebugSymbols [tricky]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/CPEExport%20%5Btricky%5D/logs/stdio", "summary": "Stage was successful", "board": "tricky", "duration": "0:09:52", "name": "CPEExport [tricky]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/Archive%20%5Btricky%5D/logs/stdio", "summary": "Stage was successful", "board": "tricky", "duration": "0:25:34", "name": "Archive [tricky]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/UploadTestArtifacts%20%5Btricky%5D/logs/stdio", "summary": "Stage was successful", "board": "tricky", "duration": "0:19:17", "name": "UploadTestArtifacts [tricky]"}, {"status": "passed", "description": "** HWTest did not complete due to infrastructure issues (code 3) **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/HWTest%20%5Bbvt-inline%5D%20%5Btricky%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "tricky", "duration": "0:00:02", "name": "HWTest [bvt-inline] [tricky]"}, {"status": "passed", "description": "** HWTest did not complete due to infrastructure issues (code 3) **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/AUTest%20%5Bau%5D%20%5Btricky%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "tricky", "duration": "0:00:37", "name": "AUTest [au] [tricky]"}, {"status": "passed", "description": "** HWTest did not complete due to infrastructure issues (code 3) **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Btricky%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "tricky", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [tricky]"}, {"status": "passed", "description": "** HWTest did not complete due to infrastructure issues (code 3) **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Btricky%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "tricky", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [tricky]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/BuildPackages%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "0:20:20", "name": "BuildPackages [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/BuildImage%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "0:09:57", "name": "BuildImage [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/ChromeSDK%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "0:14:51", "name": "ChromeSDK [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/VMTest%20%28attempt%201%29%20%5Bzako%5D/logs/stdio", "summary": "Stage was skipped", "board": "zako", "duration": "0:00:00", "name": "VMTest (attempt 1) [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/SignerTest%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "0:06:32", "name": "SignerTest [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/Paygen%20%28attempt%201%29%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "1:28:39", "name": "Paygen (attempt 1) [zako]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/ImageTest%20%5Bzako%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "zako", "duration": "0:17:44", "name": "ImageTest [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/UnitTest%20%5Bzako%5D/logs/stdio", "summary": "Stage was skipped", "board": "zako", "duration": "0:00:00", "name": "UnitTest [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/UploadPrebuilts%20%5Bzako%5D/logs/stdio", "summary": "Stage was skipped", "board": "zako", "duration": "0:00:00", "name": "UploadPrebuilts [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/DevInstallerPrebuilts%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "0:02:03", "name": "DevInstallerPrebuilts [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/DebugSymbols%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "1:05:10", "name": "DebugSymbols [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/CPEExport%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "0:11:09", "name": "CPEExport [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/Archive%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "0:20:38", "name": "Archive [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/UploadTestArtifacts%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "0:14:27", "name": "UploadTestArtifacts [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/HWTest%20%5Bbvt-inline%5D%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "0:42:15", "name": "HWTest [bvt-inline] [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/AUTest%20%5Bau%5D%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "0:25:20", "name": "AUTest [au] [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "0:00:03", "name": "ASyncHWTest [bvt-cq] [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bzako%5D/logs/stdio", "summary": "Stage was successful", "board": "zako", "duration": "0:00:03", "name": "ASyncHWTest [bvt-perbuild] [zako]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:20", "name": "CanaryCompletion"}], "build-number": 239, "child-configs": [{"name": "monroe-release", "boards": ["monroe"]}, {"name": "tricky-release", "boards": ["tricky"]}, {"name": "zako-release", "boards": ["zako"]}], "bot-config": "beltino-b-release-group", "builder-name": "beltino-b canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build19-b2.chrome.corp.google.com", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/canary-master-metadata.json b/cidb/test_data/series_1/canary-master-metadata.json
new file mode 100644
index 0000000..b6f68d5
--- /dev/null
+++ b/cidb/test_data/series_1/canary-master-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "failed", "summary": "", "current-time": "Mon, 18 Aug 2014 08:46:38 -0700 (PDT)"}, "board-metadata": {}, "slave_targets": {"rambi-c-release-group": {"status": "inflight", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-c%20canary/builds/340", "reason": "None"}, "stumpy_moblab-release": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679", "reason": "None"}, "beltino-b-release-group": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-b%20canary/builds/239", "reason": "None"}, "beltino-a-release-group": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beltino-a%20canary/builds/237", "reason": "None"}, "rambi-b-release-group": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451", "reason": "None"}, "daisy-release-group": {"status": "inflight", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/daisy%20canary/builds/3033", "reason": "None"}, "nyan-release-group": {"status": "fail", "message": "nyan-release-group: The BuildPackages [nyan_big] stage failed: Packages failed in ./build_packages: chromeos-base/chromeos-chrome The BuildPackages [nyan_blaze] stage failed: Packages failed in ./build_packages: chromeos-base/chromeos-chrome in https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888", "reason": "The BuildPackages [nyan_big] stage failed: Packages failed in ./build_packages: chromeos-base/chromeos-chrome The BuildPackages [nyan_blaze] stage failed: Packages failed in ./build_packages: chromeos-base/chromeos-chrome"}, "peach-release-group": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448", "reason": "None"}, "samus-release": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268", "reason": "None"}, "slippy-release-group": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769", "reason": "None"}, "rambi-a-release-group": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449", "reason": "None"}, "pineview-release-group": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449", "reason": "None"}, "storm-release": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182", "reason": "None"}, "duck-release": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683", "reason": "None"}, "link-release": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973", "reason": "None"}, "nyan_blaze-release": {"status": "fail", "message": "nyan-release-group: The BuildPackages [nyan_big] stage failed: Packages failed in ./build_packages: chromeos-base/chromeos-chrome The BuildPackages [nyan_blaze] stage failed: Packages failed in ./build_packages: chromeos-base/chromeos-chrome in https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888", "reason": "The BuildPackages [nyan_big] stage failed: Packages failed in ./build_packages: chromeos-base/chromeos-chrome The BuildPackages [nyan_blaze] stage failed: Packages failed in ./build_packages: chromeos-base/chromeos-chrome"}, "sandybridge-release-group": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495", "reason": "None"}, "ivybridge-release-group": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505", "reason": "None"}, "link_freon-release": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182", "reason": "None"}, "beaglebone-release-group": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/beaglebone%20canary/builds/1102", "reason": "None"}, "peach_pi-release": {"status": "pass", "message": "None", "dashboard_url": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448", "reason": "None"}}, "boards": [], "metadata-version": "2", "child-configs": [], "build-number": 70, "bot-hostname": "build117-m2.golo.chromium.org", "bot-config": "master-release", "cl_actions": [], "builder-name": "Canary master", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "time": {"duration": "7:45:06.726478", "start": "Mon, 18 Aug 2014 01:01:31 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 08:46:38 -0700 (PDT)"}, "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/Canary%20master/builds/70/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:04", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/Canary%20master/builds/70/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:17", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/Canary%20master/builds/70/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/Canary%20master/builds/70/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:22", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/Canary%20master/builds/70/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:13", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/Canary%20master/builds/70/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/Canary%20master/builds/70/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "failed", "description": "", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/Canary%20master/builds/70/steps/CanaryCompletion/logs/stdio", "summary": "", "board": "", "duration": "7:40:19", "name": "CanaryCompletion"}], "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/duck-release-metadata.json b/cidb/test_data/series_1/duck-release-metadata.json
new file mode 100644
index 0000000..70490ff
--- /dev/null
+++ b/cidb/test_data/series_1/duck-release-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 02:04:36 -0700 (PDT)"}, "board-metadata": {"duck": {"main-firmware-version": null, "ec-firmware-version": null}}, "boards": ["duck"], "time": {"duration": "1:01:38.335482", "start": "Mon, 18 Aug 2014 01:02:58 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 02:04:36 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:08", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:50", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:27", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:14", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:04:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:16:53", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:20", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:31:30", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/ImageTest/logs/stdio", "summary": "Stage failed but was optional", "board": "duck", "duration": "0:06:53", "name": "ImageTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:05:25", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "duck", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:31:53", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:03:02", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:12:54", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "duck", "duration": "0:02:54", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/duck%20canary/builds/683/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CanaryCompletion"}], "build-number": 683, "child-configs": [], "bot-config": "duck-release", "builder-name": "duck canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build23-b2.chrome.corp.google.com", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/ivybridge-release-group-metadata.json b/cidb/test_data/series_1/ivybridge-release-group-metadata.json
new file mode 100644
index 0000000..6ce14d2
--- /dev/null
+++ b/cidb/test_data/series_1/ivybridge-release-group-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 04:24:23 -0700 (PDT)"}, "board-metadata": {"parrot_ivb": {"main-firmware-version": "Google_Parrot.2685.54.0", "ec-firmware-version": "00BE107A00"}, "stout": {"main-firmware-version": "Google_Stout.2817.52.0", "ec-firmware-version": "3.08"}}, "boards": ["stout"], "time": {"duration": "3:18:58.134266", "start": "Mon, 18 Aug 2014 01:05:25 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 04:24:23 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:53", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:14", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:34", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:21", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/SetupBoard%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:04:21", "name": "SetupBoard [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/SetupBoard%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot_ivb", "duration": "0:00:46", "name": "SetupBoard [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/SyncChrome/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:21", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/BuildPackages%20%5Bstout%5D%20%5Bafdo_use%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "1:12:40", "name": "BuildPackages [stout] [afdo_use]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/BuildImage%20%5Bstout%5D%20%5Bafdo_use%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:08:07", "name": "BuildImage [stout] [afdo_use]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/ChromeSDK%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:56:46", "name": "ChromeSDK [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/VMTest%20%28attempt%201%29%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:31:12", "name": "VMTest (attempt 1) [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/SignerTest%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:06:50", "name": "SignerTest [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/Paygen%20%28attempt%201%29%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "1:40:41", "name": "Paygen (attempt 1) [stout]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/ImageTest%20%5Bstout%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "stout", "duration": "0:18:28", "name": "ImageTest [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/UnitTest%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:15:56", "name": "UnitTest [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/UploadPrebuilts%20%5Bstout%5D/logs/stdio", "summary": "Stage was skipped", "board": "stout", "duration": "0:00:00", "name": "UploadPrebuilts [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/DevInstallerPrebuilts%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:01:48", "name": "DevInstallerPrebuilts [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/DebugSymbols%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "1:14:35", "name": "DebugSymbols [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/CPEExport%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:15:34", "name": "CPEExport [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/Archive%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:21:34", "name": "Archive [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/UploadTestArtifacts%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:14:39", "name": "UploadTestArtifacts [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/HWTest%20%5Bbvt-inline%5D%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:34:13", "name": "HWTest [bvt-inline] [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/AUTest%20%5Bau%5D%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:18:36", "name": "AUTest [au] [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bstout%5D/logs/stdio", "summary": "Stage was successful", "board": "stout", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [stout]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/BuildPackages%20%5Bparrot_ivb%5D%20%5Bafdo_use%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot_ivb", "duration": "0:13:41", "name": "BuildPackages [parrot_ivb] [afdo_use]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/BuildImage%20%5Bparrot_ivb%5D%20%5Bafdo_use%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot_ivb", "duration": "0:08:39", "name": "BuildImage [parrot_ivb] [afdo_use]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/VMTest%20%28attempt%201%29%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was skipped", "board": "parrot_ivb", "duration": "0:00:00", "name": "VMTest (attempt 1) [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/SignerTest%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot_ivb", "duration": "0:02:39", "name": "SignerTest [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/Paygen%20%28attempt%201%29%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot_ivb", "duration": "1:23:27", "name": "Paygen (attempt 1) [parrot_ivb]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/ImageTest%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "parrot_ivb", "duration": "0:11:21", "name": "ImageTest [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/UnitTest%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was skipped", "board": "parrot_ivb", "duration": "0:00:00", "name": "UnitTest [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/UploadPrebuilts%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was skipped", "board": "parrot_ivb", "duration": "0:00:00", "name": "UploadPrebuilts [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/DevInstallerPrebuilts%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot_ivb", "duration": "0:01:11", "name": "DevInstallerPrebuilts [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/DebugSymbols%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot_ivb", "duration": "1:03:17", "name": "DebugSymbols [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/CPEExport%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot_ivb", "duration": "0:11:57", "name": "CPEExport [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/Archive%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot_ivb", "duration": "0:11:36", "name": "Archive [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/UploadTestArtifacts%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot_ivb", "duration": "0:09:42", "name": "UploadTestArtifacts [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/HWTest%20%5Bbvt-inline%5D%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot_ivb", "duration": "0:46:45", "name": "HWTest [bvt-inline] [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/AUTest%20%5Bau%5D%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot_ivb", "duration": "0:25:18", "name": "AUTest [au] [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot_ivb", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bparrot_ivb%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot_ivb", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [parrot_ivb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/ivybridge%20canary/builds/505/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:53", "name": "CanaryCompletion"}], "build-number": 505, "child-configs": [{"name": "stout-release", "boards": ["stout"]}, {"name": "parrot_ivb-release", "boards": ["parrot_ivb"]}], "bot-config": "ivybridge-release-group", "builder-name": "ivybridge canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build116-m2.golo.chromium.org", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/link-release-metadata.json b/cidb/test_data/series_1/link-release-metadata.json
new file mode 100644
index 0000000..1b5f27c
--- /dev/null
+++ b/cidb/test_data/series_1/link-release-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 04:11:51 -0700 (PDT)"}, "board-metadata": {"link": {"main-firmware-version": "Google_Link.2695.1.155", "ec-firmware-version": "link_v1.2.138-e4a9915"}}, "boards": ["link"], "time": {"duration": "3:07:13.235269", "start": "Mon, 18 Aug 2014 01:04:38 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 04:11:51 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:19", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:25", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:04:18", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:26", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:04:29", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/SyncChrome/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:09:45", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:08:18", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:06:32", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:51:09", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:31:36", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/SignerTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:05:22", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:31:42", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/ImageTest/logs/stdio", "summary": "Stage failed but was optional", "board": "link", "duration": "0:23:41", "name": "ImageTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:16:23", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:01:39", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "1:16:18", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:11:11", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:23:54", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:14:57", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/HWTest%20%5Bbvt-inline%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:35:29", "name": "HWTest [bvt-inline]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/AUTest%20%5Bau%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:16:38", "name": "AUTest [au]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/ASyncHWTest%20%5Bbvt-cq%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/ASyncHWTest%20%5Bbvt-perbuild%5D/logs/stdio", "summary": "Stage was successful", "board": "link", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link%20canary/builds/3973/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:31", "name": "CanaryCompletion"}], "build-number": 3973, "child-configs": [], "bot-config": "link-release", "builder-name": "link canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build20-b2.chrome.corp.google.com", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/link_freon-release-metadata.json b/cidb/test_data/series_1/link_freon-release-metadata.json
new file mode 100644
index 0000000..a6a6551
--- /dev/null
+++ b/cidb/test_data/series_1/link_freon-release-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 03:41:35 -0700 (PDT)"}, "board-metadata": {"link_freon": {"main-firmware-version": "Google_Link.2695.1.155", "ec-firmware-version": "link_v1.2.138-e4a9915"}}, "boards": ["link_freon"], "time": {"duration": "2:37:24.692685", "start": "Mon, 18 Aug 2014 01:04:10 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 03:41:35 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:20", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:05", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:04:22", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:17", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "link_freon", "duration": "0:04:17", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/SyncChrome/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:58", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "link_freon", "duration": "1:07:08", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "link_freon", "duration": "0:05:49", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "link_freon", "duration": "0:45:32", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link_freon", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/SignerTest/logs/stdio", "summary": "Stage was successful", "board": "link_freon", "duration": "0:10:43", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "link_freon", "duration": "0:00:00", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/ImageTest/logs/stdio", "summary": "Stage failed but was optional", "board": "link_freon", "duration": "0:17:36", "name": "ImageTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "link_freon", "duration": "0:14:53", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "link_freon", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "link_freon", "duration": "0:01:28", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "link_freon", "duration": "1:07:01", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "link_freon", "duration": "0:09:48", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "link_freon", "duration": "0:27:58", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "link_freon", "duration": "0:12:40", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/link_freon%20canary/builds/182/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:22", "name": "CanaryCompletion"}], "build-number": 182, "child-configs": [], "bot-config": "link_freon-release", "builder-name": "link_freon canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu", "arm-none-eabi"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build25-b2.chrome.corp.google.com", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/nyan-release-group-metadata.json b/cidb/test_data/series_1/nyan-release-group-metadata.json
new file mode 100644
index 0000000..913bce8
--- /dev/null
+++ b/cidb/test_data/series_1/nyan-release-group-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "failed", "summary": "", "current-time": "Mon, 18 Aug 2014 03:53:45 -0700 (PDT)"}, "board-metadata": {"nyan": {"main-firmware-version": "Google_Nyan.5771.10.0", "ec-firmware-version": "nyan_v1.1.1782-23f1337"}}, "boards": ["nyan"], "time": {"duration": "2:47:28.591827", "start": "Mon, 18 Aug 2014 01:06:16 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 03:53:45 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:02:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:26", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:29", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:24", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/SetupBoard%20%5Bnyan%5D/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:04:22", "name": "SetupBoard [nyan]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/SetupBoard%20%5Bnyan_big%5D/logs/stdio", "summary": "Stage was successful", "board": "nyan_big", "duration": "0:01:35", "name": "SetupBoard [nyan_big]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/SetupBoard%20%5Bnyan_blaze%5D/logs/stdio", "summary": "Stage was successful", "board": "nyan_blaze", "duration": "0:00:46", "name": "SetupBoard [nyan_blaze]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/SyncChrome/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:09:37", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/BuildPackages%20%5Bnyan%5D/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "1:05:48", "name": "BuildPackages [nyan]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/BuildImage%20%5Bnyan%5D/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:13:08", "name": "BuildImage [nyan]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/ChromeSDK%20%5Bnyan%5D/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:38:26", "name": "ChromeSDK [nyan]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/VMTest%20%28attempt%201%29%20%5Bnyan%5D/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "VMTest (attempt 1) [nyan]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/SignerTest%20%5Bnyan%5D/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:03:03", "name": "SignerTest [nyan]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/Paygen%20%28attempt%201%29%20%5Bnyan%5D/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "1:06:10", "name": "Paygen (attempt 1) [nyan]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/ImageTest%20%5Bnyan%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "nyan", "duration": "0:12:03", "name": "ImageTest [nyan]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/UnitTest%20%5Bnyan%5D/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UnitTest [nyan]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/UploadPrebuilts%20%5Bnyan%5D/logs/stdio", "summary": "Stage was skipped", "board": "nyan", "duration": "0:00:00", "name": "UploadPrebuilts [nyan]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/DevInstallerPrebuilts%20%5Bnyan%5D/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:01:38", "name": "DevInstallerPrebuilts [nyan]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/DebugSymbols%20%5Bnyan%5D/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:55:33", "name": "DebugSymbols [nyan]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/CPEExport%20%5Bnyan%5D/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:06:42", "name": "CPEExport [nyan]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/Archive%20%5Bnyan%5D/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:17:42", "name": "Archive [nyan]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/UploadTestArtifacts%20%5Bnyan%5D/logs/stdio", "summary": "Stage was successful", "board": "nyan", "duration": "0:11:45", "name": "UploadTestArtifacts [nyan]"}, {"status": "passed", "description": "** HWTest did not complete due to infrastructure issues (code 3) **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/HWTest%20%5Bbvt-inline%5D%20%5Bnyan%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "nyan", "duration": "0:00:02", "name": "HWTest [bvt-inline] [nyan]"}, {"status": "passed", "description": "** HWTest did not complete due to infrastructure issues (code 3) **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/AUTest%20%5Bau%5D%20%5Bnyan%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "nyan", "duration": "0:00:19", "name": "AUTest [au] [nyan]"}, {"status": "passed", "description": "** HWTest did not complete due to infrastructure issues (code 3) **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bnyan%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "nyan", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [nyan]"}, {"status": "passed", "description": "** HWTest did not complete due to infrastructure issues (code 3) **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bnyan%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "nyan", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [nyan]"}, {"status": "failed", "description": "Packages failed in ./build_packages: chromeos-base/chromeos-chrome", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/BuildPackages%20%5Bnyan_big%5D/logs/stdio", "summary": "Packages failed in ./build_packages: chromeos-base/chromeos-chrome", "board": "nyan_big", "duration": "0:19:49", "name": "BuildPackages [nyan_big]"}, {"status": "failed", "description": "Packages failed in ./build_packages: chromeos-base/chromeos-chrome", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/BuildPackages%20%5Bnyan_blaze%5D/logs/stdio", "summary": "Packages failed in ./build_packages: chromeos-base/chromeos-chrome", "board": "nyan_blaze", "duration": "0:19:51", "name": "BuildPackages [nyan_blaze]"}, {"status": "passed", "description": "", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/nyan%20canary/builds/888/steps/CanaryCompletion/logs/stdio", "summary": "Stage failed but was optional", "board": "", "duration": "0:00:57", "name": "CanaryCompletion"}], "build-number": 888, "child-configs": [{"name": "nyan-release", "boards": ["nyan"]}, {"name": "nyan_big-release", "boards": ["nyan_big"]}, {"name": "nyan_blaze-release", "boards": ["nyan_blaze"]}], "bot-config": "nyan-release-group", "builder-name": "nyan canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build29-b2.chrome.corp.google.com", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/peach-release-group-metadata.json b/cidb/test_data/series_1/peach-release-group-metadata.json
new file mode 100644
index 0000000..804d783
--- /dev/null
+++ b/cidb/test_data/series_1/peach-release-group-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 05:18:06 -0700 (PDT)"}, "board-metadata": {"peach_pit": {"main-firmware-version": "Google_Peach_Pit.4482.95.0", "ec-firmware-version": "pit_v1.1.1198-1cd618e"}, "peach_pi": {"main-firmware-version": "Google_Peach_Pi.4482.94.0", "ec-firmware-version": "pit_v1.1.1196-658a380"}}, "boards": ["peach_pit"], "time": {"duration": "4:12:01.992325", "start": "Mon, 18 Aug 2014 01:06:04 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 05:18:06 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:45", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:27", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:30", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:22", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/SetupBoard%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:04:21", "name": "SetupBoard [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/SetupBoard%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "0:01:35", "name": "SetupBoard [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/SyncChrome/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:09:55", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/BuildPackages%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "1:06:07", "name": "BuildPackages [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/BuildImage%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:10:12", "name": "BuildImage [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/ChromeSDK%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "1:03:55", "name": "ChromeSDK [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/VMTest%20%28attempt%201%29%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "VMTest (attempt 1) [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/SignerTest%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:05:23", "name": "SignerTest [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/Paygen%20%28attempt%201%29%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "1:36:08", "name": "Paygen (attempt 1) [peach_pit]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/ImageTest%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "peach_pit", "duration": "0:17:46", "name": "ImageTest [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/UnitTest%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UnitTest [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/UploadPrebuilts%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was skipped", "board": "peach_pit", "duration": "0:00:00", "name": "UploadPrebuilts [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/DevInstallerPrebuilts%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:01:38", "name": "DevInstallerPrebuilts [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/DebugSymbols%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:54:55", "name": "DebugSymbols [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/CPEExport%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:10:41", "name": "CPEExport [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/Archive%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:22:15", "name": "Archive [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/UploadTestArtifacts%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:15:59", "name": "UploadTestArtifacts [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/HWTest%20%5Bbvt-inline%5D%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:38:19", "name": "HWTest [bvt-inline] [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/AUTest%20%5Bau%5D%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:24:03", "name": "AUTest [au] [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bpeach_pit%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pit", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [peach_pit]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/BuildPackages%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "1:10:14", "name": "BuildPackages [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/BuildImage%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "0:08:41", "name": "BuildImage [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/ChromeSDK%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "0:10:18", "name": "ChromeSDK [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/VMTest%20%28attempt%201%29%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was skipped", "board": "peach_pi", "duration": "0:00:00", "name": "VMTest (attempt 1) [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/SignerTest%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "0:02:42", "name": "SignerTest [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/Paygen%20%28attempt%201%29%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "1:10:47", "name": "Paygen (attempt 1) [peach_pi]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/ImageTest%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "peach_pi", "duration": "0:11:29", "name": "ImageTest [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/UnitTest%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was skipped", "board": "peach_pi", "duration": "0:00:00", "name": "UnitTest [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/UploadPrebuilts%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was skipped", "board": "peach_pi", "duration": "0:00:00", "name": "UploadPrebuilts [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/DevInstallerPrebuilts%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "0:01:19", "name": "DevInstallerPrebuilts [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/DebugSymbols%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "0:43:03", "name": "DebugSymbols [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/CPEExport%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "0:05:49", "name": "CPEExport [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/Archive%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "0:15:33", "name": "Archive [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/UploadTestArtifacts%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "0:08:26", "name": "UploadTestArtifacts [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/HWTest%20%5Bbvt-inline%5D%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "0:43:30", "name": "HWTest [bvt-inline] [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/AUTest%20%5Bau%5D%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "0:33:18", "name": "AUTest [au] [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "0:00:03", "name": "ASyncHWTest [bvt-cq] [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bpeach_pi%5D/logs/stdio", "summary": "Stage was successful", "board": "peach_pi", "duration": "0:00:03", "name": "ASyncHWTest [bvt-perbuild] [peach_pi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/peach%20canary/builds/448/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:51", "name": "CanaryCompletion"}], "build-number": 448, "child-configs": [{"name": "peach_pit-release", "boards": ["peach_pit"]}, {"name": "peach_pi-release", "boards": ["peach_pi"]}], "bot-config": "peach-release-group", "builder-name": "peach canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build16-b2.chrome.corp.google.com", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/pineview-release-group-metadata.json b/cidb/test_data/series_1/pineview-release-group-metadata.json
new file mode 100644
index 0000000..0849fd5
--- /dev/null
+++ b/cidb/test_data/series_1/pineview-release-group-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 05:54:50 -0700 (PDT)"}, "board-metadata": {"x86-mario": {"main-firmware-version": null, "ec-firmware-version": "0222"}, "x86-zgb_he": {"main-firmware-version": "ZGB.03.61.0740.0062G7.010", "ec-firmware-version": "1.00"}, "x86-zgb": {"main-firmware-version": "ZGB.03.61.0740.0062G7.010", "ec-firmware-version": "1.00"}, "x86-alex_he": {"main-firmware-version": "Alex.03.61.0735.0056G3.0021", "ec-firmware-version": "01VFA617 "}, "x86-alex": {"main-firmware-version": "Alex.03.61.0735.0056G3.0021", "ec-firmware-version": "01VFA617 "}}, "boards": ["x86-mario"], "time": {"duration": "4:45:05.888347", "start": "Mon, 18 Aug 2014 01:09:45 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 05:54:50 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:04:49", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:53", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:27", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:22", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/SetupBoard%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:04:16", "name": "SetupBoard [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/SetupBoard%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:45", "name": "SetupBoard [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/SetupBoard%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "0:00:46", "name": "SetupBoard [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/SetupBoard%20%5Bx86-alex_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex_he", "duration": "0:00:47", "name": "SetupBoard [x86-alex_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/SetupBoard%20%5Bx86-zgb_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb_he", "duration": "0:00:47", "name": "SetupBoard [x86-zgb_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/SyncChrome/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:32", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/BuildPackages%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "1:08:36", "name": "BuildPackages [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/BuildImage%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:17:09", "name": "BuildImage [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ChromeSDK%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "1:18:49", "name": "ChromeSDK [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/VMTest%20%28attempt%201%29%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:41:35", "name": "VMTest (attempt 1) [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/SignerTest%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:18:27", "name": "SignerTest [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/Paygen%20%28attempt%201%29%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "2:21:38", "name": "Paygen (attempt 1) [x86-mario]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ImageTest%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "x86-mario", "duration": "0:26:56", "name": "ImageTest [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UnitTest%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:24:27", "name": "UnitTest [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UploadPrebuilts%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was skipped", "board": "x86-mario", "duration": "0:00:00", "name": "UploadPrebuilts [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/DevInstallerPrebuilts%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:02:34", "name": "DevInstallerPrebuilts [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/DebugSymbols%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "1:22:23", "name": "DebugSymbols [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/CPEExport%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:11:34", "name": "CPEExport [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/Archive%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:33:22", "name": "Archive [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UploadTestArtifacts%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:16:50", "name": "UploadTestArtifacts [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/HWTest%20%5Bbvt-inline%5D%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:48:21", "name": "HWTest [bvt-inline] [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/AUTest%20%5Bau%5D%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:30:24", "name": "AUTest [au] [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bx86-mario%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-mario", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [x86-mario]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/BuildPackages%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:28:54", "name": "BuildPackages [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/BuildImage%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:10:52", "name": "BuildImage [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ChromeSDK%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:16:59", "name": "ChromeSDK [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/VMTest%20%28attempt%201%29%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "VMTest (attempt 1) [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/SignerTest%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:05:48", "name": "SignerTest [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/Paygen%20%28attempt%201%29%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "2:18:12", "name": "Paygen (attempt 1) [x86-alex]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ImageTest%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "x86-alex", "duration": "0:18:10", "name": "ImageTest [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UnitTest%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "UnitTest [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UploadPrebuilts%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex", "duration": "0:00:00", "name": "UploadPrebuilts [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/DevInstallerPrebuilts%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:01:31", "name": "DevInstallerPrebuilts [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/DebugSymbols%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:17:37", "name": "DebugSymbols [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/CPEExport%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:11:13", "name": "CPEExport [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/Archive%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:22:11", "name": "Archive [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UploadTestArtifacts%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:19:16", "name": "UploadTestArtifacts [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/HWTest%20%5Bbvt-inline%5D%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "1:11:56", "name": "HWTest [bvt-inline] [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/AUTest%20%5Bau%5D%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:35:41", "name": "AUTest [au] [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bx86-alex%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [x86-alex]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/BuildPackages%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "0:28:55", "name": "BuildPackages [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/BuildImage%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "0:12:41", "name": "BuildImage [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ChromeSDK%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "0:15:46", "name": "ChromeSDK [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/VMTest%20%28attempt%201%29%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was skipped", "board": "x86-zgb", "duration": "0:00:00", "name": "VMTest (attempt 1) [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/SignerTest%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "0:06:43", "name": "SignerTest [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/Paygen%20%28attempt%201%29%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "1:27:25", "name": "Paygen (attempt 1) [x86-zgb]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ImageTest%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "x86-zgb", "duration": "0:18:00", "name": "ImageTest [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UnitTest%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was skipped", "board": "x86-zgb", "duration": "0:00:00", "name": "UnitTest [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UploadPrebuilts%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was skipped", "board": "x86-zgb", "duration": "0:00:00", "name": "UploadPrebuilts [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/DevInstallerPrebuilts%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "0:01:37", "name": "DevInstallerPrebuilts [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/DebugSymbols%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "1:10:07", "name": "DebugSymbols [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/CPEExport%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "0:12:19", "name": "CPEExport [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/Archive%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "0:19:35", "name": "Archive [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UploadTestArtifacts%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "0:17:29", "name": "UploadTestArtifacts [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/HWTest%20%5Bbvt-inline%5D%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "0:55:32", "name": "HWTest [bvt-inline] [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/AUTest%20%5Bau%5D%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "0:28:41", "name": "AUTest [au] [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bx86-zgb%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [x86-zgb]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/BuildPackages%20%5Bx86-alex_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex_he", "duration": "0:28:41", "name": "BuildPackages [x86-alex_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/BuildImage%20%5Bx86-alex_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex_he", "duration": "0:09:53", "name": "BuildImage [x86-alex_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/VMTest%20%28attempt%201%29%20%5Bx86-alex_he%5D/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex_he", "duration": "0:00:00", "name": "VMTest (attempt 1) [x86-alex_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/SignerTest%20%5Bx86-alex_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex_he", "duration": "0:05:00", "name": "SignerTest [x86-alex_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/Paygen%20%28attempt%201%29%20%5Bx86-alex_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex_he", "duration": "0:59:15", "name": "Paygen (attempt 1) [x86-alex_he]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ImageTest%20%5Bx86-alex_he%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "x86-alex_he", "duration": "0:12:45", "name": "ImageTest [x86-alex_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UnitTest%20%5Bx86-alex_he%5D/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex_he", "duration": "0:00:00", "name": "UnitTest [x86-alex_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UploadPrebuilts%20%5Bx86-alex_he%5D/logs/stdio", "summary": "Stage was skipped", "board": "x86-alex_he", "duration": "0:00:00", "name": "UploadPrebuilts [x86-alex_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/DevInstallerPrebuilts%20%5Bx86-alex_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex_he", "duration": "0:01:34", "name": "DevInstallerPrebuilts [x86-alex_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/DebugSymbols%20%5Bx86-alex_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex_he", "duration": "1:12:44", "name": "DebugSymbols [x86-alex_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/CPEExport%20%5Bx86-alex_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex_he", "duration": "0:10:24", "name": "CPEExport [x86-alex_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/Archive%20%5Bx86-alex_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex_he", "duration": "0:18:37", "name": "Archive [x86-alex_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UploadTestArtifacts%20%5Bx86-alex_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-alex_he", "duration": "0:00:01", "name": "UploadTestArtifacts [x86-alex_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/BuildPackages%20%5Bx86-zgb_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb_he", "duration": "0:28:40", "name": "BuildPackages [x86-zgb_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/BuildImage%20%5Bx86-zgb_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb_he", "duration": "0:08:51", "name": "BuildImage [x86-zgb_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/VMTest%20%28attempt%201%29%20%5Bx86-zgb_he%5D/logs/stdio", "summary": "Stage was skipped", "board": "x86-zgb_he", "duration": "0:00:00", "name": "VMTest (attempt 1) [x86-zgb_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/SignerTest%20%5Bx86-zgb_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb_he", "duration": "0:03:11", "name": "SignerTest [x86-zgb_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/Paygen%20%28attempt%201%29%20%5Bx86-zgb_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb_he", "duration": "0:59:30", "name": "Paygen (attempt 1) [x86-zgb_he]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/ImageTest%20%5Bx86-zgb_he%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "x86-zgb_he", "duration": "0:11:59", "name": "ImageTest [x86-zgb_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UnitTest%20%5Bx86-zgb_he%5D/logs/stdio", "summary": "Stage was skipped", "board": "x86-zgb_he", "duration": "0:00:00", "name": "UnitTest [x86-zgb_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UploadPrebuilts%20%5Bx86-zgb_he%5D/logs/stdio", "summary": "Stage was skipped", "board": "x86-zgb_he", "duration": "0:00:00", "name": "UploadPrebuilts [x86-zgb_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/DevInstallerPrebuilts%20%5Bx86-zgb_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb_he", "duration": "0:01:23", "name": "DevInstallerPrebuilts [x86-zgb_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/DebugSymbols%20%5Bx86-zgb_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb_he", "duration": "1:11:08", "name": "DebugSymbols [x86-zgb_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/CPEExport%20%5Bx86-zgb_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb_he", "duration": "0:08:41", "name": "CPEExport [x86-zgb_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/Archive%20%5Bx86-zgb_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb_he", "duration": "0:15:44", "name": "Archive [x86-zgb_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/UploadTestArtifacts%20%5Bx86-zgb_he%5D/logs/stdio", "summary": "Stage was successful", "board": "x86-zgb_he", "duration": "0:00:03", "name": "UploadTestArtifacts [x86-zgb_he]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/pineview%20canary/builds/449/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:07", "name": "CanaryCompletion"}], "build-number": 449, "child-configs": [{"name": "x86-mario-release", "boards": ["x86-mario"]}, {"name": "x86-alex-release", "boards": ["x86-alex"]}, {"name": "x86-zgb-release", "boards": ["x86-zgb"]}, {"name": "x86-alex_he-release", "boards": ["x86-alex_he"]}, {"name": "x86-zgb_he-release", "boards": ["x86-zgb_he"]}], "bot-config": "pineview-release-group", "builder-name": "pineview canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["i686-pc-linux-gnu"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build106-m2.golo.chromium.org", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/rambi-a-release-group-metadata.json b/cidb/test_data/series_1/rambi-a-release-group-metadata.json
new file mode 100644
index 0000000..ac2e38f
--- /dev/null
+++ b/cidb/test_data/series_1/rambi-a-release-group-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 05:34:41 -0700 (PDT)"}, "board-metadata": {"clapper": {"main-firmware-version": "Google_Clapper.5216.199.5", "ec-firmware-version": "clapper_v1.6.171-929cdb9"}, "rambi": {"main-firmware-version": "Google_Rambi.5216.239.0", "ec-firmware-version": "rambi_v1.6.198-15250dc"}, "expresso": {"main-firmware-version": "Google_Expresso.5216.223.2", "ec-firmware-version": "expresso_v1.6.187-bfe0c53"}, "enguarde": {"main-firmware-version": "Google_Enguarde.5216.201.4", "ec-firmware-version": "enguarde_v1.6.170-2279c44"}}, "boards": ["rambi"], "time": {"duration": "4:26:46.954629", "start": "Mon, 18 Aug 2014 01:07:54 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 05:34:41 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:56", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:53", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:58", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:31", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/SetupBoard%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:04:53", "name": "SetupBoard [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/SetupBoard%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "0:00:47", "name": "SetupBoard [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/SetupBoard%20%5Benguarde%5D/logs/stdio", "summary": "Stage was successful", "board": "enguarde", "duration": "0:00:48", "name": "SetupBoard [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/SetupBoard%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "0:00:49", "name": "SetupBoard [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/SyncChrome/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:07:26", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/BuildPackages%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "1:20:37", "name": "BuildPackages [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/BuildImage%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:15:22", "name": "BuildImage [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ChromeSDK%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "1:34:33", "name": "ChromeSDK [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/VMTest%20%28attempt%201%29%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:44:02", "name": "VMTest (attempt 1) [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/SignerTest%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:10:19", "name": "SignerTest [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/Paygen%20%28attempt%201%29%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "2:11:06", "name": "Paygen (attempt 1) [rambi]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ImageTest%20%5Brambi%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "rambi", "duration": "0:27:03", "name": "ImageTest [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/UnitTest%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:23:06", "name": "UnitTest [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/UploadPrebuilts%20%5Brambi%5D/logs/stdio", "summary": "Stage was skipped", "board": "rambi", "duration": "0:00:00", "name": "UploadPrebuilts [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/DevInstallerPrebuilts%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:02:23", "name": "DevInstallerPrebuilts [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/DebugSymbols%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "1:20:01", "name": "DebugSymbols [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/CPEExport%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:17:02", "name": "CPEExport [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/Archive%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:31:28", "name": "Archive [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/UploadTestArtifacts%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:21:02", "name": "UploadTestArtifacts [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/HWTest%20%5Bbvt-inline%5D%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:41:34", "name": "HWTest [bvt-inline] [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/AUTest%20%5Bau%5D%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:24:38", "name": "AUTest [au] [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Brambi%5D/logs/stdio", "summary": "Stage was successful", "board": "rambi", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [rambi]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/BuildPackages%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "0:24:06", "name": "BuildPackages [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/BuildImage%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "0:14:10", "name": "BuildImage [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ChromeSDK%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "0:22:38", "name": "ChromeSDK [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/VMTest%20%28attempt%201%29%20%5Bclapper%5D/logs/stdio", "summary": "Stage was skipped", "board": "clapper", "duration": "0:00:00", "name": "VMTest (attempt 1) [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/SignerTest%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "0:11:05", "name": "SignerTest [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/Paygen%20%28attempt%201%29%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "1:45:18", "name": "Paygen (attempt 1) [clapper]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ImageTest%20%5Bclapper%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "clapper", "duration": "0:24:38", "name": "ImageTest [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/UnitTest%20%5Bclapper%5D/logs/stdio", "summary": "Stage was skipped", "board": "clapper", "duration": "0:00:00", "name": "UnitTest [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/UploadPrebuilts%20%5Bclapper%5D/logs/stdio", "summary": "Stage was skipped", "board": "clapper", "duration": "0:00:00", "name": "UploadPrebuilts [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/DevInstallerPrebuilts%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "0:01:42", "name": "DevInstallerPrebuilts [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/DebugSymbols%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "1:13:53", "name": "DebugSymbols [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/CPEExport%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "0:15:54", "name": "CPEExport [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/Archive%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "0:29:33", "name": "Archive [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/UploadTestArtifacts%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "0:24:33", "name": "UploadTestArtifacts [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/HWTest%20%5Bbvt-inline%5D%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "0:46:21", "name": "HWTest [bvt-inline] [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/AUTest%20%5Bau%5D%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "0:29:06", "name": "AUTest [au] [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bclapper%5D/logs/stdio", "summary": "Stage was successful", "board": "clapper", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [clapper]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/BuildPackages%20%5Benguarde%5D/logs/stdio", "summary": "Stage was successful", "board": "enguarde", "duration": "0:24:01", "name": "BuildPackages [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/BuildImage%20%5Benguarde%5D/logs/stdio", "summary": "Stage was successful", "board": "enguarde", "duration": "0:10:47", "name": "BuildImage [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ChromeSDK%20%5Benguarde%5D/logs/stdio", "summary": "Stage was successful", "board": "enguarde", "duration": "0:17:46", "name": "ChromeSDK [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/VMTest%20%28attempt%201%29%20%5Benguarde%5D/logs/stdio", "summary": "Stage was skipped", "board": "enguarde", "duration": "0:00:00", "name": "VMTest (attempt 1) [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/SignerTest%20%5Benguarde%5D/logs/stdio", "summary": "Stage was successful", "board": "enguarde", "duration": "0:04:49", "name": "SignerTest [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/Paygen%20%28attempt%201%29%20%5Benguarde%5D/logs/stdio", "summary": "Stage was successful", "board": "enguarde", "duration": "1:50:04", "name": "Paygen (attempt 1) [enguarde]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ImageTest%20%5Benguarde%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "enguarde", "duration": "0:17:21", "name": "ImageTest [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/UnitTest%20%5Benguarde%5D/logs/stdio", "summary": "Stage was skipped", "board": "enguarde", "duration": "0:00:00", "name": "UnitTest [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/UploadPrebuilts%20%5Benguarde%5D/logs/stdio", "summary": "Stage was skipped", "board": "enguarde", "duration": "0:00:00", "name": "UploadPrebuilts [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/DevInstallerPrebuilts%20%5Benguarde%5D/logs/stdio", "summary": "Stage was successful", "board": "enguarde", "duration": "0:01:26", "name": "DevInstallerPrebuilts [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/DebugSymbols%20%5Benguarde%5D/logs/stdio", "summary": "Stage was successful", "board": "enguarde", "duration": "1:07:00", "name": "DebugSymbols [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/CPEExport%20%5Benguarde%5D/logs/stdio", "summary": "Stage was successful", "board": "enguarde", "duration": "0:09:25", "name": "CPEExport [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/Archive%20%5Benguarde%5D/logs/stdio", "summary": "Stage was successful", "board": "enguarde", "duration": "0:21:42", "name": "Archive [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/UploadTestArtifacts%20%5Benguarde%5D/logs/stdio", "summary": "Stage was successful", "board": "enguarde", "duration": "0:17:04", "name": "UploadTestArtifacts [enguarde]"}, {"status": "passed", "description": "** Suite passed with a warning code **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/HWTest%20%5Bbvt-inline%5D%20%5Benguarde%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "enguarde", "duration": "0:42:09", "name": "HWTest [bvt-inline] [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/AUTest%20%5Bau%5D%20%5Benguarde%5D/logs/stdio", "summary": "Stage was successful", "board": "enguarde", "duration": "0:22:41", "name": "AUTest [au] [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Benguarde%5D/logs/stdio", "summary": "Stage was successful", "board": "enguarde", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Benguarde%5D/logs/stdio", "summary": "Stage was successful", "board": "enguarde", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [enguarde]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/BuildPackages%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "0:24:01", "name": "BuildPackages [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/BuildImage%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "0:10:02", "name": "BuildImage [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ChromeSDK%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "0:23:10", "name": "ChromeSDK [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/VMTest%20%28attempt%201%29%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was skipped", "board": "expresso", "duration": "0:00:00", "name": "VMTest (attempt 1) [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/SignerTest%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "0:08:57", "name": "SignerTest [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/Paygen%20%28attempt%201%29%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "1:37:47", "name": "Paygen (attempt 1) [expresso]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ImageTest%20%5Bexpresso%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "expresso", "duration": "0:25:06", "name": "ImageTest [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/UnitTest%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was skipped", "board": "expresso", "duration": "0:00:00", "name": "UnitTest [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/UploadPrebuilts%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was skipped", "board": "expresso", "duration": "0:00:00", "name": "UploadPrebuilts [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/DevInstallerPrebuilts%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "0:01:42", "name": "DevInstallerPrebuilts [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/DebugSymbols%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "1:08:44", "name": "DebugSymbols [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/CPEExport%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "0:16:39", "name": "CPEExport [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/Archive%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "0:28:49", "name": "Archive [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/UploadTestArtifacts%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "0:24:24", "name": "UploadTestArtifacts [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/HWTest%20%5Bbvt-inline%5D%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "0:41:09", "name": "HWTest [bvt-inline] [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/AUTest%20%5Bau%5D%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "0:23:12", "name": "AUTest [au] [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "0:00:08", "name": "ASyncHWTest [bvt-cq] [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bexpresso%5D/logs/stdio", "summary": "Stage was successful", "board": "expresso", "duration": "0:00:08", "name": "ASyncHWTest [bvt-perbuild] [expresso]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-a%20canary/builds/449/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:13", "name": "CanaryCompletion"}], "build-number": 449, "child-configs": [{"name": "rambi-release", "boards": ["rambi"]}, {"name": "clapper-release", "boards": ["clapper"]}, {"name": "enguarde-release", "boards": ["enguarde"]}, {"name": "expresso-release", "boards": ["expresso"]}], "bot-config": "rambi-a-release-group", "builder-name": "rambi-a canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build172-m2.golo.chromium.org", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/rambi-b-release-group-metadata.json b/cidb/test_data/series_1/rambi-b-release-group-metadata.json
new file mode 100644
index 0000000..ebe6fcb
--- /dev/null
+++ b/cidb/test_data/series_1/rambi-b-release-group-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 06:06:23 -0700 (PDT)"}, "board-metadata": {"kip": {"main-firmware-version": "Google_Kip.5216.227.3", "ec-firmware-version": "kip_v1.6.189-8ce7b3c"}, "gnawty": {"main-firmware-version": "Google_Gnawty.5216.239.2", "ec-firmware-version": "gnawty_v1.6.198-15250dc"}, "glimmer": {"main-firmware-version": "Google_Glimmer.5216.198.4", "ec-firmware-version": "glimmer_v1.6.170-d2503de"}, "quawks": {"main-firmware-version": "Google_Quawks.5216.204.5", "ec-firmware-version": "quawks_v1.6.172-6da63b5"}}, "boards": ["glimmer"], "time": {"duration": "4:56:53.158868", "start": "Mon, 18 Aug 2014 01:09:29 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 06:06:23 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:53", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:52", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:06", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:30", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:23", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/SetupBoard%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "0:04:24", "name": "SetupBoard [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/SetupBoard%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "0:00:47", "name": "SetupBoard [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/SetupBoard%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "0:00:47", "name": "SetupBoard [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/SetupBoard%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "0:01:37", "name": "SetupBoard [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/SyncChrome/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:38", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/BuildPackages%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "1:08:22", "name": "BuildPackages [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/BuildImage%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "0:13:16", "name": "BuildImage [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ChromeSDK%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "1:45:35", "name": "ChromeSDK [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/VMTest%20%28attempt%201%29%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "0:54:55", "name": "VMTest (attempt 1) [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/SignerTest%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "0:08:44", "name": "SignerTest [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/Paygen%20%28attempt%201%29%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "2:12:15", "name": "Paygen (attempt 1) [glimmer]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ImageTest%20%5Bglimmer%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "glimmer", "duration": "0:33:04", "name": "ImageTest [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/UnitTest%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "0:27:17", "name": "UnitTest [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/UploadPrebuilts%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was skipped", "board": "glimmer", "duration": "0:00:00", "name": "UploadPrebuilts [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/DevInstallerPrebuilts%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "0:01:56", "name": "DevInstallerPrebuilts [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/DebugSymbols%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "1:15:46", "name": "DebugSymbols [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/CPEExport%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "0:16:39", "name": "CPEExport [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/Archive%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "0:43:55", "name": "Archive [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/UploadTestArtifacts%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "0:20:35", "name": "UploadTestArtifacts [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/HWTest%20%5Bbvt-inline%5D%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "0:44:43", "name": "HWTest [bvt-inline] [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/AUTest%20%5Bau%5D%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "0:23:22", "name": "AUTest [au] [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bglimmer%5D/logs/stdio", "summary": "Stage was successful", "board": "glimmer", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [glimmer]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/BuildPackages%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "0:24:59", "name": "BuildPackages [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/BuildImage%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "0:19:12", "name": "BuildImage [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ChromeSDK%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "0:28:01", "name": "ChromeSDK [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/VMTest%20%28attempt%201%29%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was skipped", "board": "gnawty", "duration": "0:00:00", "name": "VMTest (attempt 1) [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/SignerTest%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "0:12:29", "name": "SignerTest [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/Paygen%20%28attempt%201%29%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "1:52:00", "name": "Paygen (attempt 1) [gnawty]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ImageTest%20%5Bgnawty%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "gnawty", "duration": "0:34:15", "name": "ImageTest [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/UnitTest%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was skipped", "board": "gnawty", "duration": "0:00:00", "name": "UnitTest [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/UploadPrebuilts%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was skipped", "board": "gnawty", "duration": "0:00:00", "name": "UploadPrebuilts [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/DevInstallerPrebuilts%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "0:01:52", "name": "DevInstallerPrebuilts [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/DebugSymbols%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "1:22:08", "name": "DebugSymbols [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/CPEExport%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "0:25:02", "name": "CPEExport [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/Archive%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "0:30:32", "name": "Archive [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/UploadTestArtifacts%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "0:29:28", "name": "UploadTestArtifacts [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/HWTest%20%5Bbvt-inline%5D%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "0:47:01", "name": "HWTest [bvt-inline] [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/AUTest%20%5Bau%5D%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "0:34:47", "name": "AUTest [au] [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bgnawty%5D/logs/stdio", "summary": "Stage was successful", "board": "gnawty", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [gnawty]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/BuildPackages%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "0:24:16", "name": "BuildPackages [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/BuildImage%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "0:14:58", "name": "BuildImage [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ChromeSDK%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "0:35:57", "name": "ChromeSDK [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/VMTest%20%28attempt%201%29%20%5Bkip%5D/logs/stdio", "summary": "Stage was skipped", "board": "kip", "duration": "0:00:00", "name": "VMTest (attempt 1) [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/SignerTest%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "0:11:16", "name": "SignerTest [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/Paygen%20%28attempt%201%29%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "2:38:08", "name": "Paygen (attempt 1) [kip]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ImageTest%20%5Bkip%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "kip", "duration": "0:41:00", "name": "ImageTest [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/UnitTest%20%5Bkip%5D/logs/stdio", "summary": "Stage was skipped", "board": "kip", "duration": "0:00:00", "name": "UnitTest [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/UploadPrebuilts%20%5Bkip%5D/logs/stdio", "summary": "Stage was skipped", "board": "kip", "duration": "0:00:00", "name": "UploadPrebuilts [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/DevInstallerPrebuilts%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "0:01:37", "name": "DevInstallerPrebuilts [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/DebugSymbols%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "1:16:14", "name": "DebugSymbols [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/CPEExport%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "0:26:50", "name": "CPEExport [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/Archive%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "0:36:37", "name": "Archive [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/UploadTestArtifacts%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "0:38:39", "name": "UploadTestArtifacts [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/HWTest%20%5Bbvt-inline%5D%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "1:09:51", "name": "HWTest [bvt-inline] [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/AUTest%20%5Bau%5D%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "0:28:44", "name": "AUTest [au] [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bkip%5D/logs/stdio", "summary": "Stage was successful", "board": "kip", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [kip]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/BuildPackages%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "1:41:17", "name": "BuildPackages [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/BuildImage%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "0:08:37", "name": "BuildImage [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ChromeSDK%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "0:09:48", "name": "ChromeSDK [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/VMTest%20%28attempt%201%29%20%5Bquawks%5D/logs/stdio", "summary": "Stage was skipped", "board": "quawks", "duration": "0:00:00", "name": "VMTest (attempt 1) [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/SignerTest%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "0:03:54", "name": "SignerTest [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/Paygen%20%28attempt%201%29%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "1:36:07", "name": "Paygen (attempt 1) [quawks]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ImageTest%20%5Bquawks%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "quawks", "duration": "0:11:54", "name": "ImageTest [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/UnitTest%20%5Bquawks%5D/logs/stdio", "summary": "Stage was skipped", "board": "quawks", "duration": "0:00:00", "name": "UnitTest [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/UploadPrebuilts%20%5Bquawks%5D/logs/stdio", "summary": "Stage was skipped", "board": "quawks", "duration": "0:00:00", "name": "UploadPrebuilts [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/DevInstallerPrebuilts%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "0:01:25", "name": "DevInstallerPrebuilts [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/DebugSymbols%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "1:15:07", "name": "DebugSymbols [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/CPEExport%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "0:07:05", "name": "CPEExport [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/Archive%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "0:11:06", "name": "Archive [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/UploadTestArtifacts%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "0:09:11", "name": "UploadTestArtifacts [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/HWTest%20%5Bbvt-inline%5D%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "0:49:57", "name": "HWTest [bvt-inline] [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/AUTest%20%5Bau%5D%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "0:39:11", "name": "AUTest [au] [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bquawks%5D/logs/stdio", "summary": "Stage was successful", "board": "quawks", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [quawks]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/rambi-b%20canary/builds/451/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:04", "name": "CanaryCompletion"}], "build-number": 451, "child-configs": [{"name": "glimmer-release", "boards": ["glimmer"]}, {"name": "gnawty-release", "boards": ["gnawty"]}, {"name": "kip-release", "boards": ["kip"]}, {"name": "quawks-release", "boards": ["quawks"]}], "bot-config": "rambi-b-release-group", "builder-name": "rambi-b canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build173-m2.golo.chromium.org", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/samus-release-metadata.json b/cidb/test_data/series_1/samus-release-metadata.json
new file mode 100644
index 0000000..1f6afc5
--- /dev/null
+++ b/cidb/test_data/series_1/samus-release-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 03:47:24 -0700 (PDT)"}, "board-metadata": {"samus": {"main-firmware-version": "Google_Samus.6078.0.0", "ec-firmware-version": "samus_v1.1.2019-4eee9fe"}}, "boards": ["samus"], "time": {"duration": "2:43:25.511828", "start": "Mon, 18 Aug 2014 01:03:59 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 03:47:24 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:24", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:00", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:35", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:22", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:04:19", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/SyncChrome/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:07:19", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "1:07:39", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:06:20", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:47:21", "name": "ChromeSDK"}, {"status": "passed", "description": "** VMTests failed with code 1 **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage failed but was optional", "board": "samus", "duration": "0:19:18", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/VMTest%20%28attempt%202%29/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:26:08", "name": "VMTest (attempt 2)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/SignerTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:05:17", "name": "SignerTest"}, {"status": "passed", "description": "No release.conf entry was found for board samus. Get a TPM to fix.", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage failed but was optional", "board": "samus", "duration": "0:00:09", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/ImageTest/logs/stdio", "summary": "Stage failed but was optional", "board": "samus", "duration": "0:17:38", "name": "ImageTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:16:24", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "samus", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:01:35", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "1:12:14", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:10:50", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:22:37", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "samus", "duration": "0:13:25", "name": "UploadTestArtifacts"}, {"status": "passed", "description": "** HWTest did not complete due to infrastructure issues (code 3) **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/HWTest%20%5Bbvt-inline%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "samus", "duration": "0:00:02", "name": "HWTest [bvt-inline]"}, {"status": "passed", "description": "** HWTest did not complete due to infrastructure issues (code 3) **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/AUTest%20%5Bau%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "samus", "duration": "0:00:11", "name": "AUTest [au]"}, {"status": "passed", "description": "** HWTest did not complete due to infrastructure issues (code 3) **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/ASyncHWTest%20%5Bbvt-cq%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "samus", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq]"}, {"status": "passed", "description": "** HWTest did not complete due to infrastructure issues (code 3) **", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/ASyncHWTest%20%5Bbvt-perbuild%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "samus", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/samus%20canary/builds/1268/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:27", "name": "CanaryCompletion"}], "build-number": 1268, "child-configs": [], "bot-config": "samus-release", "builder-name": "samus canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build21-b2.chrome.corp.google.com", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/sandybridge-release-group-metadata.json b/cidb/test_data/series_1/sandybridge-release-group-metadata.json
new file mode 100644
index 0000000..32a95ba
--- /dev/null
+++ b/cidb/test_data/series_1/sandybridge-release-group-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 06:00:09 -0700 (PDT)"}, "board-metadata": {"butterfly": {"main-firmware-version": "Google_Butterfly.2788.39.0", "ec-firmware-version": "820DG1"}, "lumpy": {"main-firmware-version": "Google_Lumpy.2.111.0", "ec-firmware-version": "02WQA015"}, "stumpy": {"main-firmware-version": "Google_Stumpy.2.102.0", "ec-firmware-version": null}, "parrot": {"main-firmware-version": "Google_Parrot.2685.54.0", "ec-firmware-version": "00BE107A00"}}, "boards": ["lumpy"], "time": {"duration": "4:53:26.852612", "start": "Mon, 18 Aug 2014 01:06:42 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 06:00:09 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:13", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:28", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:48", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:53", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/SetupBoard%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:04:58", "name": "SetupBoard [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/SetupBoard%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:00:47", "name": "SetupBoard [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/SetupBoard%20%5Bparrot%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:51", "name": "SetupBoard [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/SetupBoard%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:49", "name": "SetupBoard [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/SyncChrome/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:06:55", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/BuildPackages%20%5Blumpy%5D%20%5Bafdo_use%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "1:25:20", "name": "BuildPackages [lumpy] [afdo_use]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/BuildImage%20%5Blumpy%5D%20%5Bafdo_use%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:16:39", "name": "BuildImage [lumpy] [afdo_use]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ChromeSDK%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "1:36:31", "name": "ChromeSDK [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/VMTest%20%28attempt%201%29%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:42:57", "name": "VMTest (attempt 1) [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/SignerTest%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:21:15", "name": "SignerTest [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/Paygen%20%28attempt%201%29%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "2:32:07", "name": "Paygen (attempt 1) [lumpy]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ImageTest%20%5Blumpy%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "lumpy", "duration": "0:32:13", "name": "ImageTest [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/UnitTest%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:25:52", "name": "UnitTest [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/UploadPrebuilts%20%5Blumpy%5D/logs/stdio", "summary": "Stage was skipped", "board": "lumpy", "duration": "0:00:00", "name": "UploadPrebuilts [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/DevInstallerPrebuilts%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:02:28", "name": "DevInstallerPrebuilts [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/DebugSymbols%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "1:26:57", "name": "DebugSymbols [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/CPEExport%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:18:43", "name": "CPEExport [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/Archive%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:37:56", "name": "Archive [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/UploadTestArtifacts%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:17:06", "name": "UploadTestArtifacts [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/HWTest%20%5Bbvt-inline%5D%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:40:59", "name": "HWTest [bvt-inline] [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/AUTest%20%5Bau%5D%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:25:54", "name": "AUTest [au] [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:04", "name": "ASyncHWTest [bvt-cq] [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Blumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "lumpy", "duration": "0:00:04", "name": "ASyncHWTest [bvt-perbuild] [lumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/BuildPackages%20%5Bbutterfly%5D%20%5Bafdo_use%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:31:49", "name": "BuildPackages [butterfly] [afdo_use]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/BuildImage%20%5Bbutterfly%5D%20%5Bafdo_use%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:09:54", "name": "BuildImage [butterfly] [afdo_use]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ChromeSDK%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:20:57", "name": "ChromeSDK [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/VMTest%20%28attempt%201%29%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "VMTest (attempt 1) [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/SignerTest%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:06:24", "name": "SignerTest [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/Paygen%20%28attempt%201%29%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "1:42:09", "name": "Paygen (attempt 1) [butterfly]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ImageTest%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "butterfly", "duration": "0:23:25", "name": "ImageTest [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/UnitTest%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "UnitTest [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/UploadPrebuilts%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was skipped", "board": "butterfly", "duration": "0:00:00", "name": "UploadPrebuilts [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/DevInstallerPrebuilts%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:01:27", "name": "DevInstallerPrebuilts [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/DebugSymbols%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "1:08:29", "name": "DebugSymbols [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/CPEExport%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:22:05", "name": "CPEExport [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/Archive%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:23:39", "name": "Archive [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/UploadTestArtifacts%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:20:17", "name": "UploadTestArtifacts [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/HWTest%20%5Bbvt-inline%5D%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:51:32", "name": "HWTest [bvt-inline] [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/AUTest%20%5Bau%5D%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:34:09", "name": "AUTest [au] [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bbutterfly%5D/logs/stdio", "summary": "Stage was successful", "board": "butterfly", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [butterfly]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/BuildPackages%20%5Bparrot%5D%20%5Bafdo_use%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:32:07", "name": "BuildPackages [parrot] [afdo_use]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/BuildImage%20%5Bparrot%5D%20%5Bafdo_use%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:14:01", "name": "BuildImage [parrot] [afdo_use]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ChromeSDK%20%5Bparrot%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:23:19", "name": "ChromeSDK [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/VMTest%20%28attempt%201%29%20%5Bparrot%5D/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "VMTest (attempt 1) [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/SignerTest%20%5Bparrot%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:11:34", "name": "SignerTest [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/Paygen%20%28attempt%201%29%20%5Bparrot%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "1:40:50", "name": "Paygen (attempt 1) [parrot]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ImageTest%20%5Bparrot%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "parrot", "duration": "0:24:49", "name": "ImageTest [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/UnitTest%20%5Bparrot%5D/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "UnitTest [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/UploadPrebuilts%20%5Bparrot%5D/logs/stdio", "summary": "Stage was skipped", "board": "parrot", "duration": "0:00:00", "name": "UploadPrebuilts [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/DevInstallerPrebuilts%20%5Bparrot%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:01:46", "name": "DevInstallerPrebuilts [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/DebugSymbols%20%5Bparrot%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "1:16:29", "name": "DebugSymbols [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/CPEExport%20%5Bparrot%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:25:04", "name": "CPEExport [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/Archive%20%5Bparrot%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:28:24", "name": "Archive [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/UploadTestArtifacts%20%5Bparrot%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:24:13", "name": "UploadTestArtifacts [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/HWTest%20%5Bbvt-inline%5D%20%5Bparrot%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:57:38", "name": "HWTest [bvt-inline] [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/AUTest%20%5Bau%5D%20%5Bparrot%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:35:51", "name": "AUTest [au] [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bparrot%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bparrot%5D/logs/stdio", "summary": "Stage was successful", "board": "parrot", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [parrot]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/BuildPackages%20%5Bstumpy%5D%20%5Bafdo_use%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:31:53", "name": "BuildPackages [stumpy] [afdo_use]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/BuildImage%20%5Bstumpy%5D%20%5Bafdo_use%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:13:14", "name": "BuildImage [stumpy] [afdo_use]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ChromeSDK%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:23:03", "name": "ChromeSDK [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/VMTest%20%28attempt%201%29%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "VMTest (attempt 1) [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/SignerTest%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:09:32", "name": "SignerTest [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/Paygen%20%28attempt%201%29%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:40:30", "name": "Paygen (attempt 1) [stumpy]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ImageTest%20%5Bstumpy%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "stumpy", "duration": "0:24:49", "name": "ImageTest [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/UnitTest%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "UnitTest [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/UploadPrebuilts%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was skipped", "board": "stumpy", "duration": "0:00:00", "name": "UploadPrebuilts [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/DevInstallerPrebuilts%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:01:44", "name": "DevInstallerPrebuilts [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/DebugSymbols%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "1:10:12", "name": "DebugSymbols [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/CPEExport%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:24:56", "name": "CPEExport [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/Archive%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:29:52", "name": "Archive [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/UploadTestArtifacts%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:23:01", "name": "UploadTestArtifacts [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/HWTest%20%5Bbvt-inline%5D%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:47:56", "name": "HWTest [bvt-inline] [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/AUTest%20%5Bau%5D%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:29:19", "name": "AUTest [au] [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bstumpy%5D/logs/stdio", "summary": "Stage was successful", "board": "stumpy", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [stumpy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/sandybridge%20canary/builds/495/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:11", "name": "CanaryCompletion"}], "build-number": 495, "child-configs": [{"name": "lumpy-release", "boards": ["lumpy"]}, {"name": "butterfly-release", "boards": ["butterfly"]}, {"name": "parrot-release", "boards": ["parrot"]}, {"name": "stumpy-release", "boards": ["stumpy"]}], "bot-config": "sandybridge-release-group", "builder-name": "sandybridge canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build119-m2.golo.chromium.org", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/slippy-release-group-metadata.json b/cidb/test_data/series_1/slippy-release-group-metadata.json
new file mode 100644
index 0000000..e3cabd2
--- /dev/null
+++ b/cidb/test_data/series_1/slippy-release-group-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 05:21:44 -0700 (PDT)"}, "board-metadata": {"falco_li": {"main-firmware-version": "Google_Falco.4389.78.0", "ec-firmware-version": "falco_v1.5.113-2d79820"}, "peppy": {"main-firmware-version": "Google_Peppy.4389.86.0", "ec-firmware-version": "peppy_v1.5.129-cd1a1e9"}, "wolf": {"main-firmware-version": "Google_Wolf.4389.24.53", "ec-firmware-version": "wolf_v1.5.126-6b10085"}, "leon": {"main-firmware-version": "Google_Leon.4389.61.43", "ec-firmware-version": "leon_v1.5.145-fcaa745"}, "falco": {"main-firmware-version": "Google_Falco.4389.78.0", "ec-firmware-version": "falco_v1.5.113-2d79820"}}, "boards": ["peppy"], "time": {"duration": "4:12:33.506487", "start": "Mon, 18 Aug 2014 01:09:10 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 05:21:44 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:05:14", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:43", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:34", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:26", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/SetupBoard%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:04:22", "name": "SetupBoard [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/SetupBoard%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:00:46", "name": "SetupBoard [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/SetupBoard%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:46", "name": "SetupBoard [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/SetupBoard%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:48", "name": "SetupBoard [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/SetupBoard%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was successful", "board": "falco_li", "duration": "0:00:48", "name": "SetupBoard [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/SyncChrome/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:10:46", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/BuildPackages%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "1:08:42", "name": "BuildPackages [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/BuildImage%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:18:23", "name": "BuildImage [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ChromeSDK%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "1:28:13", "name": "ChromeSDK [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/VMTest%20%28attempt%201%29%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:41:01", "name": "VMTest (attempt 1) [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/SignerTest%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:08:56", "name": "SignerTest [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/Paygen%20%28attempt%201%29%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "1:49:01", "name": "Paygen (attempt 1) [peppy]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ImageTest%20%5Bpeppy%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "peppy", "duration": "0:26:26", "name": "ImageTest [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UnitTest%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:20:37", "name": "UnitTest [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UploadPrebuilts%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was skipped", "board": "peppy", "duration": "0:00:00", "name": "UploadPrebuilts [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/DevInstallerPrebuilts%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:02:24", "name": "DevInstallerPrebuilts [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/DebugSymbols%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "1:15:14", "name": "DebugSymbols [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/CPEExport%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:15:09", "name": "CPEExport [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/Archive%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:28:37", "name": "Archive [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UploadTestArtifacts%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:19:04", "name": "UploadTestArtifacts [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/HWTest%20%5Bbvt-inline%5D%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:27:26", "name": "HWTest [bvt-inline] [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/AUTest%20%5Bau%5D%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:16:21", "name": "AUTest [au] [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:03", "name": "ASyncHWTest [bvt-cq] [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bpeppy%5D/logs/stdio", "summary": "Stage was successful", "board": "peppy", "duration": "0:00:03", "name": "ASyncHWTest [bvt-perbuild] [peppy]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/BuildPackages%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:26:57", "name": "BuildPackages [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/BuildImage%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:11:59", "name": "BuildImage [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ChromeSDK%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:21:24", "name": "ChromeSDK [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/VMTest%20%28attempt%201%29%20%5Bfalco%5D/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "VMTest (attempt 1) [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/SignerTest%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:09:54", "name": "SignerTest [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/Paygen%20%28attempt%201%29%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "1:22:19", "name": "Paygen (attempt 1) [falco]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ImageTest%20%5Bfalco%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "falco", "duration": "0:24:32", "name": "ImageTest [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UnitTest%20%5Bfalco%5D/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "UnitTest [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UploadPrebuilts%20%5Bfalco%5D/logs/stdio", "summary": "Stage was skipped", "board": "falco", "duration": "0:00:00", "name": "UploadPrebuilts [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/DevInstallerPrebuilts%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:01:47", "name": "DevInstallerPrebuilts [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/DebugSymbols%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "1:17:08", "name": "DebugSymbols [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/CPEExport%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:14:33", "name": "CPEExport [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/Archive%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:28:59", "name": "Archive [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UploadTestArtifacts%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:23:07", "name": "UploadTestArtifacts [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/HWTest%20%5Bbvt-inline%5D%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:41:50", "name": "HWTest [bvt-inline] [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/AUTest%20%5Bau%5D%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:21:49", "name": "AUTest [au] [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:00:03", "name": "ASyncHWTest [bvt-cq] [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bfalco%5D/logs/stdio", "summary": "Stage was successful", "board": "falco", "duration": "0:00:03", "name": "ASyncHWTest [bvt-perbuild] [falco]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/BuildPackages%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:26:55", "name": "BuildPackages [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/BuildImage%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:08:11", "name": "BuildImage [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ChromeSDK%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:18:56", "name": "ChromeSDK [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/VMTest%20%28attempt%201%29%20%5Bleon%5D/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "VMTest (attempt 1) [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/SignerTest%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:07:07", "name": "SignerTest [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/Paygen%20%28attempt%201%29%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "1:30:31", "name": "Paygen (attempt 1) [leon]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ImageTest%20%5Bleon%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "leon", "duration": "0:23:56", "name": "ImageTest [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UnitTest%20%5Bleon%5D/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "UnitTest [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UploadPrebuilts%20%5Bleon%5D/logs/stdio", "summary": "Stage was skipped", "board": "leon", "duration": "0:00:00", "name": "UploadPrebuilts [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/DevInstallerPrebuilts%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:01:33", "name": "DevInstallerPrebuilts [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/DebugSymbols%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "1:14:11", "name": "DebugSymbols [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/CPEExport%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:12:29", "name": "CPEExport [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/Archive%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:24:48", "name": "Archive [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UploadTestArtifacts%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:19:14", "name": "UploadTestArtifacts [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/HWTest%20%5Bbvt-inline%5D%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:24:56", "name": "HWTest [bvt-inline] [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/AUTest%20%5Bau%5D%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:20:52", "name": "AUTest [au] [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bleon%5D/logs/stdio", "summary": "Stage was successful", "board": "leon", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [leon]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/BuildPackages%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:24:55", "name": "BuildPackages [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/BuildImage%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:10:17", "name": "BuildImage [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ChromeSDK%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:14:19", "name": "ChromeSDK [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/VMTest%20%28attempt%201%29%20%5Bwolf%5D/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "VMTest (attempt 1) [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/SignerTest%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:03:55", "name": "SignerTest [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/Paygen%20%28attempt%201%29%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "1:50:51", "name": "Paygen (attempt 1) [wolf]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ImageTest%20%5Bwolf%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "wolf", "duration": "0:17:03", "name": "ImageTest [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UnitTest%20%5Bwolf%5D/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "UnitTest [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UploadPrebuilts%20%5Bwolf%5D/logs/stdio", "summary": "Stage was skipped", "board": "wolf", "duration": "0:00:00", "name": "UploadPrebuilts [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/DevInstallerPrebuilts%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:01:25", "name": "DevInstallerPrebuilts [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/DebugSymbols%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "1:11:59", "name": "DebugSymbols [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/CPEExport%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:07:49", "name": "CPEExport [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/Archive%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:16:48", "name": "Archive [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UploadTestArtifacts%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:12:46", "name": "UploadTestArtifacts [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/HWTest%20%5Bbvt-inline%5D%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:30:52", "name": "HWTest [bvt-inline] [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/AUTest%20%5Bau%5D%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:20:09", "name": "AUTest [au] [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:03", "name": "ASyncHWTest [bvt-cq] [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bwolf%5D/logs/stdio", "summary": "Stage was successful", "board": "wolf", "duration": "0:00:03", "name": "ASyncHWTest [bvt-perbuild] [wolf]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/BuildPackages%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was successful", "board": "falco_li", "duration": "0:26:57", "name": "BuildPackages [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/BuildImage%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was successful", "board": "falco_li", "duration": "0:14:03", "name": "BuildImage [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/VMTest%20%28attempt%201%29%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was skipped", "board": "falco_li", "duration": "0:00:00", "name": "VMTest (attempt 1) [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/SignerTest%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was successful", "board": "falco_li", "duration": "0:09:48", "name": "SignerTest [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/Paygen%20%28attempt%201%29%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was successful", "board": "falco_li", "duration": "1:18:35", "name": "Paygen (attempt 1) [falco_li]"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ImageTest%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage failed but was optional", "board": "falco_li", "duration": "0:19:27", "name": "ImageTest [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UnitTest%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was skipped", "board": "falco_li", "duration": "0:00:00", "name": "UnitTest [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UploadPrebuilts%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was skipped", "board": "falco_li", "duration": "0:00:00", "name": "UploadPrebuilts [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/DevInstallerPrebuilts%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was successful", "board": "falco_li", "duration": "0:01:46", "name": "DevInstallerPrebuilts [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/DebugSymbols%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was successful", "board": "falco_li", "duration": "1:14:57", "name": "DebugSymbols [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/CPEExport%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was successful", "board": "falco_li", "duration": "0:14:24", "name": "CPEExport [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/Archive%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was successful", "board": "falco_li", "duration": "0:25:50", "name": "Archive [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/UploadTestArtifacts%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was successful", "board": "falco_li", "duration": "0:21:22", "name": "UploadTestArtifacts [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/HWTest%20%5Bbvt-inline%5D%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was successful", "board": "falco_li", "duration": "0:40:09", "name": "HWTest [bvt-inline] [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/AUTest%20%5Bau%5D%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was successful", "board": "falco_li", "duration": "0:28:20", "name": "AUTest [au] [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ASyncHWTest%20%5Bbvt-cq%5D%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was successful", "board": "falco_li", "duration": "0:00:02", "name": "ASyncHWTest [bvt-cq] [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/ASyncHWTest%20%5Bbvt-perbuild%5D%20%5Bfalco_li%5D/logs/stdio", "summary": "Stage was successful", "board": "falco_li", "duration": "0:00:02", "name": "ASyncHWTest [bvt-perbuild] [falco_li]"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/slippy%20canary/builds/1769/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:08", "name": "CanaryCompletion"}], "build-number": 1769, "child-configs": [{"name": "peppy-release", "boards": ["peppy"]}, {"name": "falco-release", "boards": ["falco"]}, {"name": "leon-release", "boards": ["leon"]}, {"name": "wolf-release", "boards": ["wolf"]}, {"name": "falco_li-release", "boards": ["falco_li"]}], "bot-config": "slippy-release-group", "builder-name": "slippy canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build17-b2.chrome.corp.google.com", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/storm-release-metadata.json b/cidb/test_data/series_1/storm-release-metadata.json
new file mode 100644
index 0000000..d95e2e0
--- /dev/null
+++ b/cidb/test_data/series_1/storm-release-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 02:02:12 -0700 (PDT)"}, "board-metadata": {"storm": {"main-firmware-version": null, "ec-firmware-version": null}}, "boards": ["storm"], "time": {"duration": "0:58:26.258841", "start": "Mon, 18 Aug 2014 01:03:46 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 02:02:12 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:55", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:26", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:14", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "storm", "duration": "0:04:17", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/SyncChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "storm", "duration": "0:16:50", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "storm", "duration": "0:03:25", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "storm", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "storm", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": "No release.conf entry was found for board storm. Get a TPM to fix.", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage failed but was optional", "board": "storm", "duration": "0:00:05", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/ImageTest/logs/stdio", "summary": "Stage failed but was optional", "board": "storm", "duration": "0:10:28", "name": "ImageTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/UnitTest/logs/stdio", "summary": "Stage was skipped", "board": "storm", "duration": "0:00:00", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "storm", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "storm", "duration": "0:00:00", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "storm", "duration": "0:28:38", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "storm", "duration": "0:01:38", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "storm", "duration": "0:16:00", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "storm", "duration": "0:01:47", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/storm%20canary/builds/182/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:23", "name": "CanaryCompletion"}], "build-number": 182, "child-configs": [], "bot-config": "storm-release", "builder-name": "storm canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["armv7a-cros-linux-gnueabi"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build26-b2.chrome.corp.google.com", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cidb/test_data/series_1/stumpy_moblab-release-metadata.json b/cidb/test_data/series_1/stumpy_moblab-release-metadata.json
new file mode 100644
index 0000000..e505e32
--- /dev/null
+++ b/cidb/test_data/series_1/stumpy_moblab-release-metadata.json
@@ -0,0 +1 @@
+{"status": {"status": "passed", "summary": "", "current-time": "Mon, 18 Aug 2014 03:46:32 -0700 (PDT)"}, "board-metadata": {"stumpy_moblab": {"main-firmware-version": "Google_Stumpy.2.102.0", "ec-firmware-version": null}}, "boards": ["stumpy_moblab"], "time": {"duration": "2:41:53.074174", "start": "Mon, 18 Aug 2014 01:04:39 -0700 (PDT)", "finish": "Mon, 18 Aug 2014 03:46:32 -0700 (PDT)"}, "metadata-version": "2", "results": [{"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/CleanUp/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:00", "name": "CleanUp"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/ManifestVersionedSync/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:55", "name": "ManifestVersionedSync"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/ReportBuildStart/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:07", "name": "ReportBuildStart"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/InitSDK/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:03:29", "name": "InitSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/Uprev/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:01:16", "name": "Uprev"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/SetupBoard/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:04:22", "name": "SetupBoard"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/SyncChrome/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:07:13", "name": "SyncChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/PatchChrome/logs/stdio", "summary": "Stage was skipped", "board": "", "duration": "0:00:00", "name": "PatchChrome"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/BuildPackages/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "1:06:49", "name": "BuildPackages"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/BuildImage/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:05:45", "name": "BuildImage"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/ChromeSDK/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:43:40", "name": "ChromeSDK"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/VMTest%20%28attempt%201%29/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "VMTest (attempt 1)"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/SignerTest/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "SignerTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/Paygen%20%28attempt%201%29/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:59:07", "name": "Paygen (attempt 1)"}, {"status": "passed", "description": "/b/cbuild/internal_master/chromite/bin/test_image failed (code=1)", "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/ImageTest/logs/stdio", "summary": "Stage failed but was optional", "board": "stumpy_moblab", "duration": "0:18:10", "name": "ImageTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/UnitTest/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:15:19", "name": "UnitTest"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/UploadPrebuilts/logs/stdio", "summary": "Stage was skipped", "board": "stumpy_moblab", "duration": "0:00:00", "name": "UploadPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/DevInstallerPrebuilts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:01:18", "name": "DevInstallerPrebuilts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/DebugSymbols/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "1:12:22", "name": "DebugSymbols"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/CPEExport/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:10:14", "name": "CPEExport"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/Archive/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:25:10", "name": "Archive"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/UploadTestArtifacts/logs/stdio", "summary": "Stage was successful", "board": "stumpy_moblab", "duration": "0:17:49", "name": "UploadTestArtifacts"}, {"status": "passed", "description": null, "log": "https://uberchromegw.corp.google.com/i/chromeos/builders/stumpy_moblab%20canary/builds/679/steps/CanaryCompletion/logs/stdio", "summary": "Stage was successful", "board": "", "duration": "0:00:21", "name": "CanaryCompletion"}], "build-number": 679, "child-configs": [], "bot-config": "stumpy_moblab-release", "builder-name": "stumpy_moblab canary", "version": {"chrome": "39.0.2126.0", "platform": "6165.0.0", "full": "R39-6165.0.0", "milestone": "39"}, "toolchain-tuple": ["x86_64-cros-linux-gnu", "i686-pc-linux-gnu"], "toolchain-url": "2014/08/%(target)s-2014.08.17.185408.tar.xz", "cl_actions": [], "bot-hostname": "build24-b2.chrome.corp.google.com", "sdk-version": "2014.08.17.185408", "build_type": "canary"}
\ No newline at end of file
diff --git a/cli/__init__.py b/cli/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cli/__init__.py
diff --git a/cli/command.py b/cli/command.py
new file mode 100644
index 0000000..360a6ec
--- /dev/null
+++ b/cli/command.py
@@ -0,0 +1,193 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module that contains meta-logic related to CLI commands.
+
+This module contains two important definitions used by all commands:
+  CliCommand: The parent class of all CLI commands.
+  CommandDecorator: Decorator that must be used to ensure that the command shows
+    up in |_commands| and is discoverable.
+
+Commands can be either imported directly or looked up using this module's
+ListCommands() function.
+"""
+
+from __future__ import print_function
+
+import glob
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import brick_lib
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_import
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import workspace_lib
+
+
+# Paths for finding and importing subcommand modules.
+_SUBCOMMAND_MODULE_DIRECTORY = os.path.join(os.path.dirname(__file__), 'cros')
+_SUBCOMMAND_MODULE_PREFIX = 'cros_'
+
+
+_commands = dict()
+
+
+def SetupFileLogger(filename='brillo.log', log_level=logging.DEBUG):
+  """Store log messages to a file.
+
+  In case of an error, this file can be made visible to the user.
+  """
+  workspace_path = workspace_lib.WorkspacePath()
+  if workspace_path is None:
+    return
+  path = os.path.join(workspace_path, workspace_lib.WORKSPACE_LOGS_DIR,
+                      filename)
+  osutils.Touch(path, makedirs=True)
+  logger = logging.getLogger()
+  fh = logging.FileHandler(path, mode='w')
+  fh.setLevel(log_level)
+  fh.setFormatter(
+      logging.Formatter(fmt=constants.LOGGER_FMT,
+                        datefmt=constants.LOGGER_DATE_FMT))
+  logger.addHandler(fh)
+
+
+def UseProgressBar():
+  """Determine whether the progress bar is to be used or not.
+
+  We only want the progress bar to display for the brillo commands which operate
+  at logging level NOTICE. If the user wants to see the noisy output, then they
+  can execute the command at logging level INFO or DEBUG.
+  """
+  return logging.getLogger().getEffectiveLevel() == logging.NOTICE
+
+
+def _FindModules(subdir_path):
+  """Returns a list of subcommand python modules in |subdir_path|.
+
+  Args:
+    subdir_path: directory (string) to search for modules in.
+
+  Returns:
+    List of filenames (strings).
+  """
+  modules = []
+  glob_path = os.path.join(subdir_path, '%s*.py' % _SUBCOMMAND_MODULE_PREFIX)
+  for file_path in glob.glob(glob_path):
+    if not file_path.endswith('_unittest.py'):
+      modules.append(file_path)
+  return modules
+
+
+def _ImportCommands():
+  """Directly imports all subcommand python modules.
+
+  This method imports the modules which may contain subcommands. When
+  these modules are loaded, declared commands (those that use
+  CommandDecorator) will automatically get added to |_commands|.
+  """
+  for file_path in _FindModules(_SUBCOMMAND_MODULE_DIRECTORY):
+    module_path = os.path.splitext(file_path)[0]
+    import_path = os.path.relpath(os.path.realpath(module_path),
+                                  os.path.dirname(constants.CHROMITE_DIR))
+    cros_import.ImportModule(import_path.split(os.path.sep))
+
+
+def ListCommands():
+  """Return a dictionary mapping command names to classes.
+
+  Returns:
+    A dictionary mapping names (strings) to commands (classes).
+  """
+  _ImportCommands()
+  return _commands.copy()
+
+
+class InvalidCommandError(Exception):
+  """Error that occurs when command class fails sanity checks."""
+  pass
+
+
+def CommandDecorator(command_name):
+  """Decorator that sanity checks and adds class to list of usable commands."""
+
+  def InnerCommandDecorator(original_class):
+    """"Inner Decorator that actually wraps the class."""
+    if not hasattr(original_class, '__doc__'):
+      raise InvalidCommandError('All handlers must have docstrings: %s' %
+                                original_class)
+
+    if not issubclass(original_class, CliCommand):
+      raise InvalidCommandError('All Commands must derive from CliCommand: %s' %
+                                original_class)
+
+    _commands[command_name] = original_class
+    original_class.command_name = command_name
+
+    return original_class
+
+  return InnerCommandDecorator
+
+
+class CliCommand(object):
+  """All CLI commands must derive from this class.
+
+  This class provides the abstract interface for all CLI commands. When
+  designing a new command, you must sub-class from this class and use the
+  CommandDecorator decorator. You must specify a class docstring as that will be
+  used as the usage for the sub-command.
+
+  In addition your command should implement AddParser which is passed in a
+  parser that you can add your own custom arguments. See argparse for more
+  information.
+  """
+  # Indicates whether command stats should be uploaded for this command.
+  # Override to enable command stats uploading.
+  upload_stats = False
+  # We set the default timeout to 1 second, to prevent overly long waits for
+  # commands to complete.  From manual tests, stat uploads usually take
+  # between 0.35s-0.45s in MTV.
+  upload_stats_timeout = 1
+
+  # Indicates whether command uses cache related commandline options.
+  use_caching_options = False
+
+  def __init__(self, options):
+    self.options = options
+    brick = brick_lib.FindBrickInPath()
+    self.curr_brick_locator = brick.brick_locator if brick else None
+
+  @classmethod
+  def AddParser(cls, parser):
+    """Add arguments for this command to the parser."""
+    parser.set_defaults(command_class=cls)
+
+  @classmethod
+  def AddDeviceArgument(cls, parser, schemes=commandline.DEVICE_SCHEME_SSH):
+    """Add a device argument to the parser.
+
+    This standardizes the help message across all subcommands.
+
+    Args:
+      parser: The parser to add the device argument to.
+      schemes: List of device schemes or single scheme to allow.
+    """
+    help_strings = []
+    schemes = list(cros_build_lib.iflatten_instance(schemes))
+    if commandline.DEVICE_SCHEME_SSH in schemes:
+      help_strings.append('Target a device with [user@]hostname[:port].')
+    if commandline.DEVICE_SCHEME_USB in schemes:
+      help_strings.append('Target removable media with usb://[path].')
+    if commandline.DEVICE_SCHEME_FILE in schemes:
+      help_strings.append('Target a local file with file://path.')
+    parser.add_argument('device',
+                        type=commandline.DeviceParser(schemes),
+                        help=' '.join(help_strings))
+
+  def Run(self):
+    """The command to run."""
+    raise NotImplementedError()
diff --git a/cli/command_unittest b/cli/command_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cli/command_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/command_unittest.py b/cli/command_unittest.py
new file mode 100644
index 0000000..33c0725
--- /dev/null
+++ b/cli/command_unittest.py
@@ -0,0 +1,179 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for the command module."""
+
+from __future__ import print_function
+
+import argparse
+import glob
+import os
+
+from chromite.cbuildbot import constants
+from chromite.cli import command
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_import
+from chromite.lib import cros_logging as logging
+from chromite.lib import cros_test_lib
+from chromite.lib import partial_mock
+from chromite.lib import workspace_lib
+
+
+# pylint:disable=protected-access
+
+_COMMAND_NAME = 'superAwesomeCommandOfFunness'
+
+
+@command.CommandDecorator(_COMMAND_NAME)
+class TestCommand(command.CliCommand):
+  """A fake command."""
+  def Run(self):
+    print('Just testing')
+
+
+class TestCommandTest(cros_test_lib.MockTestCase):
+  """This test class tests that Commands method."""
+
+  def testParserSetsCommandClass(self):
+    """Tests that our parser sets command_class correctly."""
+    my_parser = argparse.ArgumentParser()
+    command.CliCommand.AddParser(my_parser)
+    ns = my_parser.parse_args([])
+    self.assertEqual(ns.command_class, command.CliCommand)
+
+  def testCommandDecorator(self):
+    """Tests that our decorator correctly adds TestCommand to _commands."""
+    # Note this exposes an implementation detail of _commands.
+    self.assertEqual(command._commands[_COMMAND_NAME], TestCommand)
+
+  def testBadUseOfCommandDecorator(self):
+    """Tests that our decorator correctly rejects bad test commands."""
+    try:
+      # pylint: disable=W0612
+      @command.CommandDecorator('bad')
+      class BadTestCommand(object):
+        """A command that wasn't implemented correctly."""
+        pass
+
+    except command.InvalidCommandError:
+      pass
+    else:
+      self.fail('Invalid command was accepted by the CommandDecorator')
+
+  def testAddDeviceArgument(self):
+    """Tests CliCommand.AddDeviceArgument()."""
+    parser = argparse.ArgumentParser()
+    command.CliCommand.AddDeviceArgument(parser)
+    # Device should be a positional argument.
+    parser.parse_args(['device'])
+
+
+class MockCommand(partial_mock.PartialMock):
+  """Mock class for a generic CLI command."""
+  ATTRS = ('Run',)
+  COMMAND = None
+  TARGET_CLASS = None
+
+  def __init__(self, args, base_args=None):
+    partial_mock.PartialMock.__init__(self)
+    self.args = args
+    self.rc_mock = cros_build_lib_unittest.RunCommandMock()
+    self.rc_mock.SetDefaultCmdResult()
+    parser = commandline.ArgumentParser(caching=True)
+    subparsers = parser.add_subparsers()
+    subparser = subparsers.add_parser(self.COMMAND, caching=True)
+    self.TARGET_CLASS.AddParser(subparser)
+
+    args = base_args if base_args else []
+    args += [self.COMMAND] + self.args
+    options = parser.parse_args(args)
+    self.inst = options.command_class(options)
+
+  def Run(self, inst):
+    with self.rc_mock:
+      return self.backup['Run'](inst)
+
+
+class CommandTest(cros_test_lib.MockTestCase):
+  """This test class tests that we can load modules correctly."""
+
+  # pylint: disable=W0212
+
+  def testFindModules(self):
+    """Tests that we can return modules correctly when mocking out glob."""
+    fake_command_file = 'cros_command_test.py'
+    filtered_file = 'cros_command_unittest.py'
+    mydir = 'mydir'
+
+    self.PatchObject(glob, 'glob',
+                     return_value=[fake_command_file, filtered_file])
+
+    self.assertEqual(command._FindModules(mydir), [fake_command_file])
+
+  def testLoadCommands(self):
+    """Tests import commands correctly."""
+    fake_module = 'cros_command_test'
+    fake_command_file = os.path.join(constants.CHROMITE_DIR, 'foo', fake_module)
+    module_path = ['chromite', 'foo', fake_module]
+
+    self.PatchObject(command, '_FindModules', return_value=[fake_command_file])
+    # The code doesn't use the return value, so stub it out lazy-like.
+    load_mock = self.PatchObject(cros_import, 'ImportModule', return_value=None)
+
+    command._ImportCommands()
+
+    load_mock.assert_called_with(module_path)
+
+  def testListCrosCommands(self):
+    """Tests we get a sane `cros` list back."""
+    cros_commands = command.ListCommands()
+    # Pick some commands that are likely to not go away.
+    self.assertIn('chrome-sdk', cros_commands)
+    self.assertIn('flash', cros_commands)
+
+
+class FileLoggerSetupTest(cros_test_lib.WorkspaceTestCase):
+  """Test that logging to file works correctly."""
+
+  def setUp(self):
+    self.CreateWorkspace()
+
+  def testSetupFileLoggerFilename(self):
+    """Test that the filename and path are correct."""
+    patch_handler = self.PatchObject(logging, 'FileHandler',
+                                     return_value=logging.StreamHandler())
+    command.SetupFileLogger(filename='foo.log')
+
+    # Test that the filename is correct.
+    patch_handler.assert_called_with(
+        os.path.join(self.workspace_path, workspace_lib.WORKSPACE_LOGS_DIR,
+                     'foo.log'), mode='w')
+
+  def testSetupFileLoggerNoFilename(self):
+    """Test that the filename and path are correct with no arguments."""
+    patch_handler = self.PatchObject(logging, 'FileHandler',
+                                     return_value=logging.StreamHandler())
+    command.SetupFileLogger()
+
+    # Test that the filename is correct.
+    patch_handler.assert_called_with(
+        os.path.join(self.workspace_path, workspace_lib.WORKSPACE_LOGS_DIR,
+                     'brillo.log'), mode='w')
+
+  def testSetupFileLoggerLogLevels(self):
+    """Test that the logger operates at the right level."""
+    command.SetupFileLogger('foo.log', log_level=logging.INFO)
+    logging.getLogger().setLevel(logging.DEBUG)
+    logging.debug('debug')
+    logging.info('info')
+    logging.notice('notice')
+
+    # Test that the logs are correct.
+    logs = open(
+        os.path.join(self.workspace_path, workspace_lib.WORKSPACE_LOGS_DIR,
+                     'foo.log'), 'r').read()
+    self.assertNotIn('debug', logs)
+    self.assertIn('info', logs)
+    self.assertIn('notice', logs)
diff --git a/cli/command_vm_test.py b/cli/command_vm_test.py
new file mode 100644
index 0000000..a021ae9
--- /dev/null
+++ b/cli/command_vm_test.py
@@ -0,0 +1,267 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module for integration VM tests for CLI commands.
+
+This module contains the basic functionalities for setting up a VM and testing
+the CLI commands.
+"""
+
+from __future__ import print_function
+
+from chromite.cli import deploy
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import remote_access
+from chromite.lib import vm
+
+
+class Error(Exception):
+  """Base exception for CLI command VM tests."""
+
+
+class SetupError(Error):
+  """Raised when error occurs during test environment setup."""
+
+
+class TestError(Error):
+  """Raised when a command test has failed."""
+
+
+class CommandError(Error):
+  """Raised when error occurs during a command test."""
+
+
+def _PrintCommandLog(command, content):
+  """Print out the log |content| for |command|."""
+  if content:
+    logging.info('\n----------- Start of %s log -----------\n%s\n'
+                 '-----------  End of %s log  -----------',
+                 command, content.rstrip(), command)
+
+
+def TestCommandDecorator(command_name):
+  """Decorator that runs the command test function."""
+
+  def Decorator(test_function):
+    """Inner decorator that actually wraps the function."""
+
+    def Wrapper(command_test):
+      """Wrapper for the test function."""
+      command = cros_build_lib.CmdToStr(command_test.BuildCommand(command_name))
+      logging.info('Running test for %s.', command)
+      try:
+        test_function(command_test)
+        logging.info('Test for %s passed.', command)
+      except CommandError as e:
+        _PrintCommandLog(command, str(e))
+        raise TestError('Test for %s failed.' % command)
+
+    return Wrapper
+
+  return Decorator
+
+
+class CommandVMTest(object):
+  """Base class for CLI command VM tests.
+
+  This class provides the abstract interface for testing CLI commands on a VM.
+  The sub-class must define the BuildCommand method in order to be usable. And
+  the test functions must use the TestCommandDecorator decorator.
+  """
+
+  def __init__(self, board, image_path):
+    """Initializes CommandVMTest.
+
+    Args:
+      board: Board for the VM to run tests.
+      image_path: Path to the image for the VM to run tests.
+    """
+    self.board = board
+    self.image_path = image_path
+    self.working_image_path = None
+    self.vm = None
+
+  def BuildCommand(self, command, device=None, pos_args=None, opt_args=None):
+    """Builds a CLI command.
+
+    Args:
+      command: The sub-command to build on (e.g. 'flash', 'deploy').
+      device: The device's address for the command.
+      pos_args: A list of positional arguments for the command.
+      opt_args: A list of optional arguments for the command.
+    """
+    raise NotImplementedError()
+
+  def SetUp(self):
+    """Creates and starts the VM instance for testing."""
+    try:
+      logging.info('Setting up the VM for testing.')
+      self.working_image_path = vm.CreateVMImage(
+          image=self.image_path, board=self.board, updatable=True)
+      self.vm = vm.VMInstance(self.working_image_path)
+      self.vm.Start()
+      logging.info('The VM has been successfully set up. Ready to run tests.')
+    except vm.VMError as e:
+      raise SetupError('Failed to set up the VM for testing: %s' % e)
+
+  def TearDown(self):
+    """Stops the VM instance after testing."""
+    try:
+      logging.info('Stopping the VM.')
+      if self.vm:
+        self.vm.Stop()
+      logging.info('The VM has been stopped.')
+    except vm.VMStopError as e:
+      logging.warning('Failed to stop the VM: %s', e)
+
+  @TestCommandDecorator('shell')
+  def TestShell(self):
+    """Tests the shell command."""
+    # The path and content of a temporary file for testing shell command.
+    path = '/tmp/shell-test'
+    content = 'shell command test file'
+
+    cmd = self.BuildCommand('shell', device=self.vm.device_addr,
+                            opt_args=['--no-known-hosts'])
+
+    logging.info('Test to use shell command to write a file to the VM device.')
+    write_cmd = cmd + ['--', 'echo "%s" > %s' % (content, path)]
+    result = cros_build_lib.RunCommand(write_cmd, capture_output=True,
+                                       error_code_ok=True)
+    if result.returncode:
+      logging.error('Failed to write the file to the VM device.')
+      raise CommandError(result.error)
+
+    logging.info('Test to use shell command to read a file on the VM device.')
+    read_cmd = cmd + ['--', 'cat %s' % path]
+    result = cros_build_lib.RunCommand(read_cmd, capture_output=True,
+                                       error_code_ok=True)
+    if result.returncode or result.output.rstrip() != content:
+      logging.error('Failed to read the file on the VM device.')
+      raise CommandError(result.error)
+
+    logging.info('Test to use shell command to remove a file on the VM device.')
+    remove_cmd = cmd + ['--', 'rm %s' % path]
+    result = cros_build_lib.RunCommand(remove_cmd, capture_output=True,
+                                       error_code_ok=True)
+    if result.returncode:
+      logging.error('Failed to remove the file on the VM device.')
+      raise CommandError(result.error)
+
+  @TestCommandDecorator('debug')
+  def TestDebug(self):
+    """Tests the debug command."""
+    logging.info('Test to start and debug a new process on the VM device.')
+    exe_path = '/bin/bash'
+    start_cmd = self.BuildCommand('debug', device=self.vm.device_addr,
+                                  opt_args=['--exe', exe_path])
+    result = cros_build_lib.RunCommand(start_cmd, capture_output=True,
+                                       error_code_ok=True, input='\n')
+    if result.returncode:
+      logging.error('Failed to start and debug a new process on the VM device.')
+      raise CommandError(result.error)
+
+    logging.info('Test to attach a running process on the VM device.')
+    with remote_access.ChromiumOSDeviceHandler(
+        remote_access.LOCALHOST, port=self.vm.port) as device:
+      exe = 'update_engine'
+      pids = device.GetRunningPids(exe, full_path=False)
+      if not pids:
+        logging.error('Failed to find any running process to debug.')
+        raise CommandError()
+      pid = pids[0]
+      attach_cmd = self.BuildCommand('debug', device=self.vm.device_addr,
+                                     opt_args=['--pid', str(pid)])
+      result = cros_build_lib.RunCommand(attach_cmd, capture_output=True,
+                                         error_code_ok=True, input='\n')
+      if result.returncode:
+        logging.error('Failed to attach a running process on the VM device.')
+        raise CommandError(result.error)
+
+  @TestCommandDecorator('flash')
+  def TestFlash(self):
+    """Tests the flash command."""
+    # We explicitly disable reboot after the update because VMs sometimes do
+    # not come back after reboot. The flash command does not need to verify
+    # the integrity of the updated image. We have AU tests for that.
+    cmd = self.BuildCommand('flash', device=self.vm.device_addr,
+                            pos_args=['latest'],
+                            opt_args=['--no-wipe', '--no-reboot'])
+
+    logging.info('Test to flash the VM device with the latest image.')
+    result = cros_build_lib.RunCommand(cmd, capture_output=True,
+                                       error_code_ok=True)
+    if result.returncode:
+      logging.error('Failed to flash the VM device.')
+      raise CommandError(result.error)
+
+  @TestCommandDecorator('deploy')
+  def TestDeploy(self):
+    """Tests the deploy command."""
+    packages = ['dev-python/cherrypy', 'app-portage/portage-utils']
+    # Set the installation root to /usr/local so that the command does not
+    # attempt to remount rootfs (which leads to VM reboot).
+    cmd = self.BuildCommand('deploy', device=self.vm.device_addr,
+                            pos_args=packages, opt_args=['--log-level=info',
+                                                         '--root=/usr/local'])
+
+    logging.info('Test to uninstall packages on the VM device.')
+    with cros_build_lib.OutputCapturer() as output:
+      result = cros_build_lib.RunCommand(cmd + ['--unmerge'],
+                                         error_code_ok=True)
+
+    if result.returncode:
+      logging.error('Failed to uninstall packages on the VM device.')
+      raise CommandError(result.error)
+
+    captured_output = output.GetStdout() + output.GetStderr()
+    for event in deploy.BrilloDeployOperation.UNMERGE_EVENTS:
+      if event not in captured_output:
+        logging.error('Strings used by deploy.BrilloDeployOperation to update '
+                      'the progress bar have been changed. Please update the '
+                      'strings in UNMERGE_EVENTS')
+        raise CommandError()
+
+    logging.info('Test to install packages on the VM device.')
+    with cros_build_lib.OutputCapturer() as output:
+      result = cros_build_lib.RunCommand(cmd, error_code_ok=True)
+
+    if result.returncode:
+      logging.error('Failed to install packages on the VM device.')
+      raise CommandError(result.error)
+
+    captured_output = output.GetStdout() + output.GetStderr()
+    for event in deploy.BrilloDeployOperation.MERGE_EVENTS:
+      if event not in captured_output:
+        logging.error('Strings used by deploy.BrilloDeployOperation to update '
+                      'the progress bar have been changed. Please update the '
+                      'strings in MERGE_EVENTS')
+        raise CommandError()
+
+    # Verify that the packages are installed.
+    with remote_access.ChromiumOSDeviceHandler(
+        remote_access.LOCALHOST, port=self.vm.port) as device:
+      try:
+        device.RunCommand(['python', '-c', '"import cherrypy"'])
+        device.RunCommand(['qmerge', '-h'])
+      except cros_build_lib.RunCommandError as e:
+        logging.error('Unable to verify packages installed on VM: %s', e)
+        raise CommandError()
+
+  def RunTests(self):
+    """Calls the test functions."""
+    self.TestShell()
+    self.TestDebug()
+    self.TestFlash()
+    self.TestDeploy()
+
+  def Run(self):
+    """Runs the tests."""
+    try:
+      self.SetUp()
+      self.RunTests()
+      logging.info('All tests completed successfully.')
+    finally:
+      self.TearDown()
diff --git a/cli/cros/__init__.py b/cli/cros/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cli/cros/__init__.py
diff --git a/cli/cros/cros_build.py b/cli/cros/cros_build.py
new file mode 100644
index 0000000..b868c8f
--- /dev/null
+++ b/cli/cros/cros_build.py
@@ -0,0 +1,213 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""cros build: Build the requested packages."""
+
+from __future__ import print_function
+
+from chromite.cli import command
+from chromite.lib import blueprint_lib
+from chromite.lib import brick_lib
+from chromite.lib import chroot_util
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import operation
+from chromite.lib import parallel
+from chromite.lib import toolchain
+from chromite.lib import workon_helper
+
+
+class BrilloBuildOperation(operation.ParallelEmergeOperation):
+  """Wrapper around operation.ParallelEmergeOperation.
+
+  Currently, this class is empty as the main component is just
+  operation.ParallelEmergeOperation. However, self._CheckDependencies also
+  produces output, then that output can be captured here.
+  """
+
+
+@command.CommandDecorator('build')
+class BuildCommand(command.CliCommand):
+  """Build the requested packages."""
+
+  _BAD_DEPEND_MSG = '\nemerge detected broken ebuilds. See error message above.'
+  EPILOG = """
+To update specified package and all dependencies:
+  cros build --board=lumpy power_manager
+  cros build --host cros-devutils
+
+To just build a single package:
+  cros build --board=lumpy --no-deps power_manager
+"""
+
+  def __init__(self, options):
+    super(BuildCommand, self).__init__(options)
+    self.chroot_update = options.chroot_update and options.deps
+    if options.chroot_update and not options.deps:
+      logging.debug('Skipping chroot update due to --nodeps')
+    self.build_pkgs = options.packages
+    self.host = False
+    self.board = None
+    self.brick = None
+    self.blueprint = None
+
+    if self.options.host:
+      self.host = True
+    elif self.options.board:
+      self.board = self.options.board
+    elif self.options.blueprint:
+      self.blueprint = blueprint_lib.Blueprint(self.options.blueprint)
+
+      if not self.build_pkgs:
+        self.build_pkgs = self.blueprint.GetPackages()
+    elif self.options.brick or self.curr_brick_locator:
+      self.brick = brick_lib.Brick(self.options.brick
+                                   or self.curr_brick_locator)
+      self.board = self.brick.FriendlyName()
+      if not self.build_pkgs:
+        self.build_pkgs = self.brick.MainPackages()
+    else:
+      # If nothing is explicitly set, use the default board.
+      self.board = cros_build_lib.GetDefaultBoard()
+
+    # Set sysroot and friendly name. The latter is None if building for host.
+    self.sysroot = cros_build_lib.GetSysroot(self.blueprint.FriendlyName()
+                                             if self.blueprint else self.board)
+
+  @classmethod
+  def AddParser(cls, parser):
+    super(cls, BuildCommand).AddParser(parser)
+    target = parser.add_mutually_exclusive_group()
+    target.add_argument('--board', help='The board to build packages for.')
+    target.add_argument('--brick', type='brick_path',
+                        help='The brick to build packages for.')
+    target.add_argument('--blueprint', type='blueprint_path',
+                        help='The blueprint to build packages for.')
+    target.add_argument('--host', help='Build packages for the chroot itself.',
+                        default=False, action='store_true')
+    parser.add_argument('--no-binary', help="Don't use binary packages.",
+                        default=True, dest='binary', action='store_false')
+    parser.add_argument('--init-only', action='store_true',
+                        help="Initialize build environment but don't build "
+                        "anything.")
+    deps = parser.add_mutually_exclusive_group()
+    deps.add_argument('--no-deps', help="Don't update dependencies.",
+                      default=True, dest='deps', action='store_false')
+    deps.add_argument('--rebuild-deps', default=False, action='store_true',
+                      help='Automatically rebuild dependencies.')
+    parser.add_argument('packages',
+                        help='Packages to build. If no packages listed, uses '
+                        'the current brick main package.',
+                        nargs='*')
+
+    # Advanced options.
+    advanced = parser.add_argument_group('Advanced options')
+    advanced.add_argument('--no-host-packages-update',
+                          dest='host_packages_update', default=True,
+                          action='store_false',
+                          help="Don't update host packages during chroot "
+                          "update.")
+    advanced.add_argument('--no-chroot-update', default=True,
+                          dest='chroot_update', action='store_false',
+                          help="Don't update chroot at all.")
+    advanced.add_argument('--no-enable-only-latest', default=True,
+                          dest='enable_only_latest', action='store_false',
+                          help="Don't enable packages with only live ebuilds.")
+    advanced.add_argument('--jobs', default=None, type=int,
+                          help='Maximum job count to run in parallel '
+                          '(uses all available cores by default).')
+
+    # Legacy options, for backward compatibiltiy.
+    legacy = parser.add_argument_group('Options for backward compatibility')
+    legacy.add_argument('--norebuild', default=True, dest='rebuild_deps',
+                        action='store_false', help='Inverse of --rebuild-deps.')
+
+  def _CheckDependencies(self):
+    """Verify emerge dependencies.
+
+    Verify all board packages can be emerged from scratch, without any
+    backtracking. This ensures that no updates are skipped by Portage due to
+    the fallback behavior enabled by the backtrack option, and helps catch
+    cases where Portage skips an update due to a typo in the ebuild.
+
+    Only print the output if this step fails or if we're in debug mode.
+    """
+    if self.options.deps and not self.host and not self.blueprint:
+      cmd = chroot_util.GetEmergeCommand(sysroot=self.sysroot)
+      cmd += ['-pe', '--backtrack=0'] + self.build_pkgs
+      try:
+        cros_build_lib.RunCommand(cmd, combine_stdout_stderr=True,
+                                  debug_level=logging.DEBUG)
+      except cros_build_lib.RunCommandError as ex:
+        ex.msg += self._BAD_DEPEND_MSG
+        raise
+
+  def _Build(self):
+    """Update the chroot, then merge the requested packages."""
+    if self.chroot_update and self.host:
+      chroot_util.UpdateChroot()
+
+    chroot_util.Emerge(self.build_pkgs, self.sysroot,
+                       with_deps=self.options.deps,
+                       rebuild_deps=self.options.rebuild_deps,
+                       use_binary=self.options.binary, jobs=self.options.jobs,
+                       debug_output=(self.options.log_level.lower() == 'debug'))
+
+  def Run(self):
+    """Run cros build."""
+    self.options.Freeze()
+
+    if not self.host:
+      if not (self.board or self.brick or self.blueprint):
+        cros_build_lib.Die('You did not specify a board/brick to build for. '
+                           'You need to be in a brick directory or set '
+                           '--board/--brick/--host')
+
+      if self.brick and self.brick.legacy:
+        cros_build_lib.Die('--brick should not be used with board names. Use '
+                           '--board=%s instead.' % self.brick.config['name'])
+
+    if self.blueprint:
+      chroot_args = ['--toolchains',
+                     ','.join(toolchain.GetToolchainsForBrick(
+                         self.blueprint.GetBSP()).iterkeys())]
+    elif self.board:
+      chroot_args = ['--board', self.board]
+    else:
+      chroot_args = None
+
+    commandline.RunInsideChroot(self, chroot_args=chroot_args)
+
+    if not (self.build_pkgs or self.options.init_only):
+      cros_build_lib.Die('No packages found, nothing to build.')
+
+    # Set up the sysroots if not building for host.
+    if self.blueprint:
+      if self.chroot_update:
+        chroot_util.UpdateChroot(
+            update_host_packages=self.options.host_packages_update,
+            brick=brick_lib.Brick(self.blueprint.GetBSP()))
+      chroot_util.InitializeSysroots(self.blueprint)
+    elif self.brick or self.board:
+      chroot_util.SetupBoard(
+          brick=self.brick, board=self.board,
+          update_chroot=self.chroot_update,
+          update_host_packages=self.options.host_packages_update,
+          use_binary=self.options.binary)
+
+    if not self.options.init_only:
+      # Preliminary: enable all packages that only have a live ebuild.
+      if self.options.enable_only_latest:
+        workon = workon_helper.WorkonHelper(self.sysroot)
+        workon.StartWorkingOnPackages([], use_workon_only=True)
+
+      if command.UseProgressBar():
+        op = BrilloBuildOperation()
+        op.Run(
+            parallel.RunParallelSteps, [self._CheckDependencies, self._Build],
+            log_level=logging.DEBUG)
+      else:
+        parallel.RunParallelSteps([self._CheckDependencies, self._Build])
+      logging.notice('Build completed successfully.')
diff --git a/cli/cros/cros_build_unittest b/cli/cros/cros_build_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cli/cros/cros_build_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/cros/cros_build_unittest.py b/cli/cros/cros_build_unittest.py
new file mode 100644
index 0000000..7c44f2e
--- /dev/null
+++ b/cli/cros/cros_build_unittest.py
@@ -0,0 +1,116 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module tests the cros build command."""
+
+from __future__ import print_function
+
+from chromite.cli import command
+from chromite.cli import command_unittest
+from chromite.cli.cros import cros_build
+from chromite.lib import chroot_util
+from chromite.lib import cros_logging as logging
+from chromite.lib import cros_test_lib
+from chromite.lib import parallel_unittest
+from chromite.lib import partial_mock
+from chromite.lib import workon_helper
+
+
+class MockBuildCommand(command_unittest.MockCommand):
+  """Mock out the build command."""
+  TARGET = 'chromite.cli.cros.cros_build.BuildCommand'
+  TARGET_CLASS = cros_build.BuildCommand
+
+  def __init__(self, *args, **kwargs):
+    super(MockBuildCommand, self).__init__(*args, **kwargs)
+    self.chroot_update_called = 0
+
+  def OnChrootUpdate(self, *_args, **_kwargs):
+    self.chroot_update_called += 1
+
+  def Run(self, inst):
+    self.PatchObject(chroot_util, 'UpdateChroot',
+                     side_effect=self.OnChrootUpdate)
+    self.PatchObject(chroot_util, 'Emerge')
+    with parallel_unittest.ParallelMock():
+      command_unittest.MockCommand.Run(self, inst)
+
+
+class FakeWorkonHelper(object):
+  """Fake workon_helper.WorkonHelper."""
+
+  def __init__(self, *_args, **_kwargs):
+    self.start_called = 0
+    self.use_workon_only = None
+
+  def ListAtoms(self, *_args, **_kwargs):
+    pass
+
+  def StartWorkingOnPackages(self, *_args, **kwargs):
+    self.start_called += 1
+    self.use_workon_only = kwargs.get('use_workon_only')
+
+
+class BuildCommandTest(cros_test_lib.MockTempDirTestCase,
+                       cros_test_lib.OutputTestCase):
+  """Test class for our BuildCommand class."""
+
+  def testBrilloBuildOperationCalled(self):
+    """Test that BrilloBuildOperation is used when appropriate."""
+    cmd = ['--board=randonname', 'power_manager']
+    self.PatchObject(workon_helper, 'WorkonHelper')
+    self.PatchObject(command, 'UseProgressBar', return_value=True)
+    with MockBuildCommand(cmd) as build:
+      operation_run = self.PatchObject(cros_build.BrilloBuildOperation, 'Run')
+      build.inst.Run()
+      self.assertTrue(operation_run.called)
+
+  def testBrilloBuildOperationNotCalled(self):
+    """Test that BrilloBuildOperation is not used when it shouldn't be."""
+    cmd = ['--board=randonname', 'power_manager']
+    self.PatchObject(workon_helper, 'WorkonHelper')
+    self.PatchObject(command, 'UseProgressBar', return_value=False)
+    with MockBuildCommand(cmd) as build:
+      operation_run = self.PatchObject(cros_build.BrilloBuildOperation, 'Run')
+      build.inst.Run()
+      self.assertFalse(operation_run.called)
+
+  def testSuccess(self):
+    """Test that successful commands work."""
+    cmds = [['--host', 'power_manager'],
+            ['--board=randomname', 'power_manager'],
+            ['--board=randomname', '--debug', 'power_manager'],
+            ['--board=randomname', '--no-deps', 'power_manager'],
+            ['--board=randomname', '--no-chroot-update', 'power_manager'],
+            ['--board=randomname', '--no-enable-only-latest', 'power_manager']]
+    for cmd in cmds:
+      update_chroot = not ('--no-deps' in cmd or '--no-chroot-update' in cmd)
+      enable_only_latest = '--no-enable-only-latest' not in cmd
+      fake_workon_helper = FakeWorkonHelper()
+      self.PatchObject(workon_helper, 'WorkonHelper',
+                       return_value=fake_workon_helper)
+      with MockBuildCommand(cmd) as build:
+        build.inst.Run()
+        self.assertEquals(1 if update_chroot else 0, build.chroot_update_called)
+        self.assertEquals(1 if enable_only_latest else 0,
+                          fake_workon_helper.start_called)
+        self.assertEquals(True if enable_only_latest else None,
+                          fake_workon_helper.use_workon_only)
+
+  def testFailedDeps(self):
+    """Test that failures are detected correctly."""
+    # pylint: disable=protected-access
+    args = ['--board=randomname', 'power_manager']
+    self.PatchObject(workon_helper, 'WorkonHelper',
+                     return_value=FakeWorkonHelper())
+    with MockBuildCommand(args) as build:
+      cmd = partial_mock.In('--backtrack=0')
+      build.rc_mock.AddCmdResult(cmd=cmd, returncode=1, error='error\n')
+      with self.OutputCapturer():
+        try:
+          build.inst.Run()
+        except Exception as e:
+          logging.error(e)
+      self.AssertOutputContainsError(cros_build.BuildCommand._BAD_DEPEND_MSG,
+                                     check_stderr=True)
diff --git a/cli/cros/cros_chrome_sdk.py b/cli/cros/cros_chrome_sdk.py
new file mode 100644
index 0000000..73ac533
--- /dev/null
+++ b/cli/cros/cros_chrome_sdk.py
@@ -0,0 +1,907 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""The cros chrome-sdk command for the simple chrome workflow."""
+
+from __future__ import print_function
+
+import argparse
+import collections
+import contextlib
+import json
+import os
+import distutils.version
+
+from chromite.cli import command
+from chromite.lib import cache
+from chromite.lib import chrome_util
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import path_util
+from chromite.lib import stats
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+
+
+COMMAND_NAME = 'chrome-sdk'
+CUSTOM_VERSION = 'custom'
+
+
+def Log(*args, **kwargs):
+  """Conditional logging.
+
+  Args:
+    silent: If set to True, then logs with level DEBUG.  logs with level INFO
+      otherwise.  Defaults to False.
+  """
+  silent = kwargs.pop('silent', False)
+  level = logging.DEBUG if silent else logging.INFO
+  logging.log(level, *args, **kwargs)
+
+
+class MissingSDK(Exception):
+  """Error thrown when we cannot find an SDK."""
+
+  def __init__(self, board, version=None):
+    msg = 'Cannot find SDK for %r' % (board,)
+    if version is not None:
+      msg += ' with version %s' % (version,)
+    Exception.__init__(self, msg)
+
+
+class SDKFetcher(object):
+  """Functionality for fetching an SDK environment.
+
+  For the version of ChromeOS specified, the class downloads and caches
+  SDK components.
+  """
+  SDK_BOARD_ENV = '%SDK_BOARD'
+  SDK_PATH_ENV = '%SDK_PATH'
+  SDK_VERSION_ENV = '%SDK_VERSION'
+
+  SDKContext = collections.namedtuple(
+      'SDKContext', ['version', 'target_tc', 'key_map'])
+
+  TARBALL_CACHE = 'tarballs'
+  MISC_CACHE = 'misc'
+
+  TARGET_TOOLCHAIN_KEY = 'target_toolchain'
+
+  def __init__(self, cache_dir, board, clear_cache=False, chrome_src=None,
+               sdk_path=None, toolchain_path=None, silent=False,
+               use_external_config=None):
+    """Initialize the class.
+
+    Args:
+      cache_dir: The toplevel cache dir to use.
+      board: The board to manage the SDK for.
+      clear_cache: Clears the sdk cache during __init__.
+      chrome_src: The location of the chrome checkout.  If unspecified, the
+        cwd is presumed to be within a chrome checkout.
+      sdk_path: The path (whether a local directory or a gs:// path) to fetch
+        SDK components from.
+      toolchain_path: The path (whether a local directory or a gs:// path) to
+        fetch toolchain components from.
+      silent: If set, the fetcher prints less output.
+      use_external_config: When identifying the configuration for a board,
+        force usage of the external configuration if both external and internal
+        are available.
+    """
+    site_config = config_lib.LoadConfigFromFile()
+
+    self.cache_base = os.path.join(cache_dir, COMMAND_NAME)
+    if clear_cache:
+      logging.warning('Clearing the SDK cache.')
+      osutils.RmDir(self.cache_base, ignore_missing=True)
+    self.tarball_cache = cache.TarballCache(
+        os.path.join(self.cache_base, self.TARBALL_CACHE))
+    self.misc_cache = cache.DiskCache(
+        os.path.join(self.cache_base, self.MISC_CACHE))
+    self.board = board
+    self.config = site_config.FindCanonicalConfigForBoard(
+        board, allow_internal=not use_external_config)
+    self.gs_base = '%s/%s' % (constants.DEFAULT_ARCHIVE_BUCKET,
+                              self.config['name'])
+    self.clear_cache = clear_cache
+    self.chrome_src = chrome_src
+    self.sdk_path = sdk_path
+    self.toolchain_path = toolchain_path
+    self.silent = silent
+
+    # For external configs, there is no need to run 'gsutil config', because
+    # the necessary files are all accessible to anonymous users.
+    internal = self.config['internal']
+    self.gs_ctx = gs.GSContext(cache_dir=cache_dir, init_boto=internal)
+
+    if self.sdk_path is None:
+      self.sdk_path = os.environ.get(self.SDK_PATH_ENV)
+
+    if self.toolchain_path is None:
+      self.toolchain_path = 'gs://%s' % constants.SDK_GS_BUCKET
+
+  def _UpdateTarball(self, url, ref):
+    """Worker function to fetch tarballs"""
+    with osutils.TempDir(base_dir=self.tarball_cache.staging_dir) as tempdir:
+      local_path = os.path.join(tempdir, os.path.basename(url))
+      Log('SDK: Fetching %s', url, silent=self.silent)
+      self.gs_ctx.Copy(url, tempdir, debug_level=logging.DEBUG)
+      ref.SetDefault(local_path, lock=True)
+
+  def _GetMetadata(self, version):
+    """Return metadata (in the form of a dict) for a given version."""
+    raw_json = None
+    version_base = self._GetVersionGSBase(version)
+    with self.misc_cache.Lookup(
+        self._GetCacheKeyForComponent(version, constants.METADATA_JSON)) as ref:
+      if ref.Exists(lock=True):
+        raw_json = osutils.ReadFile(ref.path)
+      else:
+        metadata_path = os.path.join(version_base, constants.METADATA_JSON)
+        partial_metadata_path = os.path.join(version_base,
+                                             constants.PARTIAL_METADATA_JSON)
+        try:
+          raw_json = self.gs_ctx.Cat(metadata_path,
+                                     debug_level=logging.DEBUG)
+        except gs.GSNoSuchKey:
+          logging.info('Could not read %s, falling back to %s',
+                       metadata_path, partial_metadata_path)
+          raw_json = self.gs_ctx.Cat(partial_metadata_path,
+                                     debug_level=logging.DEBUG)
+
+        ref.AssignText(raw_json)
+
+    return json.loads(raw_json)
+
+  def _GetChromeLKGM(self, chrome_src_dir):
+    """Get ChromeOS LKGM checked into the Chrome tree.
+
+    Returns:
+      Version number in format '3929.0.0'.
+    """
+    version = osutils.ReadFile(os.path.join(
+        chrome_src_dir, constants.PATH_TO_CHROME_LKGM))
+    return version
+
+  def _GetRepoCheckoutVersion(self, repo_root):
+    """Get the version specified in chromeos_version.sh.
+
+    Returns:
+      Version number in format '3929.0.0'.
+    """
+    chromeos_version_sh = os.path.join(repo_root, constants.VERSION_FILE)
+    sourced_env = osutils.SourceEnvironment(
+        chromeos_version_sh, ['CHROMEOS_VERSION_STRING'],
+        env={'CHROMEOS_OFFICIAL': '1'})
+    return sourced_env['CHROMEOS_VERSION_STRING']
+
+  def _GetNewestFullVersion(self, version=None):
+    """Gets the full version number of the latest build for the given |version|.
+
+    Args:
+      version: The version number or branch to look at. By default, look at
+        builds on the current branch.
+
+    Returns:
+      Version number in the format 'R30-3929.0.0'.
+    """
+    if version is None:
+      version = git.GetChromiteTrackingBranch()
+    version_file = '%s/LATEST-%s' % (self.gs_base, version)
+    try:
+      full_version = self.gs_ctx.Cat(version_file)
+      assert full_version.startswith('R')
+      return full_version
+    except gs.GSNoSuchKey:
+      return None
+
+  def _GetNewestManifestVersion(self):
+    """Gets the latest uploaded SDK version.
+
+    Returns:
+      Version number in the format '3929.0.0'.
+    """
+    full_version = self._GetNewestFullVersion()
+    return None if full_version is None else full_version.split('-')[1]
+
+  def GetDefaultVersion(self):
+    """Get the default SDK version to use.
+
+    If we are in an existing SDK shell, the default version will just be
+    the current version. Otherwise, we will try to calculate the
+    appropriate version to use based on the checkout.
+    """
+    if os.environ.get(self.SDK_BOARD_ENV) == self.board:
+      sdk_version = os.environ.get(self.SDK_VERSION_ENV)
+      if sdk_version is not None:
+        return sdk_version
+
+    with self.misc_cache.Lookup((self.board, 'latest')) as ref:
+      if ref.Exists(lock=True):
+        version = osutils.ReadFile(ref.path).strip()
+        # Deal with the old version format.
+        if version.startswith('R'):
+          version = version.split('-')[1]
+        return version
+      else:
+        return None
+
+  def _SetDefaultVersion(self, version):
+    """Set the new default version."""
+    with self.misc_cache.Lookup((self.board, 'latest')) as ref:
+      ref.AssignText(version)
+
+  def UpdateDefaultVersion(self):
+    """Update the version that we default to using.
+
+    Returns:
+      A tuple of the form (version, updated), where |version| is the
+      version number in the format '3929.0.0', and |updated| indicates
+      whether the version was indeed updated.
+    """
+    checkout_dir = self.chrome_src if self.chrome_src else os.getcwd()
+    checkout = path_util.DetermineCheckout(checkout_dir)
+    current = self.GetDefaultVersion() or '0'
+    if checkout.chrome_src_dir:
+      target = self._GetChromeLKGM(checkout.chrome_src_dir)
+    elif checkout.type == path_util.CHECKOUT_TYPE_REPO:
+      target = self._GetRepoCheckoutVersion(checkout.root)
+      if target != current:
+        lv_cls = distutils.version.LooseVersion
+        if lv_cls(target) > lv_cls(current):
+          # Hit the network for the newest uploaded version for the branch.
+          newest = self._GetNewestManifestVersion()
+          # The SDK for the version of the checkout has not been uploaded yet,
+          # so fall back to the latest uploaded SDK.
+          if newest is not None and lv_cls(target) > lv_cls(newest):
+            target = newest
+    else:
+      target = self._GetNewestManifestVersion()
+
+    if target is None:
+      raise MissingSDK(self.board)
+
+    self._SetDefaultVersion(target)
+    return target, target != current
+
+  def GetFullVersion(self, version):
+    """Add the release branch and build number to a ChromeOS platform version.
+
+    This will specify where you can get the latest build for the given version
+    for the current board.
+
+    Args:
+      version: A ChromeOS platform number of the form XXXX.XX.XX, i.e.,
+        3918.0.0.
+
+    Returns:
+      The version with release branch and build number added, as needed. E.g.
+      R28-3918.0.0-b1234.
+    """
+    assert not version.startswith('R')
+
+    with self.misc_cache.Lookup(('full-version', self.board, version)) as ref:
+      if ref.Exists(lock=True):
+        return osutils.ReadFile(ref.path).strip()
+      else:
+        # Find out the newest version from the LATEST (or LATEST-%s) file.
+        full_version = self._GetNewestFullVersion(version=version)
+
+        if full_version is None:
+          raise MissingSDK(self.board, version)
+
+        ref.AssignText(full_version)
+        return full_version
+
+  def _GetVersionGSBase(self, version):
+    """The base path of the SDK for a particular version."""
+    if self.sdk_path is not None:
+      return self.sdk_path
+
+    full_version = self.GetFullVersion(version)
+    return os.path.join(self.gs_base, full_version)
+
+  def _GetCacheKeyForComponent(self, version, component):
+    """Builds the cache key tuple for an SDK component."""
+    version_section = version
+    if self.sdk_path is not None:
+      version_section = self.sdk_path.replace('/', '__').replace(':', '__')
+    return (self.board, version_section, component)
+
+  @contextlib.contextmanager
+  def Prepare(self, components, version=None, target_tc=None,
+              toolchain_url=None):
+    """Ensures the components of an SDK exist and are read-locked.
+
+    For a given SDK version, pulls down missing components, and provides a
+    context where the components are read-locked, which prevents the cache from
+    deleting them during its purge operations.
+
+    If both target_tc and toolchain_url arguments are provided, then this
+    does not download metadata.json for the given version. Otherwise, this
+    function requires metadata.json for the given version to exist.
+
+    Args:
+      gs_ctx: GSContext object.
+      components: A list of specific components(tarballs) to prepare.
+      version: The version to prepare.  If not set, uses the version returned by
+        GetDefaultVersion().  If there is no default version set (this is the
+        first time we are being executed), then we update the default version.
+      target_tc: Target toolchain name to use, e.g. x86_64-cros-linux-gnu
+      toolchain_url: Format pattern for path to fetch toolchain from,
+        e.g. 2014/04/%(target)s-2014.04.23.220740.tar.xz
+
+    Yields:
+      An SDKFetcher.SDKContext namedtuple object.  The attributes of the
+      object are:
+        version: The version that was prepared.
+        target_tc: Target toolchain name.
+        key_map: Dictionary that contains CacheReference objects for the SDK
+          artifacts, indexed by cache key.
+    """
+    if version is None and self.sdk_path is None:
+      version = self.GetDefaultVersion()
+      if version is None:
+        version, _ = self.UpdateDefaultVersion()
+    components = list(components)
+
+    key_map = {}
+    fetch_urls = {}
+
+    if not target_tc or not toolchain_url:
+      metadata = self._GetMetadata(version)
+      target_tc = target_tc or metadata['toolchain-tuple'][0]
+      toolchain_url = toolchain_url or metadata['toolchain-url']
+
+    # Fetch toolchains from separate location.
+    if self.TARGET_TOOLCHAIN_KEY in components:
+      fetch_urls[self.TARGET_TOOLCHAIN_KEY] = os.path.join(
+          self.toolchain_path, toolchain_url % {'target': target_tc})
+      components.remove(self.TARGET_TOOLCHAIN_KEY)
+
+    version_base = self._GetVersionGSBase(version)
+    fetch_urls.update((t, os.path.join(version_base, t)) for t in components)
+    try:
+      for key, url in fetch_urls.iteritems():
+        cache_key = self._GetCacheKeyForComponent(version, key)
+        ref = self.tarball_cache.Lookup(cache_key)
+        key_map[key] = ref
+        ref.Acquire()
+        if not ref.Exists(lock=True):
+          # TODO(rcui): Parallelize this.  Requires acquiring locks *before*
+          # generating worker processes; therefore the functionality needs to
+          # be moved into the DiskCache class itself -
+          # i.e.,DiskCache.ParallelSetDefault().
+          self._UpdateTarball(url, ref)
+
+      ctx_version = version
+      if self.sdk_path is not None:
+        ctx_version = CUSTOM_VERSION
+      yield self.SDKContext(ctx_version, target_tc, key_map)
+    finally:
+      # TODO(rcui): Move to using cros_build_lib.ContextManagerStack()
+      cros_build_lib.SafeRun([ref.Release for ref in key_map.itervalues()])
+
+
+class GomaError(Exception):
+  """Indicates error with setting up Goma."""
+
+
+@command.CommandDecorator(COMMAND_NAME)
+class ChromeSDKCommand(command.CliCommand):
+  """Set up an environment for building Chrome on Chrome OS.
+
+  Pulls down SDK components for building and testing Chrome for Chrome OS,
+  sets up the environment for building Chrome, and runs a command in the
+  environment, starting a bash session if no command is specified.
+
+  The bash session environment is set up by a user-configurable rc file located
+  at ~/.chromite/chrome_sdk.bashrc.
+  """
+
+  # Note, this URL is not accessible outside of corp.
+  _GOMA_URL = ('https://clients5.google.com/cxx-compiler-service/'
+               'download/goma_ctl.py')
+
+  _CLANG_DIR = 'third_party/llvm-build/Release+Asserts/bin'
+
+  EBUILD_ENV = (
+      # Compiler tools.
+      'CXX',
+      'CC',
+      'AR',
+      'AS',
+      'LD',
+      'RANLIB',
+
+      # Compiler flags.
+      'CFLAGS',
+      'CXXFLAGS',
+      'CPPFLAGS',
+      'LDFLAGS',
+
+      # Misc settings.
+      'GOLD_SET',
+      'GYP_DEFINES',
+  )
+
+  SDK_GOMA_PORT_ENV = 'SDK_GOMA_PORT'
+  SDK_GOMA_DIR_ENV = 'SDK_GOMA_DIR'
+
+  GOMACC_PORT_CMD = ['./gomacc', 'port']
+  FETCH_GOMA_CMD = ['wget', _GOMA_URL]
+
+  # Override base class property to enable stats upload.
+  upload_stats = True
+
+  # Override base class property to use cache related commandline options.
+  use_caching_options = True
+
+  @property
+  def upload_stats_timeout(self):
+    # Give a longer timeout for interactive SDK shell invocations, since the
+    # user will not notice a longer wait because it's happening in the
+    # background.
+    if self.options.cmd:
+      return super(ChromeSDKCommand, self).upload_stats_timeout
+    else:
+      return stats.StatsUploader.UPLOAD_TIMEOUT
+
+  @staticmethod
+  def ValidateVersion(version):
+    if version.startswith('R') or len(version.split('.')) != 3:
+      raise argparse.ArgumentTypeError(
+          '--version should be in the format 3912.0.0')
+    return version
+
+  @classmethod
+  def AddParser(cls, parser):
+    super(ChromeSDKCommand, cls).AddParser(parser)
+    parser.add_argument(
+        '--board', required=True, help='The board SDK to use.')
+    parser.add_argument(
+        '--bashrc', type='path',
+        default=constants.CHROME_SDK_BASHRC,
+        help='A bashrc file used to set up the SDK shell environment. '
+             'Defaults to %s.' % constants.CHROME_SDK_BASHRC)
+    parser.add_argument(
+        '--chroot', type='path',
+        help='Path to a ChromeOS chroot to use.  If set, '
+             '<chroot>/build/<board> will be used as the sysroot that Chrome '
+             'is built against.  The version shown in the SDK shell prompt '
+             'will then have an asterisk prepended to it.')
+    parser.add_argument(
+        '--chrome-src', type='path',
+        help='Specifies the location of a Chrome src/ directory.  Required if '
+             'running with --clang if not running from a Chrome checkout.')
+    parser.add_argument(
+        '--clang', action='store_true', default=False,
+        help='Sets up the environment for building with clang.')
+    parser.add_argument(
+        '--cwd', type='path',
+        help='Specifies a directory to switch to after setting up the SDK '
+             'shell.  Defaults to the current directory.')
+    parser.add_argument(
+        '--internal', action='store_true', default=False,
+        help='Sets up SDK for building official (internal) Chrome '
+             'Chrome, rather than Chromium.')
+    parser.add_argument(
+        '--component', action='store_true', default=False,
+        help='Sets up SDK for building a componentized build of Chrome '
+             '(component=shared_library in GYP).')
+    parser.add_argument(
+        '--fastbuild', action='store_true', default=False,
+        help='Turn off debugging information for a faster build '
+             '(fastbuild=1 in GYP).')
+    parser.add_argument(
+        '--use-external-config', action='store_true', default=False,
+        help='Use the external configuration for the specified board, even if '
+             'an internal configuration is avalable.')
+    parser.add_argument(
+        '--sdk-path', type='local_or_gs_path',
+        help='Provides a path, whether a local directory or a gs:// path, to '
+             'pull SDK components from.')
+    parser.add_argument(
+        '--toolchain-path', type='local_or_gs_path',
+        help='Provides a path, whether a local directory or a gs:// path, to '
+             'pull toolchain components from.')
+    parser.add_argument(
+        '--nogoma', action='store_false', default=True, dest='goma',
+        help="Disables Goma in the shell by removing it from the PATH.")
+    parser.add_argument(
+        '--gomadir', type='path',
+        help="Use the goma installation at the specified PATH.")
+    parser.add_argument(
+        '--version', default=None, type=cls.ValidateVersion,
+        help="Specify version of SDK to use, in the format '3912.0.0'.  "
+             "Defaults to determining version based on the type of checkout "
+             "(Chrome or ChromeOS) you are executing from.")
+    parser.add_argument(
+        'cmd', nargs='*', default=None,
+        help='The command to execute in the SDK environment.  Defaults to '
+             'starting a bash shell.')
+
+    parser.add_option_to_group(
+        parser.caching_group, '--clear-sdk-cache', action='store_true',
+        default=False,
+        help='Removes everything in the SDK cache before starting.')
+
+    group = parser.add_option_group(
+        'Metadata Overrides (Advanced)',
+        description='Provide all of these overrides in order to remove '
+                    'dependencies on metadata.json existence.')
+    parser.add_option_to_group(
+        group, '--target-tc', action='store', default=None,
+        help='Override target toolchain name, e.g. x86_64-cros-linux-gnu')
+    parser.add_option_to_group(
+        group, '--toolchain-url', action='store', default=None,
+        help='Override toolchain url format pattern, e.g. '
+             '2014/04/%%(target)s-2014.04.23.220740.tar.xz')
+
+  def __init__(self, options):
+    super(ChromeSDKCommand, self).__init__(options)
+    self.board = options.board
+    # Lazy initialized.
+    self.sdk = None
+    # Initialized later based on options passed in.
+    self.silent = True
+
+  @staticmethod
+  def _CreatePS1(board, version, chroot=None):
+    """Returns PS1 string that sets commandline and xterm window caption.
+
+    If a chroot path is set, then indicate we are using the sysroot from there
+    instead of the stock sysroot by prepending an asterisk to the version.
+
+    Args:
+      board: The SDK board.
+      version: The SDK version.
+      chroot: The path to the chroot, if set.
+    """
+    custom = '*' if chroot else ''
+    current_ps1 = cros_build_lib.RunCommand(
+        ['bash', '-l', '-c', 'echo "$PS1"'], print_cmd=False,
+        capture_output=True).output.splitlines()
+    if current_ps1:
+      current_ps1 = current_ps1[-1]
+    if not current_ps1:
+      # Something went wrong, so use a fallback value.
+      current_ps1 = r'\u@\h \w $ '
+    return '(sdk %s %s%s) %s' % (board, custom, version, current_ps1)
+
+  def _FixGoldPath(self, var_contents, toolchain_path):
+    """Point to the gold linker in the toolchain tarball.
+
+    Accepts an already set environment variable in the form of '<cmd>
+    -B<gold_path>', and overrides the gold_path to the correct path in the
+    extracted toolchain tarball.
+
+    Args:
+      var_contents: The contents of the environment variable.
+      toolchain_path: Path to the extracted toolchain tarball contents.
+
+    Returns:
+      Environment string that has correct gold path.
+    """
+    cmd, _, gold_path = var_contents.partition(' -B')
+    gold_path = os.path.join(toolchain_path, gold_path.lstrip('/'))
+    return '%s -B%s' % (cmd, gold_path)
+
+  def _SetupTCEnvironment(self, sdk_ctx, options, env):
+    """Sets up toolchain-related environment variables."""
+    target_tc_path = sdk_ctx.key_map[self.sdk.TARGET_TOOLCHAIN_KEY].path
+    tc_bin_path = os.path.join(target_tc_path, 'bin')
+    env['PATH'] = '%s:%s' % (tc_bin_path, os.environ['PATH'])
+
+    for var in ('CXX', 'CC', 'LD'):
+      env[var] = self._FixGoldPath(env[var], target_tc_path)
+
+    clang_path = os.path.join(options.chrome_src, self._CLANG_DIR)
+    if options.clang:
+      # Tell clang where to find the gcc headers and libraries.
+      flags = ['--gcc-toolchain=' + os.path.join(target_tc_path, 'usr'),
+               '--target=' + sdk_ctx.target_tc]
+      # TODO: It'd be nicer to inject these flags via some gyp variable.
+      # Note: It's important they're only passed to target targets, not host
+      # targets. They are intentionally added only to CC and not CC_host.
+      clang_bin = os.path.join(clang_path, 'clang')
+      env['CC'] = ' '.join([clang_bin] + flags + [env['CC'].split()[-1]])
+      clangxx_bin = os.path.join(clang_path, 'clang++')
+      env['CXX'] = ' '.join([clangxx_bin] + flags + [env['CXX'].split()[-1]])
+
+    # The host compiler intentionally doesn't use the libstdc++ from sdk_ctx,
+    # so that host binaries link against the system libstdc++ and can run
+    # without a special rpath.
+    env['CC_host'] = os.path.join(clang_path, 'clang')
+    env['CXX_host'] = os.path.join(clang_path, 'clang++')
+
+    if not options.fastbuild:
+      # Enable debug fission.
+      env['CFLAGS'] = env.get('CFLAGS', '') +  ' -gsplit-dwarf'
+      env['CXXFLAGS'] = env.get('CXXFLAGS', '') + ' -gsplit-dwarf'
+
+  def _SetupEnvironment(self, board, sdk_ctx, options, goma_dir=None,
+                        goma_port=None):
+    """Sets environment variables to export to the SDK shell."""
+    if options.chroot:
+      sysroot = os.path.join(options.chroot, 'build', board)
+      if not os.path.isdir(sysroot) and not options.cmd:
+        logging.warning("Because --chroot is set, expected a sysroot to be at "
+                        "%s, but couldn't find one.", sysroot)
+    else:
+      sysroot = sdk_ctx.key_map[constants.CHROME_SYSROOT_TAR].path
+
+    environment = os.path.join(sdk_ctx.key_map[constants.CHROME_ENV_TAR].path,
+                               'environment')
+    env = osutils.SourceEnvironment(environment, self.EBUILD_ENV)
+    self._SetupTCEnvironment(sdk_ctx, options, env)
+
+    # Add managed components to the PATH.
+    env['PATH'] = '%s:%s' % (constants.CHROMITE_BIN_DIR, env['PATH'])
+    env['PATH'] = '%s:%s' % (os.path.dirname(self.sdk.gs_ctx.gsutil_bin),
+                             env['PATH'])
+
+    # Export internally referenced variables.
+    os.environ[self.sdk.SDK_BOARD_ENV] = board
+    if self.options.sdk_path:
+      os.environ[self.sdk.SDK_PATH_ENV] = self.options.sdk_path
+    os.environ[self.sdk.SDK_VERSION_ENV] = sdk_ctx.version
+
+    # Export the board/version info in a more accessible way, so developers can
+    # reference them in their chrome_sdk.bashrc files, as well as within the
+    # chrome-sdk shell.
+    for var in [self.sdk.SDK_VERSION_ENV, self.sdk.SDK_BOARD_ENV]:
+      env[var.lstrip('%')] = os.environ[var]
+
+    # Export Goma information.
+    if goma_dir:
+      env[self.SDK_GOMA_DIR_ENV] = goma_dir
+      env[self.SDK_GOMA_PORT_ENV] = goma_port
+
+    # SYSROOT is necessary for Goma and the sysroot wrapper.
+    env['SYSROOT'] = sysroot
+    gyp_dict = chrome_util.ProcessGypDefines(env['GYP_DEFINES'])
+    gyp_dict['sysroot'] = sysroot
+    gyp_dict.pop('order_text_section', None)
+    gyp_dict.pop('pkg-config', None)
+    gyp_dict['host_clang'] = 1
+    if options.clang:
+      gyp_dict['clang'] = 1
+    if options.internal:
+      gyp_dict['branding'] = 'Chrome'
+      gyp_dict['buildtype'] = 'Official'
+    else:
+      gyp_dict.pop('branding', None)
+      gyp_dict.pop('buildtype', None)
+      gyp_dict.pop('internal_gles2_conform_tests', None)
+    if options.component:
+      gyp_dict['component'] = 'shared_library'
+    if options.fastbuild:
+      gyp_dict['fastbuild'] = 1
+      gyp_dict.pop('release_extra_cflags', None)
+
+    # Enable goma if requested.
+    if goma_dir:
+      gyp_dict['use_goma'] = 1
+      gyp_dict['gomadir'] = goma_dir
+
+    if options.clang:
+      # TODO(thakis): Remove once https://b/issue?id=16876457 is fixed.
+      gyp_dict['use_goma'] = 0
+
+    env['GYP_DEFINES'] = chrome_util.DictToGypDefines(gyp_dict)
+
+    # PS1 sets the command line prompt and xterm window caption.
+    full_version = sdk_ctx.version
+    if full_version != CUSTOM_VERSION:
+      full_version = self.sdk.GetFullVersion(sdk_ctx.version)
+    env['PS1'] = self._CreatePS1(self.board, full_version,
+                                 chroot=options.chroot)
+
+    out_dir = 'out_%s' % self.board
+    env['builddir_name'] = out_dir
+    env['GYP_GENERATOR_FLAGS'] = 'output_dir=%s' % out_dir
+    env['GYP_CROSSCOMPILE'] = '1'
+    return env
+
+  @staticmethod
+  def _VerifyGoma(user_rc):
+    """Verify that the user has no goma installations set up in user_rc.
+
+    If the user does have a goma installation set up, verify that it's for
+    ChromeOS.
+
+    Args:
+      user_rc: User-supplied rc file.
+    """
+    user_env = osutils.SourceEnvironment(user_rc, ['PATH'])
+    goma_ctl = osutils.Which('goma_ctl.py', user_env.get('PATH'))
+    if goma_ctl is not None:
+      logging.warning(
+          '%s is adding Goma to the PATH.  Using that Goma instead of the '
+          'managed Goma install.', user_rc)
+
+  @staticmethod
+  def _VerifyChromiteBin(user_rc):
+    """Verify that the user has not set a chromite bin/ dir in user_rc.
+
+    Args:
+      user_rc: User-supplied rc file.
+    """
+    user_env = osutils.SourceEnvironment(user_rc, ['PATH'])
+    chromite_bin = osutils.Which('parallel_emerge', user_env.get('PATH'))
+    if chromite_bin is not None:
+      logging.warning(
+          '%s is adding chromite/bin to the PATH.  Remove it from the PATH to '
+          'use the the default Chromite.', user_rc)
+
+  @contextlib.contextmanager
+  def _GetRCFile(self, env, user_rc):
+    """Returns path to dynamically created bashrc file.
+
+    The bashrc file sets the environment variables contained in |env|, as well
+    as sources the user-editable chrome_sdk.bashrc file in the user's home
+    directory.  That rc file is created if it doesn't already exist.
+
+    Args:
+      env: A dictionary of environment variables that will be set by the rc
+        file.
+      user_rc: User-supplied rc file.
+    """
+    if not os.path.exists(user_rc):
+      osutils.Touch(user_rc, makedirs=True)
+
+    self._VerifyGoma(user_rc)
+    self._VerifyChromiteBin(user_rc)
+
+    # We need a temporary rc file to 'wrap' the user configuration file,
+    # because running with '--rcfile' causes bash to ignore bash special
+    # variables passed through subprocess.Popen, such as PS1.  So we set them
+    # here.
+    #
+    # Having a wrapper rc file will also allow us to inject bash functions into
+    # the environment, not just variables.
+    with osutils.TempDir() as tempdir:
+      # Only source the user's ~/.bashrc if running in interactive mode.
+      contents = [
+          '[[ -e ~/.bashrc && $- == *i* ]] && . ~/.bashrc\n',
+      ]
+
+      for key, value in env.iteritems():
+        contents.append("export %s='%s'\n" % (key, value))
+      contents.append('. "%s"\n' % user_rc)
+
+      rc_file = os.path.join(tempdir, 'rcfile')
+      osutils.WriteFile(rc_file, contents)
+      yield rc_file
+
+  def _GomaPort(self, goma_dir):
+    """Returns current active Goma port."""
+    port = cros_build_lib.RunCommand(
+        self.GOMACC_PORT_CMD, cwd=goma_dir, debug_level=logging.DEBUG,
+        error_code_ok=True, capture_output=True).output.strip()
+    return port
+
+  def _FetchGoma(self):
+    """Fetch, install, and start Goma, using cached version if it exists.
+
+    Returns:
+      A tuple (dir, port) containing the path to the cached goma/ dir and the
+      Goma port.
+    """
+    common_path = os.path.join(self.options.cache_dir, constants.COMMON_CACHE)
+    common_cache = cache.DiskCache(common_path)
+
+    goma_dir = self.options.gomadir
+    if not goma_dir:
+      ref = common_cache.Lookup(('goma', '2'))
+      if not ref.Exists():
+        Log('Installing Goma.', silent=self.silent)
+        with osutils.TempDir() as tempdir:
+          goma_dir = os.path.join(tempdir, 'goma')
+          os.mkdir(goma_dir)
+          result = cros_build_lib.DebugRunCommand(
+              self.FETCH_GOMA_CMD, cwd=goma_dir, error_code_ok=True)
+          if result.returncode:
+            raise GomaError('Failed to fetch Goma')
+         # Update to latest version of goma. We choose the outside-chroot
+         # version ('goobuntu') over the chroot version ('chromeos') by
+         # supplying input='1' to the following prompt:
+         #
+         # What is your platform?
+         #  1. Goobuntu  2. Precise (32bit)  3. Lucid (32bit)  4. Debian
+         #  5. Chrome OS  6. MacOS ? -->
+          cros_build_lib.DebugRunCommand(
+              ['python2', 'goma_ctl.py', 'update'], cwd=goma_dir, input='1\n')
+          ref.SetDefault(goma_dir)
+      goma_dir = ref.path
+
+    Log('Starting Goma.', silent=self.silent)
+    cros_build_lib.DebugRunCommand(
+        ['python2', 'goma_ctl.py', 'ensure_start'], cwd=goma_dir)
+    port = self._GomaPort(goma_dir)
+    Log('Goma is started on port %s', port, silent=self.silent)
+    if not port:
+      raise GomaError('No Goma port detected')
+
+    return goma_dir, port
+
+  def Run(self):
+    """Perform the command."""
+    if os.environ.get(SDKFetcher.SDK_VERSION_ENV) is not None:
+      cros_build_lib.Die('Already in an SDK shell.')
+
+    src_path = self.options.chrome_src or os.getcwd()
+    checkout = path_util.DetermineCheckout(src_path)
+    if not checkout.chrome_src_dir:
+      cros_build_lib.Die('Chrome checkout not found at %s', src_path)
+    self.options.chrome_src = checkout.chrome_src_dir
+
+    if self.options.clang and not self.options.chrome_src:
+      cros_build_lib.Die('--clang requires --chrome-src to be set.')
+
+    if self.options.version and self.options.sdk_path:
+      cros_build_lib.Die('Cannot specify both --version and --sdk-path.')
+
+    self.silent = bool(self.options.cmd)
+    # Lazy initialize because SDKFetcher creates a GSContext() object in its
+    # constructor, which may block on user input.
+    self.sdk = SDKFetcher(self.options.cache_dir, self.options.board,
+                          clear_cache=self.options.clear_sdk_cache,
+                          chrome_src=self.options.chrome_src,
+                          sdk_path=self.options.sdk_path,
+                          toolchain_path=self.options.toolchain_path,
+                          silent=self.silent,
+                          use_external_config=self.options.use_external_config)
+
+    prepare_version = self.options.version
+    if not prepare_version and not self.options.sdk_path:
+      prepare_version, _ = self.sdk.UpdateDefaultVersion()
+
+    components = [self.sdk.TARGET_TOOLCHAIN_KEY, constants.CHROME_ENV_TAR]
+    if not self.options.chroot:
+      components.append(constants.CHROME_SYSROOT_TAR)
+
+    goma_dir = None
+    goma_port = None
+    if self.options.goma:
+      try:
+        goma_dir, goma_port = self._FetchGoma()
+      except GomaError as e:
+        logging.error('Goma: %s.  Bypass by running with --nogoma.', e)
+
+    with self.sdk.Prepare(components, version=prepare_version,
+                          target_tc=self.options.target_tc,
+                          toolchain_url=self.options.toolchain_url) as ctx:
+      env = self._SetupEnvironment(self.options.board, ctx, self.options,
+                                   goma_dir=goma_dir, goma_port=goma_port)
+      with self._GetRCFile(env, self.options.bashrc) as rcfile:
+        bash_cmd = ['/bin/bash']
+
+        extra_env = None
+        if not self.options.cmd:
+          bash_cmd.extend(['--rcfile', rcfile, '-i'])
+        else:
+          # The '"$@"' expands out to the properly quoted positional args
+          # coming after the '--'.
+          bash_cmd.extend(['-c', '"$@"', '--'])
+          bash_cmd.extend(self.options.cmd)
+          # When run in noninteractive mode, bash sources the rc file set in
+          # BASH_ENV, and ignores the --rcfile flag.
+          extra_env = {'BASH_ENV': rcfile}
+
+        # Bash behaves differently when it detects that it's being launched by
+        # sshd - it ignores the BASH_ENV variable.  So prevent ssh-related
+        # environment variables from being passed through.
+        os.environ.pop('SSH_CLIENT', None)
+        os.environ.pop('SSH_CONNECTION', None)
+        os.environ.pop('SSH_TTY', None)
+
+        cmd_result = cros_build_lib.RunCommand(
+            bash_cmd, print_cmd=False, debug_level=logging.CRITICAL,
+            error_code_ok=True, extra_env=extra_env, cwd=self.options.cwd)
+        if self.options.cmd:
+          return cmd_result.returncode
diff --git a/cli/cros/cros_chrome_sdk_unittest b/cli/cros/cros_chrome_sdk_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cli/cros/cros_chrome_sdk_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/cros/cros_chrome_sdk_unittest.py b/cli/cros/cros_chrome_sdk_unittest.py
new file mode 100644
index 0000000..2fd26fd
--- /dev/null
+++ b/cli/cros/cros_chrome_sdk_unittest.py
@@ -0,0 +1,501 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module tests the cros image command."""
+
+from __future__ import print_function
+
+import copy
+import mock
+import os
+import shutil
+
+from chromite.cbuildbot import constants
+from chromite.cli import command_unittest
+from chromite.cli.cros import cros_chrome_sdk
+from chromite.lib import cache
+from chromite.lib import chrome_util
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import gs
+from chromite.lib import gs_unittest
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+
+
+# pylint: disable=W0212
+
+
+class MockChromeSDKCommand(command_unittest.MockCommand):
+  """Mock out the build command."""
+  TARGET = 'chromite.cli.cros.cros_chrome_sdk.ChromeSDKCommand'
+  TARGET_CLASS = cros_chrome_sdk.ChromeSDKCommand
+  COMMAND = 'chrome-sdk'
+  ATTRS = (('_GOMA_URL', '_SetupEnvironment') +
+           command_unittest.MockCommand.ATTRS)
+
+  _GOMA_URL = 'Invalid URL'
+
+  def __init__(self, *args, **kwargs):
+    command_unittest.MockCommand.__init__(self, *args, **kwargs)
+    self.env = None
+
+  def _SetupEnvironment(self, *args, **kwargs):
+    env = self.backup['_SetupEnvironment'](*args, **kwargs)
+    self.env = copy.deepcopy(env)
+    return env
+
+
+class ParserTest(cros_test_lib.MockTempDirTestCase):
+  """Test the parser."""
+  def testNormal(self):
+    """Tests that our example parser works normally."""
+    with MockChromeSDKCommand(
+        ['--board', SDKFetcherMock.BOARD],
+        base_args=['--cache-dir', self.tempdir]) as bootstrap:
+      self.assertEquals(bootstrap.inst.options.board, SDKFetcherMock.BOARD)
+      self.assertEquals(bootstrap.inst.options.cache_dir, self.tempdir)
+
+
+def _GSCopyMock(_self, path, dest, **_kwargs):
+  """Used to simulate a GS Copy operation."""
+  with osutils.TempDir() as tempdir:
+    local_path = os.path.join(tempdir, os.path.basename(path))
+    osutils.Touch(local_path)
+    shutil.move(local_path, dest)
+
+
+def _DependencyMockCtx(f):
+  """Attribute that ensures dependency PartialMocks are started.
+
+  Since PartialMock does not support nested mocking, we need to first call
+  stop() on the outer level PartialMock (which is passed in to us).  We then
+  re-start() the outer level upon exiting the context.
+  """
+  def new_f(self, *args, **kwargs):
+    if not self.entered:
+      try:
+        self.entered = True
+        # Temporarily disable outer GSContext mock before starting our mock.
+        # TODO(rcui): Generalize this attribute and include in partial_mock.py.
+        for emock in self.external_mocks:
+          emock.stop()
+
+        with self.gs_mock:
+          return f(self, *args, **kwargs)
+      finally:
+        self.entered = False
+        for emock in self.external_mocks:
+          emock.start()
+    else:
+      return f(self, *args, **kwargs)
+  return new_f
+
+
+class SDKFetcherMock(partial_mock.PartialMock):
+  """Provides mocking functionality for SDKFetcher."""
+
+  TARGET = 'chromite.cli.cros.cros_chrome_sdk.SDKFetcher'
+  ATTRS = ('__init__', 'GetFullVersion', '_GetMetadata', '_UpdateTarball',
+           'UpdateDefaultVersion')
+
+  FAKE_METADATA = """
+{
+  "boards": ["x86-alex"],
+  "cros-version": "25.3543.2",
+  "metadata-version": "1",
+  "bot-hostname": "build82-m2.golo.chromium.org",
+  "bot-config": "x86-alex-release",
+  "toolchain-tuple": ["i686-pc-linux-gnu"],
+  "toolchain-url": "2013/01/%(target)s-2013.01.23.003823.tar.xz",
+  "sdk-version": "2013.01.23.003823"
+}"""
+
+  BOARD = 'x86-alex'
+  VERSION = 'XXXX.X.X'
+
+  def __init__(self, external_mocks=None):
+    """Initializes the mock.
+
+    Args:
+      external_mocks: A list of already started PartialMock/patcher instances.
+        stop() will be called on each element every time execution enters one of
+        our the mocked out methods, and start() called on it once execution
+        leaves the mocked out method.
+    """
+    partial_mock.PartialMock.__init__(self)
+    self.external_mocks = external_mocks or []
+    self.entered = False
+    self.gs_mock = gs_unittest.GSContextMock()
+    self.gs_mock.SetDefaultCmdResult()
+    self.env = None
+
+  @_DependencyMockCtx
+  def _target__init__(self, inst, *args, **kwargs):
+    self.backup['__init__'](inst, *args, **kwargs)
+    if not inst.cache_base.startswith('/tmp'):
+      raise AssertionError('For testing, SDKFetcher cache_dir needs to be a '
+                           'dir under /tmp')
+
+  @_DependencyMockCtx
+  def UpdateDefaultVersion(self, inst, *_args, **_kwargs):
+    inst._SetDefaultVersion(self.VERSION)
+    return self.VERSION, True
+
+  @_DependencyMockCtx
+  def _UpdateTarball(self, inst, *args, **kwargs):
+    with mock.patch.object(gs.GSContext, 'Copy', autospec=True,
+                           side_effect=_GSCopyMock):
+      with mock.patch.object(cache, 'Untar'):
+        return self.backup['_UpdateTarball'](inst, *args, **kwargs)
+
+  @_DependencyMockCtx
+  def GetFullVersion(self, _inst, version):
+    return 'R26-%s' % version
+
+  @_DependencyMockCtx
+  def _GetMetadata(self, inst, *args, **kwargs):
+    self.gs_mock.SetDefaultCmdResult()
+    self.gs_mock.AddCmdResult(
+        partial_mock.ListRegex('cat .*/%s' % constants.METADATA_JSON),
+        output=self.FAKE_METADATA)
+    return self.backup['_GetMetadata'](inst, *args, **kwargs)
+
+
+class RunThroughTest(cros_test_lib.MockTempDirTestCase,
+                     cros_test_lib.LoggingTestCase):
+  """Run the script with most things mocked out."""
+
+  VERSION_KEY = (SDKFetcherMock.BOARD, SDKFetcherMock.VERSION,
+                 constants.CHROME_SYSROOT_TAR)
+
+  FAKE_ENV = {
+      'GYP_DEFINES': "sysroot='/path/to/sysroot'",
+      'CXX': 'x86_64-cros-linux-gnu-g++ -B /path/to/gold',
+      'CC': 'x86_64-cros-linux-gnu-gcc -B /path/to/gold',
+      'LD': 'x86_64-cros-linux-gnu-g++ -B /path/to/gold',
+  }
+
+  def SetupCommandMock(self, extra_args=None):
+    cmd_args = ['--board', SDKFetcherMock.BOARD, '--chrome-src',
+                self.chrome_src_dir, 'true']
+    if extra_args:
+      cmd_args.extend(extra_args)
+
+    self.cmd_mock = MockChromeSDKCommand(
+        cmd_args, base_args=['--cache-dir', self.tempdir])
+    self.StartPatcher(self.cmd_mock)
+    self.cmd_mock.UnMockAttr('Run')
+
+  def SourceEnvironmentMock(self, path, *_args, **_kwargs):
+    if path.endswith('environment'):
+      return copy.deepcopy(self.FAKE_ENV)
+    return {}
+
+  def setUp(self):
+    self.rc_mock = cros_build_lib_unittest.RunCommandMock()
+    self.rc_mock.SetDefaultCmdResult()
+    self.StartPatcher(self.rc_mock)
+
+    self.sdk_mock = self.StartPatcher(SDKFetcherMock(
+        external_mocks=[self.rc_mock]))
+
+    # This needs to occur before initializing MockChromeSDKCommand.
+    self.bashrc = os.path.join(self.tempdir, 'bashrc')
+    self.PatchObject(constants, 'CHROME_SDK_BASHRC', new=self.bashrc)
+
+    self.PatchObject(osutils, 'SourceEnvironment',
+                     autospec=True, side_effect=self.SourceEnvironmentMock)
+    self.rc_mock.AddCmdResult(cros_chrome_sdk.ChromeSDKCommand.GOMACC_PORT_CMD,
+                              output='8088')
+
+    # Initialized by SetupCommandMock.
+    self.cmd_mock = None
+
+    # Set up a fake Chrome src/ directory
+    self.chrome_root = os.path.join(self.tempdir, 'chrome_root')
+    self.chrome_src_dir = os.path.join(self.chrome_root, 'src')
+    osutils.SafeMakedirs(self.chrome_src_dir)
+    osutils.Touch(os.path.join(self.chrome_root, '.gclient'))
+
+  @property
+  def cache(self):
+    return self.cmd_mock.inst.sdk.tarball_cache
+
+  def testIt(self):
+    """Test a runthrough of the script."""
+    self.SetupCommandMock()
+    with cros_test_lib.LoggingCapturer() as logs:
+      self.cmd_mock.inst.Run()
+      self.AssertLogsContain(logs, 'Goma:', inverted=True)
+
+  def testErrorCodePassthrough(self):
+    """Test that error codes are passed through."""
+    self.SetupCommandMock()
+    with cros_test_lib.LoggingCapturer():
+      self.rc_mock.AddCmdResult(partial_mock.ListRegex('-- true'),
+                                returncode=5)
+      returncode = self.cmd_mock.inst.Run()
+      self.assertEquals(returncode, 5)
+
+  def testLocalSDKPath(self):
+    """Fetch components from a local --sdk-path."""
+    sdk_dir = os.path.join(self.tempdir, 'sdk_dir')
+    osutils.SafeMakedirs(sdk_dir)
+    osutils.WriteFile(os.path.join(sdk_dir, constants.METADATA_JSON),
+                      SDKFetcherMock.FAKE_METADATA)
+    self.SetupCommandMock(extra_args=['--sdk-path', sdk_dir])
+    with cros_test_lib.LoggingCapturer():
+      self.cmd_mock.inst.Run()
+
+  def testGomaError(self):
+    """We print an error message when GomaError is raised."""
+    self.SetupCommandMock()
+    with cros_test_lib.LoggingCapturer() as logs:
+      self.PatchObject(cros_chrome_sdk.ChromeSDKCommand, '_FetchGoma',
+                       side_effect=cros_chrome_sdk.GomaError())
+      self.cmd_mock.inst.Run()
+      self.AssertLogsContain(logs, 'Goma:')
+
+  def testSpecificComponent(self):
+    """Tests that SDKFetcher.Prepare() handles |components| param properly."""
+    sdk = cros_chrome_sdk.SDKFetcher(os.path.join(self.tempdir),
+                                     SDKFetcherMock.BOARD)
+    components = [constants.BASE_IMAGE_TAR, constants.CHROME_SYSROOT_TAR]
+    with sdk.Prepare(components=components) as ctx:
+      for c in components:
+        self.assertTrue(os.path.exists(ctx.key_map[c].path))
+      for c in [constants.IMAGE_SCRIPTS_TAR, constants.CHROME_ENV_TAR]:
+        self.assertFalse(c in ctx.key_map)
+
+  @staticmethod
+  def FindInPath(paths, endswith):
+    for path in paths.split(':'):
+      if path.endswith(endswith):
+        return True
+    return False
+
+  def testGomaInPath(self, inverted=False):
+    """Verify that we do indeed add Goma to the PATH."""
+    extra_args = ['--nogoma'] if inverted else None
+    self.SetupCommandMock(extra_args)
+    self.cmd_mock.inst.Run()
+
+    assert_fn = self.assertNotIn if inverted else self.assertIn
+    gyp_defines_str = self.cmd_mock.env['GYP_DEFINES']
+    gyp_defines = chrome_util.ProcessGypDefines(gyp_defines_str)
+    assert_fn('gomadir', gyp_defines)
+    assert_fn('use_goma', gyp_defines)
+
+  def testNoGoma(self):
+    """Verify that we do not add Goma to the PATH."""
+    self.testGomaInPath(inverted=True)
+
+  def testClang(self):
+    """Verifies clang codepath."""
+    with cros_test_lib.LoggingCapturer():
+      self.SetupCommandMock(extra_args=['--clang'])
+      self.cmd_mock.inst.Run()
+
+
+class GomaTest(cros_test_lib.MockTempDirTestCase,
+               cros_test_lib.LoggingTestCase):
+  """Test Goma setup functionality."""
+
+  def setUp(self):
+    self.rc_mock = cros_build_lib_unittest.RunCommandMock()
+    self.rc_mock.SetDefaultCmdResult()
+    self.StartPatcher(self.rc_mock)
+
+    self.cmd_mock = MockChromeSDKCommand(
+        ['--board', SDKFetcherMock.BOARD, 'true'],
+        base_args=['--cache-dir', self.tempdir])
+    self.StartPatcher(self.cmd_mock)
+
+  def VerifyGomaError(self):
+    self.assertRaises(cros_chrome_sdk.GomaError, self.cmd_mock.inst._FetchGoma)
+
+  def testNoGomaPort(self):
+    """We print an error when gomacc is not returning a port."""
+    self.rc_mock.AddCmdResult(
+        cros_chrome_sdk.ChromeSDKCommand.GOMACC_PORT_CMD)
+    self.VerifyGomaError()
+
+  def testGomaccError(self):
+    """We print an error when gomacc exits with nonzero returncode."""
+    self.rc_mock.AddCmdResult(
+        cros_chrome_sdk.ChromeSDKCommand.GOMACC_PORT_CMD, returncode=1)
+    self.VerifyGomaError()
+
+  def testFetchError(self):
+    """We print an error when we can't fetch Goma."""
+    self.rc_mock.AddCmdResult(
+        cros_chrome_sdk.ChromeSDKCommand.GOMACC_PORT_CMD, returncode=1)
+    self.VerifyGomaError()
+
+  def testGomaStart(self):
+    """Test that we start Goma if it's not already started."""
+    # Duplicate return values.
+    self.PatchObject(cros_chrome_sdk.ChromeSDKCommand, '_GomaPort',
+                     side_effect=['XXXX', 'XXXX'])
+    # Run it twice to exercise caching.
+    for _ in range(2):
+      goma_dir, goma_port = self.cmd_mock.inst._FetchGoma()
+      self.assertEquals(goma_port, 'XXXX')
+      self.assertTrue(bool(goma_dir))
+
+
+class VersionTest(cros_test_lib.MockTempDirTestCase):
+  """Tests the determination of which SDK version to use."""
+
+  VERSION = '3543.2.0'
+  FULL_VERSION = 'R55-%s' % VERSION
+  BOARD = 'lumpy'
+
+  VERSION_BASE = ('gs://chromeos-image-archive/%s-release/LATEST-%s'
+                  % (BOARD, VERSION))
+
+  CAT_ERROR = 'CommandException: No URLs matched %s' % VERSION_BASE
+  LS_ERROR = 'CommandException: One or more URLs matched no objects.'
+
+  def setUp(self):
+    self.gs_mock = self.StartPatcher(gs_unittest.GSContextMock())
+    self.gs_mock.SetDefaultCmdResult()
+    self.sdk_mock = self.StartPatcher(SDKFetcherMock(
+        external_mocks=[self.gs_mock]))
+
+    os.environ.pop(cros_chrome_sdk.SDKFetcher.SDK_VERSION_ENV, None)
+    self.sdk = cros_chrome_sdk.SDKFetcher(
+        os.path.join(self.tempdir, 'cache'), self.BOARD)
+
+  def SetUpDefaultVersion(self, current, target, newest):
+    self.PatchObject(cros_chrome_sdk.SDKFetcher, 'GetDefaultVersion',
+                     return_value=current)
+    self.PatchObject(cros_chrome_sdk.SDKFetcher, '_GetRepoCheckoutVersion',
+                     return_value=target)
+    self.PatchObject(cros_chrome_sdk.SDKFetcher, '_GetNewestManifestVersion',
+                     return_value=newest)
+    return self.sdk.UpdateDefaultVersion()
+
+  def testUpdateDefaultVersionNormal(self):
+    """Updating default version with no cached default version."""
+    self.sdk_mock.UnMockAttr('UpdateDefaultVersion')
+    target, updated = self.SetUpDefaultVersion(None, self.VERSION, '3544.0.0')
+    self.assertEquals(target, self.VERSION)
+    self.assertEquals(updated, True)
+
+  def testUpdateDefaultVersionTooNew(self):
+    """Version in chromeos_version.sh isn't uploaded yet."""
+    self.sdk_mock.UnMockAttr('UpdateDefaultVersion')
+    target, updated = self.SetUpDefaultVersion(None, '3543.10.0', self.VERSION)
+    self.assertEquals(target, self.VERSION)
+    self.assertEquals(updated, True)
+
+  def testUpdateDefaultVersionNoUpdate(self):
+    """Nothing to update because the target version did not change."""
+    self.sdk_mock.UnMockAttr('UpdateDefaultVersion')
+    target, updated = self.SetUpDefaultVersion(self.VERSION, self.VERSION,
+                                               None)
+    self.assertEquals(target, self.VERSION)
+    self.assertEquals(updated, False)
+
+  def testUpdateDefaultChromeVersion(self):
+    """We pick up the right LKGM version from the Chrome tree."""
+    dir_struct = [
+        'gclient_root/.gclient'
+    ]
+    cros_test_lib.CreateOnDiskHierarchy(self.tempdir, dir_struct)
+    gclient_root = os.path.join(self.tempdir, 'gclient_root')
+    self.PatchObject(os, 'getcwd', return_value=gclient_root)
+
+    lkgm_file = os.path.join(gclient_root, 'src', constants.PATH_TO_CHROME_LKGM)
+    osutils.Touch(lkgm_file, makedirs=True)
+    osutils.WriteFile(lkgm_file, self.VERSION)
+    self.sdk_mock.UnMockAttr('UpdateDefaultVersion')
+    self.sdk.UpdateDefaultVersion()
+    self.assertEquals(self.sdk.GetDefaultVersion(),
+                      self.VERSION)
+
+  def testFullVersionCaching(self):
+    """Test full version calculation and caching."""
+    def RaiseException(*_args, **_kwargs):
+      raise Exception('boom')
+
+    self.sdk_mock.UnMockAttr('GetFullVersion')
+    self.gs_mock.AddCmdResult(
+        partial_mock.ListRegex('cat .*/LATEST-%s' % self.VERSION),
+        output=self.FULL_VERSION)
+    self.assertEquals(
+        self.FULL_VERSION,
+        self.sdk.GetFullVersion(self.VERSION))
+    # Test that we access the cache on the next call, rather than checking GS.
+    self.gs_mock.AddCmdResult(
+        partial_mock.ListRegex('cat .*/LATEST-%s' % self.VERSION),
+        side_effect=RaiseException)
+    self.assertEquals(
+        self.FULL_VERSION,
+        self.sdk.GetFullVersion(self.VERSION))
+    # Test that we access GS again if the board is changed.
+    self.sdk.board += '2'
+    self.gs_mock.AddCmdResult(
+        partial_mock.ListRegex('cat .*/LATEST-%s' % self.VERSION),
+        output=self.FULL_VERSION + '2')
+    self.assertEquals(
+        self.FULL_VERSION + '2',
+        self.sdk.GetFullVersion(self.VERSION))
+
+  def testBadVersion(self):
+    """We raise an exception for a bad version."""
+    self.sdk_mock.UnMockAttr('GetFullVersion')
+    self.gs_mock.AddCmdResult(
+        partial_mock.ListRegex('cat .*/LATEST-%s' % self.VERSION),
+        output='', error=self.CAT_ERROR, returncode=1)
+    self.gs_mock.AddCmdResult(
+        partial_mock.ListRegex('ls .*%s' % self.VERSION),
+        output='', error=self.LS_ERROR, returncode=1)
+    self.assertRaises(cros_chrome_sdk.MissingSDK, self.sdk.GetFullVersion,
+                      self.VERSION)
+
+  def testDefaultEnvBadBoard(self):
+    """We don't use the version in the environment if board doesn't match."""
+    os.environ[cros_chrome_sdk.SDKFetcher.SDK_VERSION_ENV] = self.VERSION
+    self.assertNotEquals(self.VERSION, self.sdk_mock.VERSION)
+    self.assertEquals(self.sdk.GetDefaultVersion(), None)
+
+  def testDefaultEnvGoodBoard(self):
+    """We use the version in the environment if board matches."""
+    sdk_version_env = cros_chrome_sdk.SDKFetcher.SDK_VERSION_ENV
+    os.environ[sdk_version_env] = self.VERSION
+    os.environ[cros_chrome_sdk.SDKFetcher.SDK_BOARD_ENV] = self.BOARD
+    self.assertEquals(self.sdk.GetDefaultVersion(), self.VERSION)
+
+
+class PathVerifyTest(cros_test_lib.MockTempDirTestCase,
+                     cros_test_lib.LoggingTestCase):
+  """Tests user_rc PATH validation and warnings."""
+
+  def testPathVerifyWarnings(self):
+    """Test the user rc PATH verification codepath."""
+    def SourceEnvironmentMock(*_args, **_kwargs):
+      return {
+          'PATH': ':'.join([os.path.dirname(p) for p in abs_paths]),
+      }
+
+    self.PatchObject(osutils, 'SourceEnvironment',
+                     side_effect=SourceEnvironmentMock)
+    file_list = (
+        'goma/goma_ctl.py',
+        'clang/clang',
+        'chromite/parallel_emerge',
+    )
+    abs_paths = [os.path.join(self.tempdir, relpath) for relpath in file_list]
+    for p in abs_paths:
+      osutils.Touch(p, makedirs=True, mode=0o755)
+
+    with cros_test_lib.LoggingCapturer() as logs:
+      cros_chrome_sdk.ChromeSDKCommand._VerifyGoma(None)
+      cros_chrome_sdk.ChromeSDKCommand._VerifyChromiteBin(None)
+
+    for msg in ['managed Goma', 'default Chromite']:
+      self.AssertLogsMatch(logs, msg)
diff --git a/cli/cros/cros_chroot.py b/cli/cros/cros_chroot.py
new file mode 100644
index 0000000..21abf93
--- /dev/null
+++ b/cli/cros/cros_chroot.py
@@ -0,0 +1,63 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""cros chroot: Enter the chroot for the current build environment."""
+
+from __future__ import print_function
+
+import argparse
+
+from chromite.cbuildbot import constants
+from chromite.cli import command
+from chromite.lib import cros_build_lib
+
+
+@command.CommandDecorator('chroot')
+class ChrootCommand(command.CliCommand):
+  """Enter the chroot."""
+
+  # Override base class property to enable stats upload.
+  upload_stats = True
+
+  def _RunChrootCommand(self, cmd):
+    """Run the specified command inside the chroot.
+
+    Args:
+      cmd: A list or tuple of strings to use as a command and its arguments.
+           If empty, run 'bash'.
+
+    Returns:
+      The commands result code.
+    """
+    # If there is no command, run bash.
+    if not cmd:
+      cmd = ['bash']
+
+    chroot_args = ['--log-level', self.options.log_level]
+
+    result = cros_build_lib.RunCommand(cmd, print_cmd=False, error_code_ok=True,
+                                       cwd=constants.SOURCE_ROOT,
+                                       mute_output=False,
+                                       enter_chroot=True,
+                                       chroot_args=chroot_args)
+    return result.returncode
+
+  @classmethod
+  def AddParser(cls, parser):
+    """Adds a parser."""
+    super(cls, ChrootCommand).AddParser(parser)
+    parser.add_argument(
+        'command', nargs=argparse.REMAINDER,
+        help='(optional) Command to execute inside the chroot.')
+
+  def Run(self):
+    """Runs `cros chroot`."""
+    self.options.Freeze()
+    cmd = self.options.command
+
+    # If -- was used to separate out the command from arguments, ignore it.
+    if cmd and cmd[0] == '--':
+      cmd = cmd[1:]
+
+    return self._RunChrootCommand(cmd)
diff --git a/cli/cros/cros_chroot_unittest b/cli/cros/cros_chroot_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cli/cros/cros_chroot_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/cros/cros_chroot_unittest.py b/cli/cros/cros_chroot_unittest.py
new file mode 100644
index 0000000..3edc8c6
--- /dev/null
+++ b/cli/cros/cros_chroot_unittest.py
@@ -0,0 +1,104 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests the `cros chroot` command."""
+
+from __future__ import print_function
+
+from chromite.cli import command_unittest
+from chromite.cli.cros import cros_chroot
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+
+
+class MockChrootCommand(command_unittest.MockCommand):
+  """Mock out the `cros chroot` command."""
+  TARGET = 'chromite.cli.cros.cros_chroot.ChrootCommand'
+  TARGET_CLASS = cros_chroot.ChrootCommand
+  COMMAND = 'chroot'
+
+  def __init__(self, *args, **kwargs):
+    command_unittest.MockCommand.__init__(self, *args, **kwargs)
+
+  def Run(self, inst):
+    return command_unittest.MockCommand.Run(self, inst)
+
+
+class ChrootTest(cros_test_lib.MockTestCase):
+  """Test the ChrootCommand."""
+
+  def SetupCommandMock(self, cmd_args):
+    """Sets up the `cros chroot` command mock."""
+    self.cmd_mock = MockChrootCommand(cmd_args)
+    self.StartPatcher(self.cmd_mock)
+
+  def setUp(self):
+    """Patches objects."""
+    self.cmd_mock = None
+
+    # Pretend we are inside the chroot, so the command doesn't really enter.
+    self.mock_inside = self.PatchObject(cros_build_lib, 'IsInsideChroot',
+                                        return_value=True)
+
+  def testInteractive(self):
+    """Tests flow for an interactive session."""
+    self.SetupCommandMock([])
+    self.cmd_mock.inst.Run()
+
+    # Ensure we exec'd bash.
+    self.cmd_mock.rc_mock.assertCommandContains(['bash'], mute_output=False)
+
+  def testExplicitCmdNoArgs(self):
+    """Tests a non-interactive command as a single argument."""
+    self.SetupCommandMock(['ls'])
+    self.cmd_mock.inst.Run()
+
+    # Ensure we exec'd ls with arguments.
+    self.cmd_mock.rc_mock.assertCommandContains(['ls'])
+
+  def testLoggingLevelNotice(self):
+    """Tests that logging level is passed to cros_sdk script."""
+    self.SetupCommandMock(['ls'])
+    self.cmd_mock.inst.options.log_level = 'notice'
+    # Pretend that we are outside the chroot so the logging level gets passed as
+    # an argument to cros_sdk.
+    self.mock_inside.return_value = False
+    self.cmd_mock.inst.Run()
+
+    #Ensure that we exec'd with logging level notice.
+    self.cmd_mock.rc_mock.assertCommandContains(
+        ['ls'], chroot_args=['--log-level', 'notice'], enter_chroot=True)
+
+  def testExplicitCmd(self):
+    """Tests a non-interactive command as a single argument."""
+    self.SetupCommandMock(['ls', '/tmp'])
+    self.cmd_mock.inst.Run()
+
+    # Ensure we exec'd ls with arguments.
+    self.cmd_mock.rc_mock.assertCommandContains(['ls', '/tmp'])
+
+  def testOverlappingArguments(self):
+    """Tests a non-interactive command as a single argument."""
+    self.SetupCommandMock(['ls', '--help'])
+    self.cmd_mock.inst.Run()
+
+    # Ensure we pass along "--help" instead of processing it directly.
+    self.cmd_mock.rc_mock.assertCommandContains(['ls', '--help'])
+
+  def testDashDashShell(self):
+    """Tests an interactive specified with  '--'."""
+    self.SetupCommandMock(['--'])
+    self.cmd_mock.inst.Run()
+
+    # Only -- implies we run bash.
+    self.cmd_mock.rc_mock.assertCommandContains(['bash'])
+
+  def testDashDashArgCommand(self):
+    """Tests a command name that matches a valid argument, after '--'."""
+    # Technically, this should try to run the command "--help".
+    self.SetupCommandMock(['--', '--help'])
+    self.cmd_mock.inst.Run()
+
+    # Ensure we pass along "--help" instead of processing it directly.
+    self.cmd_mock.rc_mock.assertCommandContains(['--help'])
diff --git a/cli/cros/cros_clean.py b/cli/cros/cros_clean.py
new file mode 100644
index 0000000..0930b43
--- /dev/null
+++ b/cli/cros/cros_clean.py
@@ -0,0 +1,181 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO: Support cleaning /tmp and /build/*/tmp.
+# TODO: Support running `eclean -q packages` on / and the sysroots.
+# TODO: Support cleaning sysroots as a destructive option.
+
+"""Clean up working files in a Chromium OS checkout.
+
+If unsure, just use the --safe flag to clean out various objects.
+"""
+
+from __future__ import print_function
+
+import glob
+import os
+
+from chromite.cbuildbot import constants
+from chromite.cli import command
+from chromite.cli import flash
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+
+
+@command.CommandDecorator('clean')
+class CleanCommand(command.CliCommand):
+  """Clean up working files from the build."""
+
+  # Override base class property to enable stats upload.
+  upload_stats = False
+
+  @classmethod
+  def AddParser(cls, parser):
+    """Add parser arguments."""
+    super(CleanCommand, cls).AddParser(parser)
+
+    parser.add_argument(
+        '--safe', default=False, action='store_true',
+        help='Clean up files that are automatically created.')
+    parser.add_argument(
+        '-n', '--dry-run', default=False, action='store_true',
+        help='Show which paths would be cleaned up.')
+
+    group = parser.add_option_group(
+        'Cache Selection (Advanced)',
+        description='Clean out specific caches (--safe does all of these).')
+    group.add_argument(
+        '--cache', default=False, action='store_true',
+        help='Clean up our shared cache dir.')
+    group.add_argument(
+        '--deploy', default=False, action='store_true',
+        help='Clean files cached by cros deploy.')
+    group.add_argument(
+        '--flash', default=False, action='store_true',
+        help='Clean files cached by cros flash.')
+    group.add_argument(
+        '--images', default=False, action='store_true',
+        help='Clean up locally generated images.')
+    group.add_argument(
+        '--incrementals', default=False, action='store_true',
+        help='Clean up incremental package objects.')
+    group.add_argument(
+        '--logs', default=False, action='store_true',
+        help='Clean up various build log files.')
+    group.add_argument(
+        '--workdirs', default=False, action='store_true',
+        help='Clean up build various package build directories.')
+
+    group = parser.add_option_group(
+        'Unrecoverable Options (Dangerous)',
+        description='Clean out objects that cannot be recovered easily.')
+    parser.add_argument(
+        '--clobber', default=False, action='store_true',
+        help='Delete all non-source objects.')
+    group.add_argument(
+        '--chroot', default=False, action='store_true',
+        help='Delete build chroot (affects all boards).')
+
+  def __init__(self, options):
+    """Initializes cros clean."""
+    command.CliCommand.__init__(self, options)
+
+  def Run(self):
+    """Perfrom the cros clean command."""
+
+    # If no option is set, default to "--safe"
+    if not (self.options.safe or
+            self.options.clobber or
+            self.options.chroot or
+            self.options.cache or
+            self.options.deploy or
+            self.options.flash or
+            self.options.images or
+            self.options.incrementals):
+      self.options.safe = True
+
+    if self.options.clobber:
+      self.options.chroot = True
+      self.options.safe = True
+
+    if self.options.safe:
+      self.options.cache = True
+      self.options.deploy = True
+      self.options.flash = True
+      self.options.images = True
+      self.options.incrementals = True
+      self.options.logs = True
+      self.options.workdirs = True
+
+    self.options.Freeze()
+
+    chroot_dir = os.path.join(constants.SOURCE_ROOT,
+                              constants.DEFAULT_CHROOT_DIR)
+
+    cros_build_lib.AssertOutsideChroot()
+
+    def Clean(path):
+      """Helper wrapper for the dry-run checks"""
+      if self.options.dry_run:
+        logging.notice('would have cleaned: %s', path)
+      else:
+        osutils.RmDir(path, ignore_missing=True, sudo=True)
+
+    # Delete this first since many of the caches below live in the chroot.
+    if self.options.chroot:
+      logging.debug('Remove the chroot.')
+      if self.options.dry_run:
+        logging.notice('would have cleaned: %s', chroot_dir)
+      else:
+        cros_build_lib.RunCommand(['cros_sdk', '--delete'])
+
+    if self.options.cache:
+      logging.debug('Clean the common cache')
+      # This test is a convenience for developers that bind mount in .cache.
+      if not os.path.ismount(self.options.cache_dir):
+        Clean(self.options.cache_dir)
+      else:
+        logging.debug('Ignoring bind mounted cache dir: %s',
+                      self.options.cache_dir)
+
+    if self.options.deploy:
+      logging.debug('Clean up the cros deploy cache.')
+      for subdir in ('custom-packages', 'gmerge-packages'):
+        for d in glob.glob(os.path.join(chroot_dir, 'build', '*', subdir)):
+          Clean(d)
+
+    if self.options.flash:
+      logging.debug('Clean up the cros flash cache.')
+      Clean(flash.DEVSERVER_STATIC_DIR)
+
+    if self.options.images:
+      logging.debug('Clean the images cache.')
+      cache_dir = os.path.join(constants.SOURCE_ROOT, 'build')
+      # This test is a convenience for developers that bind mount.
+      if not os.path.ismount(cache_dir):
+        Clean(cache_dir)
+      else:
+        logging.debug('Ignoring bind mounted cache dir: %s', cache_dir)
+
+    if self.options.incrementals:
+      logging.debug('Clean package incremental objects')
+      Clean(os.path.join(chroot_dir, 'var', 'cache', 'portage'))
+      for d in glob.glob(os.path.join(chroot_dir, 'build', '*', 'var', 'cache',
+                                      'portage')):
+        Clean(d)
+
+    if self.options.logs:
+      logging.debug('Clean log files')
+      Clean(os.path.join(chroot_dir, 'var', 'log'))
+      for d in glob.glob(os.path.join(chroot_dir, 'build', '*', 'tmp',
+                                      'portage', 'logs')):
+        Clean(d)
+
+    if self.options.workdirs:
+      logging.debug('Clean package workdirs')
+      Clean(os.path.join(chroot_dir, 'var', 'tmp', 'portage'))
+      for d in glob.glob(os.path.join(chroot_dir, 'build', '*', 'tmp',
+                                      'portage')):
+        Clean(d)
diff --git a/cli/cros/cros_debug.py b/cli/cros/cros_debug.py
new file mode 100644
index 0000000..16e4a76
--- /dev/null
+++ b/cli/cros/cros_debug.py
@@ -0,0 +1,184 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""cros debug: Debug the applications on the target device."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cli import command
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import remote_access
+
+
+@command.CommandDecorator('debug')
+class DebugCommand(command.CliCommand):
+  """Use GDB to debug a process running on the target device.
+
+  This command starts a GDB session to debug a remote process running on the
+  target device. The remote process can either be an existing process or newly
+  started by calling this command.
+
+  This command can also be used to find out information about all running
+  processes of an executable on the target device.
+  """
+
+  EPILOG = """
+To list all running processes of an executable:
+  cros debug device --list --exe=/path/to/executable
+
+To debug an executable:
+  cros debug device --exe=/path/to/executable
+
+To debug a process by its pid:
+  cros debug device --pid=1234
+"""
+
+  # Override base class property to enable stats upload.
+  upload_stats = True
+
+  def __init__(self, options):
+    """Initialize DebugCommand."""
+    super(DebugCommand, self).__init__(options)
+    # SSH connection settings.
+    self.ssh_hostname = None
+    self.ssh_port = None
+    self.ssh_username = None
+    self.ssh_private_key = None
+    # The board name of the target device.
+    self.board = None
+    # Settings of the process to debug.
+    self.list = False
+    self.exe = None
+    self.pid = None
+    # The command for starting gdb.
+    self.gdb_cmd = None
+
+  @classmethod
+  def AddParser(cls, parser):
+    """Add parser arguments."""
+    super(cls, DebugCommand).AddParser(parser)
+    cls.AddDeviceArgument(parser)
+    parser.add_argument(
+        '--board', default=None, help='The board to use. By default it is '
+        'automatically detected. You can override the detected board with '
+        'this option.')
+    parser.add_argument(
+        '--private-key', type='path', default=None,
+        help='SSH identity file (private key).')
+    parser.add_argument(
+        '-l', '--list', action='store_true', default=False,
+        help='List running processes of the executable on the target device.')
+    parser.add_argument(
+        '--exe', help='Full path of the executable on the target device.')
+    parser.add_argument(
+        '-p', '--pid', type=int,
+        help='The pid of the process on the target device.')
+
+  def _ListProcesses(self, device, pids):
+    """Provided with a list of pids, print out information of the processes."""
+    if not pids:
+      logging.info(
+          'No running process of %s on device %s', self.exe, self.ssh_hostname)
+      return
+
+    try:
+      result = device.BaseRunCommand(['ps', 'aux'])
+      lines = result.output.splitlines()
+      try:
+        header, procs = lines[0], lines[1:]
+        info = os.linesep.join([p for p in procs if int(p.split()[1]) in pids])
+      except ValueError:
+        cros_build_lib.Die('Parsing output failed:\n%s', result.output)
+
+      print('\nList running processes of %s on device %s:\n%s\n%s' %
+            (self.exe, self.ssh_hostname, header, info))
+    except cros_build_lib.RunCommandError:
+      cros_build_lib.Die(
+          'Failed to find any running process on device %s', self.ssh_hostname)
+
+  def _DebugNewProcess(self):
+    """Start a new process on the target device and attach gdb to it."""
+    logging.info(
+        'Ready to start and debug %s on device %s', self.exe, self.ssh_hostname)
+    cros_build_lib.RunCommand(self.gdb_cmd + ['--remote_file', self.exe])
+
+  def _DebugRunningProcess(self, pid):
+    """Start gdb and attach it to the remote running process with |pid|."""
+    logging.info(
+        'Ready to debug process %d on device %s', pid, self.ssh_hostname)
+    cros_build_lib.RunCommand(self.gdb_cmd + ['--remote_pid', str(pid)])
+
+  def _ReadOptions(self):
+    """Process options and set variables."""
+    if self.options.device:
+      self.ssh_hostname = self.options.device.hostname
+      self.ssh_username = self.options.device.username
+      self.ssh_port = self.options.device.port
+    self.ssh_private_key = self.options.private_key
+    self.list = self.options.list
+    self.exe = self.options.exe
+    self.pid = self.options.pid
+
+  def Run(self):
+    """Run cros debug."""
+    commandline.RunInsideChroot(self)
+    self.options.Freeze()
+    self._ReadOptions()
+    with remote_access.ChromiumOSDeviceHandler(
+        self.ssh_hostname, port=self.ssh_port, username=self.ssh_username,
+        private_key=self.ssh_private_key) as device:
+      self.board = cros_build_lib.GetBoard(device_board=device.board,
+                                           override_board=self.options.board)
+      logging.info('Board is %s', self.board)
+
+      self.gdb_cmd = [
+          'gdb_remote', '--ssh',
+          '--board', self.board,
+          '--remote', self.ssh_hostname,
+      ]
+      if self.ssh_port:
+        self.gdb_cmd.extend(['--ssh_port', str(self.ssh_port)])
+
+      if not (self.pid or self.exe):
+        cros_build_lib.Die(
+            'Must use --exe or --pid to specify the process to debug.')
+
+      if self.pid:
+        if self.list or self.exe:
+          cros_build_lib.Die(
+              '--list and --exe are disallowed when --pid is used.')
+        self._DebugRunningProcess(self.pid)
+        return
+
+      if not self.exe.startswith('/'):
+        cros_build_lib.Die('--exe must have a full pathname.')
+      logging.debug('Executable path is %s', self.exe)
+      if not device.IsFileExecutable(self.exe):
+        cros_build_lib.Die(
+            'File path "%s" does not exist or is not executable on device %s',
+            self.exe, self.ssh_hostname)
+
+      pids = device.GetRunningPids(self.exe)
+      self._ListProcesses(device, pids)
+
+      if self.list:
+        # If '--list' flag is on, do not launch GDB.
+        return
+
+      if pids:
+        choices = ['Start a new process under GDB']
+        choices.extend(pids)
+        idx = cros_build_lib.GetChoice(
+            'Please select the process pid to debug (select [0] to start a '
+            'new process):', choices)
+        if idx == 0:
+          self._DebugNewProcess()
+        else:
+          self._DebugRunningProcess(pids[idx - 1])
+      else:
+        self._DebugNewProcess()
diff --git a/cli/cros/cros_debug_unittest b/cli/cros/cros_debug_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cli/cros/cros_debug_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/cros/cros_debug_unittest.py b/cli/cros/cros_debug_unittest.py
new file mode 100644
index 0000000..921552b
--- /dev/null
+++ b/cli/cros/cros_debug_unittest.py
@@ -0,0 +1,123 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module tests the cros debug command."""
+
+from __future__ import print_function
+
+from chromite.cli import command_unittest
+from chromite.cli.cros import cros_debug
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import remote_access
+
+
+class MockDebugCommand(command_unittest.MockCommand):
+  """Mock out the debug command."""
+  TARGET = 'chromite.cli.cros.cros_debug.DebugCommand'
+  TARGET_CLASS = cros_debug.DebugCommand
+  COMMAND = 'debug'
+  ATTRS = ('_ListProcesses', '_DebugNewProcess', '_DebugRunningProcess')
+
+  def __init__(self, *args, **kwargs):
+    command_unittest.MockCommand.__init__(self, *args, **kwargs)
+
+  def _ListProcesses(self, _inst, *_args, **_kwargs):
+    """Mock out _ListProcesses."""
+
+  def _DebugNewProcess(self, _inst, *_args, **_kwargs):
+    """Mock out _DebugNewProcess."""
+
+  def _DebugRunningProcess(self, _inst, *_args, **_kwargs):
+    """Mock out _DebugRunningProcess."""
+
+  def Run(self, inst):
+    command_unittest.MockCommand.Run(self, inst)
+
+
+class DebugRunThroughTest(cros_test_lib.MockTempDirTestCase):
+  """Test the flow of DebugCommand.run with the debug methods mocked out."""
+
+  DEVICE = '1.1.1.1'
+  EXE = '/path/to/exe'
+  PID = '1'
+
+  def SetupCommandMock(self, cmd_args):
+    """Set up command mock."""
+    self.cmd_mock = MockDebugCommand(
+        cmd_args, base_args=['--cache-dir', self.tempdir])
+    self.StartPatcher(self.cmd_mock)
+
+  def setUp(self):
+    """Patches objects."""
+    self.cmd_mock = None
+    self.device_mock = self.PatchObject(remote_access,
+                                        'ChromiumOSDevice').return_value
+
+  def testMissingExeAndPid(self):
+    """Test that command fails when --exe and --pid are not provided."""
+    self.SetupCommandMock([self.DEVICE])
+    self.assertRaises(cros_build_lib.DieSystemExit, self.cmd_mock.inst.Run)
+
+  def testListDisallowedWithPid(self):
+    """Test that --list is disallowed when --pid is used."""
+    self.SetupCommandMock([self.DEVICE, '--list', '--pid', self.PID])
+    self.assertRaises(cros_build_lib.DieSystemExit, self.cmd_mock.inst.Run)
+
+  def testExeDisallowedWithPid(self):
+    """Test that --exe is disallowed when --pid is used."""
+    self.SetupCommandMock([self.DEVICE, '--exe', self.EXE, '--pid', self.PID])
+    self.assertRaises(cros_build_lib.DieSystemExit, self.cmd_mock.inst.Run)
+
+  def testExeMustBeFullPath(self):
+    """Test that --exe only takes full path as a valid argument."""
+    self.SetupCommandMock([self.DEVICE, '--exe', 'bash'])
+    self.assertRaises(cros_build_lib.DieSystemExit, self.cmd_mock.inst.Run)
+
+  def testDebugProcessWithPid(self):
+    """Test that methods are called correctly when pid is provided."""
+    self.SetupCommandMock([self.DEVICE, '--pid', self.PID])
+    self.cmd_mock.inst.Run()
+    self.assertFalse(self.cmd_mock.patched['_ListProcesses'].called)
+    self.assertFalse(self.cmd_mock.patched['_DebugNewProcess'].called)
+    self.assertTrue(self.cmd_mock.patched['_DebugRunningProcess'].called)
+
+  def testListProcesses(self):
+    """Test that methods are called correctly for listing processes."""
+    self.SetupCommandMock([self.DEVICE, '--exe', self.EXE, '--list'])
+    self.cmd_mock.inst.Run()
+    self.assertTrue(self.cmd_mock.patched['_ListProcesses'].called)
+    self.assertFalse(self.cmd_mock.patched['_DebugNewProcess'].called)
+    self.assertFalse(self.cmd_mock.patched['_DebugRunningProcess'].called)
+
+  def testNoRunningProcess(self):
+    """Test that command starts a new process to debug if no process running."""
+    self.SetupCommandMock([self.DEVICE, '--exe', self.EXE])
+    self.PatchObject(self.device_mock, 'GetRunningPids', return_value=[])
+    self.cmd_mock.inst.Run()
+    self.assertTrue(self.cmd_mock.patched['_ListProcesses'].called)
+    self.assertTrue(self.cmd_mock.patched['_DebugNewProcess'].called)
+    self.assertFalse(self.cmd_mock.patched['_DebugRunningProcess'].called)
+
+  def testDebugNewProcess(self):
+    """Test that user can select zero to start a new process to debug."""
+    self.SetupCommandMock([self.DEVICE, '--exe', self.EXE])
+    self.PatchObject(self.device_mock, 'GetRunningPids', return_value=['1'])
+    mock_prompt = self.PatchObject(cros_build_lib, 'GetChoice', return_value=0)
+    self.cmd_mock.inst.Run()
+    self.assertTrue(mock_prompt.called)
+    self.assertTrue(self.cmd_mock.patched['_ListProcesses'].called)
+    self.assertTrue(self.cmd_mock.patched['_DebugNewProcess'].called)
+    self.assertFalse(self.cmd_mock.patched['_DebugRunningProcess'].called)
+
+  def testDebugRunningProcess(self):
+    """Test that user can select none-zero to debug a running process."""
+    self.SetupCommandMock([self.DEVICE, '--exe', self.EXE])
+    self.PatchObject(self.device_mock, 'GetRunningPids', return_value=['1'])
+    mock_prompt = self.PatchObject(cros_build_lib, 'GetChoice', return_value=1)
+    self.cmd_mock.inst.Run()
+    self.assertTrue(mock_prompt.called)
+    self.assertTrue(self.cmd_mock.patched['_ListProcesses'].called)
+    self.assertFalse(self.cmd_mock.patched['_DebugNewProcess'].called)
+    self.assertTrue(self.cmd_mock.patched['_DebugRunningProcess'].called)
diff --git a/cli/cros/cros_deploy.py b/cli/cros/cros_deploy.py
new file mode 100644
index 0000000..c29929d
--- /dev/null
+++ b/cli/cros/cros_deploy.py
@@ -0,0 +1,121 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""cros deploy: Deploy the packages onto the target device."""
+
+from __future__ import print_function
+
+from chromite.cli import command
+from chromite.cli import deploy
+from chromite.lib import commandline
+from chromite.lib import cros_logging as logging
+
+
+@command.CommandDecorator('deploy')
+class DeployCommand(command.CliCommand):
+  """Deploy the requested packages to the target device.
+
+  This command assumes the requested packages are already built in the
+  chroot. This command needs to run inside the chroot for inspecting
+  the installed packages.
+
+  Note: If the rootfs on your device is read-only, this command
+  remounts it as read-write. If the rootfs verification is enabled on
+  your device, this command disables it.
+  """
+
+  EPILOG = """
+To deploy packages:
+  cros deploy device power_manager cherrypy
+  cros deploy device /path/to/package
+
+To uninstall packages:
+  cros deploy --unmerge cherrypy
+
+For more information of cros build usage:
+  cros build -h
+"""
+
+  # Override base class property to enable stats upload.
+  upload_stats = True
+
+  @classmethod
+  def AddParser(cls, parser):
+    """Add a parser."""
+    super(cls, DeployCommand).AddParser(parser)
+    cls.AddDeviceArgument(parser)
+    parser.add_argument(
+        'packages', help='Packages to install. You can specify '
+        '[category/]package[:slot] or the path to the binary package. '
+        'Use @installed to update all installed packages (requires --update).',
+        nargs='+')
+    parser.add_argument(
+        '--board',
+        help='The board to use. By default it is automatically detected. You '
+        'can override the detected board with this option.')
+    parser.add_argument(
+        '--no-strip', dest='strip', action='store_false', default=True,
+        help='Do not run strip_package to filter out preset paths in the '
+        'package. Stripping removes debug symbol files and reduces the size '
+        'of the package significantly. Defaults to always strip.')
+    parser.add_argument(
+        '--unmerge', dest='emerge', action='store_false', default=True,
+        help='Unmerge requested packages.')
+    parser.add_argument(
+        '--root', default='/',
+        help="Package installation root, e.g. '/' or '/usr/local'"
+        "(default: '/').")
+    parser.add_argument(
+        '--no-clean-binpkg', dest='clean_binpkg', action='store_false',
+        default=True, help='Do not clean outdated binary packages. '
+        ' Defaults to always clean.')
+    parser.add_argument(
+        '--emerge-args', default=None,
+        help='Extra arguments to pass to emerge.')
+    parser.add_argument(
+        '--private-key', type='path', default=None,
+        help='SSH identify file (private key).')
+    parser.add_argument(
+        '--no-ping', dest='ping', action='store_false', default=True,
+        help='Do not ping the device before attempting to connect to it.')
+    parser.add_argument(
+        '--dry-run', '-n', action='store_true',
+        help='Output deployment plan but do not deploy anything.')
+
+    advanced = parser.add_option_group('Advanced options')
+    advanced.add_argument(
+        '--force', action='store_true',
+        help='Ignore sanity checks, just do it.')
+    # TODO(garnold) Make deep and check installed the default behavior.
+    advanced.add_argument(
+        '--update', action='store_true',
+        help='Check installed versions on target (emerge only).')
+    advanced.add_argument(
+        '--deep', action='store_true',
+        help='Install dependencies. Implies --update.')
+    advanced.add_argument(
+        '--deep-rev', action='store_true',
+        help='Install reverse dependencies. Implies --deep.')
+
+  def Run(self):
+    """Run cros deploy."""
+    commandline.RunInsideChroot(self)
+    self.options.Freeze()
+    deploy.Deploy(
+        self.options.device,
+        self.options.packages,
+        board=self.options.board,
+        emerge=self.options.emerge,
+        update=self.options.update,
+        deep=self.options.deep,
+        deep_rev=self.options.deep_rev,
+        clean_binpkg=self.options.clean_binpkg,
+        root=self.options.root,
+        strip=self.options.strip,
+        emerge_args=self.options.emerge_args,
+        ssh_private_key=self.options.private_key,
+        ping=self.options.ping,
+        force=self.options.force,
+        dry_run=self.options.dry_run)
+    logging.info('cros deploy completed successfully.')
diff --git a/cli/cros/cros_deploy_unittest b/cli/cros/cros_deploy_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cli/cros/cros_deploy_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/cros/cros_deploy_unittest.py b/cli/cros/cros_deploy_unittest.py
new file mode 100644
index 0000000..b06cd7c
--- /dev/null
+++ b/cli/cros/cros_deploy_unittest.py
@@ -0,0 +1,105 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module tests the cros deploy command."""
+
+from __future__ import print_function
+
+from chromite.cli import command_unittest
+from chromite.cli import deploy
+from chromite.cli.cros import cros_deploy
+from chromite.lib import commandline
+from chromite.lib import cros_test_lib
+
+
+# pylint: disable=protected-access
+
+
+class MockDeployCommand(command_unittest.MockCommand):
+  """Mock out the deploy command."""
+  TARGET = 'chromite.cli.cros.cros_deploy.DeployCommand'
+  TARGET_CLASS = cros_deploy.DeployCommand
+  COMMAND = 'deploy'
+
+  def __init__(self, *args, **kwargs):
+    command_unittest.MockCommand.__init__(self, *args, **kwargs)
+
+  def Run(self, inst):
+    command_unittest.MockCommand.Run(self, inst)
+
+
+class CrosDeployTest(cros_test_lib.MockTempDirTestCase,
+                     cros_test_lib.OutputTestCase):
+  """Test calling `cros deploy` with various arguments.
+
+  These tests just check that arguments as specified on the command
+  line are properly passed through to deploy. Testing the
+  actual update flow should be done in the deploy unit tests.
+  """
+
+  DEVICE = '1.1.1.1'
+  PACKAGES = ['foo', 'bar']
+
+  def SetupCommandMock(self, cmd_args):
+    """Setup comand mock."""
+    self.cmd_mock = MockDeployCommand(
+        cmd_args, base_args=['--cache-dir', self.tempdir])
+    self.StartPatcher(self.cmd_mock)
+
+  def setUp(self):
+    """Patches objects."""
+    self.cmd_mock = None
+    self.deploy_mock = self.PatchObject(deploy, 'Deploy', autospec=True)
+    self.run_inside_chroot_mock = self.PatchObject(
+        commandline, 'RunInsideChroot', autospec=True)
+
+  def VerifyDeployParameters(self, device, packages, **kwargs):
+    """Verifies the arguments passed to Deployer.Run().
+
+    This function helps verify that command line specifications are
+    parsed properly.
+
+    Args:
+      device: expected device hostname.
+      packages: expected packages list.
+      kwargs: keyword arguments expected in the call to Deployer.Run().
+          Arguments unspecified here are checked against their default
+          value for `cros deploy`.
+    """
+    deploy_args, deploy_kwargs = self.deploy_mock.call_args
+    self.assertEqual(device, deploy_args[0].hostname)
+    self.assertListEqual(packages, deploy_args[1])
+    # `cros deploy` default options. Must match AddParser().
+    expected_kwargs = {
+        'board': None,
+        'strip': True,
+        'emerge': True,
+        'root': '/',
+        'clean_binpkg': True,
+        'emerge_args': None,
+        'ssh_private_key': None,
+        'ping': True,
+        'dry_run': False,
+        'force': False,
+        'update': False,
+        'deep': False,
+        'deep_rev': False}
+    # Overwrite defaults with any variations in this test.
+    expected_kwargs.update(kwargs)
+    self.assertDictEqual(expected_kwargs, deploy_kwargs)
+
+  def testDefaults(self):
+    """Tests `cros deploy` default values."""
+    self.SetupCommandMock([self.DEVICE] + self.PACKAGES)
+    self.cmd_mock.inst.Run()
+    self.assertTrue(self.run_inside_chroot_mock.called)
+    self.VerifyDeployParameters(self.DEVICE, self.PACKAGES)
+
+  def testDeployError(self):
+    """Tests that DeployErrors are passed through."""
+    with self.OutputCapturer():
+      self.SetupCommandMock([self.DEVICE] + self.PACKAGES)
+      self.deploy_mock.side_effect = deploy.DeployError
+      with self.assertRaises(deploy.DeployError):
+        self.cmd_mock.inst.Run()
diff --git a/cli/cros/cros_flash.py b/cli/cros/cros_flash.py
new file mode 100644
index 0000000..6ef994e
--- /dev/null
+++ b/cli/cros/cros_flash.py
@@ -0,0 +1,155 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Install/copy the image to the device."""
+
+from __future__ import print_function
+
+from chromite.cli import command
+from chromite.cli import flash
+from chromite.lib import commandline
+from chromite.lib import cros_logging as logging
+from chromite.lib import dev_server_wrapper
+
+
+@command.CommandDecorator('flash')
+class FlashCommand(command.CliCommand):
+  """Update the device with an image.
+
+  This command updates the device with the image
+  (ssh://<hostname>:{port}, copies an image to a removable device
+  (usb://<device_path), or copies a xbuddy path to a local
+  file path with (file://file_path).
+
+  For device update, it assumes that device is able to accept ssh
+  connections.
+
+  For rootfs partition update, this command may launch a devserver to
+  generate payloads. As a side effect, it may create symlinks in
+  static_dir/others used by the devserver.
+  """
+
+  EPILOG = """
+To update/image the device with the latest locally built image:
+  cros flash device latest
+  cros flash device
+
+To update/image the device with an xbuddy path:
+  cros flash device xbuddy://{local, remote}/<board>/<version>
+
+  Common xbuddy version aliases are 'latest' (alias for 'latest-stable')
+  latest-{dev, beta, stable, canary}, and latest-official.
+
+To update/image the device with a local image path:
+  cros flash device /path/to/image.bin
+
+Examples:
+  cros flash 192.168.1.7 xbuddy://remote/x86-mario/latest-canary
+  cros flash 192.168.1.7 xbuddy://remote/x86-mario-paladin/R32-4830.0.0-rc1
+  cros flash usb:// xbuddy://remote/trybot-x86-mario-paladin/R32-5189.0.0-b100
+  cros flash usb:///dev/sde xbuddy://peppy/latest
+  cros flash file:///~/images xbuddy://peppy/latest
+
+  # For a recovery image
+  cros flash usb:// xbuddy://remote/link/latest-stable/recovery
+
+  For more information and known problems/fixes, please see:
+  http://dev.chromium.org/chromium-os/build/cros-flash
+"""
+
+  # Override base class property to enable stats upload.
+  upload_stats = True
+
+  @classmethod
+  def AddParser(cls, parser):
+    """Add parser arguments."""
+    super(FlashCommand, cls).AddParser(parser)
+    cls.AddDeviceArgument(parser, schemes=[commandline.DEVICE_SCHEME_FILE,
+                                           commandline.DEVICE_SCHEME_SSH,
+                                           commandline.DEVICE_SCHEME_USB])
+    parser.add_argument(
+        'image', nargs='?', default='latest', help="A local path or an xbuddy "
+        "path: xbuddy://{local|remote}/board/version/{image_type} image_type "
+        "can be: 'test', 'dev', 'base', or 'recovery'. Note any strings that "
+        "do not map to a real file path will be converted to an xbuddy path "
+        "i.e., latest, will map to xbuddy://latest.")
+    parser.add_argument(
+        '--clear-cache', default=False, action='store_true',
+        help='Clear the devserver static directory. This deletes all the '
+        'downloaded images and payloads, and also payloads generated by '
+        'the devserver. Default is not to clear.')
+
+    update = parser.add_argument_group('Advanced device update options')
+    update.add_argument(
+        '--board', help='The board to use. By default it is '
+        'automatically detected. You can override the detected board with '
+        'this option.')
+    update.add_argument(
+        '--yes', default=False, action='store_true',
+        help='Answer yes to any prompt. Use with caution.')
+    update.add_argument(
+        '--force', action='store_true',
+        help='Ignore sanity checks, just do it. Implies --yes.')
+    update.add_argument(
+        '--no-reboot', action='store_false', dest='reboot', default=True,
+        help='Do not reboot after update. Default is always reboot.')
+    update.add_argument(
+        '--no-wipe', action='store_false', dest='wipe', default=True,
+        help='Do not wipe the temporary working directory. Default '
+        'is always wipe.')
+    update.add_argument(
+        '--no-stateful-update', action='store_false', dest='stateful_update',
+        help='Do not update the stateful partition on the device. '
+        'Default is always update.')
+    update.add_argument(
+        '--no-rootfs-update', action='store_false', dest='rootfs_update',
+        help='Do not update the rootfs partition on the device. '
+        'Default is always update.')
+    update.add_argument(
+        '--src-image-to-delta', type='path',
+        help='Local path to an image to be used as the base to generate '
+        'delta payloads.')
+    update.add_argument(
+        '--clobber-stateful', action='store_true', default=False,
+        help='Clobber stateful partition when performing update.')
+    update.add_argument(
+        '--no-ping', dest='ping', action='store_false', default=True,
+        help='Do not ping the device before attempting to connect to it.')
+    update.add_argument(
+        '--disable-rootfs-verification', default=False, action='store_true',
+        help='Disable rootfs verification after update is completed.')
+
+    usb = parser.add_argument_group('USB specific options')
+    usb.add_argument(
+        '--install', default=False, action='store_true',
+        help='Install to the USB device using the base disk layout.')
+
+  def Run(self):
+    """Perfrom the cros flash command."""
+    self.options.Freeze()
+
+    try:
+      flash.Flash(
+          self.options.device,
+          self.options.image,
+          board=self.options.board,
+          install=self.options.install,
+          src_image_to_delta=self.options.src_image_to_delta,
+          rootfs_update=self.options.rootfs_update,
+          stateful_update=self.options.stateful_update,
+          clobber_stateful=self.options.clobber_stateful,
+          reboot=self.options.reboot,
+          wipe=self.options.wipe,
+          ping=self.options.ping,
+          disable_rootfs_verification=self.options.disable_rootfs_verification,
+          clear_cache=self.options.clear_cache,
+          yes=self.options.yes,
+          force=self.options.force,
+          debug=self.options.debug)
+      logging.notice('cros flash completed successfully.')
+    except dev_server_wrapper.ImagePathError:
+      logging.error('To get the latest remote image, please run:\n'
+                    'cros flash --board=%s %s remote/latest',
+                    self.options.board, self.options.device.raw)
+      raise
diff --git a/cli/cros/cros_flash_unittest b/cli/cros/cros_flash_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cli/cros/cros_flash_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/cros/cros_flash_unittest.py b/cli/cros/cros_flash_unittest.py
new file mode 100644
index 0000000..a8edf91
--- /dev/null
+++ b/cli/cros/cros_flash_unittest.py
@@ -0,0 +1,108 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module tests the cros flash command."""
+
+from __future__ import print_function
+
+from chromite.cli import command_unittest
+from chromite.cli import flash
+from chromite.cli.cros import cros_flash
+from chromite.lib import commandline
+from chromite.lib import cros_test_lib
+
+
+class MockFlashCommand(command_unittest.MockCommand):
+  """Mock out the flash command."""
+  TARGET = 'chromite.cli.cros.cros_flash.FlashCommand'
+  TARGET_CLASS = cros_flash.FlashCommand
+  COMMAND = 'flash'
+
+  def __init__(self, *args, **kwargs):
+    command_unittest.MockCommand.__init__(self, *args, **kwargs)
+
+  def Run(self, inst):
+    command_unittest.MockCommand.Run(self, inst)
+
+
+class CrosFlashTest(cros_test_lib.MockTempDirTestCase,
+                    cros_test_lib.OutputTestCase):
+  """Test calling `cros flash` with various arguments.
+
+  These tests just check that arguments as specified on the command
+  line are properly passed through to flash.Flash(). Testing the
+  actual update flow is done in the flash.Flash() unit tests.
+  """
+
+  IMAGE = '/path/to/image'
+  DEVICE = '1.1.1.1'
+
+  def SetupCommandMock(self, cmd_args):
+    """Setup comand mock."""
+    self.cmd_mock = MockFlashCommand(
+        cmd_args, base_args=['--cache-dir', self.tempdir])
+    self.StartPatcher(self.cmd_mock)
+
+  def setUp(self):
+    """Patches objects."""
+    self.cmd_mock = None
+    self.flash_mock = self.PatchObject(flash, 'Flash', autospec=True)
+
+  def VerifyFlashParameters(self, device, image, **kwargs):
+    """Verifies the arguments passed to flash.Flash().
+
+    This function helps verify that command line specifications are
+    parsed properly and handed to flash.Flash() as expected.
+
+    Args:
+      device: expected device hostname; currently only SSH devices
+          are supported.
+      image: expected image parameter.
+      kwargs: keyword arguments expected in the call to flash.Flash().
+          Arguments unspecified here are checked against their default
+          value for `cros flash`.
+    """
+    flash_args, flash_kwargs = self.flash_mock.call_args
+    self.assertEqual(device, flash_args[0].hostname)
+    self.assertEqual(image, flash_args[1])
+    # `cros flash` default options. Must match the configuration in AddParser().
+    expected_kwargs = {
+        'board': None,
+        'install': False,
+        'src_image_to_delta': None,
+        'rootfs_update': True,
+        'stateful_update': True,
+        'clobber_stateful': False,
+        'reboot': True,
+        'wipe': True,
+        'ping': True,
+        'disable_rootfs_verification': False,
+        'clear_cache': False,
+        'yes': False,
+        'force': False,
+        'debug': False}
+    # Overwrite defaults with any variations in this test.
+    expected_kwargs.update(kwargs)
+    self.assertDictEqual(expected_kwargs, flash_kwargs)
+
+  def testDefaults(self):
+    """Tests `cros flash` default values."""
+    self.SetupCommandMock([self.DEVICE, self.IMAGE])
+    self.cmd_mock.inst.Run()
+    self.VerifyFlashParameters(self.DEVICE, self.IMAGE)
+
+  def testDoesNotEnterChroot(self):
+    """Test that cros flash doesn't enter the chroot."""
+    self.SetupCommandMock([self.DEVICE, self.IMAGE])
+    enter_chroot = self.PatchObject(commandline, 'RunInsideChroot')
+    self.cmd_mock.inst.Run()
+    self.assertFalse(enter_chroot.called)
+
+  def testFlashError(self):
+    """Tests that FlashErrors are passed through."""
+    with self.OutputCapturer():
+      self.SetupCommandMock([self.DEVICE, self.IMAGE])
+      self.flash_mock.side_effect = flash.FlashError
+      with self.assertRaises(flash.FlashError):
+        self.cmd_mock.inst.Run()
diff --git a/cli/cros/cros_lint.py b/cli/cros/cros_lint.py
new file mode 100644
index 0000000..f867d9d
--- /dev/null
+++ b/cli/cros/cros_lint.py
@@ -0,0 +1,203 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Run lint checks on the specified files."""
+
+from __future__ import print_function
+
+import functools
+import multiprocessing
+import os
+import sys
+
+from chromite.cbuildbot import constants
+from chromite.cli import command
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import parallel
+
+
+PYTHON_EXTENSIONS = frozenset(['.py'])
+
+# Note these are defined to keep in line with cpplint.py. Technically, we could
+# include additional ones, but cpplint.py would just filter them out.
+CPP_EXTENSIONS = frozenset(['.cc', '.cpp', '.h'])
+
+
+def _GetProjectPath(path):
+  """Find the absolute path of the git checkout that contains |path|."""
+  if git.FindRepoCheckoutRoot(path):
+    manifest = git.ManifestCheckout.Cached(path)
+    return manifest.FindCheckoutFromPath(path).GetPath(absolute=True)
+  else:
+    # Maybe they're running on a file outside of a checkout.
+    # e.g. cros lint ~/foo.py /tmp/test.py
+    return os.path.dirname(path)
+
+
+def _GetPylintrc(path):
+  """Locate the pylintrc file that applies to |path|."""
+  if not path.endswith('.py'):
+    return
+
+  path = os.path.realpath(path)
+  project_path = _GetProjectPath(path)
+  parent = os.path.dirname(path)
+  while project_path and parent.startswith(project_path):
+    pylintrc = os.path.join(parent, 'pylintrc')
+    if os.path.isfile(pylintrc):
+      break
+    parent = os.path.dirname(parent)
+
+  if project_path is None or not os.path.isfile(pylintrc):
+    pylintrc = os.path.join(constants.SOURCE_ROOT, 'chromite', 'pylintrc')
+
+  return pylintrc
+
+
+def _GetPylintGroups(paths):
+  """Return a dictionary mapping pylintrc files to lists of paths."""
+  groups = {}
+  for path in paths:
+    pylintrc = _GetPylintrc(path)
+    if pylintrc:
+      groups.setdefault(pylintrc, []).append(path)
+  return groups
+
+
+def _GetPythonPath(paths):
+  """Return the set of Python library paths to use."""
+  return sys.path + [
+      # Add the Portage installation inside the chroot to the Python path.
+      # This ensures that scripts that need to import portage can do so.
+      os.path.join(constants.SOURCE_ROOT, 'chroot', 'usr', 'lib', 'portage',
+                   'pym'),
+
+      # Scripts outside of chromite expect the scripts in src/scripts/lib to
+      # be importable.
+      os.path.join(constants.CROSUTILS_DIR, 'lib'),
+
+      # Allow platform projects to be imported by name (e.g. crostestutils).
+      os.path.join(constants.SOURCE_ROOT, 'src', 'platform'),
+
+      # Ideally we'd modify meta_path in pylint to handle our virtual chromite
+      # module, but that's not possible currently.  We'll have to deal with
+      # that at some point if we want `cros lint` to work when the dir is not
+      # named 'chromite'.
+      constants.SOURCE_ROOT,
+
+      # Also allow scripts to import from their current directory.
+  ] + list(set(os.path.dirname(x) for x in paths))
+
+
+# The mapping between the "cros lint" --output-format flag and cpplint.py
+# --output flag.
+CPPLINT_OUTPUT_FORMAT_MAP = {
+    'colorized': 'emacs',
+    'msvs': 'vs7',
+    'parseable': 'emacs',
+}
+
+
+def _LinterRunCommand(cmd, debug, **kwargs):
+  """Run the linter with common RunCommand args set as higher levels expect."""
+  return cros_build_lib.RunCommand(cmd, error_code_ok=True, print_cmd=debug,
+                                   debug_level=logging.NOTICE, **kwargs)
+
+
+def _CpplintFile(path, output_format, debug):
+  """Returns result of running cpplint on |path|."""
+  cmd = [os.path.join(constants.DEPOT_TOOLS_DIR, 'cpplint.py')]
+  if output_format != 'default':
+    cmd.append('--output=%s' % CPPLINT_OUTPUT_FORMAT_MAP[output_format])
+  cmd.append(path)
+  return _LinterRunCommand(cmd, debug)
+
+
+def _PylintFile(path, output_format, debug):
+  """Returns result of running pylint on |path|."""
+  pylint = os.path.join(constants.DEPOT_TOOLS_DIR, 'pylint')
+  pylintrc = _GetPylintrc(path)
+  cmd = [pylint, '--rcfile=%s' % pylintrc]
+  if output_format != 'default':
+    cmd.append('--output-format=%s' % output_format)
+  cmd.append(path)
+  extra_env = {'PYTHONPATH': ':'.join(_GetPythonPath([path]))}
+  return _LinterRunCommand(cmd, debug, extra_env=extra_env)
+
+
+def _BreakoutFilesByLinter(files):
+  """Maps a linter method to the list of files to lint."""
+  map_to_return = {}
+  for f in files:
+    extension = os.path.splitext(f)[1]
+    if extension in PYTHON_EXTENSIONS:
+      pylint_list = map_to_return.setdefault(_PylintFile, [])
+      pylint_list.append(f)
+    elif extension in CPP_EXTENSIONS:
+      cpplint_list = map_to_return.setdefault(_CpplintFile, [])
+      cpplint_list.append(f)
+
+  return map_to_return
+
+
+def _Dispatcher(errors, output_format, debug, linter, path):
+  """Call |linter| on |path| and take care of coalescing exit codes/output."""
+  result = linter(path, output_format, debug)
+  if result.returncode:
+    with errors.get_lock():
+      errors.value += 1
+
+
+@command.CommandDecorator('lint')
+class LintCommand(command.CliCommand):
+  """Run lint checks on the specified files."""
+
+  EPILOG = """
+Right now, only supports cpplint and pylint. We may also in the future
+run other checks (e.g. pyflakes, etc.)
+"""
+
+  # The output formats supported by cros lint.
+  OUTPUT_FORMATS = ('default', 'colorized', 'msvs', 'parseable')
+
+  @classmethod
+  def AddParser(cls, parser):
+    super(LintCommand, cls).AddParser(parser)
+    parser.add_argument('files', help='Files to lint', nargs='*')
+    parser.add_argument('--output', default='default',
+                        choices=LintCommand.OUTPUT_FORMATS,
+                        help='Output format to pass to the linters. Supported '
+                        'formats are: default (no option is passed to the '
+                        'linter), colorized, msvs (Visual Studio) and '
+                        'parseable.')
+
+  def Run(self):
+    files = self.options.files
+    if not files:
+      # Running with no arguments is allowed to make the repo upload hook
+      # simple, but print a warning so that if someone runs this manually
+      # they are aware that nothing was linted.
+      logging.warning('No files provided to lint.  Doing nothing.')
+
+    errors = multiprocessing.Value('i')
+    linter_map = _BreakoutFilesByLinter(files)
+    dispatcher = functools.partial(_Dispatcher, errors,
+                                   self.options.output, self.options.debug)
+
+    # Special case one file as it's common -- faster to avoid parallel startup.
+    if sum([len(x) for _, x in linter_map.iteritems()]) == 1:
+      linter, files = linter_map.items()[0]
+      dispatcher(linter, files[0])
+    else:
+      # Run the linter in parallel on the files.
+      with parallel.BackgroundTaskRunner(dispatcher) as q:
+        for linter, files in linter_map.iteritems():
+          for path in files:
+            q.put([linter, path])
+
+    if errors.value:
+      logging.error('linter found errors in %i files', errors.value)
+      sys.exit(1)
diff --git a/cli/cros/cros_lint_unittest b/cli/cros/cros_lint_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cli/cros/cros_lint_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/cros/cros_lint_unittest.py b/cli/cros/cros_lint_unittest.py
new file mode 100644
index 0000000..56bae74
--- /dev/null
+++ b/cli/cros/cros_lint_unittest.py
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module tests the cros lint command."""
+
+from __future__ import print_function
+
+from chromite.cli.cros import cros_lint
+from chromite.lib import cros_test_lib
+
+
+class LintCommandTest(cros_test_lib.TestCase):
+  """Test class for our LintCommand class."""
+
+  def testOutputArgument(self):
+    """Tests that the --output argument mapping for cpplint is complete."""
+    self.assertEqual(
+        set(cros_lint.LintCommand.OUTPUT_FORMATS),
+        set(cros_lint.CPPLINT_OUTPUT_FORMAT_MAP.keys() + ['default']))
diff --git a/cli/cros/cros_payload.py b/cli/cros/cros_payload.py
new file mode 100644
index 0000000..ebac811
--- /dev/null
+++ b/cli/cros/cros_payload.py
@@ -0,0 +1,213 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""cros payload: Show information about an update payload."""
+
+from __future__ import print_function
+
+import itertools
+import os
+import string
+import sys
+import textwrap
+
+from chromite.cbuildbot import constants
+from chromite.cli import command
+
+# Needed for the dev.host.lib import below.
+sys.path.insert(0, os.path.join(constants.SOURCE_ROOT, 'src', 'platform'))
+
+
+def DisplayValue(key, value):
+  """Print out a key, value pair with values left-aligned."""
+  if value != None:
+    print('%-*s %s' % (24, key + ':', value))
+  else:
+    raise ValueError('Cannot display an empty value.')
+
+
+def DisplayHexData(data, indent=0):
+  """Print out binary data as a hex values."""
+  for off in range(0, len(data), 16):
+    chunk = data[off:off + 16]
+    print(' ' * indent +
+          ' '.join('%.2x' % ord(c) for c in chunk) +
+          '   ' * (16 - len(chunk)) +
+          ' | ' +
+          ''.join(c if c in string.printable else '.' for c in chunk))
+
+
+@command.CommandDecorator('payload')
+class PayloadCommand(command.CliCommand):
+  """Show basic information about an update payload.
+
+  This command parses an update payload and displays information from
+  its header and manifest.
+  """
+
+  EPILOG = """
+Example:
+  cros payload show chromeos_6716.0.0_daisy_canary-channel_full_snow-mp-v3.bin
+"""
+
+  def __init__(self, options):
+    # TODO(alliewood)(chromium:454629) update once update_payload is moved
+    # into chromite. google.protobuf may not be available outside the chroot.
+    from dev.host.lib import update_payload
+    self._update_payload = update_payload
+
+    super(PayloadCommand, self).__init__(options)
+    self.payload = None
+
+  @classmethod
+  def AddParser(cls, parser):
+    super(PayloadCommand, cls).AddParser(parser)
+    parser.add_argument(
+        'action', choices=['show'],
+        help='Show information about an update payload.')
+    parser.add_argument(
+        'payload_file', type=file,
+        help='The payload file that you want information from.')
+    parser.add_argument('--list_ops', default=False, action='store_true',
+                        help='List the install operations and their extents.')
+    parser.add_argument('--stats', default=False, action='store_true',
+                        help='Show information about overall input/output.')
+    parser.add_argument('--signatures', default=False, action='store_true',
+                        help='Show signatures stored in the payload.')
+
+  def _DisplayHeader(self):
+    """Show information from the payload header."""
+    header = self.payload.header
+    DisplayValue('Payload version', header.version)
+    DisplayValue('Manifest length', header.manifest_len)
+
+  def _DisplayManifest(self):
+    """Show information from the payload manifest."""
+    manifest = self.payload.manifest
+    DisplayValue('Number of operations', len(manifest.install_operations))
+    DisplayValue('Number of kernel ops',
+                 len(manifest.kernel_install_operations))
+    DisplayValue('Block size', manifest.block_size)
+    DisplayValue('Minor version', manifest.minor_version)
+
+  def _DisplaySignatures(self):
+    """Show information about the signatures from the manifest."""
+    manifest = self.payload.manifest
+    if not manifest.HasField('signatures_offset'):
+      print('No signatures stored in the payload')
+      return
+
+    signature_msg = 'offset=%d' % manifest.signatures_offset
+    if manifest.signatures_size:
+      signature_msg += ' (%d bytes)' % manifest.signatures_size
+    DisplayValue('Signature blob', signature_msg)
+    signatures_blob = self.payload.ReadDataBlob(manifest.signatures_offset,
+                                                manifest.signatures_size)
+
+    from dev.host.lib.update_payload import update_metadata_pb2
+    signatures = update_metadata_pb2.Signatures()
+    signatures.ParseFromString(signatures_blob)
+    print('Payload signatures: (%d entries)' % len(signatures.signatures))
+    for signature in signatures.signatures:
+      print('  version=%s, hex_data: (%d bytes)' %
+            (signature.version if signature.HasField('version') else None,
+             len(signature.data)))
+      DisplayHexData(signature.data, indent=4)
+
+
+  def _DisplayOps(self, name, operations):
+    """Show information about the install operations from the manifest.
+
+    The list shown includes operation type, data offset, data length, source
+    extents, source length, destination extents, and destinations length.
+
+    Args:
+      name: The name you want displayed above the operation table.
+      operations: The install_operations object that you want to display
+                  information about.
+    """
+    def _DisplayExtents(extents, name):
+      num_blocks = sum([ext.num_blocks for ext in extents])
+      ext_str = ' '.join(
+          '(%s,%s)' % (ext.start_block, ext.num_blocks) for ext in extents)
+      # Make extent list wrap around at 80 chars.
+      ext_str = '\n      '.join(textwrap.wrap(ext_str, 74))
+      extent_plural = 's' if len(extents) > 1 else ''
+      block_plural = 's' if num_blocks > 1 else ''
+      print('    %s: %d extent%s (%d block%s)' %
+            (name, len(extents), extent_plural, num_blocks, block_plural))
+      print('      %s' % ext_str)
+
+    op_dict = self._update_payload.common.OpType.NAMES
+    print('%s:' % name)
+    for op, op_count in itertools.izip(operations, itertools.count()):
+      print('  %d: %s' % (op_count, op_dict[op.type]))
+      if op.HasField('data_offset'):
+        print('    Data offset: %s' % op.data_offset)
+      if op.HasField('data_length'):
+        print('    Data length: %s' % op.data_length)
+      if len(op.src_extents):
+        _DisplayExtents(op.src_extents, 'Source')
+      if len(op.dst_extents):
+        _DisplayExtents(op.dst_extents, 'Destination')
+
+  def _GetStats(self, manifest):
+    """Returns various statistics about a payload file.
+
+    Returns a dictionary containing the number of blocks read during payload
+    application, the number of blocks written, and the number of seeks done
+    when writing during operation application.
+    """
+    read_blocks = 0
+    written_blocks = 0
+    num_write_seeks = 0
+    for operations in (manifest.install_operations,
+                       manifest.kernel_install_operations):
+      last_ext = None
+      for curr_op in operations:
+        read_blocks += sum([ext.num_blocks for ext in curr_op.src_extents])
+        written_blocks += sum([ext.num_blocks for ext in curr_op.dst_extents])
+        for curr_ext in curr_op.dst_extents:
+          # See if the extent is contiguous with the last extent seen.
+          if last_ext and (curr_ext.start_block !=
+                           last_ext.start_block + last_ext.num_blocks):
+            num_write_seeks += 1
+          last_ext = curr_ext
+
+    if manifest.minor_version == 1:
+      # Rootfs and kernel are written during the filesystem copy in version 1.
+      written_blocks += manifest.old_rootfs_info.size / manifest.block_size
+      written_blocks += manifest.old_kernel_info.size / manifest.block_size
+    # Old and new rootfs and kernel are read once during verification
+    read_blocks += manifest.old_rootfs_info.size / manifest.block_size
+    read_blocks += manifest.old_kernel_info.size / manifest.block_size
+    read_blocks += manifest.new_rootfs_info.size / manifest.block_size
+    read_blocks += manifest.new_kernel_info.size / manifest.block_size
+    stats = {'read_blocks': read_blocks,
+             'written_blocks': written_blocks,
+             'num_write_seeks': num_write_seeks}
+    return stats
+
+  def _DisplayStats(self, manifest):
+    stats = self._GetStats(manifest)
+    DisplayValue('Blocks read', stats['read_blocks'])
+    DisplayValue('Blocks written', stats['written_blocks'])
+    DisplayValue('Seeks when writing', stats['num_write_seeks'])
+
+  def Run(self):
+    """Parse the update payload and display information from it."""
+    self.payload = self._update_payload.Payload(self.options.payload_file)
+    self.payload.Init()
+    self._DisplayHeader()
+    self._DisplayManifest()
+    if self.options.signatures:
+      self._DisplaySignatures()
+    if self.options.stats:
+      self._DisplayStats(self.payload.manifest)
+    if self.options.list_ops:
+      print()
+      self._DisplayOps('Install operations',
+                       self.payload.manifest.install_operations)
+      self._DisplayOps('Kernel install operations',
+                       self.payload.manifest.kernel_install_operations)
diff --git a/cli/cros/cros_payload_unittest b/cli/cros/cros_payload_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cli/cros/cros_payload_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/cros/cros_payload_unittest.py b/cli/cros/cros_payload_unittest.py
new file mode 100644
index 0000000..d99f0e9
--- /dev/null
+++ b/cli/cros/cros_payload_unittest.py
@@ -0,0 +1,260 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module tests the cros payload command."""
+
+from __future__ import print_function
+
+import collections
+import os
+import sys
+
+from chromite.cbuildbot import constants
+from chromite.cli.cros import cros_payload
+from chromite.lib import cros_test_lib
+
+# Needed for the dev.host.lib import below.
+sys.path.insert(0, os.path.join(constants.SOURCE_ROOT, 'src', 'platform'))
+
+# TODO(alliewood)(chromium:454629) update once update_payload is moved
+# into chromite
+from dev.host.lib import update_payload
+from dev.host.lib.update_payload import update_metadata_pb2
+
+class FakePayloadError(Exception):
+  """A generic error when using the FakePayload."""
+
+class FakeOption(object):
+  """Fake options object for testing."""
+
+  def __init__(self, **kwargs):
+    self.list_ops = False
+    self.stats = False
+    self.signatures = False
+    for key, val in kwargs.iteritems():
+      setattr(self, key, val)
+    if not hasattr(self, 'payload_file'):
+      self.payload_file = None
+
+class FakeOp(object):
+  """Fake manifest operation for testing."""
+
+  def __init__(self, src_extents, dst_extents, op_type, **kwargs):
+    self.src_extents = src_extents
+    self.dst_extents = dst_extents
+    self.type = op_type
+    for key, val in kwargs.iteritems():
+      setattr(self, key, val)
+
+  def HasField(self, field):
+    return hasattr(self, field)
+
+class FakeManifest(object):
+  """Fake manifest for testing."""
+
+  def __init__(self):
+    FakeExtent = collections.namedtuple('FakeExtent',
+                                        ['start_block', 'num_blocks'])
+    self.install_operations = [FakeOp([],
+                                      [FakeExtent(1, 1), FakeExtent(2, 2)],
+                                      update_payload.common.OpType.REPLACE_BZ,
+                                      dst_length=3*4096,
+                                      data_offset=1,
+                                      data_length=1)]
+    self.kernel_install_operations = [FakeOp(
+        [FakeExtent(1, 1)],
+        [FakeExtent(x, x) for x in xrange(20)],
+        update_payload.common.OpType.SOURCE_COPY,
+        src_length=4096)]
+    self.block_size = 4096
+    self.minor_version = 4
+    FakePartInfo = collections.namedtuple('FakePartInfo', ['size'])
+    self.old_rootfs_info = FakePartInfo(1 * 4096)
+    self.old_kernel_info = FakePartInfo(2 * 4096)
+    self.new_rootfs_info = FakePartInfo(3 * 4096)
+    self.new_kernel_info = FakePartInfo(4 * 4096)
+    self.signatures_offset = None
+    self.signatures_size = None
+
+  def HasField(self, field_name):
+    """Fake HasField method based on the python members."""
+    return hasattr(self, field_name) and getattr(self, field_name) is not None
+
+class FakePayload(object):
+  """Fake payload for testing."""
+
+  def __init__(self):
+    FakeHeader = collections.namedtuple('FakeHeader',
+                                        ['version', 'manifest_len'])
+    self._header = FakeHeader('111', 222)
+    self.header = None
+    self._manifest = FakeManifest()
+    self.manifest = None
+
+    self._blobs = {}
+    self._signatures = update_metadata_pb2.Signatures()
+
+  def Init(self):
+    """Fake Init that sets header and manifest.
+
+    Failing to call Init() will not make header and manifest available to the
+    test.
+    """
+    self.header = self._header
+    self.manifest = self._manifest
+
+  def ReadDataBlob(self, offset, length):
+    """Return the blob that should be present at the offset location"""
+    if not offset in self._blobs:
+      raise FakePayloadError('Requested blob at unknown offset %d' % offset)
+    blob = self._blobs[offset]
+    if len(blob) != length:
+      raise FakePayloadError('Read blob with the wrong length (expect: %d, '
+                             'actual: %d)' % (len(blob), length))
+    return blob
+
+  def AddSignature(self, **kwargs):
+    new_signature = self._signatures.signatures.add()
+    for key, val in kwargs.iteritems():
+      setattr(new_signature, key, val)
+    blob = self._signatures.SerializeToString()
+    self._manifest.signatures_offset = 1234
+    self._manifest.signatures_size = len(blob)
+    self._blobs[self._manifest.signatures_offset] = blob
+
+
+class PayloadCommandTest(cros_test_lib.MockOutputTestCase):
+  """Test class for our PayloadCommand class."""
+
+  def testDisplayValue(self):
+    """Verify that DisplayValue prints what we expect."""
+    with self.OutputCapturer() as output:
+      cros_payload.DisplayValue('key', 'value')
+    stdout = output.GetStdout()
+    self.assertEquals(stdout, 'key:                     value\n')
+
+  def testRun(self):
+    """Verify that Run parses and displays the payload like we expect."""
+    payload_cmd = cros_payload.PayloadCommand(FakeOption(action='show'))
+    self.PatchObject(update_payload, 'Payload', return_value=FakePayload())
+
+    with self.OutputCapturer() as output:
+      payload_cmd.Run()
+
+    stdout = output.GetStdout()
+    expected_out = """Payload version:         111
+Manifest length:         222
+Number of operations:    1
+Number of kernel ops:    1
+Block size:              4096
+Minor version:           4
+"""
+    self.assertEquals(stdout, expected_out)
+
+  def testListOps(self):
+    """Verify that the --list_ops option gives the correct output."""
+    payload_cmd = cros_payload.PayloadCommand(FakeOption(list_ops=True,
+                                                         action='show'))
+    self.PatchObject(update_payload, 'Payload', return_value=FakePayload())
+
+    with self.OutputCapturer() as output:
+      payload_cmd.Run()
+
+    stdout = output.GetStdout()
+    expected_out = """Payload version:         111
+Manifest length:         222
+Number of operations:    1
+Number of kernel ops:    1
+Block size:              4096
+Minor version:           4
+
+Install operations:
+  0: REPLACE_BZ
+    Data offset: 1
+    Data length: 1
+    Destination: 2 extents (3 blocks)
+      (1,1) (2,2)
+Kernel install operations:
+  0: SOURCE_COPY
+    Source: 1 extent (1 block)
+      (1,1)
+    Destination: 20 extents (190 blocks)
+      (0,0) (1,1) (2,2) (3,3) (4,4) (5,5) (6,6) (7,7) (8,8) (9,9) (10,10)
+      (11,11) (12,12) (13,13) (14,14) (15,15) (16,16) (17,17) (18,18) (19,19)
+"""
+    self.assertEquals(stdout, expected_out)
+
+  def testStats(self):
+    """Verify that the --stats option works correctly."""
+    payload_cmd = cros_payload.PayloadCommand(FakeOption(stats=True,
+                                                         action='show'))
+    self.PatchObject(update_payload, 'Payload', return_value=FakePayload())
+
+    with self.OutputCapturer() as output:
+      payload_cmd.Run()
+
+    stdout = output.GetStdout()
+    expected_out = """Payload version:         111
+Manifest length:         222
+Number of operations:    1
+Number of kernel ops:    1
+Block size:              4096
+Minor version:           4
+Blocks read:             11
+Blocks written:          193
+Seeks when writing:      18
+"""
+    self.assertEquals(stdout, expected_out)
+
+  def testEmptySignatures(self):
+    """Verify that the --signatures option works with unsigned payloads."""
+    payload_cmd = cros_payload.PayloadCommand(
+        FakeOption(action='show', signatures=True))
+    self.PatchObject(update_payload, 'Payload', return_value=FakePayload())
+
+    with self.OutputCapturer() as output:
+      payload_cmd.Run()
+
+    stdout = output.GetStdout()
+    expected_out = """Payload version:         111
+Manifest length:         222
+Number of operations:    1
+Number of kernel ops:    1
+Block size:              4096
+Minor version:           4
+No signatures stored in the payload
+"""
+    self.assertEquals(stdout, expected_out)
+
+
+  def testSignatures(self):
+    """Verify that the --signatures option shows the present signatures."""
+    payload_cmd = cros_payload.PayloadCommand(
+        FakeOption(action='show', signatures=True))
+    payload = FakePayload()
+    payload.AddSignature(version=1, data='12345678abcdefgh\x00\x01\x02\x03')
+    payload.AddSignature(data='I am a signature so access is yes.')
+    self.PatchObject(update_payload, 'Payload', return_value=payload)
+
+    with self.OutputCapturer() as output:
+      payload_cmd.Run()
+
+    stdout = output.GetStdout()
+    expected_out = """Payload version:         111
+Manifest length:         222
+Number of operations:    1
+Number of kernel ops:    1
+Block size:              4096
+Minor version:           4
+Signature blob:          offset=1234 (64 bytes)
+Payload signatures: (2 entries)
+  version=1, hex_data: (20 bytes)
+    31 32 33 34 35 36 37 38 61 62 63 64 65 66 67 68 | 12345678abcdefgh
+    00 01 02 03                                     | ....
+  version=None, hex_data: (34 bytes)
+    49 20 61 6d 20 61 20 73 69 67 6e 61 74 75 72 65 | I am a signature
+    20 73 6f 20 61 63 63 65 73 73 20 69 73 20 79 65 |  so access is ye
+    73 2e                                           | s.
+"""
+    self.assertEquals(stdout, expected_out)
diff --git a/cli/cros/cros_pinchrome.py b/cli/cros/cros_pinchrome.py
new file mode 100644
index 0000000..a5999a7
--- /dev/null
+++ b/cli/cros/cros_pinchrome.py
@@ -0,0 +1,518 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""cros pinchrome: Pin chrome to an earlier version."""
+
+from __future__ import print_function
+
+import fnmatch
+import glob
+import os
+import re
+import shutil
+import tempfile
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import repository
+from chromite.cli import command
+from chromite.lib import cros_build_lib
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import path_util
+from chromite.lib import portage_util
+from chromite.scripts import cros_mark_as_stable
+
+
+site_config = config_lib.GetConfig()
+
+
+class UprevNotFound(Exception):
+  """Exception to throw when no Chrome Uprev CL is found."""
+
+
+# git utility functions.
+
+
+def CloneWorkingRepo(dest, url, reference, branch):
+  """Clone a git repository with an existing local copy as a reference.
+
+  Also copy the hooks into the new repository.
+
+  Args:
+    dest: The directory to clone int.
+    url: The URL of the repository to clone.
+    reference: Local checkout to draw objects from.
+    branch: The branch to clone.
+  """
+  repository.CloneGitRepo(dest, url, reference=reference,
+                          single_branch=True, branch=branch)
+  for name in glob.glob(os.path.join(reference, '.git', 'hooks', '*')):
+    newname = os.path.join(dest, '.git', 'hooks', os.path.basename(name))
+    shutil.copyfile(name, newname)
+    shutil.copystat(name, newname)
+
+
+# Portage utilities.
+
+def UpdateManifest(ebuild):
+  """Update the manifest for an ebuild.
+
+  Args:
+    ebuild: Path to the ebuild to update the manifest for.
+  """
+  ebuild = path_util.ToChrootPath(os.path.realpath(ebuild))
+  cros_build_lib.RunCommand(['ebuild', ebuild, 'manifest'], quiet=True,
+                            enter_chroot=True)
+
+
+def SplitPVPath(path):
+  """Utility function to run both SplitEbuildPath and SplitPV.
+
+  Args:
+    path: Ebuild path to run those functions on.
+
+  Returns:
+    The output of SplitPV.
+  """
+  return portage_util.SplitPV(portage_util.SplitEbuildPath(path)[2])
+
+
+def RevertStableEBuild(dirname, rev):
+  """Revert the stable ebuilds for a package back to a particular revision.
+
+  Also add/remove the files in git.
+
+  Args:
+    dirname: Path to the ebuild directory.
+    rev: Revision to revert back to.
+
+  Returns:
+    The name of the ebuild reverted to.
+  """
+  package = os.path.basename(dirname.rstrip(os.sep))
+  pattern = '%s-*.ebuild' % package
+
+  # Get rid of existing stable ebuilds.
+  ebuilds = glob.glob(os.path.join(dirname, pattern))
+  for ebuild in ebuilds:
+    parts = SplitPVPath(ebuild)
+    if parts.version != '9999':
+      git.RmPath(ebuild)
+
+  # Bring back the old stable ebuild.
+  names = git.GetObjectAtRev(dirname, './', rev).split()
+  names = fnmatch.filter(names, pattern)
+  names = [name for name in names
+           if SplitPVPath(os.path.join(dirname, name)).version != '9999']
+  if not names:
+    return None
+  assert len(names) == 1
+  name = names[0]
+  git.RevertPath(dirname, name, rev)
+
+  # Update the manifest.
+  UpdateManifest(os.path.join(dirname, name))
+  manifest_path = os.path.join(dirname, 'Manifest')
+  if os.path.exists(manifest_path):
+    git.AddPath(manifest_path)
+  return os.path.join(dirname, name)
+
+
+def RevertBinhostConf(overlay, conf_files, rev):
+  """Revert binhost config files back to a particular revision.
+
+  Args:
+    overlay: The overlay holding the binhost config files.
+    conf_files: A list of config file names.
+    rev: The revision to revert back to.
+  """
+  binhost_dir = os.path.join(overlay, 'chromeos', 'binhost')
+  for conf_file in conf_files:
+    try:
+      git.RevertPath(os.path.join(binhost_dir, 'target'), conf_file, rev)
+    except Exception as e1:
+      try:
+        git.RevertPath(os.path.join(binhost_dir, 'host'), conf_file, rev)
+      except Exception as e2:
+        raise Exception(str(e1) + '\n' + str(e2))
+
+
+def MaskNewerPackages(overlay, ebuilds):
+  """Mask ebuild versions newer than the ones passed in.
+
+  This creates a new mask file called chromepin which masks ebuilds newer than
+  the ones passed in. To undo the masking, just delete that file. The
+  mask file is added with git.
+
+  Args:
+    overlay: The overlay that will hold the mask file.
+    ebuilds: List of ebuilds to set up masks for.
+  """
+  content = '# Pin chrome by masking more recent versions.\n'
+  for ebuild in ebuilds:
+    parts = portage_util.SplitEbuildPath(ebuild)
+    content += '>%s\n' % os.path.join(parts[0], parts[2])
+  mask_file = os.path.join(overlay, MASK_FILE)
+  osutils.WriteFile(mask_file, content)
+  git.AddPath(mask_file)
+
+
+# Tools to pick the point right before an uprev to pin chrome to and get
+# information about it.
+
+CONF_RE = re.compile(
+    r'^\s*(?P<conf>[^:\n]+): updating LATEST_RELEASE_CHROME_BINHOST',
+    flags=re.MULTILINE)
+
+
+# Interesting paths.
+OVERLAY = os.path.join(constants.SOURCE_ROOT,
+                       constants.CHROMIUMOS_OVERLAY_DIR)
+OVERLAY_URL = (site_config.params.EXTERNAL_GOB_URL +
+               '/chromiumos/overlays/chromiumos-overlay')
+PRIV_OVERLAY = os.path.join(constants.SOURCE_ROOT, 'src',
+                            'private-overlays',
+                            'chromeos-partner-overlay')
+PRIV_OVERLAY_URL = (site_config.params.INTERNAL_GOB_URL +
+                    '/chromeos/overlays/chromeos-partner-overlay')
+MASK_FILE = os.path.join('profiles', 'default', 'linux',
+                         'package.mask', 'chromepin')
+
+
+class ChromeUprev(object):
+  """A class to represent Chrome uprev CLs in the public overlay."""
+
+  def __init__(self, ebuild_dir, before=None):
+    """Construct a Chrome uprev object
+
+    Args:
+      ebuild_dir: Path to the directory with the chrome ebuild in it.
+      before: CL to work backwards from.
+    """
+    # Format includes the hash, commit body including subject, and author date.
+    cmd = ['log', '-n', '1', '--author', 'chrome-bot', '--grep',
+           cros_mark_as_stable.GIT_COMMIT_SUBJECT,
+           '--format=format:%H%n%aD%n%B']
+    if before:
+      cmd.append(str(before) + '~')
+    cmd.append('.')
+    log = git.RunGit(ebuild_dir, cmd).output
+    if not log.strip():
+      raise UprevNotFound('No uprev CL was found')
+
+    self.sha, _, log = log.partition('\n')
+    self.date, _, message = log.partition('\n')
+    self.conf_files = [m.group('conf') for m in CONF_RE.finditer(message)]
+
+    entries = git.RawDiff(ebuild_dir, '%s^!' % self.sha)
+    for entry in entries:
+      if entry.status != 'R':
+        continue
+
+      from_path = entry.src_file
+      to_path = entry.dst_file
+
+      if (os.path.splitext(from_path)[1] != '.ebuild' or
+          os.path.splitext(to_path)[1] != '.ebuild'):
+        continue
+
+      self.from_parts = SplitPVPath(from_path)
+      self.to_parts = SplitPVPath(to_path)
+      if (self.from_parts.package != 'chromeos-chrome' or
+          self.to_parts.package != 'chromeos-chrome'):
+        continue
+
+      break
+    else:
+      raise Exception('Failed to find chromeos-chrome uprev in CL %s' %
+                      self.sha)
+
+
+class UprevList(object):
+  """A generator which returns chrome uprev CLs in a particular repository.
+
+  It also keeps track of what CLs have been presented so the one the user
+  chose can be retrieved.
+  """
+
+  def __init__(self, chrome_path):
+    """Initialize the class.
+
+    Args:
+      chrome_path: The path to the repository to search.
+    """
+    self.uprevs = []
+    self.last = None
+    self.chrome_path = chrome_path
+
+  def __iter__(self):
+    return self
+
+  def __next__(self):
+    return self.next()
+
+  def next(self):
+    before = self.last.sha if self.last else None
+    try:
+      self.last = ChromeUprev(self.chrome_path, before=before)
+    except UprevNotFound:
+      raise StopIteration()
+    ver = self.last.from_parts.version + ' (%s)' % self.last.date
+    self.uprevs.append(self.last)
+    return ver
+
+
+# Tools to find the binhost updates in the private overlay which go with the
+# ones in the public overlay.
+
+class BinHostUprev(object):
+  """Class which represents an uprev CL for the private binhost configs."""
+
+  def __init__(self, sha, log):
+    self.sha = sha
+    self.conf_files = [m.group('conf') for m in CONF_RE.finditer(log)]
+
+
+def FindPrivateConfCL(overlay, pkg_dir):
+  """Find the private binhost uprev CL which goes with the public one.
+
+  Args:
+    overlay: Path to the private overlay.
+    pkg_dir: What the package directory should contain to be considered a
+             match.
+
+  Returns:
+    A BinHostUprev object representing the CL.
+  """
+  binhost_dir = os.path.join(overlay, 'chromeos', 'binhost')
+  before = None
+
+  plus_package_re = re.compile(r'^\+.*%s' % re.escape(pkg_dir),
+                               flags=re.MULTILINE)
+
+  while True:
+    cmd = ['log', '-n', '1', '--grep', 'LATEST_RELEASE_CHROME_BINHOST',
+           '--format=format:%H']
+    if before:
+      cmd.append('%s~' % before)
+    cmd.append('.')
+    sha = git.RunGit(binhost_dir, cmd).output.strip()
+    if not sha:
+      return None
+
+    cl = git.RunGit(overlay, ['show', '-M', sha]).output
+
+    if plus_package_re.search(cl):
+      return BinHostUprev(sha, cl)
+
+    before = sha
+
+
+# The main attraction.
+
+@command.CommandDecorator('pinchrome')
+class PinchromeCommand(command.CliCommand):
+  # pylint: disable=docstring-too-many-newlines
+  """Pin chrome to an earlier revision.
+
+
+  Pinning procedure:
+
+  When pinning chrome, this script first looks through the history of the
+  public overlay repository looking for changes which upreved chrome. It shows
+  the user what versions chrome has been at recently and when the uprevs
+  happened, and lets the user pick a point in that history to go back to.
+
+  Once an old version has been selected, the script creates a change which
+  overwrites the chrome ebuild(s) and binhost config files to what they were
+  at that version in the public overlay. It also adds entries to the portage
+  mask files to prevent newer versions from being installed.
+
+  Next, the script looks for a version of the binhost config file in the
+  private overlay directory which corresponds to the one in the public overlay.
+  It creates a change which overwrites the binhost config similar to above.
+
+  For safety, these two changes have CQ-DEPEND added to them and refer to each
+  other. The script uploads them, expecting the user to go to their review
+  pages and send them on their way.
+
+
+  Unpinning procedure:
+
+  To unpin, this script simply deletes the entries in the portage mask files
+  added above. After that, the Chrome PFQ can uprev chrome normally,
+  overwriting the ebuilds and binhost configs.
+  """
+
+  def __init__(self, options):
+    super(PinchromeCommand, self).__init__(options)
+
+    # Make up a branch name which is unlikely to collide.
+    self.branch_name = 'chrome_pin_' + cros_build_lib.GetRandomString()
+
+  @classmethod
+  def AddParser(cls, parser):
+    super(cls, PinchromeCommand).AddParser(parser)
+    parser.add_argument('--unpin', help='Unpin chrome.', default=False,
+                        action='store_true')
+    parser.add_argument('--bug', help='Used in the "BUG" field of CLs.',
+                        required=True)
+    parser.add_argument('--branch', default='master',
+                        help='The branch to pin chrome on (default master).')
+    parser.add_argument('--nowipe', help='Preserve the working directory',
+                        default=True, dest='wipe', action='store_false')
+    parser.add_argument('--dryrun', action='store_true',
+                        help='Prepare pinning CLs but don\'t upload them')
+
+  def CommitMessage(self, subject, cq_depend=None, change_id=None):
+    """Generate a commit message
+
+    Args:
+      subject: The subject of the message.
+      cq_depend: An optional CQ-DEPEND target.
+      change_id: An optional change ID.
+
+    Returns:
+      The commit message.
+    """
+    message = [
+        '%s' % subject,
+        '',
+        'DO NOT REVERT THIS CL.',
+        'In general, reverting chrome (un)pin CLs does not do what you expect.',
+        'Instead, use `cros pinchrome` to generate new CLs.',
+        '',
+        'BUG=%s' % self.options.bug,
+        'TEST=None',
+    ]
+    if cq_depend:
+      message += ['CQ-DEPEND=%s' % cq_depend]
+    if change_id:
+      message += [
+          '',
+          'Change-Id: %s' % change_id,
+      ]
+
+    return '\n'.join(message)
+
+  def unpin(self, work_dir):
+    """Unpin chrome."""
+
+    overlay = os.path.join(work_dir, 'overlay')
+    print('Setting up working directory...')
+    CloneWorkingRepo(overlay, OVERLAY_URL, OVERLAY, self.options.branch)
+    print('Done')
+
+    mask_file = os.path.join(overlay, MASK_FILE)
+    if not os.path.exists(mask_file):
+      raise Exception('Mask file not found. Is Chrome pinned?')
+
+    git.CreateBranch(overlay, self.branch_name, track=True,
+                     branch_point='origin/%s' % self.options.branch)
+
+    git.RmPath(mask_file)
+    git.Commit(overlay, self.CommitMessage('Chrome: Unpin chrome'))
+    git.UploadCL(overlay, OVERLAY_URL, self.options.branch,
+                 skip=self.options.dryrun)
+
+  def pin(self, work_dir):
+    """Pin chrome."""
+
+    overlay = os.path.join(work_dir, 'overlay')
+    priv_overlay = os.path.join(work_dir, 'priv_overlay')
+    print('Setting up working directory...')
+    CloneWorkingRepo(overlay, OVERLAY_URL, OVERLAY, self.options.branch)
+    CloneWorkingRepo(priv_overlay, PRIV_OVERLAY_URL, PRIV_OVERLAY,
+                     self.options.branch)
+    print('Done')
+
+    # Interesting paths.
+    chrome_dir = os.path.join(overlay, constants.CHROME_CP)
+    other_dirs = [os.path.join(overlay, pkg) for pkg in
+                  constants.OTHER_CHROME_PACKAGES]
+
+    # Let the user pick what version to pin chrome to.
+    uprev_list = UprevList(chrome_dir)
+    choice = cros_build_lib.GetChoice('Versions of chrome to pin to:',
+                                      uprev_list, group_size=5)
+    pin_version = uprev_list.uprevs[choice]
+    commit_subject = ('Chrome: Pin to version %s' %
+                      pin_version.from_parts.version)
+
+    # Public branch.
+    git.CreateBranch(overlay, self.branch_name, track=True,
+                     branch_point='origin/%s' % self.options.branch)
+
+    target_sha = pin_version.sha + '~'
+    ebs = [RevertStableEBuild(chrome_dir, target_sha)]
+    for pkg_dir in other_dirs:
+      ebs.append(RevertStableEBuild(pkg_dir, target_sha))
+    RevertBinhostConf(overlay, pin_version.conf_files, target_sha)
+    git.RevertPath(os.path.join(overlay, 'chromeos', 'binhost'),
+                   'chromium.json', target_sha)
+    MaskNewerPackages(overlay, (eb for eb in ebs if eb))
+
+    pub_cid = git.Commit(overlay, 'Public overlay commit')
+    if not pub_cid:
+      raise Exception('Don\'t know the commit ID of the public overlay CL.')
+
+    # Find out what package directory the binhost configs should point to.
+    binhost_dir = os.path.join(overlay, 'chromeos', 'binhost')
+    target_file = os.path.join(binhost_dir, 'target', pin_version.conf_files[0])
+    host_file = os.path.join(binhost_dir, 'host', pin_version.conf_files[0])
+    conf_file = target_file if os.path.exists(target_file) else host_file
+    conf_content = osutils.ReadFile(conf_file)
+    match = re.search('/(?P<package_dir>[^/\n]*)/packages', conf_content)
+    if not match:
+      raise Exception('Failed to parse binhost conf %s' % conf_content.strip())
+    pkg_dir = match.group('package_dir')
+
+    # Private branch.
+    git.CreateBranch(priv_overlay, self.branch_name, track=True,
+                     branch_point='origin/%s' % self.options.branch)
+
+    binhost_uprev = FindPrivateConfCL(priv_overlay, pkg_dir)
+    if not binhost_uprev:
+      raise Exception('Failed to find private binhost uprev.')
+    target_sha = binhost_uprev.sha
+    RevertBinhostConf(priv_overlay, binhost_uprev.conf_files, target_sha)
+    git.RevertPath(os.path.join(priv_overlay, 'chromeos', 'binhost'),
+                   'chrome.json', target_sha)
+
+    commit_message = self.CommitMessage(commit_subject, pub_cid)
+    priv_cid = git.Commit(priv_overlay, commit_message)
+    if not priv_cid:
+      raise Exception('Don\'t know the commit ID of the private overlay CL.')
+
+    # Update the commit message on the public overlay CL.
+    commit_message = self.CommitMessage(commit_subject, '*' + priv_cid, pub_cid)
+    git.Commit(overlay, commit_message, amend=True)
+
+    # Upload the CLs.
+    git.UploadCL(overlay, OVERLAY_URL, self.options.branch,
+                 skip=self.options.dryrun)
+    git.UploadCL(priv_overlay, PRIV_OVERLAY_URL, self.options.branch,
+                 skip=self.options.dryrun)
+
+    print('\n** Both of the changes above need to be submitted for chrome '
+          'to be pinned. **\n')
+
+  def Run(self):
+    """Run cros pinchrome."""
+    self.options.Freeze()
+    chroot_tmp = os.path.join(constants.SOURCE_ROOT,
+                              constants.DEFAULT_CHROOT_DIR, 'tmp')
+    tmp_override = None if cros_build_lib.IsInsideChroot() else chroot_tmp
+    work_dir = tempfile.mkdtemp(prefix='pinchrome_', dir=tmp_override)
+    try:
+      if self.options.unpin:
+        self.unpin(work_dir)
+      else:
+        self.pin(work_dir)
+    finally:
+      if self.options.wipe:
+        osutils.RmDir(work_dir)
+      else:
+        print('Leaving working directory at %s.' % work_dir)
diff --git a/cli/cros/cros_shell.py b/cli/cros/cros_shell.py
new file mode 100644
index 0000000..4664696
--- /dev/null
+++ b/cli/cros/cros_shell.py
@@ -0,0 +1,191 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""cros shell: Open a remote shell on the target device."""
+
+from __future__ import print_function
+
+import argparse
+
+from chromite.cli import command
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import remote_access
+
+
+@command.CommandDecorator('shell')
+class ShellCommand(command.CliCommand):
+  """Opens a remote shell over SSH on the target device.
+
+  Can be used to start an interactive session or execute a command
+  remotely. Interactive sessions can be terminated like a normal SSH
+  session using Ctrl+D, `exit`, or `logout`.
+
+  Unlike other `cros` commands, this allows for both SSH key and user
+  password authentication. Because a password may be transmitted, the
+  known_hosts file is used by default to protect against connecting to
+  the wrong device.
+
+  The exit code will be the same as the last executed command.
+  """
+
+  EPILOG = """
+Examples:
+  Start an interactive session:
+    cros shell <ip>
+    cros shell <user>@<ip>:<port>
+
+  Non-interactive remote command:
+    cros shell <ip> -- cat var/log/messages
+
+Quoting can be tricky; the rules are the same as with ssh:
+  Special symbols will end the command unless quoted:
+    cros shell <ip> -- cat /var/log/messages > log.txt   (saves locally)
+    cros shell <ip> -- "cat /var/log/messages > log.txt" (saves remotely)
+
+  One set of quotes is consumed locally, so remote commands that
+  require quotes will need double quoting:
+    cros shell <ip> -- sh -c "exit 42"    (executes: sh -c exit 42)
+    cros shell <ip> -- sh -c "'exit 42'"  (executes: sh -c 'exit 42')
+"""
+
+  # Override base class property to enable stats upload.
+  upload_stats = True
+
+  def __init__(self, options):
+    """Initializes ShellCommand."""
+    super(ShellCommand, self).__init__(options)
+    # ChromiumOSDevice to connect to.
+    self.device = None
+    # SSH connection settings.
+    self.ssh_hostname = None
+    self.ssh_port = None
+    self.ssh_username = None
+    self.ssh_private_key = None
+    # Whether to use the SSH known_hosts file or not.
+    self.known_hosts = None
+    # How to set SSH StrictHostKeyChecking. Can be 'no', 'yes', or 'ask'. Has
+    # no effect if |known_hosts| is not True.
+    self.host_key_checking = None
+    # The command to execute remotely.
+    self.command = None
+
+  @classmethod
+  def AddParser(cls, parser):
+    """Adds a parser."""
+    super(cls, ShellCommand).AddParser(parser)
+    cls.AddDeviceArgument(parser)
+    parser.add_argument(
+        '--private-key', type='path', default=None,
+        help='SSH identify file (private key).')
+    parser.add_argument(
+        '--no-known-hosts', action='store_false', dest='known_hosts',
+        default=True, help='Do not use a known_hosts file.')
+    parser.add_argument(
+        'command', nargs=argparse.REMAINDER,
+        help='(optional) Command to execute on the device.')
+
+  def _ReadOptions(self):
+    """Processes options and set variables."""
+    self.ssh_hostname = self.options.device.hostname
+    self.ssh_username = self.options.device.username
+    self.ssh_port = self.options.device.port
+    self.ssh_private_key = self.options.private_key
+    self.known_hosts = self.options.known_hosts
+    # By default ask the user if a new key is found. SSH will still reject
+    # modified keys for existing hosts without asking the user.
+    self.host_key_checking = 'ask'
+    # argparse doesn't always handle -- correctly.
+    self.command = self.options.command
+    if self.command and self.command[0] == '--':
+      self.command.pop(0)
+
+  def _ConnectSettings(self):
+    """Generates the correct SSH connect settings based on our state."""
+    kwargs = {'NumberOfPasswordPrompts': 2}
+    if self.known_hosts:
+      # Use the default known_hosts and our current key check setting.
+      kwargs['UserKnownHostsFile'] = None
+      kwargs['StrictHostKeyChecking'] = self.host_key_checking
+    return remote_access.CompileSSHConnectSettings(**kwargs)
+
+  def _UserConfirmKeyChange(self):
+    """Asks the user whether it's OK that a host key has changed.
+
+    A changed key can be fairly common during Chrome OS development, so
+    instead of outright rejecting a modified key like SSH does, this
+    provides some common reasons a key may have changed to help the
+    user decide whether it was legitimate or not.
+
+    _StartSsh() must have been called before this function so that
+    |self.device| is valid.
+
+    Returns:
+      True if the user is OK with a changed host key.
+    """
+    return cros_build_lib.BooleanPrompt(
+        prolog='The host ID for "%s" has changed since last connect.\n'
+               'Some common reasons for this are:\n'
+               ' - Device powerwash.\n'
+               ' - Device flash from a USB stick.\n'
+               ' - Device flash using "--clobber-stateful".\n'
+               'Otherwise, please verify that this is the correct device'
+               ' before continuing.' % self.device.hostname)
+
+  def _StartSsh(self):
+    """Starts an SSH session or executes a remote command.
+
+    Also creates |self.device| if it doesn't yet exist. It's created
+    once and saved so that if the user wants to use the default device,
+    we only have to go through the discovery procedure the first time.
+
+    Requires that _ReadOptions() has already been called to provide the
+    SSH configuration.
+
+    Returns:
+      The SSH return code.
+
+    Raises:
+      SSHConnectionError on SSH connect failure.
+    """
+    # Create the ChromiumOSDevice the first time through this function.
+    if not self.device:
+      # Set |base_dir| to None to avoid the SSH setup commands which
+      # could require the user to enter a password multiple times. We don't
+      # need any of the additional functionality that |base_dir| enables.
+      self.device = remote_access.ChromiumOSDevice(
+          self.ssh_hostname,
+          port=self.ssh_port,
+          username=self.ssh_username,
+          base_dir=None,
+          private_key=self.ssh_private_key,
+          ping=False)
+    return self.device.BaseRunCommand(
+        self.command,
+        connect_settings=self._ConnectSettings(),
+        error_code_ok=True,
+        mute_output=False,
+        redirect_stderr=True,
+        capture_output=False).returncode
+
+  def Run(self):
+    """Runs `cros shell`."""
+    self.options.Freeze()
+    self._ReadOptions()
+    try:
+      return self._StartSsh()
+    except remote_access.SSHConnectionError as e:
+      # Handle a mismatched host key; mismatched keys are a bit of a pain to
+      # fix manually since `ssh-keygen -R` doesn't work within the chroot.
+      if e.IsKnownHostsMismatch():
+        # The full SSH error message has extra info for the user.
+        logging.warning('\n%s', e)
+        if self._UserConfirmKeyChange():
+          remote_access.RemoveKnownHost(self.device.hostname)
+          # The user already OK'd so we can skip the additional SSH check.
+          self.host_key_checking = 'no'
+          return self._StartSsh()
+        else:
+          return 1
+      raise
diff --git a/cli/cros/cros_shell_unittest b/cli/cros/cros_shell_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cli/cros/cros_shell_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/cros/cros_shell_unittest.py b/cli/cros/cros_shell_unittest.py
new file mode 100644
index 0000000..e09ccd3
--- /dev/null
+++ b/cli/cros/cros_shell_unittest.py
@@ -0,0 +1,153 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests the `cros shell` command."""
+
+from __future__ import print_function
+
+from chromite.cli import command_unittest
+from chromite.cli.cros import cros_shell
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import remote_access
+
+
+class _KeyMismatchError(remote_access.SSHConnectionError):
+  """Test exception to fake a key mismatch."""
+  def IsKnownHostsMismatch(self):
+    return True
+
+
+class MockShellCommand(command_unittest.MockCommand):
+  """Mock out the `cros shell` command."""
+  TARGET = 'chromite.cli.cros.cros_shell.ShellCommand'
+  TARGET_CLASS = cros_shell.ShellCommand
+  COMMAND = 'shell'
+
+
+class ShellTest(cros_test_lib.MockTempDirTestCase,
+                cros_test_lib.OutputTestCase):
+  """Test the flow of ShellCommand.run with the SSH methods mocked out."""
+
+  DEVICE_IP = '1.1.1.1'
+
+  def SetupCommandMock(self, cmd_args):
+    """Sets up the `cros shell` command mock."""
+    self.cmd_mock = MockShellCommand(
+        cmd_args, base_args=['--cache-dir', self.tempdir])
+    self.StartPatcher(self.cmd_mock)
+
+  def setUp(self):
+    """Patches objects."""
+    self.cmd_mock = None
+
+    # Patch any functions we want to control that may get called by a test.
+    self.mock_remove_known_host = self.PatchObject(
+        remote_access, 'RemoveKnownHost', autospec=True)
+    self.mock_prompt = self.PatchObject(
+        cros_build_lib, 'BooleanPrompt', autospec=True)
+
+    self.mock_device = self.PatchObject(
+        remote_access, 'ChromiumOSDevice', autospec=True).return_value
+    self.mock_device.hostname = self.DEVICE_IP
+    self.mock_device.connection_type = None
+    self.mock_base_run_command = self.mock_device.BaseRunCommand
+    self.mock_base_run_command.return_value = cros_build_lib.CommandResult()
+
+  def testSshInteractive(self):
+    """Tests flow for an interactive session.
+
+    User should not be prompted for input, and SSH should be attempted
+    once.
+    """
+    self.SetupCommandMock([self.DEVICE_IP])
+    self.cmd_mock.inst.Run()
+
+    self.assertEqual(self.mock_base_run_command.call_count, 1)
+    # Make sure that BaseRunCommand() started an interactive session (no cmd).
+    self.assertEqual(self.mock_base_run_command.call_args[0][0], [])
+    self.assertFalse(self.mock_prompt.called)
+
+  def testSshNonInteractiveSingleArg(self):
+    """Tests a non-interactive command as a single argument.
+
+    Example: cros shell 1.1.1.1 "ls -l /etc"
+    """
+    self.SetupCommandMock([self.DEVICE_IP, 'ls -l /etc'])
+    self.cmd_mock.inst.Run()
+
+    self.assertEqual(self.mock_base_run_command.call_args[0][0],
+                     ['ls -l /etc'])
+
+  def testSshNonInteractiveMultipleArgs(self):
+    """Tests a non-interactive command as multiple arguments with "--".
+
+    Example: cros shell 1.1.1.1 -- ls -l /etc
+    """
+    self.SetupCommandMock([self.DEVICE_IP, '--', 'ls', '-l', '/etc'])
+    self.cmd_mock.inst.Run()
+
+    self.assertEqual(self.mock_base_run_command.call_args[0][0],
+                     ['ls', '-l', '/etc'])
+
+  def testSshReturnValue(self):
+    """Tests that `cros shell` returns the exit code of BaseRunCommand()."""
+    self.SetupCommandMock([self.DEVICE_IP])
+    self.mock_base_run_command.return_value.returncode = 42
+
+    self.assertEqual(self.cmd_mock.inst.Run(), 42)
+
+  def testSshKeyChangeOK(self):
+    """Tests a host SSH key changing but the user giving it the OK.
+
+    User should be prompted, SSH should be attempted twice, and host
+    keys should be removed.
+    """
+    self.SetupCommandMock([self.DEVICE_IP])
+    error_message = 'Test error message'
+    # BaseRunCommand() gives a key mismatch error the first time only.
+    self.mock_base_run_command.side_effect = [_KeyMismatchError(error_message),
+                                              cros_build_lib.CommandResult()]
+    # User chooses to continue.
+    self.mock_prompt.return_value = True
+
+    with self.OutputCapturer():
+      self.cmd_mock.inst.Run()
+
+    self.AssertOutputContainsWarning(error_message, check_stderr=True)
+    self.assertTrue(self.mock_prompt.called)
+    self.assertEqual(self.mock_base_run_command.call_count, 2)
+    self.assertTrue(self.mock_remove_known_host.called)
+
+  def testSshKeyChangeAbort(self):
+    """Tests a host SSH key changing and the user canceling.
+
+    User should be prompted, but SSH should only be attempted once, and
+    no host keys should be removed.
+    """
+    self.SetupCommandMock([self.DEVICE_IP])
+    self.mock_base_run_command.side_effect = _KeyMismatchError()
+    # User chooses to abort.
+    self.mock_prompt.return_value = False
+
+    self.cmd_mock.inst.Run()
+
+    self.assertTrue(self.mock_prompt.called)
+    self.assertEqual(self.mock_base_run_command.call_count, 1)
+    self.assertFalse(self.mock_remove_known_host.called)
+
+  def testSshConnectError(self):
+    """Tests an SSH error other than a host key mismatch.
+
+    User should not be prompted, SSH should only be attempted once, and
+    no host keys should be removed.
+    """
+    self.SetupCommandMock([self.DEVICE_IP])
+    self.mock_base_run_command.side_effect = remote_access.SSHConnectionError()
+
+    self.assertRaises(remote_access.SSHConnectionError, self.cmd_mock.inst.Run)
+
+    self.assertFalse(self.mock_prompt.called)
+    self.assertEqual(self.mock_base_run_command.call_count, 1)
+    self.assertFalse(self.mock_remove_known_host.called)
diff --git a/cli/cros/cros_stage.py b/cli/cros/cros_stage.py
new file mode 100644
index 0000000..c4a1ce3
--- /dev/null
+++ b/cli/cros/cros_stage.py
@@ -0,0 +1,277 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Stage a custom image on a Moblab device or in Google Storage."""
+
+from __future__ import print_function
+
+import httplib
+import os
+import re
+import shutil
+import urllib2
+
+from chromite.cbuildbot import commands
+from chromite.cli import command
+from chromite.cli import flash
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import dev_server_wrapper
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import remote_access
+
+
+MOBLAB_STATIC_DIR = '/mnt/moblab/static'
+MOBLAB_TMP_DIR = os.path.join(MOBLAB_STATIC_DIR, 'tmp')
+BOARD_BUILD_DIR = 'usr/local/build'
+DEVSERVER_STAGE_URL = ('http://%(moblab)s:8080/stage?local_path=%(staged_dir)s'
+                       '&artifacts=full_payload,stateful,test_suites,'
+                       'control_files,autotest_packages,'
+                       'autotest_server_package')
+CUSTOM_BUILD_NAME = '%(board)s-custom/%(build)s'
+
+
+class CustomImageStagingException(Exception):
+  """Thrown when there is an error staging an custom image."""
+
+
+def GSURLRegexHelper(gsurl):
+  """Helper to do regex matching on a Google Storage URL
+
+  Args:
+    gsurl: Google Storage URL to match.
+
+  Returns:
+    Regex Match Object with groups(board, type, & build_name) or None if there
+    was no match.
+  """
+  return re.match(r'gs://.*/(trybot-)?(?P<board>[\w-]+)-(?P<type>\w+)/'
+                  r'(?P<build_name>R\d+-[\d.ab-]+)', gsurl)
+
+
+@command.CommandDecorator('stage')
+class StageCommand(command.CliCommand):
+  """Remotely stages an image onto a MobLab device or into Google Storage.
+
+  The image to be staged may be a local custom image built in the chroot or an
+  official image in Google Storage. The test binaries will always come from the
+  local build root regardless of the image source.
+
+  This script generates/copies the update payloads and test binaries required.
+  It then stages them on the Moblab's devserver or copies them into the
+  specified Google Storage Bucket.
+
+  The image name to then use for testing is outputted at the end of this
+  script.
+  """
+
+  EPILOG = """
+To stage a local image path onto a moblab device:
+  cros stage /path/to/board/build/chromiumos-test-image.bin <moblab>
+
+To stage an official image with custom test binaries onto a moblab device:
+  cros stage <gs_image_dir> <moblab>
+
+To stage a local image path into a Google Storage Bucket:
+  cros stage /path/to/board/build/chromiumos-test-image.bin <gs_base_path>
+    --boto_file=<boto_file_path>
+
+NOTES:
+* The autotest bits used to test this image will be the latest in your
+  build sysroot! I.E. if you emerge new autotest changes after producing the
+  image you wish to stage, there is a chance that the changes will not match.
+* The custom image will only stay on the Moblab device for 24 hours at which
+  point it will be wiped.
+"""
+
+  @classmethod
+  def AddParser(cls, parser):
+    """Add parser arguments."""
+    super(StageCommand, cls).AddParser(parser)
+    parser.add_argument(
+        'image', nargs='?', default='latest', help='Path to image we want to '
+        'stage. If a local path, it should be in the format of '
+        '/.../.../board/build/<image>.bin . If a Google Storage path it should'
+        'be in the format of '
+        'gs://<bucket-name>/<board>-<builder type>/<build name>')
+    parser.add_argument(
+        'remote', help='MobLab device that has password-less SSH set up via '
+        'the chroot already. Or Google Storage Bucket in the form of '
+        'gs://<bucket-name>/')
+    parser.add_argument(
+        '--boto_file', dest='boto_file', default=None,
+        help='Path to boto file to use when uploading to Google Storage. If '
+        'none the default chroot boto file is used.')
+
+  def __init__(self, options):
+    """Initializes cros stage."""
+    super(StageCommand, self).__init__(options)
+    self.board = None
+    # Determine if we are staging a local custom image or an official image.
+    if self.options.image.startswith('gs://'):
+      self._remote_image = True
+      self.staged_image_name = self._GenerateImageNameFromGSUrl(
+          self.options.image)
+    else:
+      self._remote_image = False
+      self.staged_image_name = self._GenerateImageNameFromLocalPath(
+          self.options.image)
+    self.stage_directory = os.path.join(MOBLAB_TMP_DIR, self.staged_image_name)
+
+    # Determine if the staging destination is a Moblab or Google Storage.
+    if self.options.remote.startswith('gs://'):
+      self._remote_is_moblab = False
+    else:
+      self._remote_is_moblab = True
+
+  def _GenerateImageNameFromLocalPath(self, image):
+    """Generate the name as which |image| will be staged onto Moblab.
+
+    Args:
+      image: Path to image we want to stage. It should be in the format of
+             /.../.../board/build/<image>.bin
+
+    Returns:
+      Name the image will be staged as.
+
+    Raises:
+      CustomImageStagingException: If the image name supplied is not valid.
+    """
+    realpath = osutils.ExpandPath(image)
+    if not realpath.endswith('.bin'):
+      raise CustomImageStagingException(
+          'Image path: %s does not end in .bin !' % realpath)
+    build_name = os.path.basename(os.path.dirname(realpath))
+    # Custom builds are name with the suffix of '-a1' but the build itself
+    # is missing this suffix in its filesystem. Therefore lets rename the build
+    # name to match the name inside the build.
+    if build_name.endswith('-a1'):
+      build_name = build_name[:-len('-a1')]
+
+    self.board = os.path.basename(os.path.dirname(os.path.dirname(realpath)))
+    return CUSTOM_BUILD_NAME % dict(board=self.board, build=build_name)
+
+  def _GenerateImageNameFromGSUrl(self, image):
+    """Generate the name as which |image| will be staged onto Moblab.
+
+    Args:
+      image: GS Url to the image we want to stage. It should be in the format
+             gs://<bucket-name>/<board>-<builder type>/<build name>
+
+    Returns:
+      Name the image will be staged as.
+
+    Raises:
+      CustomImageStagingException: If the image name supplied is not valid.
+    """
+    match = GSURLRegexHelper(image)
+    if not match:
+      raise CustomImageStagingException(
+          'Image URL: %s is improperly defined!' % image)
+    self.board = match.group('board')
+    return CUSTOM_BUILD_NAME % dict(board=self.board,
+                                    build=match.group('build_name'))
+
+  def _DownloadPayloads(self, tempdir):
+    """Download from GS the update payloads we require.
+
+    Args:
+      tempdir: Temporary Directory to store the downloaded payloads.
+    """
+    gs_context = gs.GSContext(boto_file=self.options.boto_file)
+    gs_context.Copy(os.path.join(self.options.image, 'stateful.tgz'), tempdir)
+    gs_context.Copy(os.path.join(self.options.image, '*_full*'), tempdir)
+
+  def _GeneratePayloads(self, tempdir):
+    """Generate the update payloads we require.
+
+    Args:
+      tempdir: Temporary Directory to store the generated payloads.
+    """
+    dev_server_wrapper.GetUpdatePayloadsFromLocalPath(
+        self.options.image, tempdir, static_dir=flash.DEVSERVER_STATIC_DIR)
+    rootfs_payload = os.path.join(tempdir, dev_server_wrapper.ROOTFS_FILENAME)
+    # Devservers will look for a file named *_full_*.
+    shutil.move(rootfs_payload, os.path.join(tempdir, 'update_full_dev.bin'))
+
+  def _GenerateTestBits(self, tempdir):
+    """Generate and transfer to the Moblab the test bits we require.
+
+    Args:
+      tempdir: Temporary Directory to store the generated test artifacts.
+    """
+    build_root = cros_build_lib.GetSysroot(board=self.board)
+    cwd = os.path.join(build_root, BOARD_BUILD_DIR)
+    tarball_funcs = [commands.BuildAutotestControlFilesTarball,
+                     commands.BuildAutotestPackagesTarball,
+                     commands.BuildAutotestTestSuitesTarball,
+                     commands.BuildAutotestServerPackageTarball]
+    for tarball_func in tarball_funcs:
+      tarball_func(build_root, cwd, tempdir)
+
+  def _StageOnMoblab(self, tempdir):
+    """Stage the generated payloads and test bits on a moblab device.
+
+    Args:
+      tempdir: Temporary Directory that contains the generated payloads and
+               test bits.
+    """
+    with remote_access.ChromiumOSDeviceHandler(self.options.remote) as device:
+      device.RunCommand(['mkdir', '-p', self.stage_directory])
+      for f in os.listdir(tempdir):
+        device.CopyToDevice(os.path.join(tempdir, f), self.stage_directory)
+      device.RunCommand(['chown', '-R', 'moblab:moblab',
+                         MOBLAB_TMP_DIR])
+      # Delete this image from the Devserver in case it was previously staged.
+      device.RunCommand(['rm', '-rf', os.path.join(MOBLAB_STATIC_DIR,
+                                                   self.staged_image_name)])
+      try:
+        stage_url = DEVSERVER_STAGE_URL % dict(moblab=self.options.remote,
+                                               staged_dir=self.stage_directory)
+        res = urllib2.urlopen(stage_url).read()
+      except (urllib2.HTTPError, httplib.HTTPException, urllib2.URLError) as e:
+        logging.error('Unable to stage artifacts on moblab. Error: %s', e)
+      else:
+        if res == 'Success':
+          logging.info('\n\nStaging Completed!')
+          logging.info('Image is staged on Moblab as %s',
+                       self.staged_image_name)
+        else:
+          logging.info('Staging failed. Error Message: %s', res)
+      finally:
+        device.RunCommand(['rm', '-rf', self.stage_directory])
+
+  def _StageOnGS(self, tempdir):
+    """Stage the generated payloads and test bits into a Google Storage bucket.
+
+    Args:
+      tempdir: Temporary Directory that contains the generated payloads and
+               test bits.
+    """
+    gs_context = gs.GSContext(boto_file=self.options.boto_file)
+    for f in os.listdir(tempdir):
+      gs_context.CopyInto(os.path.join(tempdir, f), os.path.join(
+          self.options.remote, self.staged_image_name))
+    logging.info('\n\nStaging Completed!')
+    logging.info('Image is staged in Google Storage as %s',
+                 self.staged_image_name)
+
+  def Run(self):
+    """Perform the cros stage command."""
+    logging.info('Attempting to stage: %s as Image: %s at Location: %s',
+                 self.options.image, self.staged_image_name,
+                 self.options.remote)
+    osutils.SafeMakedirsNonRoot(flash.DEVSERVER_STATIC_DIR)
+
+    with osutils.TempDir() as tempdir:
+      if self._remote_image:
+        self._DownloadPayloads(tempdir)
+      else:
+        self._GeneratePayloads(tempdir)
+      self._GenerateTestBits(tempdir)
+      if self._remote_is_moblab:
+        self._StageOnMoblab(tempdir)
+      else:
+        self._StageOnGS(tempdir)
diff --git a/cli/cros/cros_stage_unittest b/cli/cros/cros_stage_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cli/cros/cros_stage_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/cros/cros_stage_unittest.py b/cli/cros/cros_stage_unittest.py
new file mode 100644
index 0000000..4cd7e75
--- /dev/null
+++ b/cli/cros/cros_stage_unittest.py
@@ -0,0 +1,40 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module tests the cros stage command and subfunctions."""
+
+from __future__ import print_function
+
+from chromite.cli.cros import cros_stage
+from chromite.lib import cros_test_lib
+
+
+class GSURLRegexHelperTest(cros_test_lib.TestCase):
+  """Test class for the GSURLRegexHelper function."""
+
+  def testCorrectInputs(self):
+    """Ensure expected inputs work."""
+    gsurls = [('gs://chromeos-image-archive/peppy-release/R42-6744.0.0',
+               'peppy', 'R42-6744.0.0'),
+              ('gs://chromeos-image-archive/peppy-release/R42-6744.0.0/',
+               'peppy', 'R42-6744.0.0'),
+              ('gs://chromeos-image-archive/trybot-peppy-release/'
+               'R42-6744.0.0-b77/', 'peppy', 'R42-6744.0.0-b77')]
+    for (gsurl, board, build) in gsurls:
+      match = cros_stage.GSURLRegexHelper(gsurl)
+      self.assertNotEqual(match, None)
+      self.assertEqual(match.group('board'), board)
+      self.assertEqual(match.group('build_name'), build)
+
+  def testBadInputs(self):
+    """Ensure unexpected inputs don't work."""
+    gsurls = ['gs://chromeos-image-archive/',
+              'gs://chromeos-image-archive/peppy-release/',
+              'gs://chromeos-image-archive/peppy-release/6744.0.0/'
+              'gs://chromeos-image-archive/peppy/R42-6744.0.0/',
+              'gs://chromeos-image-archive/peppy-release/LATEST',
+              'http://my_server_name:8080/peppy-release/R42-6744.0.0']
+    for gsurl in gsurls:
+      match = cros_stage.GSURLRegexHelper(gsurl)
+      self.assertEqual(match, None)
diff --git a/cli/cros/lint.py b/cli/cros/lint.py
new file mode 100644
index 0000000..f54c8a6
--- /dev/null
+++ b/cli/cros/lint.py
@@ -0,0 +1,467 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This module is not automatically loaded by the `cros` helper.  The filename
+# would need a "cros_" prefix to make that happen.  It lives here so that it
+# is alongside the cros_lint.py file.
+#
+# For msg namespaces, the 9xxx should generally be reserved for our own use.
+
+"""Additional lint modules loaded by pylint.
+
+This is loaded by pylint directly via its pylintrc file:
+  load-plugins=chromite.cli.cros.lint
+
+Then pylint will import the register function and call it.  So we can have
+as many/few checkers as we want in this one module.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+
+from pylint.checkers import BaseChecker
+from pylint.interfaces import IAstroidChecker
+
+
+# pylint: disable=too-few-public-methods
+
+
+class DocStringChecker(BaseChecker):
+  """PyLint AST based checker to verify PEP 257 compliance
+
+  See our style guide for more info:
+  http://dev.chromium.org/chromium-os/python-style-guidelines#TOC-Describing-arguments-in-docstrings
+
+  """
+  # TODO: See about merging with the pep257 project:
+  # https://github.com/GreenSteam/pep257
+
+  __implements__ = IAstroidChecker
+
+  # pylint: disable=class-missing-docstring,multiple-statements
+  class _MessageCP001(object): pass
+  class _MessageCP002(object): pass
+  class _MessageCP003(object): pass
+  class _MessageCP004(object): pass
+  class _MessageCP005(object): pass
+  class _MessageCP006(object): pass
+  class _MessageCP007(object): pass
+  class _MessageCP008(object): pass
+  class _MessageCP009(object): pass
+  class _MessageCP010(object): pass
+  class _MessageCP011(object): pass
+  class _MessageCP012(object): pass
+  class _MessageCP013(object): pass
+  class _MessageCP014(object): pass
+  class _MessageCP015(object): pass
+  # pylint: enable=class-missing-docstring,multiple-statements
+
+  name = 'doc_string_checker'
+  priority = -1
+  MSG_ARGS = 'offset:%(offset)i: {%(line)s}'
+  msgs = {
+      'C9001': ('Modules should have docstrings (even a one liner)',
+                ('module-missing-docstring'), _MessageCP001),
+      'C9002': ('Classes should have docstrings (even a one liner)',
+                ('class-missing-docstring'), _MessageCP002),
+      'C9003': ('Trailing whitespace in docstring'
+                ': %s' % MSG_ARGS,
+                ('docstring-trailing-whitespace'), _MessageCP003),
+      'C9004': ('Leading whitespace in docstring (excess or missing)'
+                ': %s' % MSG_ARGS,
+                ('docstring-leading-whitespace'), _MessageCP004),
+      'C9005': ('Closing triple quotes should not be cuddled',
+                ('docstring-cuddled-quotes'), _MessageCP005),
+      'C9006': ('Section names should be preceded by one blank line'
+                ': %s' % MSG_ARGS,
+                ('docstring-section-newline'), _MessageCP006),
+      'C9007': ('Section names should be "Args:", "Returns:", "Yields:", '
+                'and "Raises:": %s' % MSG_ARGS,
+                ('docstring-section-name'), _MessageCP007),
+      'C9008': ('Sections should be in the order: Args, Returns/Yields, Raises',
+                ('docstring-section-order'), _MessageCP008),
+      'C9009': ('First line should be a short summary',
+                ('docstring-first-line'), _MessageCP009),
+      'C9010': ('Not all args mentioned in doc string: |%(arg)s|',
+                ('docstring-missing-args'), _MessageCP010),
+      'C9011': ('Variable args/keywords are named *args/**kwargs, not %(arg)s',
+                ('docstring-misnamed-args'), _MessageCP011),
+      'C9012': ('Incorrectly formatted Args section: %(arg)s',
+                ('docstring-arg-spacing'), _MessageCP012),
+      'C9013': ('Too many blank lines in a row: %s' % MSG_ARGS,
+                ('docstring-too-many-newlines'), _MessageCP013),
+      'C9014': ('Second line should be blank',
+                ('docstring-second-line-blank'), _MessageCP014),
+      'C9015': ('Section indentation is incorrect: %s' % MSG_ARGS,
+                ('docstring-section-indent'), _MessageCP015),
+  }
+  options = ()
+
+  # TODO: Should we enforce Examples?
+  VALID_SECTIONS = ('Args', 'Returns', 'Yields', 'Raises',)
+
+  def visit_function(self, node):
+    """Verify function docstrings"""
+    if node.doc:
+      lines = node.doc.split('\n')
+      self._check_common(node, lines)
+      self._check_last_line_function(node, lines)
+      self._check_section_lines(node, lines)
+      self._check_all_args_in_doc(node, lines)
+      self._check_func_signature(node)
+    else:
+      # This is what C0111 already does for us, so ignore.
+      pass
+
+  def visit_module(self, node):
+    """Verify module docstrings"""
+    if node.doc:
+      self._check_common(node)
+    else:
+      # Ignore stub __init__.py files.
+      if os.path.basename(node.file) == '__init__.py':
+        return
+      self.add_message('C9001', node=node)
+
+  def visit_class(self, node):
+    """Verify class docstrings"""
+    if node.doc:
+      self._check_common(node)
+    else:
+      self.add_message('C9002', node=node, line=node.fromlineno)
+
+  def _check_common(self, node, lines=None):
+    """Common checks we enforce on all docstrings"""
+    if lines is None:
+      lines = node.doc.split('\n')
+
+    funcs = (
+        self._check_first_line,
+        self._check_second_line_blank,
+        self._check_whitespace,
+        self._check_last_line,
+    )
+    for f in funcs:
+      f(node, lines)
+
+  def _check_first_line(self, node, lines):
+    """Make sure first line is a short summary by itself"""
+    if lines[0] == '':
+      self.add_message('C9009', node=node, line=node.fromlineno)
+
+  def _check_second_line_blank(self, node, lines):
+    """Make sure the second line is blank"""
+    if len(lines) > 1 and lines[1] != '':
+      self.add_message('C9014', node=node, line=node.fromlineno)
+
+  def _check_whitespace(self, node, lines):
+    """Verify whitespace is sane"""
+    # Make sure first line doesn't have leading whitespace.
+    if lines[0].lstrip() != lines[0]:
+      margs = {'offset': 0, 'line': lines[0]}
+      self.add_message('C9004', node=node, line=node.fromlineno, args=margs)
+
+    # Verify no trailing whitespace.
+    # We skip the last line since it's supposed to be pure whitespace.
+    #
+    # Also check for multiple blank lines in a row.
+    last_blank = False
+    for i, l in enumerate(lines[:-1]):
+      margs = {'offset': i, 'line': l}
+
+      if l.rstrip() != l:
+        self.add_message('C9003', node=node, line=node.fromlineno, args=margs)
+
+      curr_blank = l == ''
+      if last_blank and curr_blank:
+        self.add_message('C9013', node=node, line=node.fromlineno, args=margs)
+      last_blank = curr_blank
+
+    # Now specially handle the last line.
+    l = lines[-1]
+    if l.strip() != '' and l.rstrip() != l:
+      margs = {'offset': len(lines), 'line': l}
+      self.add_message('C9003', node=node, line=node.fromlineno, args=margs)
+
+  def _check_last_line(self, node, lines):
+    """Make sure last line is all by itself"""
+    if len(lines) > 1:
+      if lines[-1].strip() != '':
+        self.add_message('C9005', node=node, line=node.fromlineno)
+
+  def _check_last_line_function(self, node, lines):
+    """Make sure last line is indented"""
+    if len(lines) > 1:
+      # The -1 line holds the """ itself and that should be indented.
+      if lines[-1] == '':
+        margs = {'offset': len(lines) - 1, 'line': lines[-1]}
+        self.add_message('C9005', node=node, line=node.fromlineno, args=margs)
+
+      # The last line should not be blank.
+      if lines[-2] == '':
+        margs = {'offset': len(lines) - 2, 'line': lines[-2]}
+        self.add_message('C9003', node=node, line=node.fromlineno, args=margs)
+
+  def _check_section_lines(self, node, lines):
+    """Verify each section (Args/Returns/Yields/Raises) is sane"""
+    lineno_sections = [-1] * len(self.VALID_SECTIONS)
+    invalid_sections = (
+        # Handle common misnamings.
+        'arg', 'argument', 'arguments',
+        'ret', 'rets', 'return',
+        'yield', 'yeild', 'yeilds',
+        'raise', 'throw', 'throws',
+    )
+
+    last = lines[0].strip()
+    for i, line in enumerate(lines[1:]):
+      margs = {'offset': i + 1, 'line': line}
+      l = line.strip()
+
+      # Catch semi-common javadoc style.
+      if l.startswith('@param') or l.startswith('@return'):
+        self.add_message('C9007', node=node, line=node.fromlineno, args=margs)
+
+      # See if we can detect incorrect behavior.
+      section = l.split(':', 1)[0]
+      if section in self.VALID_SECTIONS or section.lower() in invalid_sections:
+        # Make sure it has some number of leading whitespace.
+        if not line.startswith(' '):
+          self.add_message('C9004', node=node, line=node.fromlineno, args=margs)
+
+        # Make sure it has a single trailing colon.
+        if l != '%s:' % section:
+          self.add_message('C9007', node=node, line=node.fromlineno, args=margs)
+
+        # Make sure it's valid.
+        if section.lower() in invalid_sections:
+          self.add_message('C9007', node=node, line=node.fromlineno, args=margs)
+        else:
+          # Gather the order of the sections.
+          lineno_sections[self.VALID_SECTIONS.index(section)] = i
+
+        # Verify blank line before it.
+        if last != '':
+          self.add_message('C9006', node=node, line=node.fromlineno, args=margs)
+
+      last = l
+
+    # Make sure the sections are in the right order.
+    valid_lineno = lambda x: x >= 0
+    lineno_sections = filter(valid_lineno, lineno_sections)
+    if lineno_sections != sorted(lineno_sections):
+      self.add_message('C9008', node=node, line=node.fromlineno)
+
+    # Check the indentation level on all the sections.
+    # The -1 line holds the trailing """ itself and that should be indented to
+    # the correct number of spaces.  All checks below are relative to this.  If
+    # it is off, then these checks might report weird errors, but that's ok as
+    # ultimately the docstring is still wrong :).
+    indent_len = len(lines[-1])
+    for lineno in lineno_sections:
+      # First the section header (e.g. Args:).
+      lineno += 1
+      line = lines[lineno]
+      if len(line) - len(line.lstrip(' ')) != indent_len:
+        margs = {'offset': lineno, 'line': line}
+        self.add_message('C9015', node=node, line=node.fromlineno, args=margs)
+
+  def _check_all_args_in_doc(self, node, lines):
+    """All function arguments are mentioned in doc"""
+    if not hasattr(node, 'argnames'):
+      return
+
+    # Locate the start of the args section.
+    arg_lines = []
+    for l in lines:
+      if arg_lines:
+        if l.strip() in [''] + ['%s:' % x for x in self.VALID_SECTIONS]:
+          break
+      elif l.strip() != 'Args:':
+        continue
+      arg_lines.append(l)
+    else:
+      # If they don't have an Args section, then give it a pass.
+      return
+
+    # Now verify all args exist.
+    # TODO: Should we verify arg order matches doc order ?
+    # TODO: Should we check indentation of wrapped docs ?
+    missing_args = []
+    for arg in node.args.args:
+      # Ignore class related args.
+      if arg.name in ('cls', 'self'):
+        continue
+      # Ignore ignored args.
+      if arg.name.startswith('_'):
+        continue
+
+      for l in arg_lines:
+        aline = l.lstrip()
+        if aline.startswith('%s:' % arg.name):
+          amsg = aline[len(arg.name) + 1:]
+          if len(amsg) and len(amsg) - len(amsg.lstrip()) != 1:
+            margs = {'arg': l}
+            self.add_message('C9012', node=node, line=node.fromlineno,
+                             args=margs)
+          break
+      else:
+        missing_args.append(arg.name)
+
+    if missing_args:
+      margs = {'arg': '|, |'.join(missing_args)}
+      self.add_message('C9010', node=node, line=node.fromlineno, args=margs)
+
+  def _check_func_signature(self, node):
+    """Require *args to be named args, and **kwargs kwargs"""
+    vararg = node.args.vararg
+    if vararg and vararg != 'args' and vararg != '_args':
+      margs = {'arg': vararg}
+      self.add_message('C9011', node=node, line=node.fromlineno, args=margs)
+
+    kwarg = node.args.kwarg
+    if kwarg and kwarg != 'kwargs' and kwarg != '_kwargs':
+      margs = {'arg': kwarg}
+      self.add_message('C9011', node=node, line=node.fromlineno, args=margs)
+
+
+class Py3kCompatChecker(BaseChecker):
+  """Make sure we enforce py3k compatible features"""
+
+  __implements__ = IAstroidChecker
+
+  # pylint: disable=class-missing-docstring,multiple-statements
+  class _MessageR9100(object): pass
+  # pylint: enable=class-missing-docstring,multiple-statements
+
+  name = 'py3k_compat_checker'
+  priority = -1
+  MSG_ARGS = 'offset:%(offset)i: {%(line)s}'
+  msgs = {
+      'R9100': ('Missing "from __future__ import print_function" line',
+                ('missing-print-function'), _MessageR9100),
+  }
+  options = ()
+
+  def __init__(self, *args, **kwargs):
+    super(Py3kCompatChecker, self).__init__(*args, **kwargs)
+    self.seen_print_func = False
+    self.saw_imports = False
+
+  def close(self):
+    """Called when done processing module"""
+    if not self.seen_print_func:
+      # Do not warn if moduler doesn't import anything at all (like
+      # empty __init__.py files).
+      if self.saw_imports:
+        self.add_message('R9100')
+
+  def _check_print_function(self, node):
+    """Verify print_function is imported"""
+    if node.modname == '__future__':
+      for name, _ in node.names:
+        if name == 'print_function':
+          self.seen_print_func = True
+
+  def visit_from(self, node):
+    """Process 'from' statements"""
+    self.saw_imports = True
+    self._check_print_function(node)
+
+  def visit_import(self, _node):
+    """Process 'import' statements"""
+    self.saw_imports = True
+
+
+class SourceChecker(BaseChecker):
+  """Make sure we enforce rules on the source."""
+
+  __implements__ = IAstroidChecker
+
+  # pylint: disable=class-missing-docstring,multiple-statements
+  class _MessageR9200(object): pass
+  class _MessageR9201(object): pass
+  class _MessageR9202(object): pass
+  # pylint: enable=class-missing-docstring,multiple-statements
+
+  name = 'source_checker'
+  priority = -1
+  MSG_ARGS = 'offset:%(offset)i: {%(line)s}'
+  msgs = {
+      'R9200': ('Shebang should be #!/usr/bin/python2 or #!/usr/bin/python3',
+                ('bad-shebang'), _MessageR9200),
+      'R9201': ('Shebang is missing, but file is executable',
+                ('missing-shebang'), _MessageR9201),
+      'R9202': ('Shebang is set, but file is not executable',
+                ('spurious-shebang'), _MessageR9202),
+  }
+  options = ()
+
+  def visit_module(self, node):
+    """Called when the whole file has been read"""
+    stream = node.file_stream
+    stream.seek(0)
+    self._check_shebang(node, stream)
+
+  def _check_shebang(self, _node, stream):
+    """Verify the shebang is version specific"""
+    st = os.fstat(stream.fileno())
+    mode = st.st_mode
+    executable = bool(mode & 0o0111)
+
+    shebang = stream.readline()
+    if shebang[0:2] != '#!':
+      if executable:
+        self.add_message('R9201')
+      return
+    elif not executable:
+      self.add_message('R9202')
+
+    parts = shebang.split()
+    if parts[0] not in ('#!/usr/bin/python2', '#!/usr/bin/python3'):
+      self.add_message('R9200')
+
+
+class ChromiteLoggingChecker(BaseChecker):
+  """Make sure we enforce rules on importing logging."""
+
+  __implements__ = IAstroidChecker
+
+  # pylint: disable=class-missing-docstring,multiple-statements
+  class _MessageR9301(object): pass
+  # pylint: enable=class-missing-docstring,multiple-statements
+
+  name = 'chromite_logging_checker'
+  priority = -1
+  MSG_ARGS = 'offset:%(offset)i: {%(line)s}'
+  msgs = {
+      'R9301': ('logging is deprecated. Use "from chromite.lib import '
+                'cros_logging as logging" to import chromite/lib/cros_logging',
+                ('cros-logging-import'), _MessageR9301),
+  }
+  options = ()
+  # This checker is disabled by default because we only want to disallow "import
+  # logging" in chromite and not in other places cros lint is used. To enable
+  # this checker, modify the pylintrc file.
+  enabled = False
+
+  def visit_import(self, node):
+    """Called when node is an import statement."""
+    for name, _ in node.names:
+      if name == 'logging':
+        self.add_message('R9301', line=node.lineno)
+
+
+def register(linter):
+  """pylint will call this func to register all our checkers"""
+  # Walk all the classes in this module and register ours.
+  this_module = sys.modules[__name__]
+  for member in dir(this_module):
+    if (not member.endswith('Checker') or
+        member in ('BaseChecker', 'IAstroidChecker')):
+      continue
+    cls = getattr(this_module, member)
+    linter.register_checker(cls(linter))
diff --git a/cli/cros/lint_unittest b/cli/cros/lint_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/cli/cros/lint_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/cros/lint_unittest.py b/cli/cros/lint_unittest.py
new file mode 100644
index 0000000..b78c655
--- /dev/null
+++ b/cli/cros/lint_unittest.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the lint module."""
+
+from __future__ import print_function
+
+import collections
+import StringIO
+
+from chromite.cli.cros import lint
+from chromite.lib import cros_test_lib
+
+
+# pylint: disable=protected-access
+
+
+class TestNode(object):
+  """Object good enough to stand in for lint funcs"""
+
+  Args = collections.namedtuple('Args', ('args', 'vararg', 'kwarg'))
+  Arg = collections.namedtuple('Arg', ('name',))
+
+  def __init__(self, doc='', fromlineno=0, path='foo.py', args=(), vararg='',
+               kwarg='', names=None, lineno=0):
+    if names is None:
+      names = [('name', None)]
+    self.doc = doc
+    self.lines = doc.split('\n')
+    self.fromlineno = fromlineno
+    self.lineno = lineno
+    self.file = path
+    self.args = self.Args(args=[self.Arg(name=x) for x in args],
+                          vararg=vararg, kwarg=kwarg)
+    self.names = names
+
+  def argnames(self):
+    return self.args
+
+
+class CheckerTestCase(cros_test_lib.TestCase):
+  """Helpers for Checker modules"""
+
+  def add_message(self, msg_id, node=None, line=None, args=None):
+    """Capture lint checks"""
+    # We include node.doc here explicitly so the pretty assert message
+    # inclues it in the output automatically.
+    doc = node.doc if node else ''
+    self.results.append((msg_id, doc, line, args))
+
+  def setUp(self):
+    assert hasattr(self, 'CHECKER'), 'TestCase must set CHECKER'
+
+    self.results = []
+    self.checker = self.CHECKER()
+    self.checker.add_message = self.add_message
+
+
+class DocStringCheckerTest(CheckerTestCase):
+  """Tests for DocStringChecker module"""
+
+  GOOD_FUNC_DOCSTRINGS = (
+      'Some string',
+      """Short summary
+
+      Body of text.
+      """,
+      """line o text
+
+      Body and comments on
+      more than one line.
+
+      Args:
+        moo: cow
+
+      Returns:
+        some value
+
+      Raises:
+        something else
+      """,
+      """Short summary.
+
+      Args:
+        fat: cat
+
+      Yields:
+        a spoon
+      """,
+  )
+
+  BAD_FUNC_DOCSTRINGS = (
+      """
+      bad first line
+      """,
+      """The first line is good
+      but the second one isn't
+      """,
+      """ whitespace is wrong""",
+      """whitespace is wrong	""",
+      """ whitespace is wrong
+
+      Multiline tickles differently.
+      """,
+      """Should be no trailing blank lines
+
+      Returns:
+        a value
+
+      """,
+      """ok line
+
+      cuddled end""",
+      """we want Args/Returns not Arguments/Return
+
+      Arguments:
+      Return:
+      """,
+      """section order is wrong here
+
+      Raises:
+      Returns:
+      """,
+      """sections lack whitespace between them
+
+      Args:
+        foo: bar
+      Returns:
+        yeah
+      """,
+      """yields is misspelled
+
+      Yield:
+        a car
+      """,
+      """Section name has bad spacing
+
+      Args:\x20\x20\x20
+        key: here
+      """,
+      """too many blank lines
+
+
+      Returns:
+        None
+      """,
+      """wrongly uses javadoc
+
+      @returns None
+      """,
+      """the indentation is incorrect
+
+        Args:
+          some: day
+      """,
+  )
+
+  # The current linter isn't good enough yet to detect these.
+  TODO_BAD_FUNC_DOCSTRINGS = (
+      """The returns section isn't a proper section
+
+      Args:
+        bloop: de
+
+      returns something
+      """,
+  )
+
+  CHECKER = lint.DocStringChecker
+
+  def testGood_visit_function(self):
+    """Allow known good docstrings"""
+    for dc in self.GOOD_FUNC_DOCSTRINGS:
+      self.results = []
+      node = TestNode(doc=dc)
+      self.checker.visit_function(node)
+      self.assertEqual(self.results, [],
+                       msg='docstring was not accepted:\n"""%s"""' % dc)
+
+  def testBad_visit_function(self):
+    """Reject known bad docstrings"""
+    for dc in self.BAD_FUNC_DOCSTRINGS:
+      self.results = []
+      node = TestNode(doc=dc)
+      self.checker.visit_function(node)
+      self.assertNotEqual(self.results, [],
+                          msg='docstring was not rejected:\n"""%s"""' % dc)
+
+  def testSmoke_visit_module(self):
+    """Smoke test for modules"""
+    self.checker.visit_module(TestNode(doc='foo'))
+    self.assertEqual(self.results, [])
+    self.checker.visit_module(TestNode(doc='', path='/foo/__init__.py'))
+    self.assertEqual(self.results, [])
+
+  def testSmoke_visit_class(self):
+    """Smoke test for classes"""
+    self.checker.visit_class(TestNode(doc='bar'))
+
+  def testGood_check_first_line(self):
+    """Verify _check_first_line accepts good inputs"""
+    docstrings = (
+        'Some string',
+    )
+    for dc in docstrings:
+      self.results = []
+      node = TestNode(doc=dc)
+      self.checker._check_first_line(node, node.lines)
+      self.assertEqual(self.results, [],
+                       msg='docstring was not accepted:\n"""%s"""' % dc)
+
+  def testBad_check_first_line(self):
+    """Verify _check_first_line rejects bad inputs"""
+    docstrings = (
+        '\nSome string\n',
+    )
+    for dc in docstrings:
+      self.results = []
+      node = TestNode(doc=dc)
+      self.checker._check_first_line(node, node.lines)
+      self.assertEqual(len(self.results), 1)
+
+  def testGood_check_second_line_blank(self):
+    """Verify _check_second_line_blank accepts good inputs"""
+    docstrings = (
+        'Some string\n\nThis is the third line',
+        'Some string',
+    )
+    for dc in docstrings:
+      self.results = []
+      node = TestNode(doc=dc)
+      self.checker._check_second_line_blank(node, node.lines)
+      self.assertEqual(self.results, [],
+                       msg='docstring was not accepted:\n"""%s"""' % dc)
+
+  def testBad_check_second_line_blank(self):
+    """Verify _check_second_line_blank rejects bad inputs"""
+    docstrings = (
+        'Some string\nnonempty secondline',
+    )
+    for dc in docstrings:
+      self.results = []
+      node = TestNode(doc=dc)
+      self.checker._check_second_line_blank(node, node.lines)
+      self.assertEqual(len(self.results), 1)
+
+  def testGoodFuncVarKwArg(self):
+    """Check valid inputs for *args and **kwargs"""
+    for vararg in (None, 'args', '_args'):
+      for kwarg in (None, 'kwargs', '_kwargs'):
+        self.results = []
+        node = TestNode(vararg=vararg, kwarg=kwarg)
+        self.checker._check_func_signature(node)
+        self.assertEqual(len(self.results), 0)
+
+  def testMisnamedFuncVarKwArg(self):
+    """Reject anything but *args and **kwargs"""
+    for vararg in ('arg', 'params', 'kwargs', '_moo'):
+      self.results = []
+      node = TestNode(vararg=vararg)
+      self.checker._check_func_signature(node)
+      self.assertEqual(len(self.results), 1)
+
+    for kwarg in ('kwds', '_kwds', 'args', '_moo'):
+      self.results = []
+      node = TestNode(kwarg=kwarg)
+      self.checker._check_func_signature(node)
+      self.assertEqual(len(self.results), 1)
+
+  def testGoodFuncArgs(self):
+    """Verify normal args in Args are allowed"""
+    datasets = (
+        ("""args are correct, and cls is ignored
+
+         Args:
+           moo: cow
+         """,
+         ('cls', 'moo',), None, None,
+        ),
+        ("""args are correct, and self is ignored
+
+         Args:
+           moo: cow
+           *args: here
+         """,
+         ('self', 'moo',), 'args', 'kwargs',
+        ),
+        ("""args are allowed to wrap
+
+         Args:
+           moo:
+             a big fat cow
+             that takes many lines
+             to describe its fatness
+         """,
+         ('moo',), None, 'kwargs',
+        ),
+    )
+    for dc, args, vararg, kwarg in datasets:
+      self.results = []
+      node = TestNode(doc=dc, args=args, vararg=vararg, kwarg=kwarg)
+      self.checker._check_all_args_in_doc(node, node.lines)
+      self.assertEqual(len(self.results), 0)
+
+  def testBadFuncArgs(self):
+    """Verify bad/missing args in Args are caught"""
+    datasets = (
+        ("""missing 'bar'
+
+         Args:
+           moo: cow
+         """,
+         ('moo', 'bar',),
+        ),
+        ("""missing 'cow' but has 'bloop'
+
+         Args:
+           moo: cow
+         """,
+         ('bloop',),
+        ),
+        ("""too much space after colon
+
+         Args:
+           moo:  cow
+         """,
+         ('moo',),
+        ),
+        ("""not enough space after colon
+
+         Args:
+           moo:cow
+         """,
+         ('moo',),
+        ),
+    )
+    for dc, args in datasets:
+      self.results = []
+      node = TestNode(doc=dc, args=args)
+      self.checker._check_all_args_in_doc(node, node.lines)
+      self.assertEqual(len(self.results), 1)
+
+
+class ChromiteLoggingCheckerTest(CheckerTestCase):
+  """Tests for ChromiteLoggingChecker module"""
+
+  CHECKER = lint.ChromiteLoggingChecker
+
+  def testLoggingImported(self):
+    """Test that import logging is flagged."""
+    node = TestNode(names=[('logging', None)], lineno=15)
+    self.checker.visit_import(node)
+    self.assertEqual(self.results, [('R9301', '', 15, None)])
+
+  def testLoggingNotImported(self):
+    """Test that importing something else (not logging) is not flagged."""
+    node = TestNode(names=[('myModule', None)], lineno=15)
+    self.checker.visit_import(node)
+    self.assertEqual(self.results, [])
+
+
+class SourceCheckerTest(CheckerTestCase):
+  """Tests for SourceChecker module"""
+
+  CHECKER = lint.SourceChecker
+
+  def _testShebang(self, shebangs, exp, fileno):
+    """Helper for shebang tests"""
+    for shebang in shebangs:
+      self.results = []
+      node = TestNode()
+      stream = StringIO.StringIO(shebang)
+      stream.fileno = lambda: fileno
+      self.checker._check_shebang(node, stream)
+      self.assertEqual(len(self.results), exp,
+                       msg='processing shebang failed: %r' % shebang)
+
+  def testBadShebangNoExec(self):
+    """Verify _check_shebang rejects bad shebangs"""
+    shebangs = (
+        '#!/usr/bin/python\n',
+        '#! /usr/bin/python2 \n',
+        '#!/usr/bin/env python3\n',
+    )
+    with open('/dev/null') as f:
+      self._testShebang(shebangs, 2, f.fileno())
+
+  def testGoodShebang(self):
+    """Verify _check_shebang accepts good shebangs"""
+    shebangs = (
+        '#!/usr/bin/python2\n',
+        '#!/usr/bin/python2  \n',
+        '#!/usr/bin/python3\n',
+        '#!/usr/bin/python3\t\n',
+    )
+    with open('/bin/sh') as f:
+      self._testShebang(shebangs, 0, f.fileno())
diff --git a/cli/cros/tests/__init__.py b/cli/cros/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cli/cros/tests/__init__.py
diff --git a/cli/cros/tests/cros_vm_test b/cli/cros/tests/cros_vm_test
new file mode 120000
index 0000000..f54a630
--- /dev/null
+++ b/cli/cros/tests/cros_vm_test
@@ -0,0 +1 @@
+../../../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/cros/tests/cros_vm_test.py b/cli/cros/tests/cros_vm_test.py
new file mode 100644
index 0000000..ca7fb8a
--- /dev/null
+++ b/cli/cros/tests/cros_vm_test.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Integration VM test for cros commands."""
+
+from __future__ import print_function
+
+from chromite.cli import command_vm_test
+from chromite.lib import commandline
+
+
+class CrosVMTest(command_vm_test.CommandVMTest):
+  """Test class for cros commands."""
+
+  def BuildCommand(self, command, device=None, pos_args=None, opt_args=None):
+    """Builds a cros command.
+
+    Args:
+      command: The subcommand to build on (e.g. 'flash', 'deploy').
+      device: The device's address for the command.
+      pos_args: A list of positional arguments for the command.
+      opt_args: A list of optional arguments for the command.
+
+    Returns:
+      A full cros command as a list.
+    """
+    cmd = ['cros', command]
+    if opt_args:
+      cmd.extend(opt_args)
+    if device:
+      if command == 'devices':
+        # The device argument is optional for 'cros devices' command.
+        cmd.extend(['--device', device])
+      else:
+        cmd.append(device)
+    if pos_args:
+      cmd.extend(pos_args)
+    return cmd
+
+
+def _ParseArguments(argv):
+  """Parses command-line arguments."""
+  parser = commandline.ArgumentParser(caching=True)
+  parser.add_argument(
+      '--board', required=True, help='Board for the VM to run tests.')
+  parser.add_argument(
+      '--image_path', required=True, type='path',
+      help='Path to the image for the VM to run tests.')
+  return parser.parse_args(argv)
+
+
+def main(argv):
+  """Main function of the script."""
+  options = _ParseArguments(argv)
+  options.Freeze()
+  test = CrosVMTest(options.board, options.image_path)
+  test.Run()
diff --git a/cli/deploy.py b/cli/deploy.py
new file mode 100644
index 0000000..65d4146
--- /dev/null
+++ b/cli/deploy.py
@@ -0,0 +1,1009 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Deploy packages onto a target device."""
+
+from __future__ import print_function
+
+import fnmatch
+import functools
+import json
+import os
+
+from chromite.cli import command
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import operation
+from chromite.lib import portage_util
+from chromite.lib import remote_access
+try:
+  import portage
+except ImportError:
+  if cros_build_lib.IsInsideChroot():
+    raise
+
+
+_DEVICE_BASE_DIR = '/usr/local/tmp/cros-deploy'
+# This is defined in src/platform/dev/builder.py
+_STRIPPED_PACKAGES_DIR = 'stripped-packages'
+
+_MAX_UPDATES_NUM = 10
+_MAX_UPDATES_WARNING = (
+    'You are about to update a large number of installed packages, which '
+    'might take a long time, fail midway, or leave the target in an '
+    'inconsistent state. It is highly recommended that you flash a new image '
+    'instead.')
+
+
+class DeployError(Exception):
+  """Thrown when an unrecoverable error is encountered during deploy."""
+
+
+class BrilloDeployOperation(operation.ProgressBarOperation):
+  """ProgressBarOperation specific for brillo deploy."""
+  MERGE_EVENTS = ['NOTICE: Copying', 'NOTICE: Installing',
+                  'Calculating dependencies', '... done!', 'Extracting info',
+                  'Installing (1 of 1)', 'has been installed.']
+  UNMERGE_EVENTS = ['NOTICE: Unmerging', 'has been uninstalled.']
+
+  def __init__(self, pkg_count, emerge):
+    """Construct BrilloDeployOperation object.
+
+    Args:
+      pkg_count: number of packages being built.
+      emerge: True if emerge, False is unmerge.
+    """
+    super(BrilloDeployOperation, self).__init__()
+    if emerge:
+      self._events = self.MERGE_EVENTS
+    else:
+      self._events = self.UNMERGE_EVENTS
+    self._total = pkg_count * len(self._events)
+    self._completed = 0
+
+  def ParseOutput(self, output=None):
+    """Parse the output of brillo deploy to update a progress bar."""
+    stdout = self._stdout.read()
+    stderr = self._stderr.read()
+    output = stdout + stderr
+    for event in self._events:
+      self._completed += output.count(event)
+    self.ProgressBar(float(self._completed) / self._total)
+
+
+class _InstallPackageScanner(object):
+  """Finds packages that need to be installed on a target device.
+
+  Scans the sysroot bintree, beginning with a user-provided list of packages,
+  to find all packages that need to be installed. If so instructed,
+  transitively scans forward (mandatory) and backward (optional) dependencies
+  as well. A package will be installed if missing on the target (mandatory
+  packages only), or it will be updated if its sysroot version and build time
+  are different from the target. Common usage:
+
+    pkg_scanner = _InstallPackageScanner(sysroot)
+    pkgs = pkg_scanner.Run(...)
+  """
+
+  class VartreeError(Exception):
+    """An error in the processing of the installed packages tree."""
+
+  class BintreeError(Exception):
+    """An error in the processing of the source binpkgs tree."""
+
+  class PkgInfo(object):
+    """A record containing package information."""
+
+    __slots__ = ('cpv', 'build_time', 'rdeps_raw', 'rdeps', 'rev_rdeps')
+
+    def __init__(self, cpv, build_time, rdeps_raw, rdeps=None, rev_rdeps=None):
+      self.cpv = cpv
+      self.build_time = build_time
+      self.rdeps_raw = rdeps_raw
+      self.rdeps = set() if rdeps is None else rdeps
+      self.rev_rdeps = set() if rev_rdeps is None else rev_rdeps
+
+  # Python snippet for dumping vartree info on the target. Instantiate using
+  # _GetVartreeSnippet().
+  _GET_VARTREE = """
+import portage
+import json
+trees = portage.create_trees(target_root='%(root)s', config_root='/')
+vartree = trees['%(root)s']['vartree']
+pkg_info = []
+for cpv in vartree.dbapi.cpv_all():
+  slot, rdep_raw, build_time = vartree.dbapi.aux_get(
+      cpv, ('SLOT', 'RDEPEND', 'BUILD_TIME'))
+  pkg_info.append((cpv, slot, rdep_raw, build_time))
+
+print(json.dumps(pkg_info))
+"""
+
+  def __init__(self, sysroot):
+    self.sysroot = sysroot
+    # Members containing the sysroot (binpkg) and target (installed) package DB.
+    self.target_db = None
+    self.binpkgs_db = None
+    # Members for managing the dependency resolution work queue.
+    self.queue = None
+    self.seen = None
+    self.listed = None
+
+  @staticmethod
+  def _GetCP(cpv):
+    """Returns the CP value for a given CPV string."""
+    attrs = portage_util.SplitCPV(cpv, strict=False)
+    if not (attrs.category and attrs.package):
+      raise ValueError('Cannot get CP value for %s' % cpv)
+    return os.path.join(attrs.category, attrs.package)
+
+  @staticmethod
+  def _InDB(cp, slot, db):
+    """Returns whether CP and slot are found in a database (if provided)."""
+    cp_slots = db.get(cp) if db else None
+    return cp_slots is not None and (not slot or slot in cp_slots)
+
+  @staticmethod
+  def _AtomStr(cp, slot):
+    """Returns 'CP:slot' if slot is non-empty, else just 'CP'."""
+    return '%s:%s' % (cp, slot) if slot else cp
+
+  @classmethod
+  def _GetVartreeSnippet(cls, root='/'):
+    """Returns a code snippet for dumping the vartree on the target.
+
+    Args:
+      root: The installation root.
+
+    Returns:
+      The said code snippet (string) with parameters filled in.
+    """
+    return cls._GET_VARTREE % {'root': root}
+
+  @classmethod
+  def _StripDepAtom(cls, dep_atom, installed_db=None):
+    """Strips a dependency atom and returns a (CP, slot) pair."""
+    # TODO(garnold) This is a gross simplification of ebuild dependency
+    # semantics, stripping and ignoring various qualifiers (versions, slots,
+    # USE flag, negation) and will likely need to be fixed. chromium:447366.
+
+    # Ignore unversioned blockers, leaving them for the user to resolve.
+    if dep_atom[0] == '!' and dep_atom[1] not in '<=>~':
+      return None, None
+
+    cp = dep_atom
+    slot = None
+    require_installed = False
+
+    # Versioned blockers should be updated, but only if already installed.
+    # These are often used for forcing cascaded updates of multiple packages,
+    # so we're treating them as ordinary constraints with hopes that it'll lead
+    # to the desired result.
+    if cp.startswith('!'):
+      cp = cp.lstrip('!')
+      require_installed = True
+
+    # Remove USE flags.
+    if '[' in cp:
+      cp = cp[:cp.index('[')] + cp[cp.index(']') + 1:]
+
+    # Separate the slot qualifier and strip off subslots.
+    if ':' in cp:
+      cp, slot = cp.split(':')
+      for delim in ('/', '='):
+        slot = slot.split(delim, 1)[0]
+
+    # Strip version wildcards (right), comparators (left).
+    cp = cp.rstrip('*')
+    cp = cp.lstrip('<=>~')
+
+    # Turn into CP form.
+    cp = cls._GetCP(cp)
+
+    if require_installed and not cls._InDB(cp, None, installed_db):
+      return None, None
+
+    return cp, slot
+
+  @classmethod
+  def _ProcessDepStr(cls, dep_str, installed_db, avail_db):
+    """Resolves and returns a list of dependencies from a dependency string.
+
+    This parses a dependency string and returns a list of package names and
+    slots. Other atom qualifiers (version, sub-slot, block) are ignored. When
+    resolving disjunctive deps, we include all choices that are fully present
+    in |installed_db|. If none is present, we choose an arbitrary one that is
+    available.
+
+    Args:
+      dep_str: A raw dependency string.
+      installed_db: A database of installed packages.
+      avail_db: A database of packages available for installation.
+
+    Returns:
+      A list of pairs (CP, slot).
+
+    Raises:
+      ValueError: the dependencies string is malformed.
+    """
+    def ProcessSubDeps(dep_exp, disjunct):
+      """Parses and processes a dependency (sub)expression."""
+      deps = set()
+      default_deps = set()
+      sub_disjunct = False
+      for dep_sub_exp in dep_exp:
+        sub_deps = set()
+
+        if isinstance(dep_sub_exp, (list, tuple)):
+          sub_deps = ProcessSubDeps(dep_sub_exp, sub_disjunct)
+          sub_disjunct = False
+        elif sub_disjunct:
+          raise ValueError('Malformed disjunctive operation in deps')
+        elif dep_sub_exp == '||':
+          sub_disjunct = True
+        elif dep_sub_exp.endswith('?'):
+          raise ValueError('Dependencies contain a conditional')
+        else:
+          cp, slot = cls._StripDepAtom(dep_sub_exp, installed_db)
+          if cp:
+            sub_deps = set([(cp, slot)])
+          elif disjunct:
+            raise ValueError('Atom in disjunct ignored')
+
+        # Handle sub-deps of a disjunctive expression.
+        if disjunct:
+          # Make the first available choice the default, for use in case that
+          # no option is installed.
+          if (not default_deps and avail_db is not None and
+              all([cls._InDB(cp, slot, avail_db) for cp, slot in sub_deps])):
+            default_deps = sub_deps
+
+          # If not all sub-deps are installed, then don't consider them.
+          if not all([cls._InDB(cp, slot, installed_db)
+                      for cp, slot in sub_deps]):
+            sub_deps = set()
+
+        deps.update(sub_deps)
+
+      return deps or default_deps
+
+    try:
+      return ProcessSubDeps(portage.dep.paren_reduce(dep_str), False)
+    except portage.exception.InvalidDependString as e:
+      raise ValueError('Invalid dep string: %s' % e)
+    except ValueError as e:
+      raise ValueError('%s: %s' % (e, dep_str))
+
+  def _BuildDB(self, cpv_info, process_rdeps, process_rev_rdeps,
+               installed_db=None):
+    """Returns a database of packages given a list of CPV info.
+
+    Args:
+      cpv_info: A list of tuples containing package CPV and attributes.
+      process_rdeps: Whether to populate forward dependencies.
+      process_rev_rdeps: Whether to populate reverse dependencies.
+      installed_db: A database of installed packages for filtering disjunctive
+        choices against; if None, using own built database.
+
+    Returns:
+      A map from CP values to another dictionary that maps slots to package
+      attribute tuples. Tuples contain a CPV value (string), build time
+      (string), runtime dependencies (set), and reverse dependencies (set,
+      empty if not populated).
+
+    Raises:
+      ValueError: If more than one CPV occupies a single slot.
+    """
+    db = {}
+    logging.debug('Populating package DB...')
+    for cpv, slot, rdeps_raw, build_time in cpv_info:
+      cp = self._GetCP(cpv)
+      cp_slots = db.setdefault(cp, dict())
+      if slot in cp_slots:
+        raise ValueError('More than one package found for %s' %
+                         self._AtomStr(cp, slot))
+      logging.debug(' %s -> %s, built %s, raw rdeps: %s',
+                    self._AtomStr(cp, slot), cpv, build_time, rdeps_raw)
+      cp_slots[slot] = self.PkgInfo(cpv, build_time, rdeps_raw)
+
+    avail_db = db
+    if installed_db is None:
+      installed_db = db
+      avail_db = None
+
+    # Add approximate forward dependencies.
+    if process_rdeps:
+      logging.debug('Populating forward dependencies...')
+      for cp, cp_slots in db.iteritems():
+        for slot, pkg_info in cp_slots.iteritems():
+          pkg_info.rdeps.update(self._ProcessDepStr(pkg_info.rdeps_raw,
+                                                    installed_db, avail_db))
+          logging.debug(' %s (%s) processed rdeps: %s',
+                        self._AtomStr(cp, slot), pkg_info.cpv,
+                        ' '.join([self._AtomStr(rdep_cp, rdep_slot)
+                                  for rdep_cp, rdep_slot in pkg_info.rdeps]))
+
+    # Add approximate reverse dependencies (optional).
+    if process_rev_rdeps:
+      logging.debug('Populating reverse dependencies...')
+      for cp, cp_slots in db.iteritems():
+        for slot, pkg_info in cp_slots.iteritems():
+          for rdep_cp, rdep_slot in pkg_info.rdeps:
+            to_slots = db.get(rdep_cp)
+            if not to_slots:
+              continue
+
+            for to_slot, to_pkg_info in to_slots.iteritems():
+              if rdep_slot and to_slot != rdep_slot:
+                continue
+              logging.debug(' %s (%s) added as rev rdep for %s (%s)',
+                            self._AtomStr(cp, slot), pkg_info.cpv,
+                            self._AtomStr(rdep_cp, to_slot), to_pkg_info.cpv)
+              to_pkg_info.rev_rdeps.add((cp, slot))
+
+    return db
+
+  def _InitTargetVarDB(self, device, root, process_rdeps, process_rev_rdeps):
+    """Initializes a dictionary of packages installed on |device|."""
+    get_vartree_script = self._GetVartreeSnippet(root)
+    try:
+      result = device.GetAgent().RemoteSh('python', remote_sudo=True,
+                                          input=get_vartree_script)
+    except cros_build_lib.RunCommandError as e:
+      logging.error('Cannot get target vartree:\n%s', e.result.error)
+      raise
+
+    try:
+      self.target_db = self._BuildDB(json.loads(result.output),
+                                     process_rdeps, process_rev_rdeps)
+    except ValueError as e:
+      raise self.VartreeError(str(e))
+
+  def _InitBinpkgDB(self, process_rdeps):
+    """Initializes a dictionary of binary packages for updating the target."""
+    # Get build root trees; portage indexes require a trailing '/'.
+    build_root = os.path.join(self.sysroot, '')
+    trees = portage.create_trees(target_root=build_root, config_root=build_root)
+    bintree = trees[build_root]['bintree']
+    binpkgs_info = []
+    for cpv in bintree.dbapi.cpv_all():
+      slot, rdep_raw, build_time = bintree.dbapi.aux_get(
+          cpv, ['SLOT', 'RDEPEND', 'BUILD_TIME'])
+      binpkgs_info.append((cpv, slot, rdep_raw, build_time))
+
+    try:
+      self.binpkgs_db = self._BuildDB(binpkgs_info, process_rdeps, False,
+                                      installed_db=self.target_db)
+    except ValueError as e:
+      raise self.BintreeError(str(e))
+
+  def _InitDepQueue(self):
+    """Initializes the dependency work queue."""
+    self.queue = set()
+    self.seen = {}
+    self.listed = set()
+
+  def _EnqDep(self, dep, listed, optional):
+    """Enqueues a dependency if not seen before or if turned non-optional."""
+    if dep in self.seen and (optional or not self.seen[dep]):
+      return False
+
+    self.queue.add(dep)
+    self.seen[dep] = optional
+    if listed:
+      self.listed.add(dep)
+    return True
+
+  def _DeqDep(self):
+    """Dequeues and returns a dependency, its listed and optional flags.
+
+    This returns listed packages first, if any are present, to ensure that we
+    correctly mark them as such when they are first being processed.
+    """
+    if self.listed:
+      dep = self.listed.pop()
+      self.queue.remove(dep)
+      listed = True
+    else:
+      dep = self.queue.pop()
+      listed = False
+
+    return dep, listed, self.seen[dep]
+
+  def _FindPackageMatches(self, cpv_pattern):
+    """Returns list of binpkg (CP, slot) pairs that match |cpv_pattern|.
+
+    This is breaking |cpv_pattern| into its C, P and V components, each of
+    which may or may not be present or contain wildcards. It then scans the
+    binpkgs database to find all atoms that match these components, returning a
+    list of CP and slot qualifier. When the pattern does not specify a version,
+    or when a CP has only one slot in the binpkgs database, we omit the slot
+    qualifier in the result.
+
+    Args:
+      cpv_pattern: A CPV pattern, potentially partial and/or having wildcards.
+
+    Returns:
+      A list of (CPV, slot) pairs of packages in the binpkgs database that
+      match the pattern.
+    """
+    attrs = portage_util.SplitCPV(cpv_pattern, strict=False)
+    cp_pattern = os.path.join(attrs.category or '*', attrs.package or '*')
+    matches = []
+    for cp, cp_slots in self.binpkgs_db.iteritems():
+      if not fnmatch.fnmatchcase(cp, cp_pattern):
+        continue
+
+      # If no version attribute was given or there's only one slot, omit the
+      # slot qualifier.
+      if not attrs.version or len(cp_slots) == 1:
+        matches.append((cp, None))
+      else:
+        cpv_pattern = '%s-%s' % (cp, attrs.version)
+        for slot, pkg_info in cp_slots.iteritems():
+          if fnmatch.fnmatchcase(pkg_info.cpv, cpv_pattern):
+            matches.append((cp, slot))
+
+    return matches
+
+  def _FindPackage(self, pkg):
+    """Returns the (CP, slot) pair for a package matching |pkg|.
+
+    Args:
+      pkg: Path to a binary package or a (partial) package CPV specifier.
+
+    Returns:
+      A (CP, slot) pair for the given package; slot may be None (unspecified).
+
+    Raises:
+      ValueError: if |pkg| is not a binpkg file nor does it match something
+      that's in the bintree.
+    """
+    if pkg.endswith('.tbz2') and os.path.isfile(pkg):
+      package = os.path.basename(os.path.splitext(pkg)[0])
+      category = os.path.basename(os.path.dirname(pkg))
+      return self._GetCP(os.path.join(category, package)), None
+
+    matches = self._FindPackageMatches(pkg)
+    if not matches:
+      raise ValueError('No package found for %s' % pkg)
+
+    idx = 0
+    if len(matches) > 1:
+      # Ask user to pick among multiple matches.
+      idx = cros_build_lib.GetChoice('Multiple matches found for %s: ' % pkg,
+                                     ['%s:%s' % (cp, slot) if slot else cp
+                                      for cp, slot in matches])
+
+    return matches[idx]
+
+  def _NeedsInstall(self, cpv, slot, build_time, optional):
+    """Returns whether a package needs to be installed on the target.
+
+    Args:
+      cpv: Fully qualified CPV (string) of the package.
+      slot: Slot identifier (string).
+      build_time: The BUILT_TIME value (string) of the binpkg.
+      optional: Whether package is optional on the target.
+
+    Returns:
+      A tuple (install, update) indicating whether to |install| the package and
+      whether it is an |update| to an existing package.
+
+    Raises:
+      ValueError: if slot is not provided.
+    """
+    # If not checking installed packages, always install.
+    if not self.target_db:
+      return True, False
+
+    cp = self._GetCP(cpv)
+    target_pkg_info = self.target_db.get(cp, dict()).get(slot)
+    if target_pkg_info is not None:
+      if cpv != target_pkg_info.cpv:
+        attrs = portage_util.SplitCPV(cpv)
+        target_attrs = portage_util.SplitCPV(target_pkg_info.cpv)
+        logging.debug('Updating %s: version (%s) different on target (%s)',
+                      cp, attrs.version, target_attrs.version)
+        return True, True
+
+      if build_time != target_pkg_info.build_time:
+        logging.debug('Updating %s: build time (%s) different on target (%s)',
+                      cpv, build_time, target_pkg_info.build_time)
+        return True, True
+
+      logging.debug('Not updating %s: already up-to-date (%s, built %s)',
+                    cp, target_pkg_info.cpv, target_pkg_info.build_time)
+      return False, False
+
+    if optional:
+      logging.debug('Not installing %s: missing on target but optional', cp)
+      return False, False
+
+    logging.debug('Installing %s: missing on target and non-optional (%s)',
+                  cp, cpv)
+    return True, False
+
+  def _ProcessDeps(self, deps, reverse):
+    """Enqueues dependencies for processing.
+
+    Args:
+      deps: List of dependencies to enqueue.
+      reverse: Whether these are reverse dependencies.
+    """
+    if not deps:
+      return
+
+    logging.debug('Processing %d %s dep(s)...', len(deps),
+                  'reverse' if reverse else 'forward')
+    num_already_seen = 0
+    for dep in deps:
+      if self._EnqDep(dep, False, reverse):
+        logging.debug(' Queued dep %s', dep)
+      else:
+        num_already_seen += 1
+
+    if num_already_seen:
+      logging.debug('%d dep(s) already seen', num_already_seen)
+
+  def _ComputeInstalls(self, process_rdeps, process_rev_rdeps):
+    """Returns a dictionary of packages that need to be installed on the target.
+
+    Args:
+      process_rdeps: Whether to trace forward dependencies.
+      process_rev_rdeps: Whether to trace backward dependencies as well.
+
+    Returns:
+      A dictionary mapping CP values (string) to tuples containing a CPV
+      (string), a slot (string), a boolean indicating whether the package
+      was initially listed in the queue, and a boolean indicating whether this
+      is an update to an existing package.
+    """
+    installs = {}
+    while self.queue:
+      dep, listed, optional = self._DeqDep()
+      cp, required_slot = dep
+      if cp in installs:
+        logging.debug('Already updating %s', cp)
+        continue
+
+      cp_slots = self.binpkgs_db.get(cp, dict())
+      logging.debug('Checking packages matching %s%s%s...', cp,
+                    ' (slot: %s)' % required_slot if required_slot else '',
+                    ' (optional)' if optional else '')
+      num_processed = 0
+      for slot, pkg_info in cp_slots.iteritems():
+        if required_slot and slot != required_slot:
+          continue
+
+        num_processed += 1
+        logging.debug(' Checking %s...', pkg_info.cpv)
+
+        install, update = self._NeedsInstall(pkg_info.cpv, slot,
+                                             pkg_info.build_time, optional)
+        if not install:
+          continue
+
+        installs[cp] = (pkg_info.cpv, slot, listed, update)
+
+        # Add forward and backward runtime dependencies to queue.
+        if process_rdeps:
+          self._ProcessDeps(pkg_info.rdeps, False)
+        if process_rev_rdeps:
+          target_pkg_info = self.target_db.get(cp, dict()).get(slot)
+          if target_pkg_info:
+            self._ProcessDeps(target_pkg_info.rev_rdeps, True)
+
+      if num_processed == 0:
+        logging.warning('No qualified bintree package corresponding to %s', cp)
+
+    return installs
+
+  def _SortInstalls(self, installs):
+    """Returns a sorted list of packages to install.
+
+    Performs a topological sort based on dependencies found in the binary
+    package database.
+
+    Args:
+      installs: Dictionary of packages to install indexed by CP.
+
+    Returns:
+      A list of package CPVs (string).
+
+    Raises:
+      ValueError: If dependency graph contains a cycle.
+    """
+    not_visited = set(installs.keys())
+    curr_path = []
+    sorted_installs = []
+
+    def SortFrom(cp):
+      """Traverses dependencies recursively, emitting nodes in reverse order."""
+      cpv, slot, _, _ = installs[cp]
+      if cpv in curr_path:
+        raise ValueError('Dependencies contain a cycle: %s -> %s' %
+                         (' -> '.join(curr_path[curr_path.index(cpv):]), cpv))
+      curr_path.append(cpv)
+      for rdep_cp, _ in self.binpkgs_db[cp][slot].rdeps:
+        if rdep_cp in not_visited:
+          not_visited.remove(rdep_cp)
+          SortFrom(rdep_cp)
+
+      sorted_installs.append(cpv)
+      curr_path.pop()
+
+    # So long as there's more packages, keep expanding dependency paths.
+    while not_visited:
+      SortFrom(not_visited.pop())
+
+    return sorted_installs
+
+  def _EnqListedPkg(self, pkg):
+    """Finds and enqueues a listed package."""
+    cp, slot = self._FindPackage(pkg)
+    if cp not in self.binpkgs_db:
+      raise self.BintreeError('Package %s not found in binpkgs tree' % pkg)
+    self._EnqDep((cp, slot), True, False)
+
+  def _EnqInstalledPkgs(self):
+    """Enqueues all available binary packages that are already installed."""
+    for cp, cp_slots in self.binpkgs_db.iteritems():
+      target_cp_slots = self.target_db.get(cp)
+      if target_cp_slots:
+        for slot in cp_slots.iterkeys():
+          if slot in target_cp_slots:
+            self._EnqDep((cp, slot), True, False)
+
+  def Run(self, device, root, listed_pkgs, update, process_rdeps,
+          process_rev_rdeps):
+    """Computes the list of packages that need to be installed on a target.
+
+    Args:
+      device: Target handler object.
+      root: Package installation root.
+      listed_pkgs: Package names/files listed by the user.
+      update: Whether to read the target's installed package database.
+      process_rdeps: Whether to trace forward dependencies.
+      process_rev_rdeps: Whether to trace backward dependencies as well.
+
+    Returns:
+      A tuple (sorted, listed, num_updates) where |sorted| is a list of package
+      CPVs (string) to install on the target in an order that satisfies their
+      inter-dependencies, |listed| the subset that was requested by the user,
+      and |num_updates| the number of packages being installed over preexisting
+      versions. Note that installation order should be reversed for removal.
+    """
+    if process_rev_rdeps and not process_rdeps:
+      raise ValueError('Must processing forward deps when processing rev deps')
+    if process_rdeps and not update:
+      raise ValueError('Must check installed packages when processing deps')
+
+    if update:
+      logging.info('Initializing target intalled packages database...')
+      self._InitTargetVarDB(device, root, process_rdeps, process_rev_rdeps)
+
+    logging.info('Initializing binary packages database...')
+    self._InitBinpkgDB(process_rdeps)
+
+    logging.info('Finding listed package(s)...')
+    self._InitDepQueue()
+    for pkg in listed_pkgs:
+      if pkg == '@installed':
+        if not update:
+          raise ValueError(
+              'Must check installed packages when updating all of them.')
+        self._EnqInstalledPkgs()
+      else:
+        self._EnqListedPkg(pkg)
+
+    logging.info('Computing set of packages to install...')
+    installs = self._ComputeInstalls(process_rdeps, process_rev_rdeps)
+
+    num_updates = 0
+    listed_installs = []
+    for cpv, _, listed, update in installs.itervalues():
+      if listed:
+        listed_installs.append(cpv)
+      if update:
+        num_updates += 1
+
+    logging.info('Processed %d package(s), %d will be installed, %d are '
+                 'updating existing packages',
+                 len(self.seen), len(installs), num_updates)
+
+    sorted_installs = self._SortInstalls(installs)
+    return sorted_installs, listed_installs, num_updates
+
+
+def _Emerge(device, pkg_path, root, extra_args=None):
+  """Copies |pkg| to |device| and emerges it.
+
+  Args:
+    device: A ChromiumOSDevice object.
+    pkg_path: A path to a binary package.
+    root: Package installation root path.
+    extra_args: Extra arguments to pass to emerge.
+
+  Raises:
+    DeployError: Unrecoverable error during emerge.
+  """
+  pkgroot = os.path.join(device.work_dir, 'packages')
+  pkg_name = os.path.basename(pkg_path)
+  pkg_dirname = os.path.basename(os.path.dirname(pkg_path))
+  pkg_dir = os.path.join(pkgroot, pkg_dirname)
+  portage_tmpdir = os.path.join(device.work_dir, 'portage-tmp')
+  # Clean out the dirs first if we had a previous emerge on the device so as to
+  # free up space for this emerge.  The last emerge gets implicitly cleaned up
+  # when the device connection deletes its work_dir.
+  device.RunCommand(
+      ['rm', '-rf', pkg_dir, portage_tmpdir, '&&',
+       'mkdir', '-p', pkg_dir, portage_tmpdir], remote_sudo=True)
+
+  # This message is read by BrilloDeployOperation.
+  logging.notice('Copying %s to device.', pkg_name)
+  device.CopyToDevice(pkg_path, pkg_dir, remote_sudo=True)
+
+  logging.info('Use portage temp dir %s', portage_tmpdir)
+
+  # This message is read by BrilloDeployOperation.
+  logging.notice('Installing %s.', pkg_name)
+  pkg_path = os.path.join(pkg_dir, pkg_name)
+
+  # We set PORTAGE_CONFIGROOT to '/usr/local' because by default all
+  # chromeos-base packages will be skipped due to the configuration
+  # in /etc/protage/make.profile/package.provided. However, there is
+  # a known bug that /usr/local/etc/portage is not setup properly
+  # (crbug.com/312041). This does not affect `cros deploy` because
+  # we do not use the preset PKGDIR.
+  extra_env = {
+      'FEATURES': '-sandbox',
+      'PKGDIR': pkgroot,
+      'PORTAGE_CONFIGROOT': '/usr/local',
+      'PORTAGE_TMPDIR': portage_tmpdir,
+      'PORTDIR': device.work_dir,
+      'CONFIG_PROTECT': '-*',
+  }
+  cmd = ['emerge', '--usepkg', pkg_path, '--root=%s' % root]
+  if extra_args:
+    cmd.append(extra_args)
+
+  try:
+    device.RunCommand(cmd, extra_env=extra_env, remote_sudo=True,
+                      capture_output=False, debug_level=logging.INFO)
+  except Exception:
+    logging.error('Failed to emerge package %s', pkg_name)
+    raise
+  else:
+    logging.notice('%s has been installed.', pkg_name)
+
+
+def _GetPackagesByCPV(cpvs, strip, sysroot):
+  """Returns paths to binary packages corresponding to |cpvs|.
+
+  Args:
+    cpvs: List of CPV components given by portage_util.SplitCPV().
+    strip: True to run strip_package.
+    sysroot: Sysroot path.
+
+  Returns:
+    List of paths corresponding to |cpvs|.
+
+  Raises:
+    DeployError: If a package is missing.
+  """
+  packages_dir = None
+  if strip:
+    try:
+      cros_build_lib.RunCommand(
+          ['strip_package', '--sysroot', sysroot] +
+          [os.path.join(cpv.category, str(cpv.pv)) for cpv in cpvs])
+      packages_dir = _STRIPPED_PACKAGES_DIR
+    except cros_build_lib.RunCommandError:
+      logging.error('Cannot strip packages %s',
+                    ' '.join([str(cpv) for cpv in cpvs]))
+      raise
+
+  paths = []
+  for cpv in cpvs:
+    path = portage_util.GetBinaryPackagePath(
+        cpv.category, cpv.package, cpv.version, sysroot=sysroot,
+        packages_dir=packages_dir)
+    if not path:
+      raise DeployError('Missing package %s.' % cpv)
+    paths.append(path)
+
+  return paths
+
+
+def _GetPackagesPaths(pkgs, strip, sysroot):
+  """Returns paths to binary |pkgs|.
+
+  Each package argument may be specified as a filename, in which case it is
+  returned as-is, or it may be a CPV value, in which case it is stripped (if
+  instructed) and a path to it is returned.
+
+  Args:
+    pkgs: List of package arguments.
+    strip: Whether or not to run strip_package for CPV packages.
+    sysroot: The sysroot path.
+
+  Returns:
+    List of paths corresponding to |pkgs|.
+  """
+  indexes = []
+  cpvs = []
+  for i, pkg in enumerate(pkgs):
+    if not os.path.isfile(pkg):
+      indexes.append(i)
+      cpvs.append(portage_util.SplitCPV(pkg))
+
+  cpv_paths = cpvs and _GetPackagesByCPV(cpvs, strip, sysroot)
+  paths = list(pkgs)
+  for i, cpv_path in zip(indexes, cpv_paths):
+    paths[i] = cpv_path
+  return paths
+
+
+def _Unmerge(device, pkg, root):
+  """Unmerges |pkg| on |device|.
+
+  Args:
+    device: A RemoteDevice object.
+    pkg: A package name.
+    root: Package installation root path.
+  """
+  pkg_name = os.path.basename(pkg)
+  # This message is read by BrilloDeployOperation.
+  logging.notice('Unmerging %s.', pkg_name)
+  cmd = ['qmerge', '--yes']
+  # Check if qmerge is available on the device. If not, use emerge.
+  if device.RunCommand(
+      ['qmerge', '--version'], error_code_ok=True).returncode != 0:
+    cmd = ['emerge']
+
+  cmd.extend(['--unmerge', pkg, '--root=%s' % root])
+  try:
+    # Always showing the emerge output for clarity.
+    device.RunCommand(cmd, capture_output=False, remote_sudo=True,
+                      debug_level=logging.INFO)
+  except Exception:
+    logging.error('Failed to unmerge package %s', pkg_name)
+    raise
+  else:
+    logging.notice('%s has been uninstalled.', pkg_name)
+
+
+def _ConfirmDeploy(num_updates):
+  """Returns whether we can continue deployment."""
+  if num_updates > _MAX_UPDATES_NUM:
+    logging.warning(_MAX_UPDATES_WARNING)
+    return cros_build_lib.BooleanPrompt(default=False)
+
+  return True
+
+
+def _EmergePackages(pkgs, device, strip, sysroot, root, emerge_args):
+  """Call _Emerge for each packge in pkgs."""
+  for pkg_path in _GetPackagesPaths(pkgs, strip, sysroot):
+    _Emerge(device, pkg_path, root, extra_args=emerge_args)
+
+
+def _UnmergePackages(pkgs, device, root):
+  """Call _Unmege for each package in pkgs."""
+  for pkg in pkgs:
+    _Unmerge(device, pkg, root)
+
+
+def Deploy(device, packages, board=None, emerge=True, update=False, deep=False,
+           deep_rev=False, clean_binpkg=True, root='/', strip=True,
+           emerge_args=None, ssh_private_key=None, ping=True, force=False,
+           dry_run=False):
+  """Deploys packages to a device.
+
+  Args:
+    device: commandline.Device object; None to use the default device.
+    packages: List of packages (strings) to deploy to device.
+    board: Board to use; None to automatically detect.
+    emerge: True to emerge package, False to unmerge.
+    update: Check installed version on device.
+    deep: Install dependencies also. Implies |update|.
+    deep_rev: Install reverse dependencies. Implies |deep|.
+    clean_binpkg: Clean outdated binary packages.
+    root: Package installation root path.
+    strip: Run strip_package to filter out preset paths in the package.
+    emerge_args: Extra arguments to pass to emerge.
+    ssh_private_key: Path to an SSH private key file; None to use test keys.
+    ping: True to ping the device before trying to connect.
+    force: Ignore sanity checks and prompts.
+    dry_run: Print deployment plan but do not deploy anything.
+
+  Raises:
+    ValueError: Invalid parameter or parameter combination.
+    DeployError: Unrecoverable failure during deploy.
+  """
+  if deep_rev:
+    deep = True
+  if deep:
+    update = True
+
+  if not packages:
+    raise DeployError('No packages provided, nothing to deploy.')
+
+  if update and not emerge:
+    raise ValueError('Cannot update and unmerge.')
+
+  if device:
+    hostname, username, port = device.hostname, device.username, device.port
+  else:
+    hostname, username, port = None, None, None
+
+  lsb_release = None
+  sysroot = None
+  try:
+    with remote_access.ChromiumOSDeviceHandler(
+        hostname, port=port, username=username, private_key=ssh_private_key,
+        base_dir=_DEVICE_BASE_DIR, ping=ping) as device:
+      lsb_release = device.lsb_release
+
+      board = cros_build_lib.GetBoard(device_board=device.board,
+                                      override_board=board)
+      if not force and board != device.board:
+        raise DeployError('Device (%s) is incompatible with board %s. Use '
+                          '--force to deploy anyway.' % (device, board))
+
+      sysroot = cros_build_lib.GetSysroot(board=board)
+
+      if clean_binpkg:
+        logging.notice('Cleaning outdated binary packages from %s', sysroot)
+        portage_util.CleanOutdatedBinaryPackages(sysroot)
+
+      if not device.IsDirWritable(root):
+        # Only remounts rootfs if the given root is not writable.
+        if not device.MountRootfsReadWrite():
+          raise DeployError('Cannot remount rootfs as read-write. Exiting.')
+
+      # Obtain list of packages to upgrade/remove.
+      pkg_scanner = _InstallPackageScanner(sysroot)
+      pkgs, listed, num_updates = pkg_scanner.Run(
+          device, root, packages, update, deep, deep_rev)
+      if emerge:
+        action_str = 'emerge'
+      else:
+        pkgs.reverse()
+        action_str = 'unmerge'
+
+      if not pkgs:
+        logging.notice('No packages to %s', action_str)
+        return
+
+      logging.notice('These are the packages to %s:', action_str)
+      for i, pkg in enumerate(pkgs):
+        logging.notice('%s %d) %s', '*' if pkg in listed else ' ', i + 1, pkg)
+
+      if dry_run or not _ConfirmDeploy(num_updates):
+        return
+
+      # Select function (emerge or unmerge) and bind args.
+      if emerge:
+        func = functools.partial(_EmergePackages, pkgs, device, strip,
+                                 sysroot, root, emerge_args)
+      else:
+        func = functools.partial(_UnmergePackages, pkgs, device, root)
+
+      # Call the function with the progress bar or with normal output.
+      if command.UseProgressBar():
+        op = BrilloDeployOperation(len(pkgs), emerge)
+        op.Run(func, log_level=logging.DEBUG)
+      else:
+        func()
+
+      logging.warning('Please restart any updated services on the device, '
+                      'or just reboot it.')
+  except Exception:
+    if lsb_release:
+      lsb_entries = sorted(lsb_release.items())
+      logging.info('Following are the LSB version details of the device:\n%s',
+                   '\n'.join('%s=%s' % (k, v) for k, v in lsb_entries))
+    raise
diff --git a/cli/deploy_unittest b/cli/deploy_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cli/deploy_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/deploy_unittest.py b/cli/deploy_unittest.py
new file mode 100644
index 0000000..ceb41dd
--- /dev/null
+++ b/cli/deploy_unittest.py
@@ -0,0 +1,394 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the deploy module."""
+
+from __future__ import print_function
+
+import json
+import multiprocessing
+import os
+
+from chromite.cli import command
+from chromite.cli import deploy
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import portage_util
+from chromite.lib import remote_access
+try:
+  import portage
+except ImportError:
+  if cros_build_lib.IsInsideChroot():
+    raise
+
+
+# pylint: disable=protected-access
+
+
+class ChromiumOSDeviceFake(object):
+  """Fake for device."""
+
+  def __init__(self):
+    self.board = 'board'
+    self.hostname = None
+    self.username = None
+    self.port = None
+    self.lsb_release = None
+
+  def IsDirWritable(self, _):
+    return True
+
+
+class ChromiumOSDeviceHandlerFake(object):
+  """Fake for chromite.lib.remote_access.ChomiumOSDeviceHandler."""
+
+  class RemoteAccessFake(object):
+    """Fake for chromite.lib.remote_access.RemoteAccess."""
+
+    def __init__(self):
+      self.remote_sh_output = None
+
+    def RemoteSh(self, *_args, **_kwargs):
+      return cros_build_lib.CommandResult(output=self.remote_sh_output)
+
+  def __init__(self, *_args, **_kwargs):
+    self._agent = self.RemoteAccessFake()
+    self.device = ChromiumOSDeviceFake()
+
+  # TODO(dpursell): Mock remote access object in cros_test_lib (brbug.com/986).
+  def GetAgent(self):
+    return self._agent
+
+  def __exit__(self, _type, _value, _traceback):
+    pass
+
+  def __enter__(self):
+    return ChromiumOSDeviceFake()
+
+
+class BrilloDeployOperationFake(deploy.BrilloDeployOperation):
+  """Fake for deploy.BrilloDeployOperation."""
+  def __init__(self, pkg_count, emerge, queue):
+    super(BrilloDeployOperationFake, self).__init__(pkg_count, emerge)
+    self._queue = queue
+
+  def ParseOutput(self, output=None):
+    super(BrilloDeployOperationFake, self).ParseOutput(output)
+    self._queue.put('advance')
+
+
+class DbApiFake(object):
+  """Fake for Portage dbapi."""
+
+  def __init__(self, pkgs):
+    self.pkg_db = {}
+    for cpv, slot, rdeps_raw, build_time in pkgs:
+      self.pkg_db[cpv] = {
+          'SLOT': slot, 'RDEPEND': rdeps_raw, 'BUILD_TIME': build_time}
+
+  def cpv_all(self):
+    return self.pkg_db.keys()
+
+  def aux_get(self, cpv, keys):
+    pkg_info = self.pkg_db[cpv]
+    return [pkg_info[key] for key in keys]
+
+
+class PackageScannerFake(object):
+  """Fake for PackageScanner."""
+
+  def __init__(self, packages):
+    self.pkgs = packages
+    self.listed = []
+    self.num_updates = None
+
+  def Run(self, _device, _root, _packages, _update, _deep, _deep_rev):
+    return self.pkgs, self.listed, self.num_updates
+
+
+class PortageTreeFake(object):
+  """Fake for Portage tree."""
+
+  def __init__(self, dbapi):
+    self.dbapi = dbapi
+
+
+class TestInstallPackageScanner(cros_test_lib.MockOutputTestCase):
+  """Test the update package scanner."""
+  _BOARD = 'foo_board'
+  _BUILD_ROOT = '/build/%s' % _BOARD
+  _VARTREE = [
+      ('foo/app1-1.2.3-r4', '0', 'foo/app2 !foo/app3', '1413309336'),
+      ('foo/app2-4.5.6-r7', '0', '', '1413309336'),
+      ('foo/app4-2.0.0-r1', '0', 'foo/app1 foo/app5', '1413309336'),
+      ('foo/app5-3.0.7-r3', '0', '', '1413309336'),
+  ]
+
+  def setUp(self):
+    """Patch imported modules."""
+    self.PatchObject(cros_build_lib, 'GetChoice', return_value=0)
+    self.device = ChromiumOSDeviceHandlerFake()
+    self.scanner = deploy._InstallPackageScanner(self._BUILD_ROOT)
+
+  def SetupVartree(self, vartree_pkgs):
+    self.device.GetAgent().remote_sh_output = json.dumps(vartree_pkgs)
+
+  def SetupBintree(self, bintree_pkgs):
+    bintree = PortageTreeFake(DbApiFake(bintree_pkgs))
+    build_root = os.path.join(self._BUILD_ROOT, '')
+    portage_db = {build_root: {'bintree': bintree}}
+    self.PatchObject(portage, 'create_trees', return_value=portage_db)
+
+  def ValidatePkgs(self, actual, expected, constraints=None):
+    # Containing exactly the same packages.
+    self.assertEquals(sorted(expected), sorted(actual))
+    # Packages appear in the right order.
+    if constraints is not None:
+      for needs, needed in constraints:
+        self.assertGreater(actual.index(needs), actual.index(needed))
+
+  def testRunUpdatedVersion(self):
+    self.SetupVartree(self._VARTREE)
+    app1 = 'foo/app1-1.2.5-r4'
+    self.SetupBintree([
+        (app1, '0', 'foo/app2 !foo/app3', '1413309336'),
+        ('foo/app2-4.5.6-r7', '0', '', '1413309336'),
+    ])
+    installs, listed, num_updates = self.scanner.Run(
+        self.device, '/', ['app1'], True, True, True)
+    self.ValidatePkgs(installs, [app1])
+    self.ValidatePkgs(listed, [app1])
+    self.assertEquals(num_updates, 1)
+
+  def testRunUpdatedBuildTime(self):
+    self.SetupVartree(self._VARTREE)
+    app1 = 'foo/app1-1.2.3-r4'
+    self.SetupBintree([
+        (app1, '0', 'foo/app2 !foo/app3', '1413309350'),
+        ('foo/app2-4.5.6-r7', '0', '', '1413309336'),
+    ])
+    installs, listed, num_updates = self.scanner.Run(
+        self.device, '/', ['app1'], True, True, True)
+    self.ValidatePkgs(installs, [app1])
+    self.ValidatePkgs(listed, [app1])
+    self.assertEquals(num_updates, 1)
+
+  def testRunExistingDepUpdated(self):
+    self.SetupVartree(self._VARTREE)
+    app1 = 'foo/app1-1.2.5-r2'
+    app2 = 'foo/app2-4.5.8-r3'
+    self.SetupBintree([
+        (app1, '0', 'foo/app2 !foo/app3', '1413309350'),
+        (app2, '0', '', '1413309350'),
+    ])
+    installs, listed, num_updates = self.scanner.Run(
+        self.device, '/', ['app1'], True, True, True)
+    self.ValidatePkgs(installs, [app1, app2], constraints=[(app1, app2)])
+    self.ValidatePkgs(listed, [app1])
+    self.assertEquals(num_updates, 2)
+
+  def testRunMissingDepUpdated(self):
+    self.SetupVartree(self._VARTREE)
+    app1 = 'foo/app1-1.2.5-r2'
+    app6 = 'foo/app6-1.0.0-r1'
+    self.SetupBintree([
+        (app1, '0', 'foo/app2 !foo/app3 foo/app6', '1413309350'),
+        ('foo/app2-4.5.6-r7', '0', '', '1413309336'),
+        (app6, '0', '', '1413309350'),
+    ])
+    installs, listed, num_updates = self.scanner.Run(
+        self.device, '/', ['app1'], True, True, True)
+    self.ValidatePkgs(installs, [app1, app6], constraints=[(app1, app6)])
+    self.ValidatePkgs(listed, [app1])
+    self.assertEquals(num_updates, 1)
+
+  def testRunExistingRevDepUpdated(self):
+    self.SetupVartree(self._VARTREE)
+    app1 = 'foo/app1-1.2.5-r2'
+    app4 = 'foo/app4-2.0.1-r3'
+    self.SetupBintree([
+        (app1, '0', 'foo/app2 !foo/app3', '1413309350'),
+        (app4, '0', 'foo/app1 foo/app5', '1413309350'),
+        ('foo/app5-3.0.7-r3', '0', '', '1413309336'),
+    ])
+    installs, listed, num_updates = self.scanner.Run(
+        self.device, '/', ['app1'], True, True, True)
+    self.ValidatePkgs(installs, [app1, app4], constraints=[(app4, app1)])
+    self.ValidatePkgs(listed, [app1])
+    self.assertEquals(num_updates, 2)
+
+  def testRunMissingRevDepNotUpdated(self):
+    self.SetupVartree(self._VARTREE)
+    app1 = 'foo/app1-1.2.5-r2'
+    app6 = 'foo/app6-1.0.0-r1'
+    self.SetupBintree([
+        (app1, '0', 'foo/app2 !foo/app3', '1413309350'),
+        (app6, '0', 'foo/app1', '1413309350'),
+    ])
+    installs, listed, num_updates = self.scanner.Run(
+        self.device, '/', ['app1'], True, True, True)
+    self.ValidatePkgs(installs, [app1])
+    self.ValidatePkgs(listed, [app1])
+    self.assertEquals(num_updates, 1)
+
+  def testRunTransitiveDepsUpdated(self):
+    self.SetupVartree(self._VARTREE)
+    app1 = 'foo/app1-1.2.5-r2'
+    app2 = 'foo/app2-4.5.8-r3'
+    app4 = 'foo/app4-2.0.0-r1'
+    app5 = 'foo/app5-3.0.8-r2'
+    self.SetupBintree([
+        (app1, '0', 'foo/app2 !foo/app3', '1413309350'),
+        (app2, '0', '', '1413309350'),
+        (app4, '0', 'foo/app1 foo/app5', '1413309350'),
+        (app5, '0', '', '1413309350'),
+    ])
+    installs, listed, num_updates = self.scanner.Run(
+        self.device, '/', ['app1'], True, True, True)
+    self.ValidatePkgs(installs, [app1, app2, app4, app5],
+                      constraints=[(app1, app2), (app4, app1), (app4, app5)])
+    self.ValidatePkgs(listed, [app1])
+    self.assertEquals(num_updates, 4)
+
+  def testRunDisjunctiveDepsExistingUpdated(self):
+    self.SetupVartree(self._VARTREE)
+    app1 = 'foo/app1-1.2.5-r2'
+    self.SetupBintree([
+        (app1, '0', '|| ( foo/app6 foo/app2 ) !foo/app3', '1413309350'),
+        ('foo/app2-4.5.6-r7', '0', '', '1413309336'),
+    ])
+    installs, listed, num_updates = self.scanner.Run(
+        self.device, '/', ['app1'], True, True, True)
+    self.ValidatePkgs(installs, [app1])
+    self.ValidatePkgs(listed, [app1])
+    self.assertEquals(num_updates, 1)
+
+  def testRunDisjunctiveDepsDefaultUpdated(self):
+    self.SetupVartree(self._VARTREE)
+    app1 = 'foo/app1-1.2.5-r2'
+    app7 = 'foo/app7-1.0.0-r1'
+    self.SetupBintree([
+        (app1, '0', '|| ( foo/app6 foo/app7 ) !foo/app3', '1413309350'),
+        (app7, '0', '', '1413309350'),
+    ])
+    installs, listed, num_updates = self.scanner.Run(
+        self.device, '/', ['app1'], True, True, True)
+    self.ValidatePkgs(installs, [app1, app7], constraints=[(app1, app7)])
+    self.ValidatePkgs(listed, [app1])
+    self.assertEquals(num_updates, 1)
+
+
+class TestDeploy(cros_test_lib.ProgressBarTestCase):
+  """Test deploy.Deploy."""
+
+  @staticmethod
+  def FakeGetPackagesByCPV(cpvs, _strip, _sysroot):
+    return ['/path/to/%s.tbz2' % cpv.pv for cpv in cpvs]
+
+  def setUp(self):
+    self.PatchObject(remote_access, 'ChromiumOSDeviceHandler',
+                     side_effect=ChromiumOSDeviceHandlerFake)
+    self.PatchObject(cros_build_lib, 'GetBoard', return_value=None)
+    self.PatchObject(cros_build_lib, 'GetSysroot', return_value='sysroot')
+    self.package_scanner = self.PatchObject(deploy, '_InstallPackageScanner')
+    self.get_packages_paths = self.PatchObject(
+        deploy, '_GetPackagesByCPV', side_effect=self.FakeGetPackagesByCPV)
+    self.emerge = self.PatchObject(deploy, '_Emerge', return_value=None)
+    self.unmerge = self.PatchObject(deploy, '_Unmerge', return_value=None)
+
+  def testDeployEmerge(self):
+    """Test that deploy._Emerge is called for each package."""
+
+    _BINPKG = '/path/to/bar-1.2.5.tbz2'
+    def FakeIsFile(fname):
+      return fname == _BINPKG
+
+    packages = ['some/foo-1.2.3', _BINPKG, 'some/foobar-2.0']
+    self.package_scanner.return_value = PackageScannerFake(packages)
+    self.PatchObject(os.path, 'isfile', side_effect=FakeIsFile)
+
+    deploy.Deploy(None, ['package'], force=True, clean_binpkg=False)
+
+    # Check that package names were correctly resolved into binary packages.
+    self.get_packages_paths.assert_called_once_with(
+        [portage_util.SplitCPV(p) for p in packages if p != _BINPKG],
+        True, 'sysroot')
+    # Check that deploy._Emerge is called the right number of times.
+    self.assertEqual(self.emerge.call_count, len(packages))
+    self.assertEqual(self.unmerge.call_count, 0)
+
+  def testDeployUnmerge(self):
+    """Test that deploy._Unmerge is called for each package."""
+    packages = ['foo', 'bar', 'foobar']
+    self.package_scanner.return_value = PackageScannerFake(packages)
+
+    deploy.Deploy(None, ['package'], force=True, clean_binpkg=False,
+                  emerge=False)
+
+    # Check that deploy._Unmerge is called the right number of times.
+    self.assertEqual(self.emerge.call_count, 0)
+    self.assertEqual(self.unmerge.call_count, len(packages))
+
+  def testDeployMergeWithProgressBar(self):
+    """Test that BrilloDeployOperation.Run() is called for merge."""
+    packages = ['foo', 'bar', 'foobar']
+    self.package_scanner.return_value = PackageScannerFake(packages)
+
+    run = self.PatchObject(deploy.BrilloDeployOperation, 'Run',
+                           return_value=None)
+
+    self.PatchObject(command, 'UseProgressBar', return_value=True)
+    deploy.Deploy(None, ['package'], force=True, clean_binpkg=False)
+
+    # Check that BrilloDeployOperation.Run was called.
+    self.assertTrue(run.called)
+
+  def testDeployUnmergeWithProgressBar(self):
+    """Test that BrilloDeployOperation.Run() is called for unmerge."""
+    packages = ['foo', 'bar', 'foobar']
+    self.package_scanner.return_value = PackageScannerFake(packages)
+
+    run = self.PatchObject(deploy.BrilloDeployOperation, 'Run',
+                           return_value=None)
+
+    self.PatchObject(command, 'UseProgressBar', return_value=True)
+    deploy.Deploy(None, ['package'], force=True, clean_binpkg=False,
+                  emerge=False)
+
+    # Check that BrilloDeployOperation.Run was called.
+    self.assertTrue(run.called)
+
+  def testBrilloDeployMergeOperation(self):
+    """Test that BrilloDeployOperation works for merge."""
+    def func(queue):
+      for event in op.MERGE_EVENTS:
+        queue.get()
+        print(event)
+
+    queue = multiprocessing.Queue()
+    # Emerge one package.
+    op = BrilloDeployOperationFake(1, True, queue)
+
+    with self.OutputCapturer():
+      op.Run(func, queue)
+
+    # Check that the progress bar prints correctly.
+    self.AssertProgressBarAllEvents(len(op.MERGE_EVENTS))
+
+  def testBrilloDeployUnmergeOperation(self):
+    """Test that BrilloDeployOperation works for unmerge."""
+    def func(queue):
+      for event in op.UNMERGE_EVENTS:
+        queue.get()
+        print(event)
+
+    queue = multiprocessing.Queue()
+    # Unmerge one package.
+    op = BrilloDeployOperationFake(1, False, queue)
+
+    with self.OutputCapturer():
+      op.Run(func, queue)
+
+    # Check that the progress bar prints correctly.
+    self.AssertProgressBarAllEvents(len(op.UNMERGE_EVENTS))
diff --git a/cli/flash.py b/cli/flash.py
new file mode 100644
index 0000000..4863e40
--- /dev/null
+++ b/cli/flash.py
@@ -0,0 +1,810 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Install/copy the image to the device."""
+
+from __future__ import print_function
+
+import cStringIO
+import os
+import re
+import shutil
+import tempfile
+import time
+
+from chromite.cbuildbot import constants
+from chromite.cli import command
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import dev_server_wrapper as ds_wrapper
+from chromite.lib import operation
+from chromite.lib import osutils
+from chromite.lib import path_util
+from chromite.lib import remote_access
+
+
+DEVSERVER_STATIC_DIR = path_util.FromChrootPath(
+    os.path.join(constants.CHROOT_SOURCE_ROOT, 'devserver', 'static'))
+
+
+class UsbImagerOperation(operation.ProgressBarOperation):
+  """Progress bar for flashing image to operation."""
+
+  def __init__(self, image):
+    super(UsbImagerOperation, self).__init__()
+    self._size = os.path.getsize(image)
+    self._transferred = 0.
+    self._bytes = re.compile(r'(\d+) bytes')
+
+  def _GetDDPid(self):
+    """Get the Pid of dd."""
+    try:
+      pids = cros_build_lib.RunCommand(['pgrep', 'dd'], capture_output=True,
+                                       print_cmd=False).output
+      for pid in pids.splitlines():
+        if osutils.IsChildProcess(int(pid), name='dd'):
+          return int(pid)
+      return -1
+    except cros_build_lib.RunCommandError:
+      # If dd isn't still running, then we assume that it is finished.
+      return -1
+
+  def _PingDD(self, dd_pid):
+    """Send USR1 signal to dd to get status update."""
+    try:
+      cmd = ['kill', '-USR1', str(dd_pid)]
+      cros_build_lib.SudoRunCommand(cmd, print_cmd=False)
+    except cros_build_lib.RunCommandError:
+      # Here we assume that dd finished in the background.
+      return
+
+  def ParseOutput(self, output=None):
+    """Parse the output of dd to update progress bar."""
+    dd_pid = self._GetDDPid()
+    if dd_pid == -1:
+      return
+
+    self._PingDD(dd_pid)
+
+    if output is None:
+      stdout = self._stdout.read()
+      stderr = self._stderr.read()
+      output = stdout + stderr
+
+    match = self._bytes.search(output)
+    if match:
+      self._transferred = match.groups()[0]
+
+    self.ProgressBar(float(self._transferred) / self._size)
+
+
+def _IsFilePathGPTDiskImage(file_path):
+  """Determines if a file is a valid GPT disk.
+
+  Args:
+    file_path: Path to the file to test.
+  """
+  if os.path.isfile(file_path):
+    with cros_build_lib.Open(file_path) as image_file:
+      image_file.seek(0x1fe)
+      if image_file.read(10) == '\x55\xaaEFI PART':
+        return True
+  return False
+
+
+def _ChooseImageFromDirectory(dir_path):
+  """Lists all image files in |dir_path| and ask user to select one.
+
+  Args:
+    dir_path: Path to the directory.
+  """
+  images = sorted([x for x in os.listdir(dir_path) if
+                   _IsFilePathGPTDiskImage(os.path.join(dir_path, x))])
+  idx = 0
+  if len(images) == 0:
+    raise ValueError('No image found in %s.' % dir_path)
+  elif len(images) > 1:
+    idx = cros_build_lib.GetChoice(
+        'Multiple images found in %s. Please select one to continue:' % (
+            (dir_path,)),
+        images)
+
+  return os.path.join(dir_path, images[idx])
+
+
+class FlashError(Exception):
+  """Thrown when there is an unrecoverable error during flash."""
+
+
+class USBImager(object):
+  """Copy image to the target removable device."""
+
+  def __init__(self, device, board, image, debug=False, install=False,
+               yes=False):
+    """Initalizes USBImager."""
+    self.device = device
+    self.board = board if board else cros_build_lib.GetDefaultBoard()
+    self.image = image
+    self.debug = debug
+    self.debug_level = logging.DEBUG if debug else logging.INFO
+    self.install = install
+    self.yes = yes
+
+  def DeviceNameToPath(self, device_name):
+    return '/dev/%s' % device_name
+
+  def GetRemovableDeviceDescription(self, device):
+    """Returns a informational description of the removable |device|.
+
+    Args:
+      device: the device name (e.g. sdc).
+
+    Returns:
+      A string describing |device| (e.g. Patriot Memory 7918 MB).
+    """
+    desc = [
+        osutils.GetDeviceInfo(device, keyword='manufacturer'),
+        osutils.GetDeviceInfo(device, keyword='product'),
+        osutils.GetDeviceSize(self.DeviceNameToPath(device)),
+        '(%s)' % self.DeviceNameToPath(device),
+    ]
+    return ' '.join([x for x in desc if x])
+
+  def ListAllRemovableDevices(self):
+    """Returns a list of removable devices.
+
+    Returns:
+      A list of device names (e.g. ['sdb', 'sdc']).
+    """
+    devices = osutils.ListBlockDevices()
+    removable_devices = []
+    for d in devices:
+      if d.TYPE == 'disk' and d.RM == '1':
+        removable_devices.append(d.NAME)
+
+    return removable_devices
+
+  def ChooseRemovableDevice(self, devices):
+    """Lists all removable devices and asks user to select/confirm.
+
+    Args:
+      devices: a list of device names (e.g. ['sda', 'sdb']).
+
+    Returns:
+      The device name chosen by the user.
+    """
+    idx = cros_build_lib.GetChoice(
+        'Removable device(s) found. Please select/confirm to continue:',
+        [self.GetRemovableDeviceDescription(x) for x in devices])
+
+    return devices[idx]
+
+  def InstallImageToDevice(self, image, device):
+    """Installs |image| to the removable |device|.
+
+    Args:
+      image: Path to the image to copy.
+      device: Device to copy to.
+    """
+    cmd = [
+        'chromeos-install',
+        '--yes',
+        '--skip_src_removable',
+        '--skip_dst_removable',
+        '--payload_image=%s' % image,
+        '--dst=%s' % device,
+        '--skip_postinstall',
+    ]
+    cros_build_lib.SudoRunCommand(cmd)
+
+  def CopyImageToDevice(self, image, device):
+    """Copies |image| to the removable |device|.
+
+    Args:
+      image: Path to the image to copy.
+      device: Device to copy to.
+    """
+    cmd = ['dd', 'if=%s' % image, 'of=%s' % device, 'bs=4M', 'iflag=fullblock',
+           'oflag=sync']
+    if logging.getLogger().getEffectiveLevel() <= logging.NOTICE:
+      op = UsbImagerOperation(image)
+      op.Run(cros_build_lib.SudoRunCommand, cmd, debug_level=logging.NOTICE,
+             update_period=0.5)
+    else:
+      cros_build_lib.SudoRunCommand(
+          cmd, debug_level=logging.NOTICE,
+          print_cmd=logging.getLogger().getEffectiveLevel() < logging.NOTICE)
+
+    cros_build_lib.SudoRunCommand(['sync'], debug_level=self.debug_level)
+
+  def _GetImagePath(self):
+    """Returns the image path to use."""
+    image_path = translated_path = None
+    if os.path.isfile(self.image):
+      if not self.yes and not _IsFilePathGPTDiskImage(self.image):
+        # TODO(wnwen): Open the tarball and if there is just one file in it,
+        #     use that instead. Existing code in upload_symbols.py.
+        if cros_build_lib.BooleanPrompt(
+            prolog='The given image file is not a valid disk image. Perhaps '
+                   'you forgot to untar it.',
+            prompt='Terminate the current flash process?'):
+          raise FlashError('Update terminated by user.')
+      image_path = self.image
+    elif os.path.isdir(self.image):
+      # Ask user which image (*.bin) in the folder to use.
+      image_path = _ChooseImageFromDirectory(self.image)
+    else:
+      # Translate the xbuddy path to get the exact image to use.
+      translated_path, _ = ds_wrapper.GetImagePathWithXbuddy(
+          self.image, self.board, static_dir=DEVSERVER_STATIC_DIR)
+      image_path = ds_wrapper.TranslatedPathToLocalPath(
+          translated_path, DEVSERVER_STATIC_DIR)
+
+    logging.info('Using image %s', translated_path or image_path)
+    return image_path
+
+  def Run(self):
+    """Image the removable device."""
+    devices = self.ListAllRemovableDevices()
+
+    if self.device:
+      # If user specified a device path, check if it exists.
+      if not os.path.exists(self.device):
+        raise FlashError('Device path %s does not exist.' % self.device)
+
+      # Then check if it is removable.
+      if self.device not in [self.DeviceNameToPath(x) for x in devices]:
+        msg = '%s is not a removable device.' % self.device
+        if not (self.yes or cros_build_lib.BooleanPrompt(
+            default=False, prolog=msg)):
+          raise FlashError('You can specify usb:// to choose from a list of '
+                           'removable devices.')
+    target = None
+    if self.device:
+      # Get device name from path (e.g. sdc in /dev/sdc).
+      target = self.device.rsplit(os.path.sep, 1)[-1]
+    elif devices:
+      # Ask user to choose from the list.
+      target = self.ChooseRemovableDevice(devices)
+    else:
+      raise FlashError('No removable devices detected.')
+
+    image_path = self._GetImagePath()
+    try:
+      device = self.DeviceNameToPath(target)
+      if self.install:
+        self.InstallImageToDevice(image_path, device)
+      else:
+        self.CopyImageToDevice(image_path, device)
+    except cros_build_lib.RunCommandError:
+      logging.error('Failed copying image to device %s',
+                    self.DeviceNameToPath(target))
+
+
+class FileImager(USBImager):
+  """Copy image to the target path."""
+
+  def Run(self):
+    """Copy the image to the path specified by self.device."""
+    if not os.path.exists(self.device):
+      raise FlashError('Path %s does not exist.' % self.device)
+
+    image_path = self._GetImagePath()
+    if os.path.isdir(self.device):
+      logging.info('Copying to %s',
+                   os.path.join(self.device, os.path.basename(image_path)))
+    else:
+      logging.info('Copying to %s', self.device)
+    try:
+      shutil.copy(image_path, self.device)
+    except IOError:
+      logging.error('Failed to copy image %s to %s', image_path, self.device)
+
+
+class RemoteDeviceUpdater(object):
+  """Performs update on a remote device."""
+  DEVSERVER_FILENAME = 'devserver.py'
+  STATEFUL_UPDATE_BIN = '/usr/bin/stateful_update'
+  UPDATE_ENGINE_BIN = 'update_engine_client'
+  # Root working directory on the device. This directory is in the
+  # stateful partition and thus has enough space to store the payloads.
+  DEVICE_BASE_DIR = '/mnt/stateful_partition/cros-flash'
+  UPDATE_CHECK_INTERVAL_PROGRESSBAR = 0.5
+  UPDATE_CHECK_INTERVAL_NORMAL = 10
+
+  def __init__(self, ssh_hostname, ssh_port, image, stateful_update=True,
+               rootfs_update=True, clobber_stateful=False, reboot=True,
+               board=None, src_image_to_delta=None, wipe=True, debug=False,
+               yes=False, force=False, ping=True,
+               disable_verification=False):
+    """Initializes RemoteDeviceUpdater"""
+    if not stateful_update and not rootfs_update:
+      raise ValueError('No update operation to perform; either stateful or'
+                       ' rootfs partitions must be updated.')
+    self.tempdir = tempfile.mkdtemp(prefix='cros-flash')
+    self.ssh_hostname = ssh_hostname
+    self.ssh_port = ssh_port
+    self.image = image
+    self.board = board
+    self.src_image_to_delta = src_image_to_delta
+    self.do_stateful_update = stateful_update
+    self.do_rootfs_update = rootfs_update
+    self.disable_verification = disable_verification
+    self.clobber_stateful = clobber_stateful
+    self.reboot = reboot
+    self.debug = debug
+    self.ping = ping
+    # Do not wipe if debug is set.
+    self.wipe = wipe and not debug
+    self.yes = yes
+    self.force = force
+
+  # pylint: disable=unbalanced-tuple-unpacking
+  @classmethod
+  def GetUpdateStatus(cls, device, keys=None):
+    """Returns the status of the update engine on the |device|.
+
+    Retrieves the status from update engine and confirms all keys are
+    in the status.
+
+    Args:
+      device: A ChromiumOSDevice object.
+      keys: the keys to look for in the status result (defaults to
+        ['CURRENT_OP']).
+
+    Returns:
+      A list of values in the order of |keys|.
+    """
+    keys = ['CURRENT_OP'] if not keys else keys
+    result = device.RunCommand([cls.UPDATE_ENGINE_BIN, '--status'],
+                               capture_output=True)
+    if not result.output:
+      raise Exception('Cannot get update status')
+
+    try:
+      status = cros_build_lib.LoadKeyValueFile(
+          cStringIO.StringIO(result.output))
+    except ValueError:
+      raise ValueError('Cannot parse update status')
+
+    values = []
+    for key in keys:
+      if key not in status:
+        raise ValueError('Missing %s in the update engine status')
+
+      values.append(status.get(key))
+
+    return values
+
+  def UpdateStateful(self, device, payload, clobber=False):
+    """Update the stateful partition of the device.
+
+    Args:
+      device: The ChromiumOSDevice object to update.
+      payload: The path to the update payload.
+      clobber: Clobber stateful partition (defaults to False).
+    """
+    # Copy latest stateful_update to device.
+    stateful_update_bin = path_util.FromChrootPath(self.STATEFUL_UPDATE_BIN)
+    device.CopyToWorkDir(stateful_update_bin)
+    msg = 'Updating stateful partition'
+    logging.info('Copying stateful payload to device...')
+    device.CopyToWorkDir(payload)
+    cmd = ['sh',
+           os.path.join(device.work_dir,
+                        os.path.basename(self.STATEFUL_UPDATE_BIN)),
+           os.path.join(device.work_dir, os.path.basename(payload))]
+
+    if clobber:
+      cmd.append('--stateful_change=clean')
+      msg += ' with clobber enabled'
+
+    logging.info('%s...', msg)
+    try:
+      device.RunCommand(cmd)
+    except cros_build_lib.RunCommandError:
+      logging.error('Faild to perform stateful partition update.')
+
+  def _CopyDevServerPackage(self, device, tempdir):
+    """Copy devserver package to work directory of device.
+
+    Args:
+      device: The ChromiumOSDevice object to copy the package to.
+      tempdir: The directory to temporarily store devserver package.
+    """
+    logging.info('Copying devserver package to device...')
+    src_dir = os.path.join(tempdir, 'src')
+    osutils.RmDir(src_dir, ignore_missing=True)
+    shutil.copytree(
+        ds_wrapper.DEVSERVER_PKG_DIR, src_dir,
+        ignore=shutil.ignore_patterns('*.pyc', 'tmp*', '.*', 'static', '*~'))
+    device.CopyToWorkDir(src_dir)
+    return os.path.join(device.work_dir, os.path.basename(src_dir))
+
+  def SetupRootfsUpdate(self, device):
+    """Makes sure |device| is ready for rootfs update."""
+    logging.info('Checking if update engine is idle...')
+    status, = self.GetUpdateStatus(device)
+    if status == 'UPDATE_STATUS_UPDATED_NEED_REBOOT':
+      logging.info('Device needs to reboot before updating...')
+      device.Reboot()
+      status, = self.GetUpdateStatus(device)
+
+    if status != 'UPDATE_STATUS_IDLE':
+      raise FlashError('Update engine is not idle. Status: %s' % status)
+
+  def UpdateRootfs(self, device, payload, tempdir):
+    """Update the rootfs partition of the device.
+
+    Args:
+      device: The ChromiumOSDevice object to update.
+      payload: The path to the update payload.
+      tempdir: The directory to store temporary files.
+    """
+    # Setup devserver and payload on the target device.
+    static_dir = os.path.join(device.work_dir, 'static')
+    payload_dir = os.path.join(static_dir, 'pregenerated')
+    src_dir = self._CopyDevServerPackage(device, tempdir)
+    device.RunCommand(['mkdir', '-p', payload_dir])
+    logging.info('Copying rootfs payload to device...')
+    device.CopyToDevice(payload, payload_dir)
+    devserver_bin = os.path.join(src_dir, self.DEVSERVER_FILENAME)
+    ds = ds_wrapper.RemoteDevServerWrapper(
+        device, devserver_bin, static_dir=static_dir, log_dir=device.work_dir)
+
+    logging.info('Updating rootfs partition')
+    try:
+      ds.Start()
+      # Use the localhost IP address to ensure that update engine
+      # client can connect to the devserver.
+      omaha_url = ds.GetDevServerURL(
+          ip='127.0.0.1', port=ds.port, sub_dir='update/pregenerated')
+      cmd = [self.UPDATE_ENGINE_BIN, '-check_for_update',
+             '-omaha_url=%s' % omaha_url]
+      device.RunCommand(cmd)
+
+      # If we are using a progress bar, update it every 0.5s instead of 10s.
+      if command.UseProgressBar():
+        update_check_interval = self.UPDATE_CHECK_INTERVAL_PROGRESSBAR
+        oper = operation.ProgressBarOperation()
+      else:
+        update_check_interval = self.UPDATE_CHECK_INTERVAL_NORMAL
+        oper = None
+      end_message_not_printed = True
+
+      # Loop until update is complete.
+      while True:
+        op, progress = self.GetUpdateStatus(device, ['CURRENT_OP', 'PROGRESS'])
+        logging.info('Waiting for update...status: %s at progress %s',
+                     op, progress)
+
+        if op == 'UPDATE_STATUS_UPDATED_NEED_REBOOT':
+          logging.notice('Update completed.')
+          break
+
+        if op == 'UPDATE_STATUS_IDLE':
+          raise FlashError(
+              'Update failed with unexpected update status: %s' % op)
+
+        if oper is not None:
+          if op == 'UPDATE_STATUS_DOWNLOADING':
+            oper.ProgressBar(float(progress))
+          elif end_message_not_printed and op == 'UPDATE_STATUS_FINALIZING':
+            oper.Cleanup()
+            logging.notice('Finalizing image.')
+            end_message_not_printed = False
+
+        time.sleep(update_check_interval)
+
+      ds.Stop()
+    except Exception:
+      logging.error('Rootfs update failed.')
+      logging.warning(ds.TailLog() or 'No devserver log is available.')
+      raise
+    finally:
+      ds.Stop()
+      device.CopyFromDevice(ds.log_file,
+                            os.path.join(tempdir, 'target_devserver.log'),
+                            error_code_ok=True)
+      device.CopyFromDevice('/var/log/update_engine.log', tempdir,
+                            follow_symlinks=True,
+                            error_code_ok=True)
+
+  def _CheckPayloads(self, payload_dir):
+    """Checks that all update payloads exists in |payload_dir|."""
+    filenames = []
+    filenames += [ds_wrapper.ROOTFS_FILENAME] if self.do_rootfs_update else []
+    if self.do_stateful_update:
+      filenames += [ds_wrapper.STATEFUL_FILENAME]
+    for fname in filenames:
+      payload = os.path.join(payload_dir, fname)
+      if not os.path.exists(payload):
+        raise FlashError('Payload %s does not exist!' % payload)
+
+  def Verify(self, old_root_dev, new_root_dev):
+    """Verifies that the root deivce changed after reboot."""
+    assert new_root_dev and old_root_dev
+    if new_root_dev == old_root_dev:
+      raise FlashError(
+          'Failed to boot into the new version. Possibly there was a '
+          'signing problem, or an automated rollback occurred because '
+          'your new image failed to boot.')
+
+  @classmethod
+  def GetRootDev(cls, device):
+    """Get the current root device on |device|."""
+    rootdev = device.RunCommand(
+        ['rootdev', '-s'], capture_output=True).output.strip()
+    logging.debug('Current root device is %s', rootdev)
+    return rootdev
+
+  def Cleanup(self):
+    """Cleans up the temporary directory."""
+    if self.wipe:
+      logging.info('Cleaning up temporary working directory...')
+      osutils.RmDir(self.tempdir)
+    else:
+      logging.info('You can find the log files and/or payloads in %s',
+                   self.tempdir)
+
+  def _CanRunDevserver(self, device, tempdir):
+    """We can run devserver on |device|.
+
+    If the stateful partition is corrupted, Python or other packages
+    (e.g. cherrypy) needed for rootfs update may be missing on |device|.
+
+    This will also use `ldconfig` to update library paths on the target
+    device if it looks like that's causing problems, which is necessary
+    for base images.
+
+    Args:
+      device: A ChromiumOSDevice object.
+      tempdir: A temporary directory to store files.
+
+    Returns:
+      True if we can start devserver; False otherwise.
+    """
+    logging.info('Checking if we can run devserver on the device.')
+    src_dir = self._CopyDevServerPackage(device, tempdir)
+    devserver_bin = os.path.join(src_dir, self.DEVSERVER_FILENAME)
+    devserver_check_command = ['python', devserver_bin, '--help']
+    try:
+      device.RunCommand(devserver_check_command)
+    except cros_build_lib.RunCommandError as e:
+      logging.warning('Cannot start devserver: %s', e)
+      if 'python: error while loading shared libraries' in str(e):
+        logging.info('Attempting to correct device library paths...')
+        try:
+          device.RunCommand(['ldconfig', '-r', '/'])
+          device.RunCommand(devserver_check_command)
+          logging.info('Library path correction successful.')
+          return True
+        except cros_build_lib.RunCommandError as e2:
+          logging.warning('Library path correction failed: %s', e2)
+
+      return False
+
+    return True
+
+  def Run(self):
+    """Performs remote device update."""
+    old_root_dev, new_root_dev = None, None
+    try:
+      device_connected = False
+      with remote_access.ChromiumOSDeviceHandler(
+          self.ssh_hostname, port=self.ssh_port,
+          base_dir=self.DEVICE_BASE_DIR, ping=self.ping) as device:
+        device_connected = True
+
+        payload_dir = self.tempdir
+        if os.path.isdir(self.image):
+          # If the given path is a directory, we use the provided update
+          # payload(s) in the directory.
+          payload_dir = self.image
+          logging.info('Using provided payloads in %s', payload_dir)
+        elif os.path.isfile(self.image):
+          # If the given path is an image, make sure devserver can access it
+          # and generate payloads.
+          logging.info('Using image %s', self.image)
+          ds_wrapper.GetUpdatePayloadsFromLocalPath(
+              self.image, payload_dir,
+              src_image_to_delta=self.src_image_to_delta,
+              static_dir=DEVSERVER_STATIC_DIR)
+        else:
+          self.board = cros_build_lib.GetBoard(device_board=device.board,
+                                               override_board=self.board,
+                                               force=self.yes)
+          if not self.board:
+            raise FlashError('No board identified')
+
+          if not self.force and self.board != device.board:
+            # If a board was specified, it must be compatible with the device.
+            raise FlashError('Device (%s) is incompatible with board %s',
+                             device.board, self.board)
+
+          logging.info('Board is %s', self.board)
+
+          # Translate the xbuddy path to get the exact image to use.
+          translated_path, resolved_path = ds_wrapper.GetImagePathWithXbuddy(
+              self.image, self.board, static_dir=DEVSERVER_STATIC_DIR,
+              lookup_only=True)
+          logging.info('Using image %s', translated_path)
+          # Convert the translated path to be used in the update request.
+          image_path = ds_wrapper.ConvertTranslatedPath(resolved_path,
+                                                        translated_path)
+
+          # Launch a local devserver to generate/serve update payloads.
+          ds_wrapper.GetUpdatePayloads(
+              image_path, payload_dir, board=self.board,
+              src_image_to_delta=self.src_image_to_delta,
+              static_dir=DEVSERVER_STATIC_DIR)
+
+        # Verify that all required payloads are in the payload directory.
+        self._CheckPayloads(payload_dir)
+
+        restore_stateful = False
+        if (not self._CanRunDevserver(device, self.tempdir) and
+            self.do_rootfs_update):
+          msg = ('Cannot start devserver! The stateful partition may be '
+                 'corrupted.')
+          prompt = 'Attempt to restore the stateful partition?'
+          restore_stateful = self.yes or cros_build_lib.BooleanPrompt(
+              prompt=prompt, default=False, prolog=msg)
+          if not restore_stateful:
+            raise FlashError('Cannot continue to perform rootfs update!')
+
+        if restore_stateful:
+          logging.warning('Restoring the stateful partition...')
+          payload = os.path.join(payload_dir, ds_wrapper.STATEFUL_FILENAME)
+          self.UpdateStateful(device, payload, clobber=self.clobber_stateful)
+          device.Reboot()
+          if self._CanRunDevserver(device, self.tempdir):
+            logging.info('Stateful partition restored.')
+          else:
+            raise FlashError('Unable to restore stateful partition.')
+
+        # Perform device updates.
+        if self.do_rootfs_update:
+          self.SetupRootfsUpdate(device)
+          # Record the current root device. This must be done after
+          # SetupRootfsUpdate because SetupRootfsUpdate may reboot the
+          # device if there is a pending update, which changes the
+          # root device.
+          old_root_dev = self.GetRootDev(device)
+          payload = os.path.join(payload_dir, ds_wrapper.ROOTFS_FILENAME)
+          self.UpdateRootfs(device, payload, self.tempdir)
+          logging.info('Rootfs update completed.')
+
+        if self.do_stateful_update and not restore_stateful:
+          payload = os.path.join(payload_dir, ds_wrapper.STATEFUL_FILENAME)
+          self.UpdateStateful(device, payload, clobber=self.clobber_stateful)
+          logging.info('Stateful update completed.')
+
+        if self.reboot:
+          logging.notice('Rebooting device...')
+          device.Reboot()
+          if self.clobber_stateful:
+            # --clobber-stateful wipes the stateful partition and the
+            # working directory on the device no longer exists. To
+            # remedy this, we recreate the working directory here.
+            device.BaseRunCommand(['mkdir', '-p', device.work_dir])
+
+        if self.do_rootfs_update and self.reboot:
+          logging.notice('Verifying that the device has been updated...')
+          new_root_dev = self.GetRootDev(device)
+          self.Verify(old_root_dev, new_root_dev)
+
+        if self.disable_verification:
+          logging.info('Disabling rootfs verification on the device...')
+          device.DisableRootfsVerification()
+
+    except Exception:
+      logging.error('Device update failed.')
+      if device_connected and device.lsb_release:
+        lsb_entries = sorted(device.lsb_release.items())
+        logging.info('Following are the LSB version details of the device:\n%s',
+                     '\n'.join('%s=%s' % (k, v) for k, v in lsb_entries))
+      raise
+    else:
+      logging.notice('Update performed successfully.')
+    finally:
+      self.Cleanup()
+
+
+def Flash(device, image, board=None, install=False, src_image_to_delta=None,
+          rootfs_update=True, stateful_update=True, clobber_stateful=False,
+          reboot=True, wipe=True, ping=True, disable_rootfs_verification=False,
+          clear_cache=False, yes=False, force=False, debug=False):
+  """Flashes a device, USB drive, or file with an image.
+
+  This provides functionality common to `cros flash` and `brillo flash`
+  so that they can parse the commandline separately but still use the
+  same underlying functionality.
+
+  Args:
+    device: commandline.Device object; None to use the default device.
+    image: Path (string) to the update image. Can be a local or xbuddy path;
+        non-existant local paths are converted to xbuddy.
+    board: Board to use; None to automatically detect.
+    install: Install to USB using base disk layout; USB |device| scheme only.
+    src_image_to_delta: Local path to an image to be used as the base to
+        generate delta payloads; SSH |device| scheme only.
+    rootfs_update: Update rootfs partition; SSH |device| scheme only.
+    stateful_update: Update stateful partition; SSH |device| scheme only.
+    clobber_stateful: Clobber stateful partition; SSH |device| scheme only.
+    reboot: Reboot device after update; SSH |device| scheme only.
+    wipe: Wipe temporary working directory; SSH |device| scheme only.
+    ping: Ping the device before attempting update; SSH |device| scheme only.
+    disable_rootfs_verification: Remove rootfs verification after update; SSH
+        |device| scheme only.
+    clear_cache: Clear the devserver static directory.
+    yes: Assume "yes" for any prompt.
+    force: Ignore sanity checks and prompts. Overrides |yes| if True.
+    debug: Print additional debugging messages.
+
+  Raises:
+    FlashError: An unrecoverable error occured.
+    ValueError: Invalid parameter combination.
+  """
+  if force:
+    yes = True
+
+  if clear_cache:
+    logging.info('Clearing the cache...')
+    ds_wrapper.DevServerWrapper.WipeStaticDirectory(DEVSERVER_STATIC_DIR)
+
+  try:
+    osutils.SafeMakedirsNonRoot(DEVSERVER_STATIC_DIR)
+  except OSError:
+    logging.error('Failed to create %s', DEVSERVER_STATIC_DIR)
+
+  if install:
+    if not device or device.scheme != commandline.DEVICE_SCHEME_USB:
+      raise ValueError(
+          '--install can only be used when writing to a USB device')
+    if not cros_build_lib.IsInsideChroot():
+      raise ValueError('--install can only be used inside the chroot')
+
+  if not device or device.scheme == commandline.DEVICE_SCHEME_SSH:
+    if device:
+      hostname, port = device.hostname, device.port
+    else:
+      hostname, port = None, None
+    logging.notice('Preparing to update the remote device %s', hostname)
+    updater = RemoteDeviceUpdater(
+        hostname,
+        port,
+        image,
+        board=board,
+        src_image_to_delta=src_image_to_delta,
+        rootfs_update=rootfs_update,
+        stateful_update=stateful_update,
+        clobber_stateful=clobber_stateful,
+        reboot=reboot,
+        wipe=wipe,
+        debug=debug,
+        yes=yes,
+        force=force,
+        ping=ping,
+        disable_verification=disable_rootfs_verification)
+    updater.Run()
+  elif device.scheme == commandline.DEVICE_SCHEME_USB:
+    path = osutils.ExpandPath(device.path) if device.path else ''
+    logging.info('Preparing to image the removable device %s', path)
+    imager = USBImager(path,
+                       board,
+                       image,
+                       debug=debug,
+                       install=install,
+                       yes=yes)
+    imager.Run()
+  elif device.scheme == commandline.DEVICE_SCHEME_FILE:
+    logging.info('Preparing to copy image to %s', device.path)
+    imager = FileImager(device.path,
+                        board,
+                        image,
+                        debug=debug,
+                        yes=yes)
+    imager.Run()
diff --git a/cli/flash_unittest b/cli/flash_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/cli/flash_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/cli/flash_unittest.py b/cli/flash_unittest.py
new file mode 100644
index 0000000..2a7a868
--- /dev/null
+++ b/cli/flash_unittest.py
@@ -0,0 +1,294 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the flash module."""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.cli import flash
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_logging as logging
+from chromite.lib import cros_test_lib
+from chromite.lib import dev_server_wrapper
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+from chromite.lib import remote_access
+from chromite.lib import workspace_lib
+
+
+class RemoteDeviceUpdaterMock(partial_mock.PartialCmdMock):
+  """Mock out RemoteDeviceUpdater."""
+  TARGET = 'chromite.cli.flash.RemoteDeviceUpdater'
+  ATTRS = ('UpdateStateful', 'UpdateRootfs', 'SetupRootfsUpdate', 'Verify')
+
+  def __init__(self):
+    partial_mock.PartialCmdMock.__init__(self)
+
+  def UpdateStateful(self, _inst, *_args, **_kwargs):
+    """Mock out UpdateStateful."""
+
+  def UpdateRootfs(self, _inst, *_args, **_kwargs):
+    """Mock out UpdateRootfs."""
+
+  def SetupRootfsUpdate(self, _inst, *_args, **_kwargs):
+    """Mock out SetupRootfsUpdate."""
+
+  def Verify(self, _inst, *_args, **_kwargs):
+    """Mock out SetupRootfsUpdate."""
+
+
+class RemoteDeviceUpdaterTest(cros_test_lib.MockTempDirTestCase):
+  """Test the flow of flash.Flash() with RemoteDeviceUpdater."""
+
+  IMAGE = '/path/to/image'
+  DEVICE = commandline.Device(scheme=commandline.DEVICE_SCHEME_SSH,
+                              hostname='1.1.1.1')
+
+  def setUp(self):
+    """Patches objects."""
+    self.updater_mock = self.StartPatcher(RemoteDeviceUpdaterMock())
+    self.PatchObject(dev_server_wrapper, 'GenerateXbuddyRequest',
+                     return_value='xbuddy/local/latest')
+    self.PatchObject(dev_server_wrapper, 'DevServerWrapper')
+    self.PatchObject(dev_server_wrapper, 'GetImagePathWithXbuddy',
+                     return_value=('taco-paladin/R36/chromiumos_test_image.bin',
+                                   'remote/taco-paladin/R36/test'))
+    self.PatchObject(dev_server_wrapper, 'GetUpdatePayloads')
+    self.PatchObject(remote_access, 'CHECK_INTERVAL', new=0)
+    self.PatchObject(remote_access, 'ChromiumOSDevice')
+    self.PatchObject(workspace_lib, 'WorkspacePath', return_value=None)
+
+  def testUpdateAll(self):
+    """Tests that update methods are called correctly."""
+    with mock.patch('os.path.exists', return_value=True):
+      flash.Flash(self.DEVICE, self.IMAGE)
+      self.assertTrue(self.updater_mock.patched['UpdateStateful'].called)
+      self.assertTrue(self.updater_mock.patched['UpdateRootfs'].called)
+
+  def testUpdateStateful(self):
+    """Tests that update methods are called correctly."""
+    with mock.patch('os.path.exists', return_value=True):
+      flash.Flash(self.DEVICE, self.IMAGE, rootfs_update=False)
+      self.assertTrue(self.updater_mock.patched['UpdateStateful'].called)
+      self.assertFalse(self.updater_mock.patched['UpdateRootfs'].called)
+
+  def testUpdateRootfs(self):
+    """Tests that update methods are called correctly."""
+    with mock.patch('os.path.exists', return_value=True):
+      flash.Flash(self.DEVICE, self.IMAGE, stateful_update=False)
+      self.assertFalse(self.updater_mock.patched['UpdateStateful'].called)
+      self.assertTrue(self.updater_mock.patched['UpdateRootfs'].called)
+
+  def testMissingPayloads(self):
+    """Tests we raise FlashError when payloads are missing."""
+    with mock.patch('os.path.exists', return_value=False):
+      self.assertRaises(flash.FlashError, flash.Flash, self.DEVICE, self.IMAGE)
+
+
+class USBImagerMock(partial_mock.PartialCmdMock):
+  """Mock out USBImager."""
+  TARGET = 'chromite.cli.flash.USBImager'
+  ATTRS = ('CopyImageToDevice', 'InstallImageToDevice',
+           'ChooseRemovableDevice', 'ListAllRemovableDevices',
+           'GetRemovableDeviceDescription')
+  VALID_IMAGE = True
+
+  def __init__(self):
+    partial_mock.PartialCmdMock.__init__(self)
+
+  def CopyImageToDevice(self, _inst, *_args, **_kwargs):
+    """Mock out CopyImageToDevice."""
+
+  def InstallImageToDevice(self, _inst, *_args, **_kwargs):
+    """Mock out InstallImageToDevice."""
+
+  def ChooseRemovableDevice(self, _inst, *_args, **_kwargs):
+    """Mock out ChooseRemovableDevice."""
+
+  def ListAllRemovableDevices(self, _inst, *_args, **_kwargs):
+    """Mock out ListAllRemovableDevices."""
+    return ['foo', 'taco', 'milk']
+
+  def GetRemovableDeviceDescription(self, _inst, *_args, **_kwargs):
+    """Mock out GetRemovableDeviceDescription."""
+
+
+class USBImagerTest(cros_test_lib.MockTempDirTestCase):
+  """Test the flow of flash.Flash() with USBImager."""
+  IMAGE = '/path/to/image'
+
+  def Device(self, path):
+    """Create a USB device for passing to flash.Flash()."""
+    return commandline.Device(scheme=commandline.DEVICE_SCHEME_USB,
+                              path=path)
+
+  def setUp(self):
+    """Patches objects."""
+    self.usb_mock = USBImagerMock()
+    self.imager_mock = self.StartPatcher(self.usb_mock)
+    self.PatchObject(dev_server_wrapper, 'GenerateXbuddyRequest',
+                     return_value='xbuddy/local/latest')
+    self.PatchObject(dev_server_wrapper, 'DevServerWrapper')
+    self.PatchObject(dev_server_wrapper, 'GetImagePathWithXbuddy',
+                     return_value=('taco-paladin/R36/chromiumos_test_image.bin',
+                                   'remote/taco-paladin/R36/test'))
+    self.PatchObject(os.path, 'exists', return_value=True)
+    self.isgpt_mock = self.PatchObject(flash, '_IsFilePathGPTDiskImage',
+                                       return_value=True)
+    self.PatchObject(workspace_lib, 'WorkspacePath', return_value=None)
+
+  def testLocalImagePathCopy(self):
+    """Tests that imaging methods are called correctly."""
+    with mock.patch('os.path.isfile', return_value=True):
+      flash.Flash(self.Device('/dev/foo'), self.IMAGE)
+      self.assertTrue(self.imager_mock.patched['CopyImageToDevice'].called)
+
+  def testLocalImagePathInstall(self):
+    """Tests that imaging methods are called correctly."""
+    with mock.patch('os.path.isfile', return_value=True):
+      flash.Flash(self.Device('/dev/foo'), self.IMAGE, board='taco',
+                  install=True)
+      self.assertTrue(self.imager_mock.patched['InstallImageToDevice'].called)
+
+  def testLocalBadImagePath(self):
+    """Tests that using an image not having the magic bytes has prompt."""
+    self.isgpt_mock.return_value = False
+    with mock.patch('os.path.isfile', return_value=True):
+      with mock.patch.object(cros_build_lib, 'BooleanPrompt') as mock_prompt:
+        mock_prompt.return_value = False
+        flash.Flash(self.Device('/dev/foo'), self.IMAGE)
+        self.assertTrue(mock_prompt.called)
+
+  def testNonLocalImagePath(self):
+    """Tests that we try to get the image path using xbuddy."""
+    with mock.patch.object(
+        dev_server_wrapper,
+        'GetImagePathWithXbuddy',
+        return_value=('translated/xbuddy/path',
+                      'resolved/xbuddy/path')) as mock_xbuddy:
+      with mock.patch('os.path.isfile', return_value=False):
+        with mock.patch('os.path.isdir', return_value=False):
+          flash.Flash(self.Device('/dev/foo'), self.IMAGE)
+          self.assertTrue(mock_xbuddy.called)
+
+  def testConfirmNonRemovableDevice(self):
+    """Tests that we ask user to confirm if the device is not removable."""
+    with mock.patch.object(cros_build_lib, 'BooleanPrompt') as mock_prompt:
+      flash.Flash(self.Device('/dev/dummy'), self.IMAGE)
+      self.assertTrue(mock_prompt.called)
+
+  def testSkipPromptNonRemovableDevice(self):
+    """Tests that we skip the prompt for non-removable with --yes."""
+    with mock.patch.object(cros_build_lib, 'BooleanPrompt') as mock_prompt:
+      flash.Flash(self.Device('/dev/dummy'), self.IMAGE, yes=True)
+      self.assertFalse(mock_prompt.called)
+
+  def testChooseRemovableDevice(self):
+    """Tests that we ask user to choose a device if none is given."""
+    flash.Flash(self.Device(''), self.IMAGE)
+    self.assertTrue(self.imager_mock.patched['ChooseRemovableDevice'].called)
+
+
+class UsbImagerOperationTest(cros_build_lib_unittest.RunCommandTestCase):
+  """Tests for flash.UsbImagerOperation."""
+  # pylint: disable=protected-access
+
+  def setUp(self):
+    self.PatchObject(flash.UsbImagerOperation, '__init__', return_value=None)
+
+  def testUsbImagerOperationCalled(self):
+    """Test that flash.UsbImagerOperation is called when log level <= NOTICE."""
+    expected_cmd = ['dd', 'if=foo', 'of=bar', 'bs=4M', 'iflag=fullblock',
+                    'oflag=sync']
+    usb_imager = flash.USBImager('dummy_device', 'board', 'foo')
+    run_mock = self.PatchObject(flash.UsbImagerOperation, 'Run')
+    self.PatchObject(logging.Logger, 'getEffectiveLevel',
+                     return_value=logging.NOTICE)
+    usb_imager.CopyImageToDevice('foo', 'bar')
+
+    # Check that flash.UsbImagerOperation.Run() is called correctly.
+    run_mock.assert_called_with(cros_build_lib.SudoRunCommand, expected_cmd,
+                                debug_level=logging.NOTICE, update_period=0.5)
+
+  def testSudoRunCommandCalled(self):
+    """Test that SudoRunCommand is called when log level > NOTICE."""
+    expected_cmd = ['dd', 'if=foo', 'of=bar', 'bs=4M', 'iflag=fullblock',
+                    'oflag=sync']
+    usb_imager = flash.USBImager('dummy_device', 'board', 'foo')
+    run_mock = self.PatchObject(cros_build_lib, 'SudoRunCommand')
+    self.PatchObject(logging.Logger, 'getEffectiveLevel',
+                     return_value=logging.WARNING)
+    usb_imager.CopyImageToDevice('foo', 'bar')
+
+    # Check that SudoRunCommand() is called correctly.
+    run_mock.assert_any_call(expected_cmd, debug_level=logging.NOTICE,
+                             print_cmd=False)
+
+  def testPingDD(self):
+    """Test that UsbImagerOperation._PingDD() sends the correct signal."""
+    expected_cmd = ['kill', '-USR1', '5']
+    run_mock = self.PatchObject(cros_build_lib, 'SudoRunCommand')
+    op = flash.UsbImagerOperation('foo')
+    op._PingDD(5)
+
+    # Check that SudoRunCommand was called correctly.
+    run_mock.assert_called_with(expected_cmd, print_cmd=False)
+
+  def testGetDDPidFound(self):
+    """Check that the expected pid is returned for _GetDDPid()."""
+    expected_pid = 5
+    op = flash.UsbImagerOperation('foo')
+    self.PatchObject(osutils, 'IsChildProcess', return_value=True)
+    self.rc.AddCmdResult(partial_mock.Ignore(),
+                         output='%d\n10\n' % expected_pid)
+
+    pid = op._GetDDPid()
+
+    # Check that the correct pid was returned.
+    self.assertEqual(pid, expected_pid)
+
+  def testGetDDPidNotFound(self):
+    """Check that -1 is returned for _GetDDPid() if the pids aren't valid."""
+    expected_pid = -1
+    op = flash.UsbImagerOperation('foo')
+    self.PatchObject(osutils, 'IsChildProcess', return_value=False)
+    self.rc.AddCmdResult(partial_mock.Ignore(), output='5\n10\n')
+
+    pid = op._GetDDPid()
+
+    # Check that the correct pid was returned.
+    self.assertEqual(pid, expected_pid)
+
+
+class FlashUtilTest(cros_test_lib.MockTempDirTestCase):
+  """Tests the helpers from cli.flash."""
+
+  def testChooseImage(self):
+    """Tests that we can detect a GPT image."""
+    # pylint: disable=protected-access
+
+    with self.PatchObject(flash, '_IsFilePathGPTDiskImage', return_value=True):
+      # No images defined. Choosing the image should raise an error.
+      with self.assertRaises(ValueError):
+        flash._ChooseImageFromDirectory(self.tempdir)
+
+      file_a = os.path.join(self.tempdir, 'a')
+      osutils.Touch(file_a)
+      # Only one image available, it should be selected automatically.
+      self.assertEqual(file_a, flash._ChooseImageFromDirectory(self.tempdir))
+
+      osutils.Touch(os.path.join(self.tempdir, 'b'))
+      file_c = os.path.join(self.tempdir, 'c')
+      osutils.Touch(file_c)
+      osutils.Touch(os.path.join(self.tempdir, 'd'))
+
+      # Multiple images available, we should ask the user to select the right
+      # image.
+      with self.PatchObject(cros_build_lib, 'GetChoice', return_value=2):
+        self.assertEqual(file_c, flash._ChooseImageFromDirectory(self.tempdir))
diff --git a/codereview.settings b/codereview.settings
new file mode 100644
index 0000000..59b5aae
--- /dev/null
+++ b/codereview.settings
@@ -0,0 +1,6 @@
+# This file is used by git-cl to get repository specific information.
+GERRIT_HOST: True
+GERRIT_PORT: True
+CODE_REVIEW_SERVER: https://chromium-review.googlesource.com
+# TODO: Should there be a tryserver url?
+#TRYSERVER_SVN_URL: svn://svn.chromium.org/xxx-try
diff --git a/compute/README b/compute/README
new file mode 100644
index 0000000..a8b9f1a
--- /dev/null
+++ b/compute/README
@@ -0,0 +1,9 @@
+To run the GCE commands, you need to install the gcloud tool:
+  curl https://sdk.cloud.google.com | bash
+
+Please run `gcloud auth login` to authenticate your account.
+
+When running `gcloud compute ssh` for the first time, you will be
+prompted to generate ssh keys. The scripts in this directory require
+you having an empty passphrase, so that you will not be prompted to
+enter the passphrase for every command.
diff --git a/compute/__init__.py b/compute/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/compute/__init__.py
diff --git a/compute/bot_constants.py b/compute/bot_constants.py
new file mode 100644
index 0000000..aa8e1ef
--- /dev/null
+++ b/compute/bot_constants.py
@@ -0,0 +1,47 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module contains constants for Chrome OS bots."""
+
+from __future__ import print_function
+
+import os
+
+
+BUILDBOT_DIR = '/b'
+BUILDBOT_USER = 'chrome-bot'
+
+CHROMITE_URL = 'https://chromium.googlesource.com/chromiumos/chromite'
+DEPOT_TOOLS_URL = ('https://chromium.googlesource.com/chromium/tools/'
+                   'depot_tools.git')
+BUILDBOT_GIT_REPO = ('https://chrome-internal.googlesource.com/chrome/tools/'
+                     'build/internal.DEPS')
+CHROMIUM_BUILD_URL = 'https://chromium.googlesource.com/chromium/src/build'
+GCOMPUTE_TOOLS_URL = 'https://gerrit.googlesource.com/gcompute-tools'
+
+# The BOT_CREDS_DIR is required to set up a GCE bot. The directory
+# should contain:
+#   - SVN_PASSWORD_FILE_: password for svn.
+#   - TREE_STATUS_PASSWORD_FILE: password for updating tree status.
+#   - CIDB_CREDS_DIR: A directory containing cidb credentials.
+#   - BUILDBOT_PASSWORD_FILE: password for buildbot.
+#   - HOST_ENTRIES: entries to append to /etc/hosts
+#   - GMAIL_CREDENTIALS_FILE: credentials to access Gmail API.
+BOT_CREDS_DIR_ENV_VAR = 'BOT_CREDENTIALS_DIR'
+SVN_PASSWORD_FILE = 'svn_password'
+TREE_STATUS_PASSWORD_FILE = '.status_password_chromiumos'
+CIDB_CREDS_DIR = '.cidb_creds'
+BUILDBOT_PASSWORD_FILE = '.bot_password'
+HOST_ENTRIES = 'host_entries'
+GMAIL_CREDENTIALS_FILE = '.gmail_credentials'
+
+# This path is used to store credentials on the GCE machine during botifying.
+BOT_CREDS_TMP_PATH = os.path.join(os.path.sep, 'tmp', 'bot-credentials')
+
+BUILDBOT_SVN_USER = '%s@google.com' % BUILDBOT_USER
+CHROMIUM_SVN_HOSTS = ('svn.chromium.org',)
+CHROMIUM_SVN_REPOS = ('chrome', 'chrome-internal', 'leapfrog-internal')
+
+GIT_USER_NAME = 'chrome-bot'
+GIT_USER_EMAIL = '%s@chromium.org' % GIT_USER_NAME
diff --git a/compute/compute_configs.py b/compute/compute_configs.py
new file mode 100644
index 0000000..502db86
--- /dev/null
+++ b/compute/compute_configs.py
@@ -0,0 +1,90 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module contains Google Compute Engine configurations."""
+
+from __future__ import print_function
+
+from chromite.cbuildbot import constants
+
+
+# Metadata keys to tag our GCE artifacts with.
+METADATA_IMAGE_NAME = 'cros-image'
+
+PROJECT = 'chromeos-bot'
+DEFAULT_BASE_IMAGE = 'ubuntu-14-04-server-v20150324'
+DEFAULT_IMAGE_NAME = 'chromeos-bot-v5'
+DEFAULT_ZONE = 'us-east1-a'
+DEFAULT_MACHINE_TYPE_16 = 'n1-highmem-16'
+DEFAULT_SCOPES = ('https://www.googleapis.com/auth/devstorage.full_control',
+                  'https://www.googleapis.com/auth/gerritcodereview')
+
+# TODO: We do not archive the official images to Google Storage yet
+# because the imaging creating process for this path does not allow
+# the rootfs to be larger than 10GB.
+GS_IMAGE_ARCHIVE_BASE_URL = '%s/gce-images' % constants.DEFAULT_ARCHIVE_BUCKET
+IMAGE_SUFFIX = '.tar.gz'
+
+BOOT_DISK = '/dev/sda'
+# TODO: Automatically detects the partitions.
+DRIVES = ('sda1',)
+
+
+configs = {}
+
+configs['image-creation'] = dict(
+    zone=DEFAULT_ZONE,
+    scopes=DEFAULT_SCOPES,
+)
+IMAGE_CREATION_CONFIG = configs['image-creation']
+
+# The default config for Chrome OS builders.
+configs['cros-bot'] = dict(
+    machine_type=DEFAULT_MACHINE_TYPE_16,
+    zone=DEFAULT_ZONE,
+    image=DEFAULT_IMAGE_NAME,
+    scopes=DEFAULT_SCOPES,
+)
+
+# The default config for Chrome OS PreCQ builders.
+configs['cros-precq-bot'] = dict(
+    machine_type=DEFAULT_MACHINE_TYPE_16,
+    zone=DEFAULT_ZONE,
+    image=DEFAULT_IMAGE_NAME,
+    scopes=DEFAULT_SCOPES,
+)
+
+# A light-weight config for light jobs, like boardless masters.
+configs['cros-master'] = dict(
+    machine_type='n1-standard-8',
+    zone=DEFAULT_ZONE,
+    image=DEFAULT_IMAGE_NAME,
+    scopes=DEFAULT_SCOPES,
+)
+
+# A wimpy config for testing purposes.
+configs['cros-test'] = dict(
+    machine_type='n1-standard-1',
+    zone=DEFAULT_ZONE,
+    image=DEFAULT_IMAGE_NAME,
+    scopes=DEFAULT_SCOPES,
+)
+
+# Config to use to launch an instance with the image created for the purposes of
+# testing changes to cros_compute.
+configs['cros-bot-testing'] = dict(
+    machine_type=DEFAULT_MACHINE_TYPE_16,
+    zone=DEFAULT_ZONE,
+    image='%s-testing' % DEFAULT_IMAGE_NAME,
+    scopes=DEFAULT_SCOPES,
+)
+
+# Same as cros-bot, but launch in the staging network.
+configs['cros-bot-staging'] = dict(
+    machine_type=DEFAULT_MACHINE_TYPE_16,
+    zone=DEFAULT_ZONE,
+    image=DEFAULT_IMAGE_NAME,
+    scopes=DEFAULT_SCOPES,
+    network='staging-network',
+)
diff --git a/compute/cros_compute b/compute/cros_compute
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/compute/cros_compute
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/compute/cros_compute.py b/compute/cros_compute.py
new file mode 100644
index 0000000..795ada3
--- /dev/null
+++ b/compute/cros_compute.py
@@ -0,0 +1,270 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to run Chrome OS-specific GCE commands .
+
+For managing VM instances in the fleet, see go/cros-gce-instance-admin
+For updating VM images for the fleet, see go/cros-gce-image-admin
+"""
+
+from __future__ import print_function
+
+import os
+
+from chromite.compute import bot_constants
+from chromite.compute import compute_configs
+from chromite.compute import gcloud
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import osutils
+
+
+# Supported targets.
+TARGETS = (
+    'instances',
+    'images',
+    'disks',
+)
+
+# Supported operations.
+OPERATIONS = {
+    'instances': ('create', 'delete', 'list', 'ssh',),
+    'images': ('create', 'delete', 'list',),
+    'disks': ('list',),
+}
+
+# All supported operations.
+ALL_OPERATIONS = set(cros_build_lib.iflatten_instance(OPERATIONS.values()))
+
+
+def BotifyInstance(instance, project, zone, testing=False):
+  """Transforms the |instance| to a Chrome OS bot.
+
+  Perform necessary tasks to clone the chromite repostority on the
+  |instance| and run setup scripts as BUILDBOT_USER.
+
+  The majority the setup logic is (and should be) in the scripts that
+  run directly on the |instance| because `gcloud compute ssh` incurs
+  addtional overhead on every invocation. (e.g. instance name to IP
+  lookup, copying public key if needed, etc).
+
+  Args:
+    instance: Name of the GCE instance.
+    project: GCloud Project that the |instance| belongs to.
+    zone: Zone of the GCE instance.
+    testing: If set, copy the current chromite directory to |instance|.
+      Otherwise, `git clone` the chromite repository.
+  """
+  # TODO: To speed this up, we can switch to run remote commands using
+  # remote_access.RemoteAgent wrapper. We'd only need to run `gcloud
+  # compute ssh` once to initiate the first SSH connection (which
+  # copies the public key).
+  gcctx = gcloud.GCContext(project, zone=zone)
+  gcctx.SSH(instance, cmd='umask 0022')
+  # Set up buildbot user and grant it sudo rights.
+  gcctx.SSH(instance,
+            cmd=('sudo adduser --disabled-password --gecos "" %s'
+                 % bot_constants.BUILDBOT_USER))
+  gcctx.SSH(instance,
+            cmd='sudo adduser %s sudo' % bot_constants.BUILDBOT_USER)
+  gcctx.SSH(
+      instance,
+      cmd=('sudo awk \'BEGIN{print "%%%s ALL=NOPASSWD: ALL" >>"/etc/sudoers"}\''
+           % bot_constants.BUILDBOT_USER))
+
+  # Copy bot credentials to a temporay location.
+  dest_path = bot_constants.BOT_CREDS_TMP_PATH
+  src_path = os.getenv(bot_constants.BOT_CREDS_DIR_ENV_VAR)
+  if not src_path:
+    raise ValueError('Environment variable %s is not set. This is necessary'
+                     'to set up credentials for the bot.'
+                     % bot_constants.BOT_CREDS_DIR_ENV_VAR)
+  gcctx.CopyFilesToInstance(instance, src_path, dest_path)
+  gcctx.SSH(
+      instance,
+      cmd='sudo chown -R %s:%s %s' % (bot_constants.BUILDBOT_USER,
+                                      bot_constants.BUILDBOT_USER,
+                                      dest_path))
+  # Set the credential files/directories to the correct mode.
+  gcctx.SSH(
+      instance,
+      cmd=r'sudo find %s -type d -exec chmod 700 {} \;' % dest_path)
+  gcctx.SSH(
+      instance,
+      cmd=r'sudo find %s -type f -exec chmod 600 {} \;' % dest_path)
+
+  # Bootstrap by copying chromite to the temporary directory.
+  base_dir = '/tmp'
+  if testing:
+    # Copy the current chromite directory. This allows all local
+    # changes to be copied to the temporary instance for testing.
+    chromite_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+    with osutils.TempDir(prefix='cros_compute') as tempdir:
+      chromite_tarball = os.path.join(tempdir, 'chromite.tar.gz')
+      cros_build_lib.RunCommand(
+          ['tar', '--exclude=.git', '--exclude=third_party',
+           '--exclude=appengine', '-czf', chromite_tarball, 'chromite'],
+          cwd=os.path.dirname(chromite_dir))
+      dest_path = os.path.join(base_dir, os.path.basename(chromite_tarball))
+      gcctx.CopyFilesToInstance(instance, chromite_tarball, dest_path)
+      gcctx.SSH(instance, cmd='tar xzf %s -C %s' % (dest_path, base_dir))
+  else:
+    # Install git to clone chromite.
+    gcctx.SSH(instance, cmd='sudo apt-get install git')
+    gcctx.SSH(instance, cmd='cd %s && git clone %s' % (
+        base_dir, bot_constants.CHROMITE_URL))
+
+  # Run the setup script as BUILDBOT_USER.
+  gcctx.SSH(
+      instance,
+      cmd='sudo su %s -c %s' % (
+          bot_constants.BUILDBOT_USER,
+          os.path.join(base_dir, 'chromite', 'compute', 'setup_bot')))
+
+
+def CreateImageForCrosBots(project, zone, address=None, testing=False):
+  """Create a new image for cros bots."""
+  gcctx = gcloud.GCContext(project, zone=zone, quiet=True)
+  # The name of the image to create.
+  image = compute_configs.DEFAULT_IMAGE_NAME
+  if testing:
+    image = '%s-testing' % image
+
+  # Create a temporary instance and botify it.
+  instance = ('chromeos-temp-%s'
+              % cros_build_lib.GetRandomString())
+  gcctx.CreateInstance(instance, image=compute_configs.DEFAULT_BASE_IMAGE,
+                       address=address, **compute_configs.IMAGE_CREATION_CONFIG)
+  try:
+    BotifyInstance(instance, project, zone, testing=testing)
+  except:
+    # Clean up the temp instance.
+    gcctx.DeleteInstance(instance)
+    raise
+
+  gcctx.DeleteInstance(instance, keep_disks='boot')
+  # By default the name of the boot disk is the same as the name of
+  # the instance
+  disk = instance
+  try:
+    # Create image from source disk. By default the name of the boot
+    # disk is the same as the name of the instance.
+    gcctx.CreateImage(image, disk=disk)
+  finally:
+    gcctx.DeleteDisk(disk)
+
+
+def main(argv):
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  parser.add_argument(
+      'target', choices=TARGETS, help='Operation target')
+  parser.add_argument(
+      'operation', type=str, choices=ALL_OPERATIONS,
+      help='Operation type. Valid operations depend on target.')
+  parser.add_argument(
+      '--quiet', '-q', action='store_true',
+      help='Do not prompt user for verification.')
+  parser.add_argument(
+      '--project', type=str, default=compute_configs.PROJECT,
+      help='Project name')
+  parser.add_argument(
+      '--zone', type=str, default=compute_configs.DEFAULT_ZONE,
+      help='Zone to run the command against')
+  parser.add_argument(
+      '--address', type=str, default=None,
+      help='IP to assign to the instance')
+
+  group = parser.add_argument_group(
+      'Instance options (use with target: instances)')
+  group.add_argument(
+      '--instance', type=str, default=None, help='Instance name')
+
+  group = parser.add_argument_group(
+      'Instance creation options '
+      '(use with target: instances, operation: create)')
+  group.add_argument(
+      '--build-disk', type=str, default=None, help='Build disk')
+  group.add_argument(
+      '--creds-disk', type=str, default=None, help='Credentials disk')
+  group.add_argument(
+      '--image', type=str, default=None, help='Image name')
+  parser.add_argument(
+      '--config', type=str, default=None,
+      help='Config to create the instance from')
+
+  group = parser.add_argument_group(
+      'Image creation options '
+      '(use with target: image, operation: create)')
+  group.add_argument(
+      '--testing', default=False, action='store_true',
+      help='This option is mainly for testing changes to the official '
+           'Chrome OS bot image creation process. If set true, it copies '
+           'the current chromite directory onto the instance to preserve '
+           'all local changes. It also appends the image name with -testing.')
+
+  opts = parser.parse_args(argv)
+  opts.Freeze()
+
+  if opts.operation not in OPERATIONS[opts.target]:
+    cros_build_lib.Die(
+        'Unknown operation %s. Valid operations are %s' % (
+            opts.target, OPERATIONS[opts.target]))
+
+  gcctx = gcloud.GCContext(opts.project, zone=opts.zone)
+  if opts.target == 'images':
+    # Operations against images.
+    if opts.operation == 'create':
+      # Create a new image for Chrome OS bots. The name of the base
+      # image and the image to create are defined in compute_configs.
+      CreateImageForCrosBots(opts.project, opts.zone, testing=opts.testing,
+                             address=opts.address)
+    elif opts.operation == 'delete':
+      gcctx.DeleteImage(opts.image)
+    elif opts.operation == 'list':
+      gcctx.ListImages()
+
+  elif opts.target == 'instances':
+    # Operations against instances.
+    if opts.operation == 'create':
+      if not opts.instance:
+        cros_build_lib.Die('Please specify the instance name (--instance)')
+
+      if not opts.image and not opts.config:
+        cros_build_lib.Die(
+            'At least one of the two options should be specified: '
+            'source image (--image) or the builder (--config)')
+      if opts.config:
+        config = compute_configs.configs.get(opts.config, None)
+        if config is None:
+          cros_build_lib.Die('Unkown config %s' % opts.config)
+        config = dict(config)
+      else:
+        config = {}
+
+      if opts.image:
+        config['image'] = opts.image
+      if opts.build_disk or opts.creds_disk:
+        disks = []
+        if opts.build_disk:
+          disks.append({'name': opts.build_disk, 'mode': 'rw'})
+        if opts.creds_disk:
+          disks.append({'name': opts.creds_disk, 'mode': 'ro'})
+        config['disks'] = disks
+
+      gcctx.CreateInstance(opts.instance, address=opts.address, **config)
+    elif opts.operation == 'delete':
+      if not opts.instance:
+        cros_build_lib.Die('Please specify the instance name (--instance)')
+      gcctx.DeleteInstance(opts.instance, quiet=opts.quiet)
+    elif opts.operation == 'list':
+      gcctx.ListInstances()
+    elif opts.operation == 'ssh':
+      if not opts.instance:
+        cros_build_lib.Die('Please specify the instance name (--instance)')
+      gcctx.SSH(opts.instance)
+
+  elif opts.target == 'disks':
+    if opts.operation == 'list':
+      gcctx.ListDisks()
diff --git a/compute/gcloud.py b/compute/gcloud.py
new file mode 100644
index 0000000..eb0efaa
--- /dev/null
+++ b/compute/gcloud.py
@@ -0,0 +1,326 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Library for gcloud operations."""
+
+from __future__ import print_function
+
+import re
+
+from chromite.compute import compute_configs
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import timeout_util
+
+
+class GCContextException(Exception):
+  """Base exception for this module."""
+
+
+# Wrap RunCommandError exceptions to distinguish the exceptions raised
+# by commands we invoke from those that could be thrown by others who
+# use RunCommand.
+class GCCommandError(GCContextException, cros_build_lib.RunCommandError):
+  """Thrown when an error happened we couldn't decode."""
+
+
+class ZoneNotSpecifiedError(GCContextException):
+  """Raised when zone is not specified for a zone-specific command."""
+
+
+class GCContext(object):
+  """A wrapper around the gcloud commandline tool.
+
+  Currently supports only `gcloud compute`.
+  """
+  GCLOUD_BASE_COMMAND = 'gcloud'
+  GCLOUD_COMPUTE_COMMAND = 'compute'
+
+  def __init__(self, project, zone=None, quiet=False, dry_run=False):
+    """Initializes GCContext.
+
+    Args:
+      project: The Google Cloud project to use.
+      zone: The default zone to operate on when zone is not given for a
+            zone-specific command.
+      quiet: If set True, skip any user prompts and use the default value.
+      dry_run: If True, don't actually interact with the GCE project.
+    """
+    self.project = project
+    self.zone = zone
+    self.quiet = quiet
+    self.dry_run = dry_run
+
+  @classmethod
+  def _GetBaseComputeCommand(cls):
+    """Returns the base Google Compute Engine command."""
+    return [cls.GCLOUD_BASE_COMMAND, cls.GCLOUD_COMPUTE_COMMAND]
+
+  def DoCommand(self, cmd, **kwargs):
+    """Runs |cmd|.
+
+    cmd: The command to run.
+    project: The project to use. Defaults to using self.project.
+    quiet: Suppress user prompts from the gcloud cli.
+    zone: The zone to use. Elided by default.
+    kwargs: See cros_build_lib.RunCommand.
+    """
+    cmd = self._GetBaseComputeCommand() + cmd
+    cmd += ['--project', kwargs.pop('project', self.project)]
+    if kwargs.pop('quiet', self.quiet):
+      cmd += ['--quiet']
+    zone = kwargs.pop('zone', None)
+    if zone:
+      cmd += ['--zone', zone]
+
+    if self.dry_run:
+      logging.debug('%s: Would have run: %s',
+                    self.__class__.__name__, cmd)
+      return
+
+    try:
+      return cros_build_lib.RunCommand(cmd, **kwargs)
+    except cros_build_lib.RunCommandError as e:
+      # We do not get a consistent error if the gcloud SDK is not installed, so
+      # we must always check.
+      if osutils.Which(self.GCLOUD_BASE_COMMAND) is None:
+        logging.error('Could not find command "%s". '
+                      'Have you installed the google cloud SDK?',
+                      self.GCLOUD_BASE_COMMAND)
+      raise GCCommandError(e.msg, e.result, e.exception)
+
+
+  def DoZoneSpecificCommand(self, cmd, **kwargs):
+    """Runs the zone-specific |cmd|.
+
+    cmd: The command to run.
+    zone: The zone to use. Defaults to using self.zone.
+    kwargs: See DoCommand.
+    """
+    kwargs.setdefault('zone', self.zone)
+    if not kwargs.get('zone'):
+      # To avoid ambiguity, force user to specify a zone (or a default
+      # zone) when accessing zone-specific resources.
+      raise ZoneNotSpecifiedError()
+    return self.DoCommand(cmd, **kwargs)
+
+  def CopyFilesToInstance(self, instance, src, dest, **kwargs):
+    """Copies files from |src| to |dest| on |instance|.
+
+    Args:
+      instance: Name of the instance.
+      src: The source path.
+      dest: The destination path.
+      kwargs: See DoCommand.
+    """
+    return self._CopyFiles(src, '%s:%s' % (instance, dest), **kwargs)
+
+  def CopyFilesFromInstance(self, instance, src, dest, **kwargs):
+    """Copies files from |src| on |instance| to local |dest|.
+
+    Args:
+      instance: Name of the instance.
+      src: The source path.
+      dest: The destination path.
+      kwargs: See DoCommand.
+    """
+    return self._CopyFiles('%s:%s' % (instance, src), dest, **kwargs)
+
+  def _CopyFiles(self, src, dest, ssh_key_file=None, user=None, **kwargs):
+    """Copies files from |src| to |dest|.
+
+    Args:
+      instance: Name of the instance.
+      src: The source path (a local path or user:instance@path).
+      dest: The destination path (a local path or user:instance@path).
+      ssh_key_file: File containing private key for SSH.
+      user: User to SSH as.
+      kwargs: See DoCommand.
+    """
+    command = ['copy-files']
+
+    if ssh_key_file:
+      command += ['--ssh-key-file', ssh_key_file]
+    if user:
+      dest = '%s@%s' % (user, dest)
+
+    command += [src, dest]
+
+    return self.DoZoneSpecificCommand(command, **kwargs)
+
+  def SSH(self, instance, user=None, cmd=None, ssh_key_file=None, **kwargs):
+    """SSH into |instance|. Run |cmd| if it is provided.
+
+    Args:
+      instance: Name of the instance.
+      user: User to SSH as.
+      cmd: Command to run on |instance|.
+      ssh_key_file: File containing private key for SSH.
+    """
+    ssh_cmd = ['ssh']
+    if user:
+      ssh_cmd += ['%s@%s' % (user, instance)]
+    else:
+      ssh_cmd += [instance]
+    if cmd:
+      ssh_cmd += ['--command', cmd]
+    if ssh_key_file:
+      ssh_cmd += ['--ssh-key-file', ssh_key_file]
+    return self.DoZoneSpecificCommand(ssh_cmd, **kwargs)
+
+  def ListInstances(self, **kwargs):
+    """Lists all instances."""
+    return self.DoCommand(['instances', 'list'], **kwargs)
+
+  def ListDisks(self, **kwargs):
+    """Lists all disks."""
+    return self.DoCommand(['disks', 'list'], **kwargs)
+
+  def ListImages(self, **kwargs):
+    """Lists all instances."""
+    return self.DoCommand(['images', 'list'], **kwargs)
+
+  def CreateImage(self, image, source_uri=None, disk=None, **kwargs):
+    """Creates an image from |source_uri| or |disk|.
+
+    Args:
+      image: The name of image to create.
+      source_uri: The tar.gz image file (e.g. gs://foo/bar/image.tar.gz)
+      disk: The source disk to create the image from. One and only one of
+         |source_uril| and |disk| should be set.
+      kwargs: See DoCommand.
+    """
+    if source_uri and disk:
+      raise GCContextException('Cannot specify both source uri and disk.')
+
+    cmd = ['images', 'create', image]
+    if disk:
+      cmd += ['--source-disk', disk]
+      zone = kwargs.get('zone', self.zone)
+      if zone:
+        # Disks are zone-specific resources.
+        cmd += ['--source-disk-zone', zone]
+
+    if source_uri:
+      cmd += ['--source-uri', source_uri]
+
+    return self.DoCommand(cmd, **kwargs)
+
+  def GetInstanceIP(self, instance, **kwargs):
+    """Returns an instance's ephemeral external IP address.
+
+    May not work with all network configurations.
+    """
+    # It sure would be nice if there were an easier way to fetch
+    # the instance's ephemeral IP address...
+    description = self.DoZoneSpecificCommand(
+        ['instances', 'describe', instance, '--format', 'text'],
+        redirect_stdout=True, **kwargs).output
+    match = re.search('natIP: ([.0-9]+)', description)
+    if match:
+      return match.group(1)
+    else:
+      raise GCContextException('Unable to parse instance IP.  '
+                               '"gcloud compute instances describe %s": %s'
+                               % (instance, description))
+
+  def CreateInstance(self, instance, image=None, machine_type=None,
+                     network=None, address=None, wait_until_sshable=True,
+                     scopes=None, disks=None, **kwargs):
+    """Creates an |instance|.
+
+    Additionally, if an image is provided, adds a custom metadata pair to
+    identify the the image used to create the instance.
+
+    Args:
+      instance: The name of the instance to create.
+      image: The source image to create |instance| from.
+      machine_type: The machine type to use.
+      network: An existing network to create the instance in.
+      address: The external IP address to assign to |instance|.
+      wait_until_sshable: After creating |instance|, wait until
+        we can ssh into |instance|.
+      scopes: The list (or tuple) of service account scopes.
+      disks: A list of disks to attach. Each entry in the list is a dict of
+          properties to use for the disk.
+      kwargs: See DoZoneSpecificCommand.
+    """
+    cmd = ['instances', 'create', instance]
+    if image:
+      cmd += ['--image', image]
+      cmd += ['--metadata',
+              '%s="%s"' % (compute_configs.METADATA_IMAGE_NAME, image)]
+    if network is not None:
+      cmd += ['--network', network]
+    if address is not None:
+      cmd += ['--address', address]
+    if machine_type is not None:
+      cmd += ['--machine-type', machine_type]
+    if scopes is not None:
+      cmd += ['--scopes', ','.join(list(scopes))]
+    if disks is not None:
+      for disk in disks:
+        properties = ['%s=%s' % (key, val)
+                      for key, val in disk.iteritems()]
+        cmd += ['--disk', ','.join(properties)]
+
+    ret = self.DoZoneSpecificCommand(cmd, **kwargs)
+    if wait_until_sshable:
+      def _IsUp():
+        try:
+          instance_ip = self.GetInstanceIP(instance, **kwargs)
+          command = ['nc', '-zv', instance_ip, '22']
+          cros_build_lib.RunCommand(command, capture_output=True)
+        except cros_build_lib.RunCommandError:
+          return False
+        else:
+          return True
+
+      try:
+        logging.info('Waiting for the instance to be sshable...')
+        timeout = 60 * 5
+        timeout_util.WaitForReturnTrue(_IsUp, timeout, period=5)
+      except timeout_util.TimeoutError:
+        self.DeleteInstance(instance)
+        raise GCContextException('Timed out wating to ssh into the instance')
+
+    return ret
+
+  def DeleteInstance(self, instance, quiet=False, keep_disks=None, **kwargs):
+    """Deletes |instance|.
+
+    Args:
+      instance: Name of the instance.
+      quiet: Silent delete instance without prompting the user.
+      keep_disks: Keep the type of the disk; valid types are
+        'boot', 'data', and 'all'.
+      kwargs: See DoCommand.
+    """
+    cmd = ['instances', 'delete', instance]
+    if quiet:
+      cmd += ['-q']
+    if keep_disks:
+      cmd += ['--keep-disks', keep_disks]
+
+    return self.DoZoneSpecificCommand(cmd, **kwargs)
+
+  def DeleteImage(self, image, **kwargs):
+    """Deletes |image|. User will be prompted to confirm.
+
+    Args:
+      image: Name of the image.
+      kwargs: See DoCommand.
+    """
+    return self.DoCommand(['images', 'delete', image], **kwargs)
+
+  def DeleteDisk(self, disk, **kwargs):
+    """Deletes |disk|.
+
+    Args:
+      disk: Name of the disk.
+      kwargs: See DoCommand.
+    """
+    cmd = ['disks', 'delete', disk]
+    return self.DoZoneSpecificCommand(cmd, **kwargs)
diff --git a/compute/setup_bot b/compute/setup_bot
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/compute/setup_bot
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/compute/setup_bot.py b/compute/setup_bot.py
new file mode 100644
index 0000000..ff4e923
--- /dev/null
+++ b/compute/setup_bot.py
@@ -0,0 +1,237 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Should be run on a GCE instance to set up the build environment."""
+
+from __future__ import print_function
+
+import getpass
+import os
+import shutil
+
+from chromite.compute import compute_configs
+from chromite.compute import bot_constants
+from chromite.lib import cros_build_lib
+from chromite.lib import osutils
+
+
+# Make the script more readable.
+RunCommand = cros_build_lib.RunCommand
+SudoRunCommand = cros_build_lib.SudoRunCommand
+
+
+BOT_CREDS_PATH = bot_constants.BOT_CREDS_TMP_PATH
+# Most credentials are stored in the home directory.
+HOME_DIR = osutils.ExpandPath('~')
+
+
+def SetupPrerequisites():
+  """Installs packages required for Chrome OS build."""
+  SudoRunCommand(['apt-get', 'update'])
+  SudoRunCommand(['apt-get', '-y', '--force-yes', 'upgrade'])
+  # Chrome OS pre-requisite packages.
+  packages = ['git', 'curl', 'pbzip2', 'gawk', 'gitk', 'subversion']
+  # Required for CIDB.
+  packages += ['python-sqlalchemy', 'python-mysqldb']
+  # Required for payload generation outside of the chroot.
+  packages += ['python-protobuf']
+  # Required to install python packages only available via pip.
+  packages += ['python-pip']
+
+  # Packages to monitor system performance and usage.
+  packages += ['sysstat']
+
+  SudoRunCommand(['apt-get', '-y', 'install'] + packages)
+  SetupPipPrerequisites()
+
+
+def SetupPipPrerequisites():
+  """Installs python packages via pip.
+
+  This assumes that pip itself is installed already.
+  """
+  # dict of package to version. Provide version None if you don't care about the
+  # version installed.
+  packages = {'python-statsd': '1.7.0', 'google-api-python-client': '1.4.0'}
+
+  for package, version in packages.iteritems():
+    install_atom = package
+    if version is not None:
+      install_atom += ('==' + version)
+    SudoRunCommand(['pip', 'install', install_atom])
+
+
+def InstallChromeDependencies():
+  """Installs packages required to build Chromium."""
+  # The install-build-deps.sh relies on some packages that are not in
+  # the base image. Install them first before invoking the script.
+  SudoRunCommand(['apt-get', '-y', 'install',
+                  'gcc-arm-linux-gnueabihf',
+                  'g++-4.8-multilib-arm-linux-gnueabihf',
+                  'gcc-4.8-multilib-arm-linux-gnueabihf',
+                  'realpath'])
+
+  with osutils.TempDir(prefix='tmp-chrome-deps') as tempdir:
+    RunCommand(['git', 'clone', bot_constants.CHROMIUM_BUILD_URL], cwd=tempdir)
+    RunCommand([os.path.join(tempdir, 'build', 'install-build-deps.sh'),
+                '--syms', '--no-prompt'])
+
+
+def SetMountCount():
+  """Sets mount count to a large number."""
+  for drive in compute_configs.DRIVES:
+    SudoRunCommand(['tune2fs', '-c', '150', os.path.join('dev', drive)],
+                   redirect_stdout=True)
+
+
+def _SetupSVN():
+  """Sets up the chromium svn username/password."""
+  # Create a ~/.subversion directory.
+  RunCommand(['svn', 'ls', 'http://src.chromium.org/svn'], redirect_stdout=True)
+  # Change the setting to store the svn password.
+  sed_str = ('s/# store-plaintext-passwords = '
+             'no/store-plaintext-passwords = yes/g')
+  RunCommand(['sed', '-i', '-e', sed_str,
+              osutils.ExpandPath(os.path.join('~', '.subversion', 'servers'))])
+
+  password_path = osutils.ExpandPath(
+      os.path.join(BOT_CREDS_PATH, bot_constants.SVN_PASSWORD_FILE))
+  password = osutils.ReadFile(password_path).strip()
+  # `svn ls` each repository to store the password in ~/.subversion.
+  for svn_host in bot_constants.CHROMIUM_SVN_HOSTS:
+    for svn_repo in bot_constants.CHROMIUM_SVN_REPOS:
+      RunCommand(['svn', 'ls', '--username', bot_constants.BUILDBOT_SVN_USER,
+                  '--password', password, 'svn://%s/%s' % (svn_host, svn_repo)],
+                 redirect_stdout=True)
+
+def _SetupGoB():
+  """Sets up GoB credentials."""
+  RunCommand(['git', 'config', '--global', 'user.email',
+              bot_constants.GIT_USER_EMAIL])
+  RunCommand(['git', 'config', '--global', 'user.name',
+              bot_constants.GIT_USER_NAME])
+
+  RunCommand(['git', 'clone', bot_constants.GCOMPUTE_TOOLS_URL],
+             cwd=HOME_DIR, redirect_stdout=True)
+
+  # Run git-cookie-authdaemon at boot time by adding it to
+  # /etc/rc.local
+  rc_local_path = os.path.join(os.path.sep, 'etc', 'rc.local')
+  daemon_path = os.path.join(HOME_DIR, 'gcompute-tools',
+                             'git-cookie-authdaemon')
+  daemon_cmd = ['su', bot_constants.BUILDBOT_USER, '-c', daemon_path]
+  content = osutils.ReadFile(rc_local_path).replace('exit 0', '')
+  content += (' '.join(daemon_cmd) + '\n')
+  content += 'exit 0\n'
+
+  with osutils.TempDir() as tempdir:
+    tmp_file = os.path.join(tempdir, 'rc.local')
+    osutils.WriteFile(tmp_file, content)
+    os.chmod(tmp_file, 755)
+    SudoRunCommand(['mv', tmp_file, rc_local_path])
+  # Also run the daemon now so that subsequent setup steps get credentials.
+  # NB: It's important to redirect all pipes because the daemonize code here is
+  # broken, it leaves open fds behind, causing ssh to hang.
+  SudoRunCommand(daemon_cmd,
+                 mute_output=True, combine_stdout_stderr=True)
+
+
+def _SetupCIDB():
+  """Copies cidb credentials."""
+  shutil.copytree(os.path.join(BOT_CREDS_PATH, bot_constants.CIDB_CREDS_DIR),
+                  os.path.join(HOME_DIR, bot_constants.CIDB_CREDS_DIR))
+
+
+def _SetupTreeStatus():
+  """Copies credentials for updating tree status."""
+  shutil.copy(
+      os.path.join(BOT_CREDS_PATH, bot_constants.TREE_STATUS_PASSWORD_FILE),
+      HOME_DIR)
+
+
+def _SetupGmail():
+  """Copies credentials for accessing gmail API."""
+  shutil.copy(
+      os.path.join(BOT_CREDS_PATH, bot_constants.GMAIL_CREDENTIALS_FILE),
+      HOME_DIR)
+
+
+def SetupCredentials():
+  """Sets up various credentials."""
+  _SetupSVN()
+  _SetupGoB()
+  _SetupCIDB()
+  _SetupTreeStatus()
+  _SetupGmail()
+
+
+def SetupBuildbotEnvironment():
+  """Sets up the buildbot environment."""
+
+  # Append host entries to /etc/hosts. This includes the buildbot
+  # master IP address.
+  host_entries = RunCommand(
+      ['cat', os.path.join(BOT_CREDS_PATH, bot_constants.HOST_ENTRIES)],
+      capture_output=True).output
+  SudoRunCommand(['tee', '-a', '/etc/hosts'], input=host_entries)
+
+  # Create the buildbot directory.
+  SudoRunCommand(['mkdir', '-p', bot_constants.BUILDBOT_DIR])
+  SudoRunCommand(['chown', '-R', '%s:%s' % (bot_constants.BUILDBOT_USER,
+                                            bot_constants.BUILDBOT_USER),
+                  bot_constants.BUILDBOT_DIR])
+
+  with osutils.TempDir() as tempdir:
+    # Download depot tools to a temp directory to bootstrap. `gclient
+    # sync` will create depot_tools in BUILDBOT_DIR later.
+    tmp_depot_tools_path = os.path.join(tempdir, 'depot_tools')
+    RunCommand(['git', 'clone', bot_constants.DEPOT_TOOLS_URL],
+               cwd=tempdir, redirect_stdout=True)
+    # `gclient` relies on depot_tools in $PATH, pass the extra
+    # envinornment variable.
+    path_env = '%s:%s' % (os.getenv('PATH'), tmp_depot_tools_path)
+    RunCommand(['gclient', 'config', bot_constants.BUILDBOT_GIT_REPO],
+               cwd=bot_constants.BUILDBOT_DIR, extra_env={'PATH': path_env})
+    RunCommand(['gclient', 'sync', '--jobs', '5'],
+               cwd=bot_constants.BUILDBOT_DIR,
+               redirect_stdout=True, extra_env={'PATH': path_env})
+
+  # Set up buildbot password.
+  config_dir = os.path.join(bot_constants.BUILDBOT_DIR, 'build', 'site_config')
+  shutil.copy(
+      os.path.join(BOT_CREDS_PATH, bot_constants.BUILDBOT_PASSWORD_FILE),
+      config_dir)
+
+  # Update the environment variable.
+  depot_tools_path = os.path.join(bot_constants.BUILDBOT_DIR, 'depot_tools')
+  RunCommand(['bash', '-c', r'echo export PATH=\$PATH:%s >> ~/.bashrc'
+              % depot_tools_path])
+
+  # Start buildbot slave at startup.
+  crontab_content = ''
+  result = RunCommand(
+      ['crontab', '-l'], capture_output=True, error_code_ok=True)
+  crontab_content = result.output if result.returncode == 0 else ''
+  crontab_content += ('SHELL=/bin/bash\nUSER=chrome-bot\n'
+                      '@reboot cd /b/build/slave && make start\n')
+  RunCommand(['crontab', '-'], input=crontab_content)
+
+
+def TuneSystemSettings():
+  """Tune the system settings for our build environment."""
+  # Increase the user-level file descriptor limits.
+  entries = ('*       soft    nofile  65536\n'
+             '*       hard    nofile  65536\n')
+  SudoRunCommand(['tee', '-a', '/etc/security/limits.conf'], input=entries)
+
+
+def main(_argv):
+  assert getpass.getuser() == bot_constants.BUILDBOT_USER, (
+      'This script should be run by %s instead of %s!' % (
+          bot_constants.BUILDBOT_USER, getpass.getuser()))
+  SetupPrerequisites()
+  InstallChromeDependencies()
+  SetupCredentials()
+  SetupBuildbotEnvironment()
+  TuneSystemSettings()
diff --git a/contrib/README b/contrib/README
new file mode 100644
index 0000000..0f7243f
--- /dev/null
+++ b/contrib/README
@@ -0,0 +1,7 @@
+Content in this directory are scripts that are unmaintained, but potentially
+of use.
+
+Nothing in this directory may be relied on by any other non-contrib scripts;
+no production reliance, no tests, etc.  If a script needs to be relied on,
+then it needs to be promoted out of contrib to an appropriate place and
+properly maintained.
diff --git a/contrib/__init__.py b/contrib/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/contrib/__init__.py
diff --git a/contrib/compute/__init__.py b/contrib/compute/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/contrib/compute/__init__.py
diff --git a/contrib/compute/relaunch-precq-bots b/contrib/compute/relaunch-precq-bots
new file mode 100755
index 0000000..5b7a7da
--- /dev/null
+++ b/contrib/compute/relaunch-precq-bots
@@ -0,0 +1,65 @@
+#!/bin/bash
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+testing_bots() {
+  for i in {1..5}; do
+    BOTS[i]="cros-testing$i-c2"
+  done
+}
+
+prod_bots() {
+  local bot_ids=( {1..50} 101 102 )
+  for i in ${bot_ids[@]}; do
+    BOTS[i]="cros$i-c2"
+  done
+}
+
+# Change this to work on different BOT groups.
+# declare -a BOTS
+# testing_bots
+# **********YOU ARE RELAUNCHING PROD BOTS! REALLY! *************
+# prod_bots
+
+ssh_command() {
+  local -r instance_id=$1
+  shift
+  gcloud compute ssh --project chromeos-bot --zone us-east1-a -q \
+    "${instance_id}" --ssh-flag=-q --command "$*"
+}
+
+remote_uname() {
+  ssh_command "$1" echo $(hostname && uname -a)
+}
+
+relaunch_instance() {
+  local -r ip=$(gcloud compute instances describe --project chromeos-bot --zone us-east1-a $1 | awk '{if ($1 == "natIP:") print $2}')
+  ../../compute/cros_compute instances delete -q --instance $1
+  ../../compute/cros_compute instances create --instance $1 --address "${ip}" --config cros-bot
+}
+
+print_image_tag() {
+  image=$(gcloud compute instances describe --project chromeos-bot --zone us-east1-a \
+    "$i" | grep value)
+  echo $i: $image
+}
+
+for_all_bots() {
+  for i in "${BOTS[@]}"; do
+    echo "parallel: $* $i"
+    $* $i &
+  done
+  echo "Waiting for parallel processes to finish"
+  wait
+  echo "Done!"
+}
+
+BOTS=( "$@" )
+for_all_bots relaunch_instance
+
+echo
+echo
+echo "Please verify that all instances were launched with the new image:"
+for_all_bots print_image_tag
+
diff --git a/contrib/compute/upload_gce_image b/contrib/compute/upload_gce_image
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/contrib/compute/upload_gce_image
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/contrib/compute/upload_gce_image.py b/contrib/compute/upload_gce_image.py
new file mode 100644
index 0000000..79e7e02
--- /dev/null
+++ b/contrib/compute/upload_gce_image.py
@@ -0,0 +1,107 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module to upload your homebrew ChromeOS image as a GCE image.
+
+Use this script if you have a GCE targeted CrOS image (currently,
+lakitu_mobbuild) that you built locally, and want to upload it to a GCE project
+to be used to launch a builder instance. This script will take care of
+converting the image to a GCE friendly format and creating a GCE image from it.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+
+from chromite.cbuildbot import commands
+from chromite.compute import gcloud
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import osutils
+
+
+_DEFAULT_TMP_GS_PATH = 'gs://chromeos-throw-away-bucket/gce_tmp/'
+
+
+class UploadGceImageRuntimError(RuntimeError):
+  """RuntimeError raised explicitly from this module."""
+
+
+def _GetParser():
+  """Create a parser for this script."""
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  parser.add_argument('source_image', type='path',
+                      help='Path to the image to upload.')
+  parser.add_argument('target_name',
+                      help='Name of the final image created in the project.')
+  parser.add_argument('--project', default='chromeos-bot',
+                      help='The GCE project to target: (default: chromeos-bot)')
+  parser.add_argument('--temp-gcs-path', type='gs_path',
+                      default=_DEFAULT_TMP_GS_PATH,
+                      help='GCS bucket used as temporary storage '
+                           '(default: %s).' % _DEFAULT_TMP_GS_PATH)
+  parser.add_argument('--dry-run', action='store_true',
+                      help='Skip actually uploading stuff')
+  return parser
+
+
+def main(argv):
+  parser = _GetParser()
+  opts = parser.parse_args(argv)
+  opts.Freeze()
+
+  if opts.dry_run:
+    logging.getLogger().setLevel(logging.DEBUG)
+
+  if not os.path.isfile(opts.source_image):
+    raise UploadGceImageRuntimError('%s is not a valid file.')
+
+  source_dir, source_image_name = os.path.split(opts.source_image)
+  with osutils.TempDir() as tempdir:
+    logging.info('Generating tarball from %s', opts.source_image)
+    tarball_name = commands.BuildGceTarball(tempdir, source_dir,
+                                            source_image_name)
+    # We must generate a uuid when uploading the tarball because repeated
+    # uploads are likely to be named similarly. We'll just use tempdir to keep
+    # files separate.
+    temp_tarball_dir = os.path.join(opts.temp_gcs_path,
+                                    os.path.basename(tempdir))
+    gs_context = gs.GSContext(init_boto=True, retries=5, acl='private',
+                              dry_run=opts.dry_run)
+    gc_context = gcloud.GCContext(opts.project, dry_run=opts.dry_run)
+    try:
+      logging.info('Uploading tarball %s to %s',
+                   tarball_name, temp_tarball_dir)
+      gs_context.CopyInto(os.path.join(tempdir, tarball_name), temp_tarball_dir)
+      logging.info('Creating image %s', opts.target_name)
+      gc_context.CreateImage(opts.target_name,
+                             source_uri=os.path.join(temp_tarball_dir,
+                                                     tarball_name))
+    except:
+      logging.error('Oops! Something went wonky.')
+      logging.error('Trying to clean up temporary artifacts...')
+      try:
+        with cros_build_lib.OutputCapturer() as output_capturer:
+          gc_context.ListImages()
+        if opts.target_name in ''.join(output_capturer.GetStdoutLines()):
+          logging.info('Removing image %s', opts.target_name)
+          gc_context.DeleteImage(opts.target_name, quiet=True)
+      except gcloud.GCContextException:
+        # Gobble up this error so external error is visible.
+        logging.error('Failed to clean up image %s', opts.target_name)
+
+      raise
+    finally:
+      logging.info('Removing GS tempdir %s', temp_tarball_dir)
+      gs_context.Remove(temp_tarball_dir, ignore_missing=True)
+
+  logging.info('All done!')
+
+
+if __name__ == '__main__':
+  main(sys.argv)
diff --git a/contrib/cros_tree_map b/contrib/cros_tree_map
new file mode 100755
index 0000000..0d379d5
--- /dev/null
+++ b/contrib/cros_tree_map
@@ -0,0 +1,360 @@
+#!/usr/bin/python2
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(petkov): Integrate this utility into the build system in a more
+# consistent way -- e.g., create an ebuild that pulls the utility from a
+# mirrored upstream repo with a patch or upstream the patch.
+
+from __future__ import print_function
+
+import optparse
+import os
+import re
+import sys
+import json
+
+def format_bytes(bytes):
+    """Pretty-print a number of bytes."""
+    if bytes > 1e6:
+        bytes = bytes / 1.0e6
+        return '%.1fm' % bytes
+    if bytes > 1e3:
+        bytes = bytes / 1.0e3
+        return '%.1fk' % bytes
+    return str(bytes)
+
+
+def symbol_type_to_human(type):
+    """Convert a symbol type as printed by nm into a human-readable name."""
+    return {
+        'b': 'bss',
+        'd': 'data',
+        'r': 'read-only data',
+        't': 'code',
+        'w': 'weak symbol',
+        'v': 'weak symbol'
+        }[type]
+
+
+def parse_du(input):
+    """Parse du output.
+
+    Argument: an iterable over lines of 'du -B 1' output.'
+
+    Yields: (size, path)
+    """
+
+    # Match lines with |size| |path|
+    line_re = re.compile(r'^([0-9]+)\s+(.*)$')
+    for line in input:
+        line = line.rstrip()
+        match = line_re.match(line)
+        if match:
+            size, path = match.groups()[0:2]
+            size = int(size)
+            yield size, path
+
+
+def parse_nm(input):
+    """Parse nm output.
+
+    Argument: an iterable over lines of nm output.
+
+    Yields: (symbol name, symbol type, symbol size, source file path).
+    Path may be None if nm couldn't figure out the source file.
+    """
+
+    # Match lines with size + symbol + optional filename.
+    sym_re = re.compile(r'^[0-9a-f]+ ([0-9a-f]+) (.) ([^\t]+)(?:\t(.*):\d+)?$')
+
+    # Match lines with addr but no size.
+    addr_re = re.compile(r'^[0-9a-f]+ (.) ([^\t]+)(?:\t.*)?$')
+    # Match lines that don't have an address at all -- typically external symbols.
+    noaddr_re = re.compile(r'^ + (.) (.*)$')
+
+    for line in input:
+        line = line.rstrip()
+        match = sym_re.match(line)
+        if match:
+            size, type, sym = match.groups()[0:3]
+            size = int(size, 16)
+            type = type.lower()
+            if type == 'v':
+                type = 'w'  # just call them all weak
+            if type == 'b':
+                continue  # skip all BSS for now
+            path = match.group(4)
+            yield sym, type, size, path
+            continue
+        match = addr_re.match(line)
+        if match:
+            type, sym = match.groups()[0:2]
+            # No size == we don't care.
+            continue
+        match = noaddr_re.match(line)
+        if match:
+            type, sym = match.groups()
+            if type in ('U', 'w'):
+                # external or weak symbol
+                continue
+
+        print('unparsed:', repr(line), file=sys.stderr)
+
+
+def treeify_du(dulines, strip_prefix=None):
+    dirs = {}
+    for size, path in dulines:
+        if strip_prefix and path.startswith(strip_prefix):
+            path = path[len(strip_prefix):]
+        elif path.startswith('/'):
+            path = path[1:]
+        parts = path.split('/')
+        key = parts.pop()
+        tree = dirs
+        for part in parts:
+            if part not in tree:
+                tree[part] = {}
+            tree = tree[part]
+        if key not in tree:
+            tree[key] = size
+        else:
+            # du reports the total for each directory (which may include files
+            # contained in the directory itself).
+            tree[key][None] = size
+    return dirs
+
+
+def filter_syms(types, symbols):
+    for sym, type, size, path in symbols:
+        if type in types:
+            yield sym, type, size, path
+
+
+def treeify_syms(symbols, strip_prefix=None):
+    dirs = {}
+    for sym, type, size, path in symbols:
+        if path:
+            path = os.path.normpath(path)
+            if strip_prefix and path.startswith(strip_prefix):
+                path = path[len(strip_prefix):]
+            elif path.startswith('/usr/include'):
+                path = path.replace('/usr/include', 'usrinclude')
+            elif path.startswith('/'):
+                path = path[1:]
+
+        parts = None
+        # TODO: make segmenting by namespace work.
+        if False and '::' in sym:
+            if sym.startswith('vtable for '):
+                sym = sym[len('vtable for '):]
+                parts = sym.split('::')
+                parts.append('[vtable]')
+            else:
+                parts = sym.split('::')
+            parts[0] = '::' + parts[0]
+        elif path and '/' in path:
+            parts = path.split('/')
+
+        if parts:
+            key = parts.pop()
+            tree = dirs
+            try:
+                for part in parts:
+                    assert part != '', path
+                    if part not in tree:
+                        tree[part] = {}
+                    tree = tree[part]
+                tree[key] = tree.get(key, 0) + size
+            except:
+                print(sym, parts, key, file=sys.stderr)
+                raise
+        else:
+            key = 'symbols without paths'
+            if key not in dirs:
+                dirs[key] = {}
+            tree = dirs[key]
+            subkey = 'misc'
+            if (sym.endswith('::__FUNCTION__') or
+                sym.endswith('::__PRETTY_FUNCTION__')):
+                subkey = '__FUNCTION__'
+            elif sym.startswith('CSWTCH.'):
+                subkey = 'CSWTCH'
+            elif '::' in sym:
+                subkey = sym[0:sym.find('::') + 2]
+            else:
+                print('unbucketed (no path?):', sym, type, size, path,
+                      file=sys.stderr)
+            tree[subkey] = tree.get(subkey, 0) + size
+    return dirs
+
+
+def jsonify_tree(tree, name):
+    children = []
+    total = 0
+    subtree_total = None
+
+    for key, val in tree.iteritems():
+        if key is None:
+            subtree_total = val
+            continue
+        if isinstance(val, dict):
+            subtree = jsonify_tree(val, key)
+            total += subtree['data']['$area']
+            children.append(subtree)
+        else:
+            total += val
+            children.append({
+                    'name': key + ' ' + format_bytes(val),
+                    'data': { '$area': val }
+                    })
+
+    # Process du sub-tree totals by creating a '.' child with appropriate area.
+    if subtree_total:
+        dot_total = subtree_total - total
+        if dot_total > 0:
+            children.append({'name': '. ' + format_bytes(dot_total),
+                             'data': { '$area': dot_total }})
+            total = subtree_total
+
+    children.sort(key=lambda child: -child['data']['$area'])
+
+    return {
+        'name': name + ' ' + format_bytes(total),
+        'data': {
+            '$area': total,
+            },
+        'children': children,
+        }
+
+
+def dump_du(dufile, strip_prefix):
+    dirs = treeify_du(parse_du(dufile), strip_prefix)
+    print('var kTree =', json.dumps(jsonify_tree(dirs, '/'), indent=2))
+
+
+def dump_nm(nmfile, strip_prefix):
+    dirs = treeify_syms(parse_nm(nmfile), strip_prefix)
+    print('var kTree =', json.dumps(jsonify_tree(dirs, '/'), indent=2))
+
+
+def parse_objdump(input):
+    """Parse objdump -h output."""
+    sec_re = re.compile(r'^\d+ (\S+) +([0-9a-z]+)')
+    sections = []
+    debug_sections = []
+
+    for line in input:
+        line = line.strip()
+        match = sec_re.match(line)
+        if match:
+            name, size = match.groups()
+            if name.startswith('.'):
+                name = name[1:]
+            if name.startswith('debug_'):
+                name = name[len('debug_'):]
+                debug_sections.append((name, int(size, 16)))
+            else:
+                sections.append((name, int(size, 16)))
+            continue
+    return sections, debug_sections
+
+
+def jsonify_sections(name, sections):
+    children = []
+    total = 0
+    for section, size in sections:
+        children.append({
+                'name': section + ' ' + format_bytes(size),
+                'data': { '$area': size }
+                })
+        total += size
+
+    children.sort(key=lambda child: -child['data']['$area'])
+
+    return {
+        'name': name + ' ' + format_bytes(total),
+        'data': { '$area': total },
+        'children': children
+        }
+
+
+def dump_sections():
+    sections, debug_sections = parse_objdump(open('objdump.out'))
+    sections = jsonify_sections('sections', sections)
+    debug_sections = jsonify_sections('debug', debug_sections)
+    print('var kTree =', json.dumps({
+            'name': 'top',
+            'data': { '$area': sections['data']['$area'] +
+                               debug_sections['data']['$area'] },
+            'children': [ debug_sections, sections ]}))
+
+usage="""%prog [options] MODE
+
+Modes are:
+  du: output 'du' json suitable for a treemap
+  syms: output symbols json suitable for a treemap
+  dump: print symbols sorted by size (pipe to head for best output)
+  sections: output binary sections json suitable for a treemap
+
+du output passsed to --du-output should be from running a command
+like the following:
+  du -B 1 /path/to/root > du.out
+
+nm output passed to --nm-output should from running a command
+like the following (note, can take a long time -- 30 minutes):
+  nm -C -S -l /path/to/binary > nm.out
+
+objdump output passed to --objdump-output should be from a command
+like:
+  objdump -h /path/to/binary > objdump.out"""
+parser = optparse.OptionParser(usage=usage)
+parser.add_option('--du-output', action='store', dest='dupath',
+                  metavar='PATH', default='du.out',
+                  help='path to nm output [default=nm.out]')
+parser.add_option('--nm-output', action='store', dest='nmpath',
+                  metavar='PATH', default='nm.out',
+                  help='path to nm output [default=nm.out]')
+parser.add_option('--objdump-output', action='store', dest='objdump',
+                  metavar='PATH', default='objdump.out',
+                  help='path to objdump output [default=objdump.out]')
+parser.add_option('--strip-prefix', metavar='PATH', action='store',
+                  help='strip PATH prefix from paths; e.g. /path/to/src/root')
+parser.add_option('--filter', action='store',
+                  help='include only symbols/files matching FILTER')
+opts, args = parser.parse_args()
+
+if len(args) != 1:
+    parser.print_usage()
+    sys.exit(1)
+
+mode = args[0]
+if mode == 'du':
+    dufile = open(opts.dupath, 'r')
+    dump_du(dufile, strip_prefix=opts.strip_prefix)
+elif mode == 'syms':
+    nmfile = open(opts.nmpath, 'r')
+    dump_nm(nmfile, strip_prefix=opts.strip_prefix)
+elif mode == 'sections':
+    dump_sections()
+elif mode == 'dump':
+    nmfile = open(opts.nmpath, 'r')
+    syms = list(parse_nm(nmfile))
+    # a list of (sym, type, size, path); sort by size.
+    syms.sort(key=lambda x: -x[2])
+    total = 0
+    for sym, type, size, path in syms:
+        if type in ('b', 'w'):
+            continue  # skip bss and weak symbols
+        if path is None:
+            path = ''
+        if opts.filter and not (opts.filter in sym or opts.filter in path):
+            continue
+        print('%6s %s (%s) %s' % (format_bytes(size), sym,
+                                  symbol_type_to_human(type), path))
+        total += size
+    print('%6s %s' % (format_bytes(total), 'total'), end='')
+else:
+    print('unknown mode')
+    parser.print_usage()
diff --git a/contrib/eval_workstation b/contrib/eval_workstation
new file mode 100755
index 0000000..6887501
--- /dev/null
+++ b/contrib/eval_workstation
@@ -0,0 +1,225 @@
+#!/bin/bash
+
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# The purpose of this script is to run through a few steps of a typical
+# chromeos development flow, tracking the runtime of each step, in order
+# to evaluate a development machine.
+
+CROS_SDK="./chromite/bin/cros_sdk"
+BOARD="link"
+BRANCH="stabilize-4886.B"
+
+# Run a given command, printing a concise start and stop line along with
+# the given command description.
+function run() {
+  local description="${1}"
+  local cmd="${2}"
+
+  # Prepare a temporary file to hold output.
+  log_basename=$(echo ${description} | sed -e 's/ /_/g')
+  logfile="${TEMPDIR}"/"${log_basename}"
+
+  local start_time=$(date +%s)
+
+  echo "Running '${description}' now, output to ${logfile}"
+  ${cmd} >& "${logfile}"
+  local result=$?
+
+  local end_time=$(date +%s)
+  local elapsed_seconds=$(( end_time - start_time ))
+  echo "Running '${description}' took ${elapsed_seconds}s"
+
+  if [[ "${result}" -ne 0 ]]; then
+    echo "  Failed '${description}'.  Check log."
+    exit 1
+  fi
+}
+
+# Completely delete the given directory.
+function nuke_dir() {
+  local dir="${1}"
+  if [[ -d "${dir}" ]]; then
+    echo "Deleting existing ${dir}."
+    rm -rf "${dir}" >& /dev/null
+  fi
+  if [[ -d "${dir}" ]]; then
+    echo "Failed to delete ${dir}."
+    exit 1
+  fi
+}
+
+# TODO(mtennant): I am sure there is a clever way to combine this
+# function with the non-sudo one above.
+# Completely delete the given directory using sudo privileges.
+function sudo_nuke_dir() {
+  local dir="${1}"
+  if [[ -d "${dir}" ]]; then
+    echo "Deleting existing ${dir}, using sudo."
+    sudo rm -rf "${dir}" >& /dev/null
+  fi
+  if [[ -d "${dir}" ]]; then
+    echo "Failed to delete ${dir}, using sudo!"
+    exit 1
+  fi
+}
+
+function nuke_repo() {
+  nuke_dir ".repo"
+}
+
+# Delete any file or directory other than .repo.
+# The assumption is that any chroot should have been already deleted.
+function nuke_source() {
+  echo "Deleting all contents except .repo"
+  find . -maxdepth 1 ! -regex './.repo' ! -regex '.' | xargs rm -rf
+}
+
+# Delete all caches.
+function nuke_cache() {
+  sudo_nuke_dir ".cache"
+  sudo_nuke_dir "chroot/var/cache/chromeos-cache"
+}
+
+# Delete the build root for $BOARD.
+function nuke_build() {
+  build="chroot/build/${BOARD}"
+  sudo_nuke_dir "${build}"
+}
+
+# Delete the chroot for $BOARD.
+function nuke_chroot() {
+  if [[ -d "chroot" ]]; then
+    echo "Deleting existing chroot"
+    ${CROS_SDK} --delete >& /dev/null
+  fi
+  nuke_cache
+}
+
+# Delete everything to start with a clean slate.
+function nuke_all() {
+  nuke_chroot
+  nuke_source
+  nuke_repo
+}
+
+function repo_init() {
+  echo
+  nuke_repo
+
+  # Run repo init step
+  local cmd="repo init --repo-url https://chromium.googlesource.com/external/repo --manifest-url https://chrome-internal-review.googlesource.com/chromeos/manifest-internal --manifest-name default.xml --manifest-branch ${BRANCH}"
+  run "repo init" "${cmd}"
+}
+
+function repo_sync() {
+  echo
+  nuke_source
+
+  local cmd="repo sync --jobs=16"
+  run "repo sync" "${cmd}"
+}
+
+function make_chroot() {
+  echo
+  nuke_chroot
+
+  local cmd="${CROS_SDK} -- exit"
+  run "cros_sdk" "${cmd}"
+}
+
+function build_packages_plain() {
+  echo
+  nuke_cache
+  nuke_build
+
+  local cmd="${CROS_SDK} -- ./build_packages --board=${BOARD}"
+  run "build_packages plain" "${cmd}"
+}
+
+function build_packages_source() {
+  echo
+  nuke_cache
+  nuke_build
+
+  local cmd="${CROS_SDK} -- ./build_packages --board=${BOARD} --nousepkg"
+  run "build_packages source" "${cmd}"
+}
+
+function build_image() {
+  echo
+
+  local cmd="${CROS_SDK} -- ./build_image --board=${BOARD}"
+  run "build_image" "${cmd}"
+}
+
+usage() {
+  cat <<EOF
+Usage: eval_workstation [--help] run_count
+
+Run through the full ChromeOS workstation evaluation flow run_count
+times.
+
+To adjust which steps are run you must edit the main loop code.
+
+Options:
+  -h, --help    This help output
+EOF
+
+  if [[ $# -gt 0 ]]; then
+    printf '\nerror: %s\n' "$*" >&2
+    exit 1
+  else
+    exit 0
+  fi
+}
+
+main() {
+  run_count=0
+  while [[ $# -gt 0 ]]; do
+    case $1 in
+    -h|--help)    usage;;
+    -*)           usage "unknown option $1";;
+    *)            run_count="$1";;
+    esac
+    shift
+  done
+
+  if [[ "${run_count}" -eq 0 ]]; then
+    usage "missing run count argument"
+  fi
+
+  sudo true
+
+  # Offer the user a chance to kill.
+  # TODO(mtennant): A yes/no prompt would be a better solution here.
+  echo "This script will run through a few ChromeOS build steps."
+  echo "Doing so will NUKE THE CONTENTS OF CURRENT DIRECTORY."
+  echo "If that is not okay then kill this script now."
+  sleep 10
+
+  for (( i=1; i<=${run_count}; i++ )); do
+    echo
+    echo "*** Starting run ${i} of ${run_count} now. ***"
+    echo
+
+    TEMPDIR=$(mktemp -d /tmp/workstation_eval.XXXX)
+    echo "Using temporary dir: ${TEMPDIR}"
+
+    sudo true
+    nuke_all
+    repo_init
+    repo_sync
+    make_chroot
+    build_packages_plain
+    build_packages_source
+    build_image
+
+    echo
+    echo "Any logs created for run ${i} are under ${TEMPDIR}"
+  done
+}
+
+main "$@"
diff --git a/contrib/get_hash_for_release b/contrib/get_hash_for_release
new file mode 100755
index 0000000..9b3497c
--- /dev/null
+++ b/contrib/get_hash_for_release
@@ -0,0 +1,173 @@
+#!/bin/bash
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# The "get_hash_for_release" command can be used in one of two forms:
+#  - It can be used to return the hash associated with the project in
+#    the current directory:
+#
+#      ~/chromiumos/chromite$ get_hash_for_release 6822.0.0
+#      0b325183cf2eee7f93d7d631a8639c089f4a2d4f
+#      # This is the hash of the "chromite" package for release 6822.0.0
+#
+#  - It can indicate what is the earliest release where a given hash
+#    appeared:
+#
+#     ~/chromiumos/chromite$ get_hash_for_release R41 \
+#         8eba257003357ca945528fb93dadd2c4d16b7d11
+#     Oldest release with 8eba257003357ca945528fb93dadd2c4d16b7d11 is 6674.0.0
+
+
+fetch_manifests() {
+  temp_dir=/tmp/chrome_manifest_temp
+  if [[ ! -d "${temp_dir}" ]] ; then
+    mkdir -p "${temp_dir}"
+    local git_server="https://chrome-internal.googlesource.com"
+    git clone -q --depth 1 --single-branch \
+        "${git_server}/chromeos/manifest-versions" "${temp_dir}"
+  else
+    (cd "${temp_dir}" && git pull -q)
+  fi
+}
+
+cleanup_manifests() {
+  if [[ -d "${temp_dir}" ]] ; then
+    rm -rf "${temp_dir}"
+  fi
+}
+
+is_child_release() {
+  # Release numbers are stated in a set of dotted integers.
+  # 2.0.0.0 is a child of 1.0.0.0 (later release)
+  # 2.1.0.0 is a child of 2.0.0.0 (new branch)
+  # 3.0.0.0 is a child of 2.0.0.0 but not 2.1.0.0 since 2.1 branched from 2.0.
+  # 3.0.1.0 isn't really valid (branches are left aligned) but we ignore this.
+
+  local child
+  local parent
+  IFS='.' read -a child <<< "$1"
+  IFS='.' read -a parent <<< "$2"
+  if [[ ${#child[@]} -ne ${#parent[@]} ]] ; then
+    return 1
+  fi
+
+  local must_be_root
+  for idx in $(seq 0 $((${#child[@]} - 1))); do
+    local parent_num="${parent[idx]}"
+    [[ -n "${must_be_root}" && "$parent_num" != "0" ]] && return 1
+    local child_num="${child[idx]}"
+    [[ "${parent_num}" > "${child_num}" ]] && return 1
+    [[ "${parent_num}" < "${child_num}" ]] && must_be_root=1
+  done
+  return 0
+}
+
+get_project_info() {
+  repo info . 2>/dev/null | egrep '^(Project|Current revision):' | cut -f2 -d:
+}
+
+get_revision() {
+  local release="$1"
+  local revision="$2"
+  grep "${project}.*upstream=.*${branch}" \
+      ${temp_dir}/buildspecs/$release/${revision}.xml |
+      sed -e 's/ *<project.*revision="\([^"]*\)".*/\1/' \
+          -e 's/^.*\/\([0-9.]*\)\.xml:/\1 : /'
+}
+
+get_hash_for_release() {
+  get_revision '*' "${revision}"
+}
+
+get_release_for_hash() {
+  local release="${revision:1}"
+  local -a all_revs=($(ls "${temp_dir}/buildspecs/${release}" |
+                       sort -Vr | sed -e 's/\.xml$//'))
+  latest_rev=$(basename $(cd "${temp_dir}/buildspecs/${release}" &&
+                          git log -n1 --name-only --oneline --grep release . |
+                          grep -v ' ' | sort -V | tail -1 |
+                          sed -e 's/\.xml$//'))
+  revs=()
+  for rev in "${all_revs[@]}"; do
+    is_child_release "${latest_rev}" "${rev}" && revs+=("${rev}")
+  done
+  last_released_hash=$(get_revision "${release}" "${latest_rev}" |
+                       awk '{print $1}')
+  if [[ -z "${last_released_hash}" ]] ; then
+    echo "Huh?  Can't find hash for ${latest_rev}"
+    return
+  fi
+  local -a hashes
+  found=''
+  for search_size in 10 100 1000 10000; do
+    hashes=($(git log --pretty=format:%H -n "${search_size}" \
+              "${last_released_hash}"))
+    if echo "${hashes[*]}" | grep -q $hash; then
+      found=1
+      break
+    fi
+  done
+  if [[ -z "${found}" ]] ; then
+    echo "Perhaps ${hash} was never in release ${release}?"
+    return
+  fi
+  # Pop all hashes after the one we care about.
+  while [[ "${hashes[-1]}" != "${hash}" ]] ; do
+    unset hashes[${#hashes[@]}-1]
+  done
+  oldest_release="${latest_rev}"
+  idx=1
+  while [[ $idx -lt ${#revs[@]} ]]; do
+    rev="${revs[idx]}"
+    hash_for_rev=$(get_revision "${release}" "${rev}" |
+                   awk '{print $1}')
+    if ! echo "${hashes[*]}" | grep -q $hash_for_rev; then
+      break
+    fi
+    oldest_release="${rev}"
+    idx=$(( idx + 1 ))
+  done
+  echo "Oldest release with ${hash} is ${oldest_release}"
+}
+
+main() {
+  revision="${1}"
+  if [[ -z "${revision}"  ]] ; then
+    echo "Usage: $0 <revision> [hash]"
+    exit 0
+  fi
+
+  if expr "${revision}" : '[MR]' > /dev/null; then
+    hash="${2}"
+    shift
+  fi
+
+  project_info=($(get_project_info))
+
+  if [[ "${#project_info[@]}" < 2 ]] ; then
+    echo "Couldn't get project info (are you in a repo directory?)"
+    exit 0
+  fi
+
+  project="${project_info[0]}"
+  branch="$(echo ${project_info[1]} | cut -d/ -f3)"
+  if [[ "${branch}" == "master" ]] ; then
+    branch=""
+  elif [[ "${branch}" == "chromeos-3.4" ]] ; then
+    # Hack for old releases of chromeos-kernel.
+    branch='\.[4B]"'
+  fi
+
+  fetch_manifests
+
+  if [[ -z "${hash}" ]] ; then
+    get_hash_for_release
+  else
+    get_release_for_hash
+  fi
+
+  #cleanup_manifests
+}
+
+main "$@"
diff --git a/cros/__init__.py b/cros/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cros/__init__.py
diff --git a/cros/test/__init__.py b/cros/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/cros/test/__init__.py
diff --git a/cros/test/image_test.py b/cros/test/image_test.py
new file mode 100644
index 0000000..9016da1
--- /dev/null
+++ b/cros/test/image_test.py
@@ -0,0 +1,377 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Collection of tests to run on the rootfs of a built image.
+
+This module should only be imported inside the chroot.
+"""
+
+from __future__ import print_function
+
+import cStringIO
+import collections
+import itertools
+import lddtree
+import magic
+import mimetypes
+import os
+import re
+import stat
+
+from elftools.elf import elffile
+from elftools.common import exceptions
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import filetype
+from chromite.lib import image_test_lib
+from chromite.lib import osutils
+from chromite.lib import parseelf
+
+
+class LocaltimeTest(image_test_lib.NonForgivingImageTestCase):
+  """Verify that /etc/localtime is a symlink to /var/lib/timezone/localtime.
+
+  This is an example of an image test. The image is already mounted. The
+  test can access rootfs via ROOT_A constant.
+  """
+
+  def TestLocaltimeIsSymlink(self):
+    localtime_path = os.path.join(image_test_lib.ROOT_A, 'etc', 'localtime')
+    self.assertTrue(os.path.islink(localtime_path))
+
+  def TestLocaltimeLinkIsCorrect(self):
+    localtime_path = os.path.join(image_test_lib.ROOT_A, 'etc', 'localtime')
+    self.assertEqual('/var/lib/timezone/localtime',
+                     os.readlink(localtime_path))
+
+
+def _GuessMimeType(magic_obj, file_name):
+  """Guess a file's mimetype base on its extension and content.
+
+  File extension is favored over file content to reduce noise.
+
+  Args:
+    magic_obj: A loaded magic instance.
+    file_name: A path to the file.
+
+  Returns:
+    A mime type of |file_name|.
+  """
+  mime_type, _ = mimetypes.guess_type(file_name)
+  if not mime_type:
+    mime_type = magic_obj.file(file_name)
+  return mime_type
+
+
+class BlacklistTest(image_test_lib.NonForgivingImageTestCase):
+  """Verify that rootfs does not contain blacklisted items."""
+
+  def TestBlacklistedDirectories(self):
+    dirs = [os.path.join(image_test_lib.ROOT_A, 'usr', 'share', 'locale')]
+    for d in dirs:
+      self.assertFalse(os.path.isdir(d), 'Directory %s is blacklisted.' % d)
+
+  def TestBlacklistedFileTypes(self):
+    """Fail if there are files of prohibited types (such as C++ source code).
+
+    The whitelist has higher precedence than the blacklist.
+    """
+    blacklisted_patterns = [re.compile(x) for x in [
+        r'text/x-c\+\+',
+        r'text/x-c',
+    ]]
+    whitelisted_patterns = [re.compile(x) for x in [
+        r'.*/braille/.*',
+        r'.*/brltty/.*',
+        r'.*/etc/sudoers$',
+        r'.*/dump_vpd_log$',
+        r'.*\.conf$',
+        r'.*/libnl/classid$',
+        r'.*/locale/',
+        r'.*/X11/xkb/',
+        r'.*/chromeos-assets/',
+        r'.*/udev/rules.d/',
+        r'.*/firmware/ar3k/.*pst$',
+        r'.*/etc/services',
+        r'.*/usr/share/dev-install/portage',
+    ]]
+
+    failures = []
+
+    magic_obj = magic.open(magic.MAGIC_MIME_TYPE)
+    magic_obj.load()
+    for root, _, file_names in os.walk(image_test_lib.ROOT_A):
+      for file_name in file_names:
+        full_name = os.path.join(root, file_name)
+        if os.path.islink(full_name) or not os.path.isfile(full_name):
+          continue
+
+        mime_type = _GuessMimeType(magic_obj, full_name)
+        if (any(x.match(mime_type) for x in blacklisted_patterns) and not
+            any(x.match(full_name) for x in whitelisted_patterns)):
+          failures.append('File %s has blacklisted type %s.' %
+                          (full_name, mime_type))
+    magic_obj.close()
+
+    self.assertFalse(failures, '\n'.join(failures))
+
+  def TestValidInterpreter(self):
+    """Fail if a script's interpreter is not found, or not executable.
+
+    A script interpreter is anything after the #! sign, up to the end of line
+    or the first space.
+    """
+    failures = []
+
+    for root, _, file_names in os.walk(image_test_lib.ROOT_A):
+      for file_name in file_names:
+        full_name = os.path.join(root, file_name)
+        file_stat = os.lstat(full_name)
+        if (not stat.S_ISREG(file_stat.st_mode) or
+            (file_stat.st_mode & 0111) == 0):
+          continue
+
+        with open(full_name, 'rb') as f:
+          if f.read(2) != '#!':
+            continue
+          line = '#!' + f.readline().strip()
+
+        try:
+          # Ignore arguments to the interpreter.
+          interp, _ = filetype.SplitShebang(line)
+        except ValueError:
+          failures.append('File %s has an invalid interpreter path: "%s".' %
+                          (full_name, line))
+
+        # Absolute path to the interpreter.
+        interp = os.path.join(image_test_lib.ROOT_A, interp.lstrip('/'))
+        # Interpreter could be a symlink. Resolve it.
+        interp = osutils.ResolveSymlink(interp, image_test_lib.ROOT_A)
+        if not os.path.isfile(interp):
+          failures.append('File %s uses non-existing interpreter %s.' %
+                          (full_name, interp))
+        elif (os.stat(interp).st_mode & 0111) == 0:
+          failures.append('Interpreter %s is not executable.' % interp)
+
+    self.assertFalse(failures, '\n'.join(failures))
+
+
+class LinkageTest(image_test_lib.NonForgivingImageTestCase):
+  """Verify that all binaries and libraries have proper linkage."""
+
+  def setUp(self):
+    osutils.MountDir(
+        os.path.join(image_test_lib.STATEFUL, 'var_overlay'),
+        os.path.join(image_test_lib.ROOT_A, 'var'),
+        mount_opts=('bind', ),
+    )
+
+  def tearDown(self):
+    osutils.UmountDir(
+        os.path.join(image_test_lib.ROOT_A, 'var'),
+        cleanup=False,
+    )
+
+  def _IsPackageMerged(self, package_name):
+    cmd = [
+        'portageq',
+        'has_version',
+        image_test_lib.ROOT_A,
+        package_name
+    ]
+    ret = cros_build_lib.RunCommand(cmd, error_code_ok=True,
+                                    combine_stdout_stderr=True,
+                                    extra_env={'ROOT': image_test_lib.ROOT_A})
+    if ret.returncode == 0:
+      logging.info('Package is available: %s', package_name)
+    else:
+      logging.info('Package is not available: %s', package_name)
+    return ret.returncode == 0
+
+  def TestLinkage(self):
+    """Find main executable binaries and check their linkage."""
+    binaries = [
+        'boot/vmlinuz',
+        'bin/sed',
+    ]
+
+    if self._IsPackageMerged('chromeos-base/chromeos-login'):
+      binaries.append('sbin/session_manager')
+
+    if self._IsPackageMerged('x11-base/xorg-server'):
+      binaries.append('usr/bin/Xorg')
+
+    # When chrome is built with USE="pgo_generate", rootfs chrome is actually a
+    # symlink to a real binary which is in the stateful partition. So we do not
+    # check for a valid chrome binary in that case.
+    if not self._IsPackageMerged('chromeos-base/chromeos-chrome[pgo_generate]'):
+      if self._IsPackageMerged('chromeos-base/chromeos-chrome[app_shell]'):
+        binaries.append('opt/google/chrome/app_shell')
+      elif self._IsPackageMerged('chromeos-base/chromeos-chrome[envoy]'):
+        binaries.append('opt/google/chrome/envoy_shell')
+      elif self._IsPackageMerged('chromeos-base/chromeos-chrome'):
+        binaries.append('opt/google/chrome/chrome')
+
+    binaries = [os.path.join(image_test_lib.ROOT_A, x) for x in binaries]
+
+    # Grab all .so files
+    libraries = []
+    for root, _, files in os.walk(image_test_lib.ROOT_A):
+      for name in files:
+        filename = os.path.join(root, name)
+        if '.so' in filename:
+          libraries.append(filename)
+
+    ldpaths = lddtree.LoadLdpaths(image_test_lib.ROOT_A)
+    for to_test in itertools.chain(binaries, libraries):
+      # to_test could be a symlink, we need to resolve it relative to ROOT_A.
+      while os.path.islink(to_test):
+        link = os.readlink(to_test)
+        if link.startswith('/'):
+          to_test = os.path.join(image_test_lib.ROOT_A, link[1:])
+        else:
+          to_test = os.path.join(os.path.dirname(to_test), link)
+      try:
+        lddtree.ParseELF(to_test, root=image_test_lib.ROOT_A, ldpaths=ldpaths)
+      except lddtree.exceptions.ELFError:
+        continue
+      except IOError as e:
+        self.fail('Fail linkage test for %s: %s' % (to_test, e))
+
+
+class FileSystemMetaDataTest(image_test_lib.ForgivingImageTestCase):
+  """A test class to gather file system stats such as free inodes, blocks."""
+
+  def TestStats(self):
+    """Collect inodes and blocks usage."""
+    # Find the loopback device that was mounted to ROOT_A.
+    loop_device = None
+    root_path = os.path.abspath(os.readlink(image_test_lib.ROOT_A))
+    for mtab in osutils.IterateMountPoints():
+      if mtab.destination == root_path:
+        loop_device = mtab.source
+        break
+    self.assertTrue(loop_device, 'Cannot find loopback device for ROOT_A.')
+
+    # Gather file system stats with tune2fs.
+    cmd = [
+        'tune2fs',
+        '-l',
+        loop_device
+    ]
+    # tune2fs produces output like this:
+    #
+    # tune2fs 1.42 (29-Nov-2011)
+    # Filesystem volume name:   ROOT-A
+    # Last mounted on:          <not available>
+    # Filesystem UUID:          <none>
+    # Filesystem magic number:  0xEF53
+    # Filesystem revision #:    1 (dynamic)
+    # ...
+    #
+    # So we need to ignore the first line.
+    ret = cros_build_lib.SudoRunCommand(cmd, capture_output=True,
+                                        extra_env={'LC_ALL': 'C'})
+    fs_stat = dict(line.split(':', 1) for line in ret.output.splitlines()
+                   if ':' in line)
+    free_inodes = int(fs_stat['Free inodes'])
+    free_blocks = int(fs_stat['Free blocks'])
+    inode_count = int(fs_stat['Inode count'])
+    block_count = int(fs_stat['Block count'])
+    block_size = int(fs_stat['Block size'])
+
+    sum_file_size = 0
+    for root, _, filenames in os.walk(image_test_lib.ROOT_A):
+      for file_name in filenames:
+        full_name = os.path.join(root, file_name)
+        file_stat = os.lstat(full_name)
+        sum_file_size += file_stat.st_size
+
+    metadata_size = (block_count - free_blocks) * block_size - sum_file_size
+
+    self.OutputPerfValue('free_inodes_over_inode_count',
+                         free_inodes * 100.0 / inode_count, 'percent',
+                         graph='free_over_used_ratio')
+    self.OutputPerfValue('free_blocks_over_block_count',
+                         free_blocks * 100.0 / block_count, 'percent',
+                         graph='free_over_used_ratio')
+    self.OutputPerfValue('apparent_size', sum_file_size, 'bytes',
+                         higher_is_better=False, graph='filesystem_stats')
+    self.OutputPerfValue('metadata_size', metadata_size, 'bytes',
+                         higher_is_better=False, graph='filesystem_stats')
+
+
+class SymbolsTest(image_test_lib.NonForgivingImageTestCase):
+  """Tests related to symbols in ELF files."""
+
+  def setUp(self):
+    # Mapping of file name --> 2-tuple (import, export).
+    self._known_symtabs = {}
+
+  def _GetSymbols(self, file_name):
+    """Return a 2-tuple (import, export) of an ELF file |file_name|.
+
+    Import and export in the returned tuple are sets of strings (symbol names).
+    """
+    if file_name in self._known_symtabs:
+      return self._known_symtabs[file_name]
+
+    # We use cstringio here to obviate fseek/fread time in pyelftools.
+    stream = cStringIO.StringIO(osutils.ReadFile(file_name))
+
+    try:
+      elf = elffile.ELFFile(stream)
+    except exceptions.ELFError:
+      raise ValueError('%s is not an ELF file.' % file_name)
+
+    imp, exp = parseelf.ParseELFSymbols(elf)
+    exp = set(exp.keys())
+
+    self._known_symtabs[file_name] = imp, exp
+    return imp, exp
+
+  def TestImportedSymbolsAreAvailable(self):
+    """Ensure all ELF files' imported symbols are available in ROOT-A.
+
+    In this test, we find all imported symbols and exported symbols from all
+    ELF files on the system. This test will fail if the set of imported symbols
+    is not a subset of exported symbols.
+
+    This test DOES NOT simulate ELF loading. "TestLinkage" does that with
+    `lddtree`.
+    """
+    # Import tables of files, keyed by file names.
+    importeds = collections.defaultdict(set)
+    # All exported symbols.
+    exported = set()
+
+    for root, _, filenames in os.walk(image_test_lib.ROOT_A):
+      for filename in filenames:
+        full_name = os.path.join(root, filename)
+        if os.path.islink(full_name) or not os.path.isfile(full_name):
+          continue
+
+        try:
+          imp, exp = self._GetSymbols(full_name)
+        except (ValueError, IOError):
+          continue
+        else:
+          importeds[full_name] = imp
+          exported.update(exp)
+
+    known_unsatisfieds = {
+        'libthread_db-1.0.so': set([
+            'ps_pdwrite', 'ps_pdread', 'ps_lgetfpregs', 'ps_lsetregs',
+            'ps_lgetregs', 'ps_lsetfpregs', 'ps_pglobal_lookup', 'ps_getpid']),
+    }
+
+    failures = []
+    for full_name, imported in importeds.iteritems():
+      file_name = os.path.basename(full_name)
+      missing = imported - exported - known_unsatisfieds.get(file_name, set())
+      if missing:
+        failures.append('File %s contains unsatisfied symbols: %r' %
+                        (full_name, missing))
+    self.assertFalse(failures, '\n'.join(failures))
diff --git a/inherit-review-settings-ok b/inherit-review-settings-ok
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/inherit-review-settings-ok
diff --git a/lib/__init__.py b/lib/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/__init__.py
diff --git a/lib/accounts_lib.py b/lib/accounts_lib.py
new file mode 100644
index 0000000..fd7dfef
--- /dev/null
+++ b/lib/accounts_lib.py
@@ -0,0 +1,241 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Logic to parse and merge account databases in overlay stacks."""
+
+from __future__ import print_function
+
+import collections
+
+from chromite.lib import json_lib
+from chromite.lib import user_db
+
+
+GROUPS_KEY = 'groups'
+USERS_KEY = 'users'
+
+USER_COMMENT_KEY = 'gecos'
+USER_DEFUNCT_KEY = 'defunct'
+USER_FIXED_ID_KEY = 'fixed_id'
+USER_GROUP_KEY = 'group_name'
+USER_HOME_KEY = 'home'
+USER_ID_KEY = 'uid'
+USER_NAME_KEY = 'user'
+USER_PASSWORD_KEY = 'password'
+USER_SHELL_KEY = 'shell'
+
+GROUP_DEFUNCT_KEY = 'defunct'
+GROUP_FIXED_ID_KEY = 'fixed_id'
+GROUP_ID_KEY = 'gid'
+GROUP_NAME_KEY = 'group'
+GROUP_PASSWORD_KEY = 'password'
+GROUP_USERS_KEY = 'users'
+
+User = collections.namedtuple(
+    'User', ('name', 'password', 'uid', 'group_name', 'description', 'home',
+             'shell', 'is_fixed_id', 'is_defunct'))
+
+Group = collections.namedtuple(
+    'Group', ('name', 'password', 'gid', 'users', 'is_fixed_id', 'is_defunct'))
+
+
+class AccountDatabase(object):
+  """Parses, validates, and combines account databases from overlays."""
+
+  def __init__(self):
+    """Construct an an empty instance."""
+    self.groups = {}
+    self.users = {}
+
+  def AddAccountsFromDatabase(self, account_db_path):
+    """Add accounts from the database at |account_db_path| to self.
+
+    Overrides previously loaded accounts.
+
+    Args:
+      account_db_path: path to file containing an account database.
+    """
+    raw_db = json_lib.ParseJsonFileWithComments(account_db_path)
+    json_lib.AssertIsInstance(raw_db, dict, 'accounts database')
+
+    # We don't mandate that an accounts database specify either field.
+    raw_db.setdefault(USERS_KEY, [])
+    raw_db.setdefault(GROUPS_KEY, [])
+    user_list = json_lib.PopValueOfType(raw_db, USERS_KEY, list,
+                                        'list of users in accounts database')
+    group_list = json_lib.PopValueOfType(raw_db, GROUPS_KEY, list,
+                                         'list of groups in accounts database')
+
+    # We do mandate that the database contain only fields we know about.
+    if raw_db:
+      raise ValueError('Accounts database include unknown fields: %r' %
+                       raw_db.keys())
+
+    for user in user_list:
+      json_lib.AssertIsInstance(
+          user, dict, 'user specification in accounts database')
+      self._AddUser(user)
+
+    for group in group_list:
+      json_lib.AssertIsInstance(
+          group, dict, 'group specification in accounts database')
+      self._AddGroup(group)
+
+  def _AddUser(self, user_spec):
+    """Add a user to this account database based on |user_spec|.
+
+    Args:
+      user_spec: dict of information from an accounts database.
+          This fragment is expected to have been parsed from
+          developer supplied JSON and will be type checked.
+    """
+    # By default, user accounts are locked and cannot be logged into.
+    user_spec.setdefault(USER_PASSWORD_KEY, u'!')
+    # By default, users don't get a shell.
+    user_spec.setdefault(USER_SHELL_KEY, u'/bin/false')
+    # By default, users don't get a home directory.
+    user_spec.setdefault(USER_HOME_KEY, u'/dev/null')
+    # By default, users don't get a fixed UID.
+    user_spec.setdefault(USER_FIXED_ID_KEY, False)
+    # By default, users don't need a comment.
+    user_spec.setdefault(USER_COMMENT_KEY, u'')
+    # By default, users are not defunct.
+    user_spec.setdefault(USER_DEFUNCT_KEY, False)
+
+    name = json_lib.PopValueOfType(user_spec, USER_NAME_KEY, unicode,
+                                   'username from user spec')
+    password = json_lib.PopValueOfType(user_spec, USER_PASSWORD_KEY, unicode,
+                                       'password for user %s' % name)
+    uid = json_lib.PopValueOfType(user_spec, USER_ID_KEY, int,
+                                  'default uid for user %s' % name)
+    group_name = json_lib.PopValueOfType(user_spec, USER_GROUP_KEY, unicode,
+                                         'primary group for user %s' % name)
+    description = json_lib.PopValueOfType(user_spec, USER_COMMENT_KEY, unicode,
+                                          'description for user %s' % name)
+    home = json_lib.PopValueOfType(user_spec, USER_HOME_KEY, unicode,
+                                   'home directory for user %s' % name)
+    shell = json_lib.PopValueOfType(user_spec, USER_SHELL_KEY, unicode,
+                                    'shell for user %s' % name)
+    is_fixed_id = json_lib.PopValueOfType(user_spec, USER_FIXED_ID_KEY, bool,
+                                          'whether UID for user %s is fixed' %
+                                          name)
+    is_defunct = json_lib.PopValueOfType(user_spec, USER_DEFUNCT_KEY, bool,
+                                         'whether user %s is defunct.' % name)
+
+    if user_spec:
+      raise ValueError('Unexpected keys in user spec for user %s: %r' %
+                       (name, user_spec.keys()))
+
+    self.users[name] = User(name=name, password=password, uid=uid,
+                            group_name=group_name, description=description,
+                            home=home, shell=shell, is_fixed_id=is_fixed_id,
+                            is_defunct=is_defunct)
+
+  def _AddGroup(self, group_spec):
+    """Add a group to this account database based on |group_spec|.
+
+    Args:
+      group_spec: dict of information from an accounts database.
+          This fragment is expected to have been parsed from
+          developer supplied JSON and will be type checked.
+    """
+    # By default, groups don't get a fixed GID.
+    group_spec.setdefault(GROUP_FIXED_ID_KEY, False)
+    # By default, groups don't get a password.
+    group_spec.setdefault(GROUP_PASSWORD_KEY, u'!')
+    # By default, groups are not defunct.
+    group_spec.setdefault(GROUP_DEFUNCT_KEY, False)
+
+    name = json_lib.PopValueOfType(group_spec, GROUP_NAME_KEY, unicode,
+                                   'groupname from group spec')
+    password = json_lib.PopValueOfType(group_spec, GROUP_PASSWORD_KEY, unicode,
+                                       'password for group %s' % name)
+    gid = json_lib.PopValueOfType(group_spec, GROUP_ID_KEY, int,
+                                  'gid for group %s' % name)
+    users = json_lib.PopValueOfType(group_spec, GROUP_USERS_KEY, list,
+                                    'users in group %s' % name)
+    is_fixed_id = json_lib.PopValueOfType(group_spec, GROUP_FIXED_ID_KEY, bool,
+                                          'whether GID for group %s is fixed' %
+                                          name)
+    is_defunct = json_lib.PopValueOfType(group_spec, GROUP_DEFUNCT_KEY, bool,
+                                         'whether group %s is defunct' % name)
+
+    for username in users:
+      json_lib.AssertIsInstance(username, unicode, 'user in group %s' % name)
+
+    if group_spec:
+      raise ValueError('Unexpected keys in group spec for group %s: %r' %
+                       (name, group_spec.keys()))
+
+    self.groups[name] = Group(name=name, password=password, gid=gid,
+                              users=users, is_fixed_id=is_fixed_id,
+                              is_defunct=is_defunct)
+
+  def InstallUser(self, username, sysroot_user_db,
+                  uid=None, shell=None, homedir=None, primary_group=None):
+    """Install a user in |sysroot_user_db|.
+
+    Args:
+      username: name of user to install.
+      sysroot_user_db: user_db.UserDB instance representing the installed users
+          of a particular sysroot.
+      uid: ebuild specified uid.
+      shell: ebuild specified shell.
+      homedir: ebuild specified home directory.
+      primary_group: ebuild specified primary group for user.
+    """
+    if not username in self.users:
+      raise ValueError('Cannot add unknown user "%s"' % username)
+    user = self.users[username]
+
+    if user.is_defunct:
+      raise ValueError('Refusing to install defunct user: "%s"' % username)
+
+    def RaiseIfNotCompatible(user_specified, db_specified, fieldname):
+      if user_specified is not None and user_specified != db_specified:
+        raise ValueError('Accounts database %s (%s) for user %s differs from '
+                         'requested %s (%s)' %
+                         (fieldname, db_specified, user.name, fieldname,
+                          user_specified))
+
+    RaiseIfNotCompatible(uid, user.uid, 'UID')
+    RaiseIfNotCompatible(shell, user.shell, 'shell')
+    RaiseIfNotCompatible(homedir, user.home, 'homedir')
+    RaiseIfNotCompatible(primary_group, user.group_name, 'group')
+
+    if not user.group_name in self.groups:
+      raise ValueError('Refusing to install user %s with unknown group %s' %
+                       (user.name, user.group_name))
+
+    installable_user = user_db.User(
+        user=user.name, password=user.password, uid=user.uid,
+        gid=self.groups[user.group_name].gid, gecos=user.description,
+        home=user.home, shell=user.shell)
+    sysroot_user_db.AddUser(installable_user)
+
+  def InstallGroup(self, groupname, sysroot_user_db, gid=None):
+    """Install a group in |sysroot_user_db|.
+
+    Args:
+      groupname: name of group to install.
+      sysroot_user_db: user_db.UserDB instance representing the installed
+          groups.
+      gid: ebuild specified gid.
+    """
+    if not groupname in self.groups:
+      raise ValueError('Cannot add unknown group "%s"' % groupname)
+    group = self.groups[groupname]
+
+    if group.is_defunct:
+      raise ValueError('Refusing to install defunct group: "%s"' % groupname)
+
+    if gid and gid != group.gid:
+      raise ValueError('Refusing to install group %s with gid=%d while account '
+                       'database indicates gid=%d' %
+                       (groupname, gid, group.gid))
+
+    installable_group = user_db.Group(
+        group=group.name, password=group.password,
+        gid=group.gid, users=group.users)
+    sysroot_user_db.AddGroup(installable_group)
diff --git a/lib/accounts_lib_unittest b/lib/accounts_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/accounts_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/accounts_lib_unittest.py b/lib/accounts_lib_unittest.py
new file mode 100644
index 0000000..6b3fc58
--- /dev/null
+++ b/lib/accounts_lib_unittest.py
@@ -0,0 +1,223 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for accounts_lib."""
+
+from __future__ import print_function
+
+import json
+import mock
+
+from chromite.lib import accounts_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import user_db
+
+
+EMPTY_ACCOUNTS_DB_WITH_COMMENTS = """
+{
+  # This accounts spec is empty.
+  "users": [
+  ],
+  "groups": [
+  ]
+}
+"""
+
+MINIMAL_DB_USER = accounts_lib.User(
+    name='minimal', password='!', uid=1000, group_name='minimal',
+    description='', home='/dev/null', shell='/bin/false',
+    is_fixed_id=False, is_defunct=False)
+MINIMAL_DB_GROUP = accounts_lib.Group(
+    name='minimal', password='!', gid=1000, users=['minimal'],
+    is_fixed_id=False, is_defunct=False)
+
+MINIMAL_ACCOUNTS_DB = """
+{
+  "users": [
+    {
+      # Minimal user.
+      "user": "minimal",
+      "uid": 1000,
+      "group_name": "minimal"
+    }
+  ],
+  "groups": [
+    {
+      # Minimal group.
+      "group": "minimal",
+      "gid": 1000,
+      "users": [ "minimal" ]
+    }
+  ]
+}
+"""
+
+EXTRA_USER_SPEC_FIELD_DB = """
+{
+  "users": [
+    {
+      "user": "minimal",
+      "uid": 1000,
+      "group_name": "minimal",
+      "gecos": "minimal user spec",
+      "extra": "This field is not expected."
+    }
+  ]
+}
+"""
+
+class AccountDatabaseTest(cros_test_lib.MockTestCase):
+  """Tests for chromite.lib.accounts_lib.AccountDatabase."""
+
+  def _ParseSpec(self, contents, db=None):
+    """Return a AccountDatabase that has read a file with |contents|.
+
+    Args:
+      contents: desired contents of accounts database to parse.
+      db: existing account db to override with new definitions.
+
+    Returns:
+      an instance of AccountDatabase.
+    """
+    if db is None:
+      db = accounts_lib.AccountDatabase()
+    with self.PatchObject(osutils, 'ReadFile', return_value=contents):
+      db.AddAccountsFromDatabase('ignored')
+    return db
+
+  def _ParseSpecs(self, specs):
+    """Return a AccountDatabase based on the account database stack in |specs|.
+
+    Args:
+      specs: list of json fragments (encoded as strings) to compose into a
+          consistent account database.  This list is assumed to be in
+          increasing priority order so that later entries override earlier
+          entries.
+
+    Returns:
+      an instance of AccountDatabase.
+    """
+    db = accounts_lib.AccountDatabase()
+    for spec in specs:
+      self._ParseSpec(spec, db=db)
+    return db
+
+  def testParsesEmptyDb(self):
+    """Test that we can parse an empty database."""
+    self._ParseSpec(json.dumps({}))
+
+  def testParsesDbWithComments(self):
+    """Test that we handle comments properly."""
+    self._ParseSpec(EMPTY_ACCOUNTS_DB_WITH_COMMENTS)
+
+  def testRejectsUnkownDbKeys(self):
+    """Test that we check the set of keys specified in the account database."""
+    self.assertRaises(ValueError,
+                      self._ParseSpec,
+                      json.dumps({'foo': 'This is not a valid field.'}))
+
+  def testRejectsBadKeyValues(self):
+    """Check that typecheck user/group specs."""
+    self.assertRaises(ValueError,
+                      self._ParseSpec,
+                      json.dumps({'users': 'This should be a list'}))
+    self.assertRaises(ValueError,
+                      self._ParseSpec,
+                      json.dumps({'groups': 'This should be a list'}))
+
+  def testRejectsExtraUserSpecFields(self):
+    """Test that we check for extra user spec fields."""
+    self.assertRaises(ValueError, self._ParseSpec, EXTRA_USER_SPEC_FIELD_DB)
+
+  def testParsesMinimalDb(self):
+    """Test that we can parse a basic database."""
+    db = self._ParseSpec(MINIMAL_ACCOUNTS_DB)
+    self.assertEqual(1, len(db.users.keys()))
+    self.assertEqual(1, len(db.groups.keys()))
+    self.assertIn(MINIMAL_DB_USER.name, db.users)
+    self.assertIn(MINIMAL_DB_GROUP.name, db.groups)
+    self.assertEqual(db.users[MINIMAL_DB_USER.name], MINIMAL_DB_USER)
+    self.assertEqual(db.groups[MINIMAL_DB_GROUP.name], MINIMAL_DB_GROUP)
+
+  def testComposesDbs(self):
+    """Test that we can compose databases from multiple overlays."""
+    BASE_ID = 1000
+    OVERRIDE_ID = 2000
+    BASE_NAME = 'base'
+    OVERRIDE_NAME = 'override'
+    EXTRA_USER = 'extra.user'
+    base_db = json.dumps({
+        'users': [
+            {'user': BASE_NAME,
+             'uid': BASE_ID,
+             'group_name': 'base.group',
+            },
+            {'user': OVERRIDE_NAME,
+             'uid': OVERRIDE_ID - 1,
+             'group_name': 'override.group',
+            },
+        ],
+        'groups': [
+            {'group': BASE_NAME,
+             'gid': BASE_ID,
+             'users': ['base.user']
+            },
+            {'group': OVERRIDE_NAME,
+             'gid': OVERRIDE_ID - 1,
+             'users': ['override.user']
+            },
+        ],
+    })
+    override_db = json.dumps({
+        'users': [
+            {'user': OVERRIDE_NAME,
+             'uid': OVERRIDE_ID,
+             'group_name': 'override.group',
+            },
+            {'user': EXTRA_USER,
+             'uid': 3000,
+             'group_name': OVERRIDE_NAME,
+            },
+        ],
+        'groups': [
+            {'group': OVERRIDE_NAME,
+             'gid': OVERRIDE_ID,
+             'users': [OVERRIDE_NAME, EXTRA_USER],
+            },
+        ],
+    })
+    db = self._ParseSpecs([base_db, override_db])
+    self.assertEqual(3, len(db.users))
+    self.assertEqual(2, len(db.groups))
+    self.assertEqual(BASE_ID, db.users[BASE_NAME].uid)
+    self.assertEqual(BASE_ID, db.groups[BASE_NAME].gid)
+    self.assertEqual(OVERRIDE_ID, db.users[OVERRIDE_NAME].uid)
+    self.assertEqual(OVERRIDE_ID, db.groups[OVERRIDE_NAME].gid)
+    self.assertEqual(sorted([OVERRIDE_NAME, EXTRA_USER]),
+                     sorted(db.groups[OVERRIDE_NAME].users))
+
+  def testInstallUser(self):
+    """Test that we can install a user correctly."""
+    db = self._ParseSpec(MINIMAL_ACCOUNTS_DB)
+    mock_user_db = mock.MagicMock()
+    db.InstallUser(MINIMAL_DB_USER.name, mock_user_db)
+    installed_user = user_db.User(
+        user=MINIMAL_DB_USER.name, password=MINIMAL_DB_USER.password,
+        uid=MINIMAL_DB_USER.uid, gid=MINIMAL_DB_GROUP.gid,
+        gecos=MINIMAL_DB_USER.description, home=MINIMAL_DB_USER.home,
+        shell=MINIMAL_DB_USER.shell)
+    self.assertEqual([mock.call.AddUser(installed_user)],
+                     mock_user_db.mock_calls)
+
+  def testInstallGroup(self):
+    """Test that we can install a group correctly."""
+    db = self._ParseSpec(MINIMAL_ACCOUNTS_DB)
+    mock_user_db = mock.MagicMock()
+    db.InstallGroup(MINIMAL_DB_GROUP.name, mock_user_db)
+    installed_group = user_db.Group(
+        group=MINIMAL_DB_GROUP.name, password=MINIMAL_DB_GROUP.password,
+        gid=MINIMAL_DB_GROUP.gid, users=MINIMAL_DB_GROUP.users)
+    self.assertEqual([mock.call.AddGroup(installed_group)],
+                     mock_user_db.mock_calls)
diff --git a/lib/alerts.py b/lib/alerts.py
new file mode 100644
index 0000000..3f6432d
--- /dev/null
+++ b/lib/alerts.py
@@ -0,0 +1,330 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Chromite email utility functions."""
+
+from __future__ import print_function
+
+import base64
+import collections
+import cStringIO
+import gzip
+import json
+import os
+import smtplib
+import socket
+import sys
+import traceback
+
+from email.mime.application import MIMEApplication
+from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
+
+from chromite.lib import cros_logging as logging
+from chromite.lib import retry_util
+
+# TODO(fdeng): Cleanup the try-catch once crbug.com/482063 is fixed.
+try:
+  import httplib2
+  from apiclient.discovery import build as apiclient_build
+  from apiclient import errors as apiclient_errors
+  from oauth2client import file as oauth_client_fileio
+  from oauth2client import client
+except ImportError as e:
+  apiclient_build = None
+  oauth_client_fileio = None
+
+
+class MailServer(object):
+  """Base class for servers."""
+
+  def Send(self, message):
+    """Send the message.
+
+    Override by sub-classes.
+
+    Args:
+      message: A MIMEMultipart() object containing the body of the message.
+
+    Returns:
+      True if the email was sent, else False.
+    """
+    raise NotImplementedError('Should be implemented by sub-classes.')
+
+
+class AuthenticationError(Exception):
+  """Error raised when authenticating via oauth2."""
+
+
+# Represent token in oauth2 token file.
+RefreshToken = collections.namedtuple('RefreshToken', (
+    'client_id',
+    'client_secret',
+    'refresh_token',
+))
+
+
+def ReadRefreshTokenJson(path):
+  """Returns RefreshToken by reading it from the JSON file.
+
+  Args:
+    path: Path to the json file that contains the credential tokens.
+
+  Returns:
+    A RefreshToken object.
+
+  Raises:
+    AuthenticationError if failed to read from json file.
+  """
+  try:
+    with open(path, 'r') as f:
+      data = json.load(f)
+      return RefreshToken(
+          client_id=str(data['client_id']),
+          client_secret=str(data['client_secret']),
+          refresh_token=str(data['refresh_token']))
+  except (IOError, ValueError) as e:
+    raise AuthenticationError(
+        'Failed to read refresh token from %s: %s' % (path, e))
+  except KeyError as e:
+    raise AuthenticationError(
+        'Failed to read refresh token from %s: missing key %s' % (path, e))
+
+
+class GmailServer(MailServer):
+  """Gmail server."""
+
+  TOKEN_URI = 'https://accounts.google.com/o/oauth2/token'
+
+
+  def __init__(self, token_cache_file, token_json_file=None):
+    """Initialize GmailServer.
+
+    If token_cache_file contains valid credentials, it will be used.
+    If not or the file doesn't exist, will try to load tokens
+    from token_json_file. The loaded credentials will be stored to the
+    cache file.
+
+    Args:
+      token_cache_file: Absolute path to gmail credentials cache file.
+      token_json_file: Absolute path to a json file that contains
+                       refresh token for gmail.
+    """
+    self._token_cache_file = token_cache_file
+    self._token_json_file = token_json_file
+
+  def _GetCachedCredentials(self):
+    """Get credentials from cached file or json file.
+
+    Returns:
+      OAuth2Credentials object.
+
+    Raises:
+      AuthenticationError on failure to read json file.
+    """
+    storage = oauth_client_fileio.Storage(self._token_cache_file)
+    # Try loading credentials from existing token cache file.
+    if os.path.isfile(self._token_cache_file):
+      credentials = storage.get()
+      if credentials and not credentials.invalid:
+        return credentials
+
+    if self._token_json_file is None:
+      raise AuthenticationError('Gmail token file path is not provided.')
+
+    # Create new credentials if cache file doesn't exist or not valid.
+    refresh_token_json = ReadRefreshTokenJson(self._token_json_file)
+    credentials = client.OAuth2Credentials(
+        access_token=None,
+        client_id=refresh_token_json.client_id,
+        client_secret=refresh_token_json.client_secret,
+        refresh_token=refresh_token_json.refresh_token,
+        token_expiry=None,
+        token_uri=self.TOKEN_URI,
+        user_agent=None,
+        revoke_uri=None)
+    credentials.set_store(storage)
+    storage.put(credentials)
+    return credentials
+
+  def Send(self, message):
+    """Send an e-mail via Gmail API.
+
+    Args:
+      message: A MIMEMultipart() object containing the body of the message.
+
+    Returns:
+      True if the email was sent, else False.
+    """
+    if not apiclient_build:
+      logging.warning('Could not send email: Google API client not installed.')
+      return False
+
+    try:
+      credentials = self._GetCachedCredentials()
+    except AuthenticationError as e:
+      logging.warning('Could not get gmail credentials: %s', e)
+      return False
+
+    http = credentials.authorize(httplib2.Http())
+    service = apiclient_build('gmail', 'v1', http=http)
+    try:
+      # 'me' represents the default authorized user.
+      payload = {'raw': base64.urlsafe_b64encode(message.as_string())}
+      service.users().messages().send(userId='me', body=payload).execute()
+      return True
+    except apiclient_errors.HttpError as error:
+      logging.warning('Could not send email: %s', error)
+      return False
+
+
+class SmtpServer(MailServer):
+  """Smtp server."""
+
+  # Note: When importing this module from cbuildbot code that will run on
+  # a builder in the golo, set this to constants.GOLO_SMTP_SERVER
+  DEFAULT_SERVER = 'localhost'
+  # Retry parameters for the actual smtp connection.
+  SMTP_RETRY_COUNT = 3
+  SMTP_RETRY_DELAY = 30
+
+  def __init__(self, smtp_server=None):
+    """Initialize SmtpServer.
+
+    Args:
+      smtp_server: The server with which to send the message.
+    """
+    self._smtp_server = smtp_server or self.DEFAULT_SERVER
+
+  def Send(self, message):
+    """Send an email via SMTP
+
+    If we get a socket error (e.g. the SMTP server is not listening or
+    timesout), we will retry a few times.  All socket errors will be
+    caught here.
+
+    Args:
+      message: A MIMEMultipart() object containing the body of the message.
+
+    Returns:
+      True if the email was sent, else False.
+    """
+    def _Send():
+      smtp_client = smtplib.SMTP(self._smtp_server)
+      recipients = [s.strip() for s in message['To'].split(',')]
+      smtp_client.sendmail(message['From'], recipients, message.as_string())
+      smtp_client.quit()
+
+    try:
+      retry_util.RetryException(socket.error, self.SMTP_RETRY_COUNT, _Send,
+                                sleep=self.SMTP_RETRY_DELAY)
+      return True
+    except socket.error as e:
+      logging.warning('Could not send e-mail from %s to %s via %r: %s',
+                      message['From'], message['To'], self._smtp_server, e)
+      return False
+
+
+def CreateEmail(subject, recipients, message='', attachment=None,
+                extra_fields=None):
+  """Create an email message object.
+
+  Args:
+    subject: E-mail subject.
+    recipients: List of e-mail recipients.
+    message: (optional) Message to put in the e-mail body.
+    attachment: (optional) text to attach.
+    extra_fields: (optional) A dictionary of additional message header fields
+                  to be added to the message. Custom field names should begin
+                  with the prefix 'X-'.
+
+  Returns:
+    A MIMEMultipart object, or None if recipients is empty.
+  """
+  # Ignore if the list of recipients is empty.
+  if not recipients:
+    logging.warning('Could not create email: recipient list is emtpy.')
+    return None
+
+  extra_fields = extra_fields or {}
+  sender = socket.getfqdn()
+  msg = MIMEMultipart()
+  for key, val in extra_fields.iteritems():
+    msg[key] = val
+  msg['From'] = sender
+  msg['Subject'] = subject
+  msg['To'] = ', '.join(recipients)
+
+  msg.attach(MIMEText(message))
+  if attachment:
+    s = cStringIO.StringIO()
+    with gzip.GzipFile(fileobj=s, mode='w') as f:
+      f.write(attachment)
+    part = MIMEApplication(s.getvalue(), _subtype='x-gzip')
+    s.close()
+    part.add_header('Content-Disposition', 'attachment', filename='logs.txt.gz')
+    msg.attach(part)
+
+  return msg
+
+
+def SendEmail(subject, recipients, server=None, message='',
+              attachment=None, extra_fields=None):
+  """Send an e-mail job notification with the given message in the body.
+
+  Args:
+    subject: E-mail subject.
+    recipients: List of e-mail recipients.
+    server: A MailServer instance. Default to local SmtpServer.
+    message: Message to put in the e-mail body.
+    attachment: Text to attach.
+    extra_fields: A dictionary of additional message header fields
+                  to be added to the message. Custom field names should begin
+                  with the prefix 'X-'.
+  """
+  if server is None:
+    server = SmtpServer()
+  msg = CreateEmail(subject, recipients, message, attachment, extra_fields)
+  if not msg:
+    return
+  server.Send(msg)
+
+
+def SendEmailLog(subject, recipients, server=None, message='',
+                 inc_trace=True, log=None, extra_fields=None):
+  """Send an e-mail with a stack trace and log snippets.
+
+  Args:
+    subject: E-mail subject.
+    recipients: list of e-mail recipients.
+    server: A MailServer instance. Default to local SmtpServer.
+    inc_trace: Append a backtrace of the current stack.
+    message: Message to put at the top of the e-mail body.
+    log: List of lines (log data) to include in the notice.
+    extra_fields: (optional) A dictionary of additional message header fields
+                  to be added to the message. Custom fields names should begin
+                  with the prefix 'X-'.
+  """
+  if server is None:
+    server = SmtpServer()
+  if not message:
+    message = subject
+  message = message[:]
+
+  if inc_trace:
+    if sys.exc_info() != (None, None, None):
+      trace = traceback.format_exc()
+      message += '\n\n' + trace
+
+  attachment = None
+  if log:
+    message += ('\n\n' +
+                '***************************\n' +
+                'Last log messages:\n' +
+                '***************************\n' +
+                ''.join(log[-50:]))
+    attachment = ''.join(log)
+
+  SendEmail(subject, recipients, server, message=message,
+            attachment=attachment, extra_fields=extra_fields)
diff --git a/lib/alerts_unittest b/lib/alerts_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/alerts_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/alerts_unittest.py b/lib/alerts_unittest.py
new file mode 100644
index 0000000..02ba43c
--- /dev/null
+++ b/lib/alerts_unittest.py
@@ -0,0 +1,173 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the alerts.py module."""
+
+from __future__ import print_function
+
+import json
+import os
+import smtplib
+import socket
+
+from chromite.lib import alerts
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+
+
+class SmtpServerTest(cros_test_lib.MockTestCase):
+  """Tests for Smtp server."""
+
+  # pylint: disable=protected-access
+
+  def setUp(self):
+    self.smtp_mock = self.PatchObject(smtplib, 'SMTP')
+
+  def testBasic(self):
+    """Basic sanity check."""
+    msg = alerts.CreateEmail('fake subject', 'fake@localhost', 'fake message')
+    server = alerts.SmtpServer(('localhost', 1))
+    ret = server.Send(msg)
+    self.assertTrue(ret)
+    self.assertEqual(self.smtp_mock.call_count, 1)
+
+  def testRetryException(self):
+    """Verify we try sending multiple times & don't abort socket.error."""
+    self.smtp_mock.side_effect = socket.error('test fail')
+    msg = alerts.CreateEmail('fake subject', 'fake@localhost', 'fake message')
+    server = alerts.SmtpServer(('localhost', 1))
+    ret = server.Send(msg)
+    self.assertFalse(ret)
+    self.assertEqual(self.smtp_mock.call_count, 4)
+
+
+class GmailServerTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for Gmail server."""
+
+  FAKE_TOKEN_JSON = {
+      'client_id': 'fake_client_id',
+      'client_secret': 'fake_client_secret',
+      'email': 'fake_email@fake.com',
+      'refresh_token': 'fake_token',
+      'scope': 'https://fake_scope/auth/fake.modify'
+  }
+  FAKE_CACHE = {
+      '_module': 'oauth2client.client',
+      'token_expiry': '2014-04-28T19:30:42Z',
+      'access_token': 'fake_access_token',
+      'token_uri': 'https://accounts.google.com/o/oauth2/token',
+      'invalid': False,
+      'token_response': {
+          'access_token': 'fake_access_token_2',
+          'token_type': 'Bearer',
+          'expires_in': 3600
+      },
+      'client_id': 'fake_client_id',
+      'id_token': None,
+      'client_secret': 'fake_client_secret',
+      'revoke_uri': None,
+      '_class': 'OAuth2Credentials',
+      'refresh_token': 'fake_refresh_token',
+      'user_agent': None,
+  }
+
+  def setUp(self):
+    self.PatchObject(alerts, 'apiclient_build')
+    self.token_cache_file = os.path.join(self.tempdir, 'fake_cache')
+    self.token_json_file = os.path.join(self.tempdir, 'fake_json')
+
+  def testValidCache(self):
+    """Test valid cache."""
+    osutils.WriteFile(self.token_cache_file, json.dumps(self.FAKE_CACHE))
+    msg = alerts.CreateEmail('fake subject', 'fake@localhost', 'fake msg')
+    server = alerts.GmailServer(token_cache_file=self.token_cache_file)
+    ret = server.Send(msg)
+    self.assertTrue(ret)
+
+  def testCacheNotExistsTokenExists(self):
+    """Test cache not exists, token exists"""
+    osutils.WriteFile(self.token_json_file, json.dumps(self.FAKE_TOKEN_JSON))
+    msg = alerts.CreateEmail('fake subject', 'fake@localhost', 'fake msg')
+    server = alerts.GmailServer(token_cache_file=self.token_cache_file,
+                                token_json_file=self.token_json_file)
+    ret = server.Send(msg)
+    self.assertTrue(ret)
+    # Cache file should be auto-generated.
+    self.assertExists(self.token_cache_file)
+
+  def testCacheNotExistsTokenNotExists(self):
+    """Test cache not exists, token not exists."""
+    msg = alerts.CreateEmail('fake subject', 'fake@localhost', 'fake msg')
+    server = alerts.GmailServer(token_cache_file=self.token_cache_file,
+                                token_json_file=self.token_json_file)
+    ret = server.Send(msg)
+    self.assertFalse(ret)
+
+  def testCacheInvalidTokenExists(self):
+    """Test cache exists but invalid, token exists."""
+    invalid_cache = self.FAKE_CACHE.copy()
+    invalid_cache['invalid'] = True
+    osutils.WriteFile(self.token_cache_file, json.dumps(invalid_cache))
+    osutils.WriteFile(self.token_json_file, json.dumps(self.FAKE_TOKEN_JSON))
+    msg = alerts.CreateEmail('fake subject', 'fake@localhost', 'fake msg')
+    server = alerts.GmailServer(token_cache_file=self.token_cache_file,
+                                token_json_file=self.token_json_file)
+    ret = server.Send(msg)
+    self.assertTrue(ret)
+    valid_cache = json.loads(osutils.ReadFile(self.token_cache_file))
+    self.assertFalse(valid_cache['invalid'])
+
+  def testCacheInvalidTokenNotExists(self):
+    """Test cache exists but invalid, token not exists."""
+    invalid_cache = self.FAKE_CACHE.copy()
+    invalid_cache['invalid'] = True
+    osutils.WriteFile(self.token_cache_file, json.dumps(invalid_cache))
+    msg = alerts.CreateEmail('fake subject', 'fake@localhost', 'fake msg')
+    server = alerts.GmailServer(token_cache_file=self.token_cache_file,
+                                token_json_file=self.token_json_file)
+    ret = server.Send(msg)
+    self.assertFalse(ret)
+    invalid_cache = json.loads(osutils.ReadFile(self.token_cache_file))
+    self.assertTrue(invalid_cache['invalid'])
+
+
+class SendEmailTest(cros_test_lib.MockTestCase):
+  """Tests for SendEmail."""
+
+  def testSmtp(self):
+    """Smtp sanity check."""
+    send_mock = self.PatchObject(alerts.SmtpServer, 'Send')
+    alerts.SendEmail('mail', 'root@localhost')
+    self.assertEqual(send_mock.call_count, 1)
+
+  def testGmail(self):
+    """Gmail sanity check."""
+    send_mock = self.PatchObject(alerts.GmailServer, 'Send')
+    alerts.SendEmail('mail', 'root@localhost',
+                     server=alerts.GmailServer(token_cache_file='fakefile'))
+    self.assertEqual(send_mock.call_count, 1)
+
+
+class SendEmailLogTest(cros_test_lib.MockTestCase):
+  """Tests for SendEmailLog()."""
+
+  def testSmtp(self):
+    """Smtp sanity check."""
+    send_mock = self.PatchObject(alerts.SmtpServer, 'Send')
+    alerts.SendEmailLog('mail', 'root@localhost')
+    self.assertEqual(send_mock.call_count, 1)
+
+  def testGmail(self):
+    """Gmail sanity check."""
+    send_mock = self.PatchObject(alerts.GmailServer, 'Send')
+    alerts.SendEmailLog('mail', 'root@localhost',
+                        server=alerts.GmailServer(token_cache_file='fakefile'))
+    self.assertEqual(send_mock.call_count, 1)
+
+
+def main(_argv):
+  # No need to make unittests sleep.
+  alerts.SmtpServer.SMTP_RETRY_DELAY = 0
+
+  cros_test_lib.main(module=__name__)
diff --git a/lib/binpkg.py b/lib/binpkg.py
new file mode 100644
index 0000000..e8a4604
--- /dev/null
+++ b/lib/binpkg.py
@@ -0,0 +1,422 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Adapted from portage/getbinpkg.py -- Portage binary-package helper functions
+# Copyright 2003-2004 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+"""Helpers dealing with binpkg Packages index files"""
+
+from __future__ import print_function
+
+import collections
+import cStringIO
+import operator
+import os
+import tempfile
+import time
+import urllib2
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import parallel
+
+
+TWO_WEEKS = 60 * 60 * 24 * 7 * 2
+HTTP_FORBIDDEN_CODES = (401, 403)
+HTTP_NOT_FOUND_CODES = (404, 410)
+
+_Package = collections.namedtuple('_Package', ['mtime', 'uri', 'debug_symbols'])
+
+class PackageIndex(object):
+  """A parser for the Portage Packages index file.
+
+  The Portage Packages index file serves to keep track of what packages are
+  included in a tree. It contains the following sections:
+    1) The header. The header tracks general key/value pairs that don't apply
+       to any specific package. E.g., it tracks the base URL of the packages
+       file, and the number of packages included in the file. The header is
+       terminated by a blank line.
+    2) The body. The body is a list of packages. Each package contains a list
+       of key/value pairs. Packages are either terminated by a blank line or
+       by the end of the file. Every package has a CPV entry, which serves as
+       a unique identifier for the package.
+   """
+
+  def __init__(self):
+    """Constructor."""
+
+    # The header tracks general key/value pairs that don't apply to any
+    # specific package. E.g., it tracks the base URL of the packages.
+    self.header = {}
+
+    # A list of packages (stored as a list of dictionaries).
+    self.packages = []
+
+    # Whether or not the PackageIndex has been modified since the last time it
+    # was written.
+    self.modified = False
+
+  def _PopulateDuplicateDB(self, db, expires):
+    """Populate db with SHA1 -> URL mapping for packages.
+
+    Args:
+      db: Dictionary to populate with SHA1 -> URL mapping for packages.
+      expires: The time at which prebuilts expire from the binhost.
+    """
+
+    uri = gs.CanonicalizeURL(self.header['URI'])
+    for pkg in self.packages:
+      cpv, sha1, mtime = pkg['CPV'], pkg.get('SHA1'), pkg.get('MTIME')
+      oldpkg = db.get(sha1, _Package(0, None, False))
+      if sha1 and mtime and int(mtime) > max(expires, oldpkg.mtime):
+        path = pkg.get('PATH', cpv + '.tbz2')
+        db[sha1] = _Package(int(mtime),
+                            '%s/%s' % (uri.rstrip('/'), path),
+                            pkg.get('DEBUG_SYMBOLS') == 'yes')
+
+  def _ReadPkgIndex(self, pkgfile):
+    """Read a list of key/value pairs from the Packages file into a dictionary.
+
+    Both header entries and package entries are lists of key/value pairs, so
+    they can both be read by this function. Entries can be terminated by empty
+    lines or by the end of the file.
+
+    This function will read lines from the specified file until it encounters
+    the a blank line or the end of the file.
+
+    Keys and values in the Packages file are separated by a colon and a space.
+    Keys may contain capital letters, numbers, and underscores, but may not
+    contain colons. Values may contain any character except a newline. In
+    particular, it is normal for values to contain colons.
+
+    Lines that have content, and do not contain a valid key/value pair, are
+    ignored. This is for compatibility with the Portage package parser, and
+    to allow for future extensions to the Packages file format.
+
+    All entries must contain at least one key/value pair. If the end of the
+    fils is reached, an empty dictionary is returned.
+
+    Args:
+      pkgfile: A python file object.
+
+    Returns:
+      The dictionary of key-value pairs that was read from the file.
+    """
+    d = {}
+    for line in pkgfile:
+      line = line.rstrip('\n')
+      if not line:
+        assert d, 'Packages entry must contain at least one key/value pair'
+        break
+      line = line.split(': ', 1)
+      if len(line) == 2:
+        k, v = line
+        d[k] = v
+    return d
+
+  def _WritePkgIndex(self, pkgfile, entry):
+    """Write header entry or package entry to packages file.
+
+    The keys and values will be separated by a colon and a space. The entry
+    will be terminated by a blank line.
+
+    Args:
+      pkgfile: A python file object.
+      entry: A dictionary of the key/value pairs to write.
+    """
+    lines = ['%s: %s' % (k, v) for k, v in sorted(entry.items()) if v]
+    pkgfile.write('%s\n\n' % '\n'.join(lines))
+
+  def _ReadHeader(self, pkgfile):
+    """Read header of packages file.
+
+    Args:
+      pkgfile: A python file object.
+    """
+    assert not self.header, 'Should only read header once.'
+    self.header = self._ReadPkgIndex(pkgfile)
+
+  def _ReadBody(self, pkgfile):
+    """Read body of packages file.
+
+    Before calling this function, you must first read the header (using
+    _ReadHeader).
+
+    Args:
+      pkgfile: A python file object.
+    """
+    assert self.header, 'Should read header first.'
+    assert not self.packages, 'Should only read body once.'
+
+    # Read all of the sections in the body by looping until we reach the end
+    # of the file.
+    while True:
+      d = self._ReadPkgIndex(pkgfile)
+      if not d:
+        break
+      if 'CPV' in d:
+        self.packages.append(d)
+
+  def Read(self, pkgfile):
+    """Read the entire packages file.
+
+    Args:
+      pkgfile: A python file object.
+    """
+    self._ReadHeader(pkgfile)
+    self._ReadBody(pkgfile)
+
+  def RemoveFilteredPackages(self, filter_fn):
+    """Remove packages which match filter_fn.
+
+    Args:
+      filter_fn: A function which operates on packages. If it returns True,
+                 the package should be removed.
+    """
+
+    filtered = [p for p in self.packages if not filter_fn(p)]
+    if filtered != self.packages:
+      self.modified = True
+      self.packages = filtered
+
+  def ResolveDuplicateUploads(self, pkgindexes):
+    """Point packages at files that have already been uploaded.
+
+    For each package in our index, check if there is an existing package that
+    has already been uploaded to the same base URI, and that is no older than
+    two weeks. If so, point that package at the existing file, so that we don't
+    have to upload the file.
+
+    Args:
+      pkgindexes: A list of PackageIndex objects containing info about packages
+        that have already been uploaded.
+
+    Returns:
+      A list of the packages that still need to be uploaded.
+    """
+    db = {}
+    now = int(time.time())
+    expires = now - TWO_WEEKS
+    base_uri = gs.CanonicalizeURL(self.header['URI'])
+    for pkgindex in pkgindexes:
+      if gs.CanonicalizeURL(pkgindex.header['URI']) == base_uri:
+        # pylint: disable=W0212
+        pkgindex._PopulateDuplicateDB(db, expires)
+
+    uploads = []
+    base_uri = self.header['URI']
+    for pkg in self.packages:
+      sha1 = pkg.get('SHA1')
+      dup = db.get(sha1)
+
+      # If the debug symbols are available locally but are not available in the
+      # remote binhost, re-upload them.
+      # Note: this should never happen as we would have pulled the debug symbols
+      # from said binhost.
+      if (sha1 and dup and dup.uri.startswith(base_uri)
+          and (pkg.get('DEBUG_SYMBOLS') != 'yes' or dup.debug_symbols)):
+        pkg['PATH'] = dup.uri[len(base_uri):].lstrip('/')
+        pkg['MTIME'] = str(dup.mtime)
+
+        if dup.debug_symbols:
+          pkg['DEBUG_SYMBOLS'] = 'yes'
+      else:
+        pkg['MTIME'] = str(now)
+        uploads.append(pkg)
+    return uploads
+
+  def SetUploadLocation(self, base_uri, path_prefix):
+    """Set upload location to base_uri + path_prefix.
+
+    Args:
+      base_uri: Base URI for all packages in the file. We set
+        self.header['URI'] to this value, so all packages must live under
+        this directory.
+      path_prefix: Path prefix to use for all current packages in the file.
+        This will be added to the beginning of the path for every package.
+    """
+    self.header['URI'] = base_uri
+    for pkg in self.packages:
+      path = pkg['CPV'] + '.tbz2'
+      pkg['PATH'] = '%s/%s' % (path_prefix.rstrip('/'), path)
+
+  def Write(self, pkgfile):
+    """Write a packages file to disk.
+
+    If 'modified' flag is set, the TIMESTAMP and PACKAGES fields in the header
+    will be updated before writing to disk.
+
+    Args:
+      pkgfile: A python file object.
+    """
+    if self.modified:
+      self.header['TIMESTAMP'] = str(long(time.time()))
+      self.header['PACKAGES'] = str(len(self.packages))
+      self.modified = False
+    self._WritePkgIndex(pkgfile, self.header)
+    for metadata in sorted(self.packages, key=operator.itemgetter('CPV')):
+      self._WritePkgIndex(pkgfile, metadata)
+
+  def WriteToNamedTemporaryFile(self):
+    """Write pkgindex to a temporary file.
+
+    Args:
+      pkgindex: The PackageIndex object.
+
+    Returns:
+      A temporary file containing the packages from pkgindex.
+    """
+    f = tempfile.NamedTemporaryFile(prefix='chromite.binpkg.pkgidx.')
+    self.Write(f)
+    f.flush()
+    f.seek(0)
+    return f
+
+
+def _RetryUrlOpen(url, tries=3):
+  """Open the specified url, retrying if we run into temporary errors.
+
+  We retry for both network errors and 5xx Server Errors. We do not retry
+  for HTTP errors with a non-5xx code.
+
+  Args:
+    url: The specified url.
+    tries: The number of times to try.
+
+  Returns:
+    The result of urllib2.urlopen(url).
+  """
+  for i in range(tries):
+    try:
+      return urllib2.urlopen(url)
+    except urllib2.HTTPError as e:
+      if i + 1 >= tries or e.code < 500:
+        e.msg += ('\nwhile processing %s' % url)
+        raise
+      else:
+        print('Cannot GET %s: %s' % (url, str(e)))
+    except urllib2.URLError as e:
+      if i + 1 >= tries:
+        raise
+      else:
+        print('Cannot GET %s: %s' % (url, str(e)))
+    print('Sleeping for 10 seconds before retrying...')
+    time.sleep(10)
+
+
+def GrabRemotePackageIndex(binhost_url):
+  """Grab the latest binary package database from the specified URL.
+
+  Args:
+    binhost_url: Base URL of remote packages (PORTAGE_BINHOST).
+
+  Returns:
+    A PackageIndex object, if the Packages file can be retrieved. If the
+    packages file cannot be retrieved, then None is returned.
+  """
+  url = '%s/Packages' % binhost_url.rstrip('/')
+  pkgindex = PackageIndex()
+  if binhost_url.startswith('http'):
+    try:
+      f = _RetryUrlOpen(url)
+    except urllib2.HTTPError as e:
+      if e.code in HTTP_FORBIDDEN_CODES:
+        logging.PrintBuildbotStepWarnings()
+        logging.error('Cannot GET %s: %s' % (url, str(e)))
+        return None
+      # Not found errors are normal if old prebuilts were cleaned out.
+      if e.code in HTTP_NOT_FOUND_CODES:
+        return None
+      raise
+  elif binhost_url.startswith('gs://'):
+    try:
+      gs_context = gs.GSContext()
+      output = gs_context.Cat(url)
+    except (cros_build_lib.RunCommandError, gs.GSNoSuchKey) as e:
+      logging.PrintBuildbotStepWarnings()
+      logging.error('Cannot GET %s: %s' % (url, str(e)))
+      return None
+    f = cStringIO.StringIO(output)
+  else:
+    return None
+  pkgindex.Read(f)
+  pkgindex.header.setdefault('URI', binhost_url)
+  f.close()
+  return pkgindex
+
+
+def GrabLocalPackageIndex(package_path):
+  """Read a local packages file from disk into a PackageIndex() object.
+
+  Args:
+    package_path: Directory containing Packages file.
+
+  Returns:
+    A PackageIndex object.
+  """
+  packages_file = file(os.path.join(package_path, 'Packages'))
+  pkgindex = PackageIndex()
+  pkgindex.Read(packages_file)
+  packages_file.close()
+
+  # List all debug symbols available in package_path.
+  symbols = set()
+  for f in cros_build_lib.ListFiles(package_path):
+    if f.endswith('.debug.tbz2'):
+      f = os.path.relpath(f, package_path)[:-len('.debug.tbz2')]
+      symbols.add(f)
+
+  for p in pkgindex.packages:
+    # If the Packages file has DEBUG_SYMBOLS set but no debug symbols are
+    # found, unset it.
+    p.pop('DEBUG_SYMBOLS', None)
+    if p['CPV'] in symbols:
+      p['DEBUG_SYMBOLS'] = 'yes'
+
+  return pkgindex
+
+
+def _DownloadURLs(urls, dest_dir):
+  """Copy URLs into the specified |dest_dir|.
+
+  Args:
+    urls: List of URLs to fetch.
+    dest_dir: Destination directory.
+  """
+  gs_ctx = gs.GSContext()
+  cmd = ['cp'] + urls + [dest_dir]
+  gs_ctx.DoCommand(cmd, parallel=len(urls) > 1)
+
+
+def FetchTarballs(binhost_urls, pkgdir):
+  """Prefetch the specified |binhost_urls| to the specified |pkgdir|.
+
+  This function fetches the tarballs from the specified list of binhost
+  URLs to disk. It does not populate the Packages file -- we leave that
+  to Portage.
+
+  Args:
+    binhost_urls: List of binhost URLs to fetch.
+    pkgdir: Location to store the fetched packages.
+  """
+  categories = {}
+  for binhost_url in binhost_urls:
+    pkgindex = GrabRemotePackageIndex(binhost_url)
+    base_uri = pkgindex.header['URI']
+    for pkg in pkgindex.packages:
+      cpv = pkg['CPV']
+      path = pkg.get('PATH', '%s.tbz2' % cpv)
+      uri = '/'.join([base_uri, path])
+      category = cpv.partition('/')[0]
+      fetches = categories.setdefault(category, {})
+      fetches[cpv] = uri
+
+  with parallel.BackgroundTaskRunner(_DownloadURLs) as queue:
+    for category, urls in categories.iteritems():
+      category_dir = os.path.join(pkgdir, category)
+      if not os.path.exists(category_dir):
+        os.makedirs(category_dir)
+      queue.put((urls.values(), category_dir))
diff --git a/lib/binpkg_unittest b/lib/binpkg_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/binpkg_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/binpkg_unittest.py b/lib/binpkg_unittest.py
new file mode 100644
index 0000000..bde909a
--- /dev/null
+++ b/lib/binpkg_unittest.py
@@ -0,0 +1,64 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the binpkg.py module."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import binpkg
+from chromite.lib import cros_test_lib
+from chromite.lib import gs_unittest
+from chromite.lib import osutils
+
+
+PACKAGES_CONTENT = """USE: test
+
+CPV: chromeos-base/shill-0.0.1-r1
+
+CPV: chromeos-base/test-0.0.1-r1
+DEBUG_SYMBOLS: yes
+"""
+
+class FetchTarballsTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for GSContext that go over the network."""
+
+  def testFetchFakePackages(self):
+    """Pretend to fetch binary packages."""
+    gs_mock = self.StartPatcher(gs_unittest.GSContextMock())
+    gs_mock.SetDefaultCmdResult()
+    uri = 'gs://foo/bar'
+    packages_uri = '{}/Packages'.format(uri)
+    packages_file = '''URI: gs://foo
+
+CPV: boo/baz
+PATH boo/baz.tbz2
+'''
+    gs_mock.AddCmdResult(['cat', packages_uri], output=packages_file)
+
+    binpkg.FetchTarballs([uri], self.tempdir)
+
+  @cros_test_lib.NetworkTest()
+  def testFetchRealPackages(self):
+    """Actually fetch a real binhost from the network."""
+    uri = 'gs://chromeos-prebuilt/board/lumpy/paladin-R37-5905.0.0-rc2/packages'
+    binpkg.FetchTarballs([uri], self.tempdir)
+
+
+class DebugSymbolsTest(cros_test_lib.TempDirTestCase):
+  """Tests for the debug symbols handling in binpkg."""
+
+  def testDebugSymbolsDetected(self):
+    """When generating the Packages file, DEBUG_SYMBOLS is updated."""
+    osutils.WriteFile(os.path.join(self.tempdir,
+                                   'chromeos-base/shill-0.0.1-r1.debug.tbz2'),
+                      'hello', makedirs=True)
+    osutils.WriteFile(os.path.join(self.tempdir, 'Packages'),
+                      PACKAGES_CONTENT)
+
+    index = binpkg.GrabLocalPackageIndex(self.tempdir)
+    self.assertEquals(index.packages[0]['CPV'], 'chromeos-base/shill-0.0.1-r1')
+    self.assertEquals(index.packages[0].get('DEBUG_SYMBOLS'), 'yes')
+    self.assertFalse('DEBUG_SYMBOLS' in index.packages[1])
diff --git a/lib/blueprint_lib.py b/lib/blueprint_lib.py
new file mode 100644
index 0000000..befe94c
--- /dev/null
+++ b/lib/blueprint_lib.py
@@ -0,0 +1,159 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities to work with blueprints."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import brick_lib
+from chromite.lib import workspace_lib
+
+
+# Field names for specifying initial configuration.
+APP_ID_FIELD = 'buildTargetId'
+BRICKS_FIELD = 'bricks'
+BSP_FIELD = 'bsp'
+
+# Those packages are implicitly built for all blueprints.
+# - target-os is needed to build any image.
+# - target-os-dev and target-os-test are needed to build a developer friendly
+#   image. They should not be included in any production images.
+_IMPLICIT_PACKAGES = (
+    'virtual/target-os',
+    'virtual/target-os-dev',
+    'virtual/target-os-test',
+)
+
+
+class BlueprintNotFoundError(Exception):
+  """The blueprint does not exist."""
+
+
+class BlueprintCreationError(Exception):
+  """Blueprint creation failed."""
+
+
+class Blueprint(object):
+  """Encapsulates the interaction with a blueprint."""
+
+  def __init__(self, blueprint_loc, initial_config=None):
+    """Instantiates a blueprint object.
+
+    Args:
+      blueprint_loc: blueprint locator.  This can be a relative path to CWD, an
+        absolute path, or a relative path to the root of the workspace prefixed
+        with '//'.
+      initial_config: A dictionary of key-value pairs to seed a new blueprint
+        with if the specified blueprint doesn't already exist.
+
+    Raises:
+      BlueprintNotFoundError: No blueprint exists at |blueprint_loc| and no
+        |initial_config| was given to create a new one.
+      BlueprintCreationError: |initial_config| was specified but a file
+        already exists at |blueprint_loc|.
+    """
+    self._path = (workspace_lib.LocatorToPath(blueprint_loc)
+                  if workspace_lib.IsLocator(blueprint_loc) else blueprint_loc)
+    self._locator = workspace_lib.PathToLocator(self._path)
+
+    if initial_config is not None:
+      self._CreateBlueprintConfig(initial_config)
+
+    try:
+      self.config = workspace_lib.ReadConfigFile(self._path)
+    except IOError:
+      raise BlueprintNotFoundError('Blueprint %s not found.' % self._path)
+
+  @property
+  def path(self):
+    return self._path
+
+  @property
+  def locator(self):
+    return self._locator
+
+  def _CreateBlueprintConfig(self, config):
+    """Create an initial blueprint config file.
+
+    Converts all brick paths in |config| into locators then saves the
+    configuration file to |self._path|.
+
+    Currently fails if |self._path| already exists, but could be
+    generalized to allow re-writing config files if needed.
+
+    Args:
+      config: configuration dictionary.
+
+    Raises:
+      BlueprintCreationError: A brick in |config| doesn't exist or an
+        error occurred while saving the config file.
+    """
+    if os.path.exists(self._path):
+      raise BlueprintCreationError('File already exists at %s.' % self._path)
+
+    try:
+      # Turn brick specifications into locators. If bricks or BSPs are
+      # unspecified, assign default values so the config file has the proper
+      # structure for easy manual editing.
+      if config.get(BRICKS_FIELD):
+        config[BRICKS_FIELD] = [brick_lib.Brick(b).brick_locator
+                                for b in config[BRICKS_FIELD]]
+      else:
+        config[BRICKS_FIELD] = []
+      if config.get(BSP_FIELD):
+        config[BSP_FIELD] = brick_lib.Brick(config[BSP_FIELD]).brick_locator
+      else:
+        config[BSP_FIELD] = None
+
+      # Create the config file.
+      workspace_lib.WriteConfigFile(self._path, config)
+    except (brick_lib.BrickNotFound, workspace_lib.ConfigFileError) as e:
+      raise BlueprintCreationError('Blueprint creation failed. %s' % e)
+
+  def GetBricks(self):
+    """Returns the bricks field of a blueprint."""
+    return self.config.get(BRICKS_FIELD, [])
+
+  def GetBSP(self):
+    """Returns the BSP field of a blueprint."""
+    return self.config.get(BSP_FIELD)
+
+  def GetAppId(self):
+    """Returns the APP_ID from a blueprint."""
+    app_id = self.config.get(APP_ID_FIELD)
+    return app_id
+
+  def FriendlyName(self):
+    """Returns the friendly name for this blueprint."""
+    return workspace_lib.LocatorToFriendlyName(self._locator)
+
+  def GetUsedBricks(self):
+    """Returns the set of bricks used by this blueprint."""
+    brick_map = {}
+    for top_brick in self.GetBricks() + [self.GetBSP()]:
+      for b in brick_lib.Brick(top_brick).BrickStack():
+        brick_map[b.brick_locator] = b
+
+    return brick_map.values()
+
+  def GetPackages(self, with_implicit=True):
+    """Returns the list of packages needed by this blueprint.
+
+    This includes the main packages for the bricks and the bsp of this
+    blueprint. We don't add the main packages of the bricks dependencies to
+    allow inheriting a brick without inheriting its required packages.
+
+    Args:
+      with_implicit: If True, include packages that are implicitly required by
+        the core system.
+    """
+    packages = []
+    for locator in self.GetBricks() + [self.GetBSP()]:
+      packages.extend(brick_lib.Brick(locator).MainPackages())
+
+    if with_implicit:
+      packages.extend(_IMPLICIT_PACKAGES)
+    return packages
diff --git a/lib/blueprint_lib_unittest b/lib/blueprint_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/blueprint_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/blueprint_lib_unittest.py b/lib/blueprint_lib_unittest.py
new file mode 100644
index 0000000..d4c2387
--- /dev/null
+++ b/lib/blueprint_lib_unittest.py
@@ -0,0 +1,121 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for the blueprint library."""
+
+from __future__ import print_function
+
+from chromite.lib import blueprint_lib
+from chromite.lib import brick_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import workspace_lib
+
+
+class BlueprintLibTest(cros_test_lib.WorkspaceTestCase):
+  """Unittest for blueprint_lib.py"""
+
+  def setUp(self):
+    self.CreateWorkspace()
+
+  def testBlueprint(self):
+    """Tests getting the basic blueprint getters."""
+    bricks = ['//foo', '//bar', '//baz']
+    for brick in bricks:
+      self.CreateBrick(brick)
+    self.CreateBrick('//bsp')
+    blueprint = self.CreateBlueprint(bricks=bricks, bsp='//bsp')
+    self.assertEqual(blueprint.GetBricks(), bricks)
+    self.assertEqual(blueprint.GetBSP(), '//bsp')
+
+  def testBlueprintNoBricks(self):
+    """Tests that blueprints without bricks return reasonable defaults."""
+    self.CreateBrick('//bsp2')
+    blueprint = self.CreateBlueprint(bsp='//bsp2')
+    self.assertEqual(blueprint.GetBricks(), [])
+    self.assertEqual(blueprint.GetBSP(), '//bsp2')
+
+  def testEmptyBlueprintFile(self):
+    """Tests that empty blueprints create the basic file structure."""
+    blueprint = self.CreateBlueprint()
+    file_contents = workspace_lib.ReadConfigFile(blueprint.path)
+
+    self.assertIn(blueprint_lib.BRICKS_FIELD, file_contents)
+    self.assertIn(blueprint_lib.BSP_FIELD, file_contents)
+
+  def testGetUsedBricks(self):
+    """Tests that we can list all the bricks used."""
+    brick_lib.Brick('//a', initial_config={'name':'a'})
+    brick_b = brick_lib.Brick('//b', initial_config={'name':'b'})
+    brick_c = brick_lib.Brick('//c',
+                              initial_config={'name':'c',
+                                              'dependencies': ['//b']})
+
+    blueprint = self.CreateBlueprint(name='foo.json',
+                                     bsp='//a', bricks=[brick_c.brick_locator])
+    self.assertEqual(3, len(blueprint.GetUsedBricks()))
+
+    # We sort out duplicates: c depends on b and b is explicitly listed in
+    # bricks too.
+    blueprint = self.CreateBlueprint(name='bar.json',
+                                     bsp='//a', bricks=[brick_c.brick_locator,
+                                                        brick_b.brick_locator])
+    self.assertEqual(3, len(blueprint.GetUsedBricks()))
+
+  def testGetPackages(self):
+    """Tests that we can get the needed packages for a given blueprint."""
+    self.CreateBrick('foo', main_package='app-misc/foopkg')
+    self.CreateBrick('bar', main_package='app-misc/barpkg')
+    self.CreateBrick('foobar', main_package='app-misc/foobarpkg',
+                     dependencies=['//foo', '//bar'])
+
+    self.CreateBrick('hello', main_package='app-misc/hello')
+
+    self.CreateBrick('mybsp', main_package='app-misc/bspbonjour')
+
+    blueprint = self.CreateBlueprint(name='//myblueprint',
+                                     bricks=['//hello', '//foobar'],
+                                     bsp='//mybsp')
+    packages = blueprint.GetPackages(with_implicit=False)
+    self.assertEqual(
+        set(('app-misc/foobarpkg', 'app-misc/hello', 'app-misc/bspbonjour')),
+        set(packages))
+
+    packages = blueprint.GetPackages(with_implicit=True)
+    self.assertTrue('virtual/target-os' in packages)
+    self.assertTrue('virtual/target-os-dev' in packages)
+    self.assertTrue('virtual/target-os-test' in packages)
+
+  def testBlueprintAlreadyExists(self):
+    """Tests creating a blueprint where one already exists."""
+    self.CreateBrick('//foo')
+    self.CreateBrick('//bar')
+    self.CreateBlueprint(name='//my_blueprint', bricks=['//foo'])
+    with self.assertRaises(blueprint_lib.BlueprintCreationError):
+      self.CreateBlueprint(name='//my_blueprint', bricks=['//bar'])
+    # Make sure the original blueprint is untouched.
+    self.assertEqual(['//foo'],
+                     blueprint_lib.Blueprint('//my_blueprint').GetBricks())
+
+  def testBlueprintBrickNotFound(self):
+    """Tests creating a blueprint with a non-existent brick fails."""
+    with self.assertRaises(blueprint_lib.BlueprintCreationError):
+      self.CreateBlueprint(name='//my_blueprint', bricks=['//none'])
+
+  def testBlueprintBSPNotFound(self):
+    """Tests creating a blueprint with a non-existent BSP fails."""
+    with self.assertRaises(blueprint_lib.BlueprintCreationError):
+      self.CreateBlueprint(name='//my_blueprint', bsp='//none')
+
+  def testBlueprintNotFound(self):
+    """Tests loading a non-existent blueprint file."""
+    with self.assertRaises(blueprint_lib.BlueprintNotFoundError):
+      blueprint_lib.Blueprint('//not/a/blueprint')
+
+  def testInvalidBlueprint(self):
+    """Tests loading an invalid blueprint file."""
+    path = workspace_lib.LocatorToPath('//invalid_file')
+    osutils.WriteFile(path, 'invalid contents')
+    with self.assertRaises(workspace_lib.ConfigFileError):
+      blueprint_lib.Blueprint(path)
diff --git a/lib/boolparse_lib.py b/lib/boolparse_lib.py
new file mode 100644
index 0000000..13bdcdb
--- /dev/null
+++ b/lib/boolparse_lib.py
@@ -0,0 +1,155 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Parser used to parse the boolean expression."""
+
+from __future__ import print_function
+
+import ast
+import pyparsing
+import re
+
+
+class BoolParseError(Exception):
+  """Base exception for this module."""
+
+
+class _BoolOperand(object):
+  """Read pyparsing.Keyword as operand and evaluate its boolean value."""
+
+  def __init__(self, t):
+    """Initialize the object.
+
+    Read boolean operands from pyparsing.Keyword and evaluate into the
+    corresponding boolean values.
+
+    Args:
+      t: t[0] is pyparsing.Keyword corresponding to False or True.
+    """
+    self.label = t[0]
+    self.value = ast.literal_eval(t[0])
+
+  def __bool__(self):
+    return self.value
+
+  def __str__(self):
+    return self.label
+
+  __nonzero__ = __bool__
+
+
+class _BoolBinOp(object):
+  """General class for binary operation."""
+
+  def __init__(self, t):
+    """Initialize object.
+
+    Extract the operand from the input. The operand is the pyparsing.Keyword.
+
+    Args:
+      t: A list containing a list of operand and operator, such as
+      [[True, 'and', False]]. t[0] is [True, 'and', False]. t[0][0::2] are the
+      two operands.
+    """
+    self.args = t[0][0::2]
+
+  def __bool__(self):
+    """Evaluate the boolean value of the binary boolean expression.
+
+    evalop is the method used to evaluate, which is overwritten in the children
+    class of _BoolBinOp.
+
+    Returns:
+      boolean result.
+    """
+    return self.evalop(bool(a) for a in self.args)
+
+  __nonzero__ = __bool__
+
+
+class _BoolAnd(_BoolBinOp):
+  """And boolean binary operation."""
+
+  evalop = all
+
+
+class _BoolOr(_BoolBinOp):
+  """Or boolean binary operation."""
+
+  evalop = any
+
+
+class _BoolNot(object):
+  """Not operation."""
+
+  def __init__(self, t):
+    self.arg = t[0][1]
+
+  def __bool__(self):
+    v = bool(self.arg)
+    return not v
+
+  __nonzero__ = __bool__
+
+
+def _ExprOverwrite(expr, true_variables):
+  """Overwrite variables in |expr| based on |true_variables|.
+
+  Overwrite variables in |expr| with 'True' if they occur in |true_variables|,
+  'False' otherwise.
+
+  Args:
+    expr: The orginal boolean expression, like 'A and B'.
+    true_variables: Collection of variable names to be considered True, e.g.
+    {'A'}.
+
+  Returns:
+    A boolean expression string with pyparsing.Keyword, like 'True and False'.
+  """
+  # When true_variables is None, replace it with empty collection ()
+  target_set = set(true_variables or ())
+  items = {
+      x.strip() for x in re.split(r'(?i) and | or |not |\(|\)', expr)
+      if x.strip()}
+  boolstr = expr
+  for item in items:
+    boolstr = boolstr.replace(
+        item, 'True' if item in target_set else 'False')
+
+  return boolstr
+
+def BoolstrResult(expr, true_variables):
+  """Determine if a boolean expression is satisfied.
+
+  BoolstrResult('A and B and not C', {'A', 'C'}) -> False
+
+  Args:
+    expr: The orginal boolean expression, like 'A and B'.
+    true_variables: Collection to be checked whether satisfy the boolean expr.
+
+  Returns:
+    True if the given |true_variables| cause the boolean expression |expr| to
+    be satisfied, False otherwise.
+  """
+  boolstr = _ExprOverwrite(expr, true_variables)
+
+  # Define the boolean logic
+  TRUE = pyparsing.Keyword('True')
+  FALSE = pyparsing.Keyword('False')
+  boolOperand = TRUE | FALSE
+  boolOperand.setParseAction(_BoolOperand)
+
+  # Define expression, based on expression operand and list of operations in
+  # precedence order.
+  boolExpr = pyparsing.infixNotation(
+      boolOperand, [('not', 1, pyparsing.opAssoc.RIGHT, _BoolNot),
+                    ('and', 2, pyparsing.opAssoc.LEFT, _BoolAnd),
+                    ('or', 2, pyparsing.opAssoc.LEFT, _BoolOr),])
+
+  try:
+    res = boolExpr.parseString(boolstr)[0]
+    return bool(res)
+  except (AttributeError, pyparsing.ParseException):
+    raise BoolParseError('Cannot parse the boolean expression string "%s".'
+                         % expr)
diff --git a/lib/boolparse_lib_unittest b/lib/boolparse_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/boolparse_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/boolparse_lib_unittest.py b/lib/boolparse_lib_unittest.py
new file mode 100644
index 0000000..fd591b5
--- /dev/null
+++ b/lib/boolparse_lib_unittest.py
@@ -0,0 +1,72 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for boolparse_lib methods."""
+
+from __future__ import print_function
+
+import boolparse_lib
+import cros_test_lib
+
+
+class ParserTest(cros_test_lib.TestCase):
+  """Unittest for boolean expression parser."""
+
+  def testSingleItem(self):
+    self.assertFalse(boolparse_lib.BoolstrResult('A', {}))
+    self.assertTrue(boolparse_lib.BoolstrResult('A', {'A', 'B'}))
+    self.assertFalse(boolparse_lib.BoolstrResult('A', {'B'}))
+
+  def testAndlogic(self):
+    self.assertFalse(boolparse_lib.BoolstrResult('A and B', {}))
+    self.assertFalse(boolparse_lib.BoolstrResult('A and B', {}))
+    self.assertTrue(boolparse_lib.BoolstrResult('A and B', {'A', 'B', 'C'}))
+
+  def testOrlogic(self):
+    self.assertFalse(boolparse_lib.BoolstrResult('A or B', {}))
+    self.assertTrue(boolparse_lib.BoolstrResult('A or B', {'B'}))
+    self.assertTrue(boolparse_lib.BoolstrResult('A or B', {'A', 'B'}))
+    self.assertTrue(boolparse_lib.BoolstrResult('A or B', {'A', 'C'}))
+    self.assertFalse(boolparse_lib.BoolstrResult('A or B', {'C'}))
+
+  def testNotlogic(self):
+    self.assertTrue(boolparse_lib.BoolstrResult('not A', {}))
+    self.assertFalse(boolparse_lib.BoolstrResult('not A', {'A'}))
+    self.assertFalse(boolparse_lib.BoolstrResult('not A', {'A', 'B'}))
+    self.assertTrue(boolparse_lib.BoolstrResult('not A', {'B'}))
+    self.assertTrue(boolparse_lib.BoolstrResult('not not A', {'A'}))
+
+  def testComplexBoolExpr(self):
+    self.assertFalse(boolparse_lib.BoolstrResult('A and not B', {}))
+    self.assertTrue(boolparse_lib.BoolstrResult('A and not B', {'A'}))
+    self.assertFalse(boolparse_lib.BoolstrResult('A and not B', {'A', 'B'}))
+    self.assertTrue(boolparse_lib.BoolstrResult('not (A and B)', {'A'}))
+    self.assertFalse(boolparse_lib.BoolstrResult('not (A and B)', {'A', 'B'}))
+    self.assertTrue(boolparse_lib.BoolstrResult('A or not B', {'A'}))
+    self.assertTrue(boolparse_lib.BoolstrResult('A or not B', {'A', 'B'}))
+    self.assertFalse(boolparse_lib.BoolstrResult('A or not B', {'B'}))
+    self.assertTrue(boolparse_lib.BoolstrResult('A or not B', {'C'}))
+
+  def testInvalidBoolExprInput(self):
+    """Test invalid boolean expression.
+
+    Test whether captures the exceptions caused by invalid boolean expression
+    input. Note that only the lowercase and, or, not are allowed.
+    """
+
+    with self.assertRaises(boolparse_lib.BoolParseError):
+      boolparse_lib.BoolstrResult('', {'A'})
+    with self.assertRaises(TypeError):
+      boolparse_lib.BoolstrResult(None, {'A'})
+
+  def testCollectionInput(self):
+    """Test whether handle different types of collection input."""
+
+    self.assertFalse(boolparse_lib.BoolstrResult('A', None))
+    self.assertTrue(boolparse_lib.BoolstrResult('A', ['A', 'A']))
+    self.assertFalse(boolparse_lib.BoolstrResult('not A', ('A', 'B', 'B')))
+
+  def testVariousOperand(self):
+    """Test on various operand format."""
+    self.assertFalse(boolparse_lib.BoolstrResult('A:foo and B:foo', {'A:foo'}))
diff --git a/lib/bootstrap_lib.py b/lib/bootstrap_lib.py
new file mode 100644
index 0000000..9734f2e
--- /dev/null
+++ b/lib/bootstrap_lib.py
@@ -0,0 +1,101 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common utilities used by the chromium/bootstrap scripts."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import cros_build_lib
+from chromite.lib import project_sdk
+from chromite.lib import workspace_lib
+
+
+# This is the subdirectory of the bootstrap, where we store SDKs.
+SDK_CHECKOUTS = 'sdk_checkouts'
+
+
+# This env is used to remember the bootstrap path in child processes.
+BOOTSTRAP_PATH_ENV = 'BRILLO_BOOTSTRAP_PATH'
+
+
+def FindBootstrapPath(save_to_env=False):
+  """Find the bootstrap directory.
+
+  This is only possible, if the process was initially launched from a bootstrap
+  environment, and isn't inside a chroot.
+
+  Args:
+    save_to_env: If true, preserve the bootstrap path in an ENV for child
+                 processes. Only intended for the brillo bootstrap wrapper.
+
+  Returns:
+   Path to root of bootstrap, or None.
+  """
+  # We never have access to bootstrap if we are inside the chroot.
+  if cros_build_lib.IsInsideChroot():
+    return None
+
+  # See if the path has already been determined, especially in a parent wrapper
+  # process.
+  env_path = os.environ.get(BOOTSTRAP_PATH_ENV)
+  if env_path:
+    return env_path
+
+  # Base the bootstrap location on our current location, and remember it.
+  new_path = os.path.realpath(os.path.join(
+      os.path.abspath(__file__), '..', '..'))
+
+  # No repo checkout is a valid bootstrap environment, because the bootstrap
+  # environment holds repo checkouts inside SDK_CHECKOUTS, and repos cannot
+  # exist inside other repos.
+  if project_sdk.FindRepoRoot(new_path):
+    return None
+
+  if save_to_env:
+    os.environ[BOOTSTRAP_PATH_ENV] = new_path
+
+  return new_path
+
+
+def ComputeSdkPath(bootstrap_path, version):
+  """What directory should an SDK be in.
+
+  Args:
+    bootstrap_path: Bootstrap root directory, or None.
+    version: Version of the SDK.
+
+  Returns:
+    Path in which SDK version should be stored, or None.
+  """
+  if bootstrap_path is None:
+    return None
+
+  return os.path.join(bootstrap_path, SDK_CHECKOUTS, version)
+
+
+def GetActiveSdkPath(bootstrap_path, workspace_path):
+  """Find the SDK Path associated with a given workspace.
+
+  Most code should use constants.SOURCE_ROOT instead.
+
+  Args:
+    bootstrap_path: Path directory of the bootstrap dir (FindBootstrapPath()).
+    workspace_path: Path directory of the workspace (FindWorkspacePath()).
+
+  Returns:
+    Path to root directory of SDK, if there is an active one, and it exists.
+  """
+  if bootstrap_path is None:
+    return None
+
+  version = workspace_lib.GetActiveSdkVersion(workspace_path)
+  if version is None:
+    return None
+
+  sdk_root = ComputeSdkPath(bootstrap_path, version)
+
+  # Returns None if there is no active SDK version, or if it's not installed.
+  return sdk_root if os.path.isdir(sdk_root) else None
diff --git a/lib/bootstrap_lib_unittest b/lib/bootstrap_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/bootstrap_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/bootstrap_lib_unittest.py b/lib/bootstrap_lib_unittest.py
new file mode 100644
index 0000000..a913533
--- /dev/null
+++ b/lib/bootstrap_lib_unittest.py
@@ -0,0 +1,128 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for the project_sdk library."""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import bootstrap_lib
+from chromite.lib import project_sdk
+from chromite.lib import workspace_lib
+
+# pylint: disable=protected-access
+
+class ProjectSdkTest(cros_test_lib.WorkspaceTestCase):
+  """Unittest for bootstrap_lib.py"""
+
+  def setUp(self):
+    self.version = '1.2.3'
+
+    # Don't use "CreateBootstrap" since it mocks out the method we are testing.
+    self.bootstrap_path = os.path.join(self.tempdir, 'bootstrap')
+    self.CreateWorkspace()
+
+
+  @mock.patch.object(project_sdk, 'FindRepoRoot')
+  @mock.patch.object(cros_build_lib, 'IsInsideChroot')
+  def _RunFindBootstrapPath(self, env, repo, chroot,
+                            expected_path, expected_env,
+                            mock_chroot, mock_repo):
+
+    orig_env = os.environ.copy()
+
+    try:
+      # Setup the ENV as requested.
+      if env is not None:
+        os.environ[bootstrap_lib.BOOTSTRAP_PATH_ENV] = env
+      else:
+        os.environ.pop(bootstrap_lib.BOOTSTRAP_PATH_ENV, None)
+
+      # Setup mocks, as requested.
+      mock_repo.return_value = repo
+      mock_chroot.return_value = chroot
+
+      # Verify that ENV is modified, if save is False.
+      self.assertEqual(bootstrap_lib.FindBootstrapPath(), expected_path)
+      self.assertEqual(os.environ.get(bootstrap_lib.BOOTSTRAP_PATH_ENV), env)
+
+      # The test environment is fully setup, run the test.
+      self.assertEqual(bootstrap_lib.FindBootstrapPath(True), expected_path)
+      self.assertEqual(os.environ.get(bootstrap_lib.BOOTSTRAP_PATH_ENV),
+                       expected_env)
+
+    finally:
+      # Restore the ENV.
+      osutils.SetEnvironment(orig_env)
+
+
+  def testFindBootstrapPath(self):
+    real_result = constants.CHROMITE_DIR
+
+    # Test first call in a bootstrap env. Exact results not verified.
+    self._RunFindBootstrapPath(None, None, False,
+                               real_result, real_result)
+
+    # Test first call after bootstrap outside an SDK. Not an expected env.
+    self._RunFindBootstrapPath('/foo', None, False,
+                               '/foo', '/foo')
+
+    # Test first call after bootstrap inside an SDK.
+    self._RunFindBootstrapPath('/foo', '/', False,
+                               '/foo', '/foo')
+
+    # Test first call without bootstrap inside an SDK. Error Case.
+    self._RunFindBootstrapPath(None, '/', False,
+                               None, None)
+
+    # Test all InsideChroot Cases.
+    self._RunFindBootstrapPath(None, None, True,
+                               None, None)
+    self._RunFindBootstrapPath('/foo', None, True,
+                               None, '/foo')
+    self._RunFindBootstrapPath('/foo', '/', True,
+                               None, '/foo')
+    self._RunFindBootstrapPath(None, '/', True,
+                               None, None)
+
+  def testComputeSdkPath(self):
+    # Try to compute path, with no valid bootstrap path.
+    self.assertEqual(None, bootstrap_lib.ComputeSdkPath(None, '1.2.3'))
+
+    self.assertEqual(
+        '/foo/bootstrap/sdk_checkouts/1.2.3',
+        bootstrap_lib.ComputeSdkPath('/foo/bootstrap', '1.2.3'))
+
+  def testGetActiveSdkPath(self):
+    # Try to find SDK Path with no valid bootstrap path.
+    sdk_dir = bootstrap_lib.GetActiveSdkPath(None,
+                                             self.workspace_path)
+    self.assertEqual(None, sdk_dir)
+
+    # Try to find SDK Path of workspace with no active SDK.
+    sdk_dir = bootstrap_lib.GetActiveSdkPath(self.bootstrap_path,
+                                             self.workspace_path)
+    self.assertEqual(None, sdk_dir)
+
+    # Try to find SDK Path of workspace with active SDK, but SDK doesn't exist.
+    workspace_lib.SetActiveSdkVersion(self.workspace_path, self.version)
+    sdk_dir = bootstrap_lib.GetActiveSdkPath(self.bootstrap_path,
+                                             self.workspace_path)
+    self.assertEqual(None, sdk_dir)
+
+    # 'Create' the active SDK.
+    expected_sdk_root = bootstrap_lib.ComputeSdkPath(self.bootstrap_path,
+                                                     self.version)
+    osutils.SafeMakedirs(expected_sdk_root)
+
+    # Verify that we can Find it now.
+    sdk_dir = bootstrap_lib.GetActiveSdkPath(self.bootstrap_path,
+                                             self.workspace_path)
+    self.assertEqual(expected_sdk_root, sdk_dir)
diff --git a/lib/brick_lib.py b/lib/brick_lib.py
new file mode 100644
index 0000000..0925688
--- /dev/null
+++ b/lib/brick_lib.py
@@ -0,0 +1,273 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common brick related utilities."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import osutils
+from chromite.lib import workspace_lib
+
+_DEFAULT_LAYOUT_CONF = {'profile_eapi_when_unspecified': '5-progress',
+                        'profile-formats': 'portage-2 profile-default-eapi',
+                        'thin-manifests': 'true',
+                        'use-manifests': 'true'}
+
+_CONFIG_FILE = 'config.json'
+
+_IGNORED_OVERLAYS = ('portage-stable', 'chromiumos', 'eclass-overlay')
+
+
+class BrickCreationFailed(Exception):
+  """The brick creation failed."""
+
+
+class BrickNotFound(Exception):
+  """The brick does not exist."""
+
+
+class BrickFeatureNotSupported(Exception):
+  """Attempted feature not supported for this brick."""
+
+
+class Brick(object):
+  """Encapsulates the interaction with a brick."""
+
+  def __init__(self, brick_loc, initial_config=None, allow_legacy=True):
+    """Instantiates a brick object.
+
+    Args:
+      brick_loc: brick locator. This can be a relative path to CWD, an absolute
+        path, a public board name prefix with 'board:' or a relative path to the
+        root of the workspace, prefixed with '//').
+      initial_config: The initial configuration as a python dictionary.
+        If not None, creates a brick with this configuration.
+      allow_legacy: Allow board overlays, simulating a basic read-only config.
+        Ignored if |initial_config| is not None.
+
+    Raises:
+      ValueError: If |brick_loc| is invalid.
+      LocatorNotResolved: |brick_loc| is valid but could not be resolved.
+      BrickNotFound: If |brick_loc| does not point to a brick and no initial
+        config was provided.
+      BrickCreationFailed: when the brick could not be created successfully.
+    """
+    if workspace_lib.IsLocator(brick_loc):
+      self.brick_dir = workspace_lib.LocatorToPath(brick_loc)
+      self.brick_locator = brick_loc
+    else:
+      self.brick_dir = brick_loc
+      self.brick_locator = workspace_lib.PathToLocator(brick_loc)
+
+    self.config = None
+    self.legacy = False
+    config_json = os.path.join(self.brick_dir, _CONFIG_FILE)
+
+    if not os.path.exists(config_json):
+      if initial_config:
+        if os.path.exists(self.brick_dir):
+          raise BrickCreationFailed('directory %s already exists.'
+                                    % self.brick_dir)
+        success = False
+        try:
+          self.UpdateConfig(initial_config)
+          osutils.SafeMakedirs(self.OverlayDir())
+          osutils.SafeMakedirs(self.SourceDir())
+          success = True
+        except BrickNotFound as e:
+          # If BrickNotFound was raised, the dependencies contain a missing
+          # brick.
+          raise BrickCreationFailed('dependency not found %s' % e)
+        finally:
+          if not success:
+            # If the brick creation failed for any reason, cleanup the partially
+            # created brick.
+            osutils.RmDir(self.brick_dir, ignore_missing=True)
+
+      elif allow_legacy:
+        self.legacy = True
+        try:
+          masters = self._ReadLayoutConf().get('masters')
+          masters_list = masters.split() if masters else []
+
+          # Keep general Chromium OS overlays out of this list as they are
+          # handled separately by the build system.
+          deps = ['board:' + d for d in masters_list
+                  if d not in _IGNORED_OVERLAYS]
+          self.config = {'name': self._ReadLayoutConf()['repo-name'],
+                         'dependencies': deps}
+        except (IOError, KeyError):
+          pass
+
+      if self.config is None:
+        raise BrickNotFound('Brick not found at %s' % self.brick_dir)
+    elif initial_config is None:
+      self.config = workspace_lib.ReadConfigFile(config_json)
+    else:
+      raise BrickCreationFailed('brick %s already exists.' % self.brick_dir)
+
+    self.friendly_name = None
+    if not self.legacy:
+      self.friendly_name = workspace_lib.LocatorToFriendlyName(
+          self.brick_locator)
+
+  def _LayoutConfPath(self):
+    """Returns the path to the layout.conf file."""
+    return os.path.join(self.OverlayDir(), 'metadata', 'layout.conf')
+
+  def _WriteLayoutConf(self, content):
+    """Writes layout.conf.
+
+    Sets unset fields to a sensible default and write |content| in layout.conf
+    in the right format.
+
+    Args:
+      content: dictionary containing the set fields in layout.conf.
+    """
+    for k, v in _DEFAULT_LAYOUT_CONF.iteritems():
+      content.setdefault(k, v)
+
+    content_str = ''.join(['%s = %s\n' % (k, v)
+                           for k, v in content.iteritems()])
+    osutils.WriteFile(self._LayoutConfPath(), content_str, makedirs=True)
+
+  def _ReadLayoutConf(self):
+    """Returns the content of layout.conf as a Python dictionary."""
+    def ParseConfLine(line):
+      k, _, v = line.partition('=')
+      return k.strip(), v.strip() or None
+
+    content_str = osutils.ReadFile(self._LayoutConfPath())
+    return dict(ParseConfLine(line) for line in content_str.splitlines())
+
+  def UpdateConfig(self, config, regenerate=True):
+    """Updates the brick's configuration.
+
+    Writes |config| to the configuration file.
+    If |regenerate| is true, regenerate the portage configuration files in
+    this brick to match the new configuration.
+
+    Args:
+      config: brick configuration as a python dict.
+      regenerate: if True, regenerate autogenerated brick files.
+    """
+    if self.legacy:
+      raise BrickFeatureNotSupported(
+          'Cannot update configuration of legacy brick %s' % self.brick_dir)
+
+    self.config = config
+    # All objects must be unambiguously referenced. Normalize all the
+    # dependencies according to the workspace.
+    self.config['dependencies'] = [d if workspace_lib.IsLocator(d)
+                                   else workspace_lib.PathToLocator(d)
+                                   for d in self.config.get('dependencies', [])]
+
+    workspace_lib.WriteConfigFile(os.path.join(self.brick_dir, _CONFIG_FILE),
+                                  config)
+
+    if regenerate:
+      self.GeneratePortageConfig()
+
+  def GeneratePortageConfig(self):
+    """Generates all autogenerated brick files."""
+    # We don't generate anything in legacy brick so everything is up-to-date.
+    if self.legacy:
+      return
+
+    deps = [b.config['name'] for b in self.Dependencies()]
+
+    self._WriteLayoutConf(
+        {'masters': ' '.join(
+            ['eclass-overlay', 'portage-stable', 'chromiumos'] + deps),
+         'repo-name': self.config['name']})
+
+  def Dependencies(self):
+    """Returns the dependent bricks."""
+    return [Brick(d) for d in self.config.get('dependencies', [])]
+
+  def Inherits(self, brick_name):
+    """Checks whether this brick contains |brick_name|.
+
+    Args:
+      brick_name: The name of the brick to check containment.
+
+    Returns:
+      Whether |brick_name| is contained in this brick.
+    """
+    return brick_name in [b.config['name'] for b in self.BrickStack()]
+
+  def MainPackages(self):
+    """Returns the brick's main package(s).
+
+    This finds the 'main_package' property.  It nevertheless returns a (single
+    element) list as it is easier to work with.
+
+    Returns:
+      A list of main packages; empty if no main package configured.
+    """
+    main_package = self.config.get('main_package')
+    return [main_package] if main_package else []
+
+  def OverlayDir(self):
+    """Returns the brick's overlay directory."""
+    if self.legacy:
+      return self.brick_dir
+
+    return os.path.join(self.brick_dir, 'packages')
+
+  def SourceDir(self):
+    """Returns the project's source directory."""
+    return os.path.join(self.brick_dir, 'src')
+
+  def FriendlyName(self):
+    """Return the friendly name for this brick.
+
+    This name is used as the board name for legacy commands (--board).
+    """
+    if self.friendly_name is None:
+      raise BrickFeatureNotSupported()
+    return self.friendly_name
+
+  def BrickStack(self):
+    """Returns the brick stack for this brick.
+
+    Returns:
+      A list of bricks, respecting the partial ordering of bricks as defined by
+      dependencies, ordered from the lowest priority to the highest priority.
+    """
+    seen = set()
+    def _stack(brick):
+      seen.add(brick.brick_dir)
+      l = []
+      for dep in brick.Dependencies():
+        if dep.brick_dir not in seen:
+          l.extend(_stack(dep))
+      l.append(brick)
+      return l
+
+    return _stack(self)
+
+
+def FindBrickInPath(path=None):
+  """Returns the root directory of the brick containing a path.
+
+  Return the first parent directory of |path| that is the root of a brick.
+  This method is used for brick auto-detection and does not consider legacy.
+
+  Args:
+    path: path to a directory. If |path| is None, |path| will be set to CWD.
+
+  Returns:
+    The path to the first parent that is a brick directory if one exist.
+    Otherwise return None.
+  """
+  for p in osutils.IteratePathParents(path or os.getcwd()):
+    try:
+      return Brick(p, allow_legacy=False)
+    except BrickNotFound:
+      pass
+
+  return None
diff --git a/lib/brick_lib_unittest b/lib/brick_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/brick_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/brick_lib_unittest.py b/lib/brick_lib_unittest.py
new file mode 100644
index 0000000..9eff5b2
--- /dev/null
+++ b/lib/brick_lib_unittest.py
@@ -0,0 +1,217 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for the brick library."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import brick_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import workspace_lib
+
+
+class BrickLibTest(cros_test_lib.WorkspaceTestCase):
+  """Unittest for brick.py"""
+
+  # pylint: disable=protected-access
+
+  def setUp(self):
+    self.CreateWorkspace()
+
+  def SetupLegacyBrick(self, brick_dir=None, brick_name='foo'):
+    """Sets up a legacy brick layout."""
+    if brick_dir is None:
+      brick_dir = self.workspace_path
+    layout_conf = 'repo-name = %s\n' % brick_name
+    osutils.WriteFile(os.path.join(brick_dir, 'metadata', 'layout.conf'),
+                      layout_conf, makedirs=True)
+
+  def testLayoutFormat(self):
+    """Test that layout.conf is correctly formatted."""
+    brick = self.CreateBrick()
+    content = {'repo-name': 'hello',
+               'bar': 'foo'}
+    brick._WriteLayoutConf(content)
+
+    path = os.path.join(brick.OverlayDir(), 'metadata', 'layout.conf')
+    layout_conf = osutils.ReadFile(path).split('\n')
+
+    expected_lines = ['repo-name = hello',
+                      'bar = foo',
+                      'profile-formats = portage-2 profile-default-eapi']
+    for line in expected_lines:
+      self.assertTrue(line in layout_conf)
+
+  def testConfigurationGenerated(self):
+    """Test that portage's files are generated when the config file changes."""
+    brick = self.CreateBrick()
+    sample_config = {'name': 'hello',
+                     'dependencies': []}
+
+    brick.UpdateConfig(sample_config)
+
+    self.assertExists(brick._LayoutConfPath())
+
+  def testFindBrickInPath(self):
+    """Test that we can infer the current brick from the current directory."""
+    brick = self.CreateBrick()
+    os.remove(os.path.join(brick.brick_dir, brick_lib._CONFIG_FILE))
+    brick_dir = os.path.join(self.workspace_path, 'foo', 'bar', 'project')
+    expected_name = 'hello'
+    brick_lib.Brick(brick_dir, initial_config={'name': 'hello'})
+
+    with osutils.ChdirContext(self.workspace_path):
+      self.assertEqual(None, brick_lib.FindBrickInPath())
+
+    with osutils.ChdirContext(brick_dir):
+      self.assertEqual(expected_name,
+                       brick_lib.FindBrickInPath().config['name'])
+
+    subdir = os.path.join(brick_dir, 'sub', 'directory')
+    osutils.SafeMakedirs(subdir)
+    with osutils.ChdirContext(subdir):
+      self.assertEqual(expected_name,
+                       brick_lib.FindBrickInPath().config['name'])
+
+  def testBrickCreation(self):
+    """Test that brick initialization throws the right errors."""
+    brick = self.CreateBrick()
+    with self.assertRaises(brick_lib.BrickCreationFailed):
+      brick_lib.Brick(brick.brick_dir, initial_config={})
+
+    nonexistingbrick = os.path.join(self.workspace_path, 'foo')
+    with self.assertRaises(brick_lib.BrickNotFound):
+      brick_lib.Brick(nonexistingbrick)
+
+  def testLoadNonExistingBrickFails(self):
+    """Tests that trying to load a non-existing brick fails."""
+    self.assertRaises(brick_lib.BrickNotFound, brick_lib.Brick,
+                      self.workspace_path)
+
+  def testLoadExistingNormalBrickSucceeds(self):
+    """Tests that loading an existing brick works."""
+    brick = self.CreateBrick(name='my_brick')
+    brick = brick_lib.Brick(brick.brick_dir, allow_legacy=False)
+    self.assertEquals('my_brick', brick.config.get('name'))
+
+  def testLoadExistingLegacyBrickFailsIfNotAllowed(self):
+    """Tests that loading a legacy brick fails when not allowed."""
+    self.SetupLegacyBrick()
+    with self.assertRaises(brick_lib.BrickNotFound):
+      brick_lib.Brick(self.workspace_path, allow_legacy=False)
+
+  def testLoadExistingLegacyBrickSucceeds(self):
+    """Tests that loading a legacy brick fails when not allowed."""
+    self.SetupLegacyBrick()
+    brick = brick_lib.Brick(self.workspace_path)
+    self.assertEquals('foo', brick.config.get('name'))
+
+  def testLegacyBrickUpdateConfigFails(self):
+    """Tests that a legacy brick config cannot be updated."""
+    self.SetupLegacyBrick()
+    brick = brick_lib.Brick(self.workspace_path)
+    with self.assertRaises(brick_lib.BrickFeatureNotSupported):
+      brick.UpdateConfig({'name': 'bar'})
+
+  def testInherits(self):
+    """Tests the containment checking works as intended."""
+    saved_root = constants.SOURCE_ROOT
+
+    try:
+      # Mock the source root so that we can create fake legacy overlay.
+      constants.SOURCE_ROOT = self.workspace_path
+      legacy = os.path.join(self.workspace_path, 'src', 'overlays',
+                            'overlay-foobar')
+      self.SetupLegacyBrick(brick_dir=legacy, brick_name='foobar')
+
+      bar_brick = brick_lib.Brick('//bar', initial_config={'name': 'bar'})
+      foo_brick = brick_lib.Brick(
+          '//foo', initial_config={'name': 'foo',
+                                   'dependencies': ['//bar', 'board:foobar']})
+
+      self.assertTrue(bar_brick.Inherits('bar'))
+      self.assertTrue(foo_brick.Inherits('bar'))
+      self.assertFalse(bar_brick.Inherits('foo'))
+      self.assertTrue(foo_brick.Inherits('foobar'))
+      self.assertFalse(foo_brick.Inherits('dontexist'))
+
+    finally:
+      constants.SOURCE_ROOT = saved_root
+
+  def testOverlayDir(self):
+    """Tests that overlay directory is returned correctly."""
+    self.assertExists(self.CreateBrick().OverlayDir())
+
+  def testOpenUsingLocator(self):
+    """Tests that we can open a brick given a locator."""
+    brick_lib.Brick(os.path.join(self.workspace_path, 'foo'),
+                    initial_config={'name': 'foo'})
+
+    brick_lib.Brick('//foo')
+
+    with self.assertRaises(brick_lib.BrickNotFound):
+      brick_lib.Brick('//doesnotexist')
+
+  def testCreateUsingLocator(self):
+    """Tests that we can create a brick using a locator."""
+    brick_lib.Brick('//foobar', initial_config={'name': 'foobar'})
+    brick_lib.Brick('//bricks/some/path', initial_config={'name': 'path'})
+
+    brick_lib.Brick('//foobar')
+    brick_lib.Brick('//bricks/some/path')
+
+    brick_lib.Brick(os.path.join(self.workspace_path, 'foobar'))
+    brick_lib.Brick(os.path.join(self.workspace_path, 'bricks', 'some', 'path'))
+
+  def testFriendlyName(self):
+    """Tests that the friendly name generation works."""
+    first = brick_lib.Brick('//foo/bar/test', initial_config={'name': 'test'})
+    self.assertEqual('foo.bar.test', first.FriendlyName())
+
+    second = brick_lib.Brick(os.path.join(self.workspace_path, 'test', 'foo'),
+                             initial_config={'name': 'foo'})
+    self.assertEqual('test.foo', second.FriendlyName())
+
+  def testMissingDependency(self):
+    """Tests that the brick creation fails when a dependency is missing."""
+    with self.assertRaises(brick_lib.BrickCreationFailed):
+      brick_lib.Brick('//bar',
+                      initial_config={'name':'bar',
+                                      'dependencies':['//dont/exist']})
+
+    # If the creation failed, the directory is removed cleanly.
+    self.assertFalse(os.path.exists(workspace_lib.LocatorToPath('//bar')))
+
+  def testNormalizedDependencies(self):
+    """Tests that dependencies are normalized during brick creation."""
+    brick_lib.Brick('//foo/bar', initial_config={'name': 'bar'})
+    with osutils.ChdirContext(os.path.join(self.workspace_path, 'foo')):
+      brick_lib.Brick('//baz', initial_config={'name': 'baz',
+                                               'dependencies': ['bar']})
+
+    deps = brick_lib.Brick('//baz').config['dependencies']
+    self.assertEqual(1, len(deps))
+    self.assertEqual('//foo/bar', deps[0])
+
+  def testBrickStack(self):
+    """Tests that the brick stacking is correct."""
+    def brick_dep(name, deps):
+      config = {'name': os.path.basename(name),
+                'dependencies': deps}
+      return brick_lib.Brick(name, initial_config=config)
+
+    brick_dep('//first', [])
+    brick_dep('//second', ['//first'])
+    third = brick_dep('//third', ['//first', '//second'])
+    fourth = brick_dep('//fourth', ['//second', '//first'])
+
+    self.assertEqual(['//first', '//second', '//third'],
+                     [b.brick_locator for b in third.BrickStack()])
+
+    self.assertEqual(['//first', '//second', '//fourth'],
+                     [b.brick_locator for b in fourth.BrickStack()])
diff --git a/lib/cache.py b/lib/cache.py
new file mode 100644
index 0000000..50711e3
--- /dev/null
+++ b/lib/cache.py
@@ -0,0 +1,320 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains on-disk caching functionality."""
+
+from __future__ import print_function
+
+import errno
+import os
+import shutil
+import tempfile
+import urlparse
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import locking
+from chromite.lib import osutils
+from chromite.lib import retry_util
+
+
+# pylint: disable=protected-access
+
+
+def EntryLock(f):
+  """Decorator that provides monitor access control."""
+
+  def new_f(self, *args, **kwargs):
+    # Ensure we don't have a read lock before potentially blocking while trying
+    # to access the monitor.
+    if self.read_locked:
+      raise AssertionError(
+          'Cannot call %s while holding a read lock.' % f.__name__)
+
+    with self._entry_lock:
+      self._entry_lock.write_lock()
+      return f(self, *args, **kwargs)
+  return new_f
+
+
+def WriteLock(f):
+  """Decorator that takes a write lock."""
+
+  def new_f(self, *args, **kwargs):
+    with self._lock.write_lock():
+      return f(self, *args, **kwargs)
+  return new_f
+
+
+class CacheReference(object):
+  """Encapsulates operations on a cache key reference.
+
+  CacheReferences are returned by the DiskCache.Lookup() function.  They are
+  used to read from and insert into the cache.
+
+  A typical example of using a CacheReference:
+
+  @contextlib.contextmanager
+  def FetchFromCache()
+    with cache.Lookup(key) as ref:
+       # If entry doesn't exist in cache already, generate it ourselves, and
+       # insert it into the cache, acquiring a read lock on it in the process.
+       # If the entry does exist, we grab a read lock on it.
+      if not ref.Exists(lock=True):
+        path = PrepareItem()
+        ref.SetDefault(path, lock=True)
+
+      # yield the path to the cached entry to consuming code.
+      yield ref.path
+  """
+
+  def __init__(self, cache, key):
+    self._cache = cache
+    self.key = key
+    self.acquired = False
+    self.read_locked = False
+    self._lock = cache._LockForKey(key)
+    self._entry_lock = cache._LockForKey(key, suffix='.entry_lock')
+
+  @property
+  def path(self):
+    """Returns on-disk path to the cached item."""
+    return self._cache._GetKeyPath(self.key)
+
+  def Acquire(self):
+    """Prepare the cache reference for operation.
+
+    This must be called (either explicitly or through entering a 'with'
+    context) before calling any methods that acquire locks, or mutates
+    reference.
+    """
+    if self.acquired:
+      raise AssertionError(
+          'Attempting to acquire an already acquired reference.')
+
+    self.acquired = True
+    self._lock.__enter__()
+
+  def Release(self):
+    """Release the cache reference.  Causes any held locks to be released."""
+    if not self.acquired:
+      raise AssertionError(
+          'Attempting to release an unacquired reference.')
+
+    self.acquired = False
+    self._lock.__exit__(None, None, None)
+
+  def __enter__(self):
+    self.Acquire()
+    return self
+
+  def __exit__(self, *args):
+    self.Release()
+
+  def _ReadLock(self):
+    self._lock.read_lock()
+    self.read_locked = True
+
+  @WriteLock
+  def _Assign(self, path):
+    self._cache._Insert(self.key, path)
+
+  @WriteLock
+  def _AssignText(self, text):
+    self._cache._InsertText(self.key, text)
+
+  @WriteLock
+  def _Remove(self):
+    self._cache._Remove(self.key)
+
+  def _Exists(self):
+    return self._cache._KeyExists(self.key)
+
+  @EntryLock
+  def Assign(self, path):
+    """Insert a file or a directory into the cache at the referenced key."""
+    self._Assign(path)
+
+  @EntryLock
+  def AssignText(self, text):
+    """Create a file containing |text| and assign it to the key.
+
+    Args:
+      text: Can be a string or an iterable.
+    """
+    self._AssignText(text)
+
+  @EntryLock
+  def Remove(self):
+    """Removes the entry from the cache."""
+    self._Remove()
+
+  @EntryLock
+  def Exists(self, lock=False):
+    """Tests for existence of entry.
+
+    Args:
+      lock: If the entry exists, acquire and maintain a read lock on it.
+    """
+    if self._Exists():
+      if lock:
+        self._ReadLock()
+      return True
+    return False
+
+  @EntryLock
+  def SetDefault(self, default_path, lock=False):
+    """Assigns default_path if the entry doesn't exist.
+
+    Args:
+      default_path: The path to assign if the entry doesn't exist.
+      lock: Acquire and maintain a read lock on the entry.
+    """
+    if not self._Exists():
+      self._Assign(default_path)
+    if lock:
+      self._ReadLock()
+
+
+class DiskCache(object):
+  """Locked file system cache keyed by tuples.
+
+  Key entries can be files or directories.  Access to the cache is provided
+  through CacheReferences, which are retrieved by using the cache Lookup()
+  method.
+  """
+  # TODO(rcui): Add LRU cleanup functionality.
+
+  _STAGING_DIR = 'staging'
+
+  def __init__(self, cache_dir):
+    self._cache_dir = cache_dir
+    self.staging_dir = os.path.join(cache_dir, self._STAGING_DIR)
+
+    osutils.SafeMakedirsNonRoot(self._cache_dir)
+    osutils.SafeMakedirsNonRoot(self.staging_dir)
+
+  def _KeyExists(self, key):
+    return os.path.exists(self._GetKeyPath(key))
+
+  def _GetKeyPath(self, key):
+    """Get the on-disk path of a key."""
+    return os.path.join(self._cache_dir, '+'.join(key))
+
+  def _LockForKey(self, key, suffix='.lock'):
+    """Returns an unacquired lock associated with a key."""
+    key_path = self._GetKeyPath(key)
+    osutils.SafeMakedirsNonRoot(os.path.dirname(key_path))
+    lock_path = os.path.join(self._cache_dir, os.path.dirname(key_path),
+                             os.path.basename(key_path) + suffix)
+    return locking.FileLock(lock_path)
+
+  def _TempDirContext(self):
+    return osutils.TempDir(base_dir=self.staging_dir)
+
+  def _Insert(self, key, path):
+    """Insert a file or a directory into the cache at a given key."""
+    self._Remove(key)
+    key_path = self._GetKeyPath(key)
+    osutils.SafeMakedirsNonRoot(os.path.dirname(key_path))
+    shutil.move(path, key_path)
+
+  def _InsertText(self, key, text):
+    """Inserts a file containing |text| into the cache."""
+    with self._TempDirContext() as tempdir:
+      file_path = os.path.join(tempdir, 'tempfile')
+      osutils.WriteFile(file_path, text)
+      self._Insert(key, file_path)
+
+  def _Remove(self, key):
+    """Remove a key from the cache."""
+    if self._KeyExists(key):
+      with self._TempDirContext() as tempdir:
+        shutil.move(self._GetKeyPath(key), tempdir)
+
+  def Lookup(self, key):
+    """Get a reference to a given key."""
+    return CacheReference(self, key)
+
+
+class RemoteCache(DiskCache):
+  """Supports caching of remote objects via URI."""
+
+  def _Fetch(self, url, local_path):
+    """Fetch a remote file."""
+    # We have to nest the import because gs.GSContext uses us to cache its own
+    # gsutil tarball.  We know we won't get into a recursive loop though as it
+    # only fetches files via non-gs URIs.
+    from chromite.lib import gs
+
+    if gs.PathIsGs(url):
+      ctx = gs.GSContext()
+      ctx.Copy(url, local_path)
+    else:
+      # Note: unittests assume local_path is at the end.
+      retry_util.RunCurl([url, '-o', local_path], debug_level=logging.DEBUG)
+
+  def _Insert(self, key, url):
+    """Insert a remote file into the cache."""
+    o = urlparse.urlparse(url)
+    if o.scheme in ('file', ''):
+      DiskCache._Insert(self, key, o.path)
+      return
+
+    with tempfile.NamedTemporaryFile(dir=self.staging_dir,
+                                     delete=False) as local_path:
+      self._Fetch(url, local_path.name)
+      DiskCache._Insert(self, key, local_path.name)
+
+
+def Untar(path, cwd, sudo=False):
+  """Untar a tarball."""
+  functor = cros_build_lib.SudoRunCommand if sudo else cros_build_lib.RunCommand
+  functor(['tar', '-xpf', path], cwd=cwd, debug_level=logging.DEBUG)
+
+
+class TarballCache(RemoteCache):
+  """Supports caching of extracted tarball contents."""
+
+  def _Insert(self, key, tarball_path):
+    """Insert a tarball and its extracted contents into the cache.
+
+    Download the tarball first if a URL is provided as tarball_path.
+    """
+    with osutils.TempDir(prefix='tarball-cache',
+                         base_dir=self.staging_dir) as tempdir:
+
+      o = urlparse.urlsplit(tarball_path)
+      if o.scheme == 'file':
+        tarball_path = o.path
+      elif o.scheme:
+        url = tarball_path
+        tarball_path = os.path.join(tempdir, os.path.basename(o.path))
+        self._Fetch(url, tarball_path)
+
+      extract_path = os.path.join(tempdir, 'extract')
+      os.mkdir(extract_path)
+      Untar(tarball_path, extract_path)
+      DiskCache._Insert(self, key, extract_path)
+
+  def _KeyExists(self, key):
+    """Specialized DiskCache._KeyExits that ignores empty directories.
+
+    The normal _KeyExists just checks to see if the key path exists in the cache
+    directory. Many tests mock out RunCommand then fetch a tarball. The mock
+    blocks untarring into it. This leaves behind an empty dir which blocks
+    future untarring in non-test scripts.
+
+    See crbug.com/468838
+    """
+    # Wipe out empty directories before testing for existence.
+    key_path = self._GetKeyPath(key)
+
+    try:
+      os.rmdir(key_path)
+    except OSError as ex:
+      if ex.errno not in (errno.ENOTEMPTY, errno.ENOENT):
+        raise
+
+    return os.path.exists(key_path)
diff --git a/lib/cache_unittest b/lib/cache_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/cache_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/cache_unittest.py b/lib/cache_unittest.py
new file mode 100644
index 0000000..a0796be
--- /dev/null
+++ b/lib/cache_unittest.py
@@ -0,0 +1,260 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the cache.py module."""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.lib import gs_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import cache
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+from chromite.lib import retry_util
+
+
+class CacheReferenceTest(cros_test_lib.TestCase):
+  """Tests for CacheReference.
+
+  Largely focused on exercising the API other objects expect from it.
+  """
+
+  # pylint: disable=protected-access
+
+  def setUp(self):
+    # These are the funcs CacheReference expects the cache object to have.
+    spec = (
+        '_GetKeyPath',
+        '_Insert',
+        '_InsertText',
+        '_KeyExists',
+        '_LockForKey',
+        '_Remove',
+    )
+    self.cache = mock.Mock(spec=spec)
+    self.lock = mock.MagicMock()
+    self.cache._LockForKey.return_value = self.lock
+
+  def testContext(self):
+    """Verify we can use it as a context manager."""
+    # We should set the acquire member and grab/release the lock.
+    ref = cache.CacheReference(self.cache, 'key')
+    self.assertFalse(ref.acquired)
+    self.assertFalse(self.lock.__enter__.called)
+    with ref as newref:
+      self.assertEqual(ref, newref)
+      self.assertTrue(ref.acquired)
+      self.assertTrue(self.lock.__enter__.called)
+      self.assertFalse(self.lock.__exit__.called)
+    self.assertFalse(ref.acquired)
+    self.assertTrue(self.lock.__exit__.called)
+
+  def testPath(self):
+    """Verify we get a file path for the ref."""
+    self.cache._GetKeyPath.return_value = '/foo/bar'
+
+    ref = cache.CacheReference(self.cache, 'key')
+    self.assertEqual(ref.path, '/foo/bar')
+
+    self.cache._GetKeyPath.assert_called_once_with('key')
+
+  def testLocking(self):
+    """Verify Acquire & Release work as expected."""
+    ref = cache.CacheReference(self.cache, 'key')
+
+    # Check behavior when the lock is free.
+    self.assertRaises(AssertionError, ref.Release)
+    self.assertFalse(ref.acquired)
+
+    # Check behavior when the lock is held.
+    self.assertEqual(ref.Acquire(), None)
+    self.assertRaises(AssertionError, ref.Acquire)
+    self.assertTrue(ref.acquired)
+
+    # Check behavior after the lock is freed.
+    self.assertEqual(ref.Release(), None)
+    self.assertFalse(ref.acquired)
+
+  def testExists(self):
+    """Verify Exists works when the entry is not in the cache."""
+    ref = cache.CacheReference(self.cache, 'key')
+    self.cache._KeyExists.return_value = False
+    self.assertFalse(ref.Exists())
+
+  def testExistsMissing(self):
+    """Verify Exists works when the entry is in the cache."""
+    ref = cache.CacheReference(self.cache, 'key')
+    self.cache._KeyExists.return_value = True
+    self.assertTrue(ref.Exists())
+
+  def testAssign(self):
+    """Verify Assign works as expected."""
+    ref = cache.CacheReference(self.cache, 'key')
+    ref.Assign('/foo')
+    self.cache._Insert.assert_called_once_with('key', '/foo')
+
+  def testAssignText(self):
+    """Verify AssignText works as expected."""
+    ref = cache.CacheReference(self.cache, 'key')
+    ref.AssignText('text!')
+    self.cache._InsertText.assert_called_once_with('key', 'text!')
+
+  def testRemove(self):
+    """Verify Remove works as expected."""
+    ref = cache.CacheReference(self.cache, 'key')
+    ref.Remove()
+    self.cache._Remove.assert_called_once_with('key')
+
+  def testSetDefault(self):
+    """Verify SetDefault works when the entry is not in the cache."""
+    ref = cache.CacheReference(self.cache, 'key')
+    self.cache._KeyExists.return_value = False
+    ref.SetDefault('/foo')
+    self.cache._Insert.assert_called_once_with('key', '/foo')
+
+  def testSetDefaultExists(self):
+    """Verify SetDefault works when the entry is in the cache."""
+    ref = cache.CacheReference(self.cache, 'key')
+    self.cache._KeyExists.return_value = True
+    ref.SetDefault('/foo')
+    self.assertFalse(self.cache._Insert.called)
+
+
+class CacheTestCase(cros_test_lib.MockTempDirTestCase):
+  """Tests for any type of Cache object."""
+
+  def setUp(self):
+    self.gs_mock = self.StartPatcher(gs_unittest.GSContextMock())
+
+  def _testAssign(self):
+    """Verify we can assign a file to the cache and get it back out."""
+    key = ('foo', 'bar')
+    data = r'text!\nthere'
+
+    path = os.path.join(self.tempdir, 'test-file')
+    osutils.WriteFile(path, data)
+
+    with self.cache.Lookup(key) as ref:
+      self.assertFalse(ref.Exists())
+      ref.Assign(path)
+      self.assertTrue(ref.Exists())
+      self.assertEqual(osutils.ReadFile(ref.path), data)
+
+    with self.cache.Lookup(key) as ref:
+      self.assertTrue(ref.Exists())
+      self.assertEqual(osutils.ReadFile(ref.path), data)
+
+  def _testAssignData(self):
+    """Verify we can assign data to the cache and get it back out."""
+    key = ('foo', 'bar')
+    data = r'text!\nthere'
+
+    with self.cache.Lookup(key) as ref:
+      self.assertFalse(ref.Exists())
+      ref.AssignText(data)
+      self.assertTrue(ref.Exists())
+      self.assertEqual(osutils.ReadFile(ref.path), data)
+
+    with self.cache.Lookup(key) as ref:
+      self.assertTrue(ref.Exists())
+      self.assertEqual(osutils.ReadFile(ref.path), data)
+
+  def _testRemove(self):
+    """Verify we can remove entries from the cache."""
+    key = ('foo', 'bar')
+    data = r'text!\nthere'
+
+    with self.cache.Lookup(key) as ref:
+      self.assertFalse(ref.Exists())
+      ref.AssignText(data)
+      self.assertTrue(ref.Exists())
+      ref.Remove()
+      self.assertFalse(ref.Exists())
+
+
+class DiskCacheTest(CacheTestCase):
+  """Tests for DiskCache."""
+
+  def setUp(self):
+    self.cache = cache.DiskCache(self.tempdir)
+
+  testAssign = CacheTestCase._testAssign
+  testAssignData = CacheTestCase._testAssignData
+  testRemove = CacheTestCase._testRemove
+
+
+class RemoteCacheTest(CacheTestCase):
+  """Tests for RemoteCache."""
+
+  def setUp(self):
+    self.cache = cache.RemoteCache(self.tempdir)
+
+  testAssign = CacheTestCase._testAssign
+  testAssignData = CacheTestCase._testAssignData
+  testRemove = CacheTestCase._testRemove
+
+  def testFetchFile(self):
+    """Verify we handle file:// URLs."""
+    key = ('file', 'foo')
+    data = 'daaaaata'
+
+    path = os.path.join(self.tempdir, 'test-file')
+    url = 'file://%s' % path
+    osutils.WriteFile(path, data)
+
+    with self.cache.Lookup(key) as ref:
+      self.assertFalse(ref.Exists())
+      ref.Assign(url)
+      self.assertTrue(ref.Exists())
+      self.assertEqual(osutils.ReadFile(ref.path), data)
+
+  def testFetchNonGs(self):
+    """Verify we fetch remote URLs and save the result."""
+    def _Fetch(*args, **_kwargs):
+      # Probably shouldn't assume this ordering, but best way for now.
+      cmd = args[0]
+      local_path = cmd[-1]
+      osutils.Touch(local_path)
+    self.PatchObject(retry_util, 'RunCurl', side_effect=_Fetch)
+
+    schemes = ('ftp', 'http', 'https')
+    for scheme in schemes:
+      key = (scheme, 'foo')
+      url = '%s://some.site.localdomain/file_go_boom' % scheme
+      with self.cache.Lookup(key) as ref:
+        self.assertFalse(ref.Exists())
+        ref.Assign(url)
+        self.assertTrue(ref.Exists())
+
+  def testFetchGs(self):
+    """Verify we fetch from Google Storage and save the result."""
+    # pylint: disable=unused-argument
+    def _Fetch(_ctx, cmd, capture_output):
+      # Touch file we tried to copy too.
+      osutils.Touch(cmd[-1])
+
+    self.gs_mock.AddCmdResult(
+        ['cp', '-v', '--', partial_mock.Ignore(), partial_mock.Ignore()],
+        side_effect=_Fetch)
+
+    key = ('gs',)
+    url = 'gs://some.site.localdomain/file_go_boom'
+    with self.cache.Lookup(key) as ref:
+      self.assertFalse(ref.Exists())
+      ref.Assign(url)
+      self.assertTrue(ref.Exists())
+
+
+class TarballCacheTest(CacheTestCase):
+  """Tests for TarballCache."""
+
+  def setUp(self):
+    self.cache = cache.RemoteCache(self.tempdir)
+
+  testAssign = CacheTestCase._testAssign
+  testAssignData = CacheTestCase._testAssignData
+  testRemove = CacheTestCase._testRemove
diff --git a/lib/cgroups.py b/lib/cgroups.py
new file mode 100644
index 0000000..334b825
--- /dev/null
+++ b/lib/cgroups.py
@@ -0,0 +1,766 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A class for managing the Linux cgroup subsystem."""
+
+from __future__ import print_function
+
+import errno
+import os
+import signal
+import time
+
+from chromite.lib import cros_build_lib
+from chromite.lib import locking
+from chromite.lib import osutils
+from chromite.lib import signals
+from chromite.lib import sudo
+
+
+# Rough hierarchy sketch:
+# - all cgroup aware cros code should nest here.
+# - No cros code should modify this namespace- this is user/system configurable
+# - only.  A release_agent can be specified, although we won't use it.
+# cros/
+#
+# - cbuildbot instances land here only when they're cleaning their task pool.
+# - this root namespace is *not* auto-removed; it's left so that user/system
+# - configuration is persistant.
+# cros/%(process-name)s/
+# cros/cbuildbot/
+#
+# - a cbuildbot job pool, owned by pid.  These are autocleaned.
+# cros/cbuildbot/%(pid)i/
+#
+# - a job pool using process that was invoked by cbuildbot.
+# - for example, cros/cbuildbot/42/cros_sdk:34
+# - this pattern continues arbitrarily deep, and is autocleaned.
+# cros/cbuildbot/%(pid1)i/%(basename_of_pid2)s:%(pid2)i/
+#
+# An example for cros_sdk (pid 552) would be:
+# cros/cros_sdk/552/
+# and it's children would be accessible in 552/tasks, or
+# would create their own namespace w/in and assign themselves to it.
+
+
+class _GroupWasRemoved(Exception):
+  """Exception representing when a group was unexpectedly removed.
+
+  Via design, this should only be possible when instantiating a new
+  pool, but the parent pool has been removed- this means effectively that
+  we're supposed to shutdown (either we've been sigterm'd and ignored it,
+  or it's imminent).
+  """
+
+
+def _FileContains(filename, strings):
+  """Greps a group of expressions, returns whether all were found."""
+  contents = osutils.ReadFile(filename)
+  return all(s in contents for s in strings)
+
+
+def EnsureInitialized(functor):
+  """Decorator for Cgroup methods to ensure the method is ran only if inited"""
+
+  def f(self, *args, **kwargs):
+    # pylint: disable=W0212
+    self.Instantiate()
+    return functor(self, *args, **kwargs)
+
+  # Dummy up our wrapper to make it look like what we're wrapping,
+  # and expose the underlying docstrings.
+  f.__name__ = functor.__name__
+  f.__doc__ = functor.__doc__
+  f.__module__ = functor.__module__
+  return f
+
+
+class Cgroup(object):
+
+  """Class representing a group in cgroups hierarchy.
+
+  Note the instance may not exist on disk; it will be created as necessary.
+  Additionally, because cgroups is kernel maintained (and mutated on the fly
+  by processes using it), chunks of this class are /explicitly/ designed to
+  always go back to disk and recalculate values.
+
+  Attributes:
+    path: Absolute on disk pathway to the cgroup directory.
+    tasks: Pids contained in this immediate cgroup, and the owning pids of
+      any first level groups nested w/in us.
+    all_tasks: All Pids, and owners of nested groups w/in this point in
+      the hierarchy.
+    nested_groups: The immediate cgroups nested w/in this one.  If this
+      cgroup is 'cbuildbot/buildbot', 'cbuildbot' would have a nested_groups
+      of [Cgroup('cbuildbot/buildbot')] for example.
+    all_nested_groups: All cgroups nested w/in this one, regardless of depth.
+    pid_owner: Which pid owns this cgroup, if the cgroup is following cros
+      conventions for group naming.
+  """
+
+  NEEDED_SUBSYSTEMS = ('cpuset',)
+  PROC_PATH = '/proc/cgroups'
+  _MOUNT_ROOT_POTENTIALS = ('/sys/fs/cgroup/cpuset', '/sys/fs/cgroup')
+  _MOUNT_ROOT_FALLBACK = '/dev/cgroup'
+  CGROUP_ROOT = None
+  MOUNT_ROOT = None
+  # Whether or not the cgroup implementation does auto inheritance via
+  # cgroup.clone_children
+  _SUPPORTS_AUTOINHERIT = False
+
+  @classmethod
+  @cros_build_lib.MemoizedSingleCall
+  def InitSystem(cls):
+    """If cgroups are supported, initialize the system state"""
+    if not cls.IsSupported():
+      return False
+
+    def _EnsureMounted(mnt, args):
+      for mtab in osutils.IterateMountPoints():
+        if mtab.destination == mnt:
+          return True
+
+      # Grab a lock so in the off chance we have multiple programs (like two
+      # cros_sdk launched in parallel) running this init logic, we don't end
+      # up mounting multiple times.
+      lock_path = '/tmp/.chromite.cgroups.lock'
+      with locking.FileLock(lock_path, 'cgroup lock') as lock:
+        lock.write_lock()
+        for mtab in osutils.IterateMountPoints():
+          if mtab.destination == mnt:
+            return True
+
+        # Not all distros mount cgroup_root to sysfs.
+        osutils.SafeMakedirs(mnt, sudo=True)
+        cros_build_lib.SudoRunCommand(['mount'] + args + [mnt], print_cmd=False)
+
+      return True
+
+    mount_root_args = ['-t', 'tmpfs', 'cgroup_root']
+
+    opts = ','.join(cls.NEEDED_SUBSYSTEMS)
+    cgroup_root_args = ['-t', 'cgroup', '-o', opts, 'cros']
+
+    return _EnsureMounted(cls.MOUNT_ROOT, mount_root_args) and \
+        _EnsureMounted(cls.CGROUP_ROOT, cgroup_root_args)
+
+  @classmethod
+  @cros_build_lib.MemoizedSingleCall
+  def IsUsable(cls):
+    """Function to sanity check if everything is setup to use cgroups"""
+    if not cls.InitSystem():
+      return False
+    cls._SUPPORTS_AUTOINHERIT = os.path.exists(
+        os.path.join(cls.CGROUP_ROOT, 'cgroup.clone_children'))
+    return True
+
+  @classmethod
+  @cros_build_lib.MemoizedSingleCall
+  def IsSupported(cls):
+    """Sanity check as to whether or not cgroups are supported."""
+    # Is the cgroup subsystem even enabled?
+
+    if not os.path.exists(cls.PROC_PATH):
+      return False
+
+    # Does it support the subsystems we want?
+    if not _FileContains(cls.PROC_PATH, cls.NEEDED_SUBSYSTEMS):
+      return False
+
+    for potential in cls._MOUNT_ROOT_POTENTIALS:
+      if os.path.exists(potential):
+        cls.MOUNT_ROOT = potential
+        break
+    else:
+      cls.MOUNT_ROOT = cls._MOUNT_ROOT_FALLBACK
+    cls.MOUNT_ROOT = os.path.realpath(cls.MOUNT_ROOT)
+
+    cls.CGROUP_ROOT = os.path.join(cls.MOUNT_ROOT, 'cros')
+    return True
+
+  def __init__(self, namespace, autoclean=True, lazy_init=False, parent=None,
+               _is_root=False, _overwrite=True):
+    """Initalize a cgroup instance.
+
+    Args:
+      namespace: What cgroup namespace is this in?  cbuildbot/1823 for example.
+      autoclean: Should this cgroup be removed once unused?
+      lazy_init: Should we create the cgroup immediately, or when needed?
+      parent: A Cgroup instance; if the namespace is cbuildbot/1823, then the
+        parent *must* be the cgroup instance for namespace cbuildbot.
+      _is_root:  Internal option, shouldn't be used by consuming code.
+      _overwrite: Internal option, shouldn't be used by consuming code.
+    """
+    self._inited = None
+    self._overwrite = bool(_overwrite)
+    if _is_root:
+      namespace = '.'
+      self._inited = True
+    else:
+      namespace = os.path.normpath(namespace)
+      if parent is None:
+        raise ValueError("Either _is_root must be set to True, or parent must "
+                         "be non null")
+      if namespace in ('.', ''):
+        raise ValueError("Invalid namespace %r was given" % (namespace,))
+
+    self.namespace = namespace
+    self.autoclean = autoclean
+    self.parent = parent
+
+    if not lazy_init:
+      self.Instantiate()
+
+  def _LimitName(self, name, for_path=False, multilevel=False):
+    """Translation function doing sanity checks on derivative namespaces
+
+    If you're extending this class, you should be using this for any namespace
+    operations that pass through a nested group.
+    """
+    # We use a fake pathway here, and this code must do so.  To calculate the
+    # real pathway requires knowing CGROUP_ROOT, which requires sudo
+    # potentially.  Since this code may be invoked just by loading the module,
+    # no execution/sudo should occur.  However, if for_path is set, we *do*
+    # require CGROUP_ROOT- which is fine, since we sort that on the way out.
+    fake_path = os.path.normpath(os.path.join('/fake-path', self.namespace))
+    path = os.path.normpath(os.path.join(fake_path, name))
+
+    # Ensure that the requested pathway isn't trying to sidestep what we
+    # expect, and in the process it does internal validation checks.
+    if not path.startswith(fake_path + '/'):
+      raise ValueError("Name %s tried descending through this namespace into"
+                       " another; this isn't allowed." % (name,))
+    elif path == self.namespace:
+      raise ValueError("Empty name %s" % (name,))
+    elif os.path.dirname(path) != fake_path and not multilevel:
+      raise ValueError("Name %s is multilevel, but disallowed." % (name,))
+
+    # Get the validated/normalized name.
+    name = path[len(fake_path):].strip('/')
+    if for_path:
+      return os.path.join(self.path, name)
+    return name
+
+  @property
+  def path(self):
+    return os.path.abspath(os.path.join(self.CGROUP_ROOT, self.namespace))
+
+  @property
+  def tasks(self):
+    s = set(x.strip() for x in self.GetValue('tasks', '').splitlines())
+    s.update(x.pid_owner for x in self.nested_groups)
+    s.discard(None)
+    return s
+
+  @property
+  def all_tasks(self):
+    s = self.tasks
+    for group in self.all_nested_groups:
+      s.update(group.tasks)
+    return s
+
+  @property
+  def nested_groups(self):
+    targets = []
+    path = self.path
+    try:
+      targets = [x for x in os.listdir(path)
+                 if os.path.isdir(os.path.join(path, x))]
+    except EnvironmentError as e:
+      if e.errno != errno.ENOENT:
+        raise
+
+    targets = [self.AddGroup(x, lazy_init=True, _overwrite=False)
+               for x in targets]
+
+    # Suppress initialization checks- if it exists on disk, we know it
+    # is already initialized.
+    for x in targets:
+      # pylint: disable=protected-access
+      x._inited = True
+    return targets
+
+  @property
+  def all_nested_groups(self):
+    # Do a depth first traversal.
+    def walk(groups):
+      for group in groups:
+        for subgroup in walk(group.nested_groups):
+          yield subgroup
+        yield group
+    return list(walk(self.nested_groups))
+
+  @property
+  @cros_build_lib.MemoizedSingleCall
+  def pid_owner(self):
+    # Ensure it's in cros namespace- if it is outside of the cros namespace,
+    # we shouldn't make assumptions about the naming convention used.
+    if not self.GroupIsAParent(_cros_node):
+      return None
+    # See documentation at the top of the file for the naming scheme.
+    # It's basically "%(program_name)s:%(owning_pid)i" if the group
+    # is nested.
+    return os.path.basename(self.namespace).rsplit(':', 1)[-1]
+
+  def GroupIsAParent(self, node):
+    """Is the given node a parent of us?"""
+    parent_path = node.path + '/'
+    return self.path.startswith(parent_path)
+
+  def GetValue(self, key, default=None):
+    """Query a cgroup configuration key from disk.
+
+    If the file doesn't exist, return the given default.
+    """
+    try:
+      return osutils.ReadFile(os.path.join(self.path, key))
+    except EnvironmentError as e:
+      if e.errno != errno.ENOENT:
+        raise
+      return default
+
+  def _AddSingleGroup(self, name, **kwargs):
+    """Method for creating a node nested within this one.
+
+    Derivative classes should override this method rather than AddGroup;
+    see __init__ for the supported keywords.
+    """
+    return self.__class__(os.path.join(self.namespace, name), **kwargs)
+
+  def AddGroup(self, name, **kwargs):
+    """Add and return a cgroup nested in this one.
+
+    See __init__ for the supported keywords.  If this isn't a direct child
+    (for example this instance is cbuildbot, and the name is 1823/x), it'll
+    create the intermediate groups as lazy_init=True, setting autoclean to
+    via the logic described for autoclean_parents below.
+
+    Args:
+      name: Name of group to add.
+      autoclean_parents: Optional keyword argument; if unspecified, it takes
+        the value of autoclean (or True if autoclean isn't specified).  This
+        controls whether any intermediate nodes that must be created for
+        multilevel groups are autocleaned.
+    """
+    name = self._LimitName(name, multilevel=True)
+
+    autoclean = kwargs.pop('autoclean', True)
+    autoclean_parents = kwargs.pop('autoclean_parents', autoclean)
+    chunks = name.split('/', 1)
+    node = self
+    # pylint: disable=W0212
+    for chunk in chunks[:-1]:
+      node = node._AddSingleGroup(chunk, parent=node,
+                                  autoclean=autoclean_parents, **kwargs)
+    return node._AddSingleGroup(chunks[-1], parent=node,
+                                autoclean=autoclean, **kwargs)
+
+  @cros_build_lib.MemoizedSingleCall
+  def Instantiate(self):
+    """Ensure this group exists on disk in the cgroup hierarchy"""
+
+    if self.namespace == '.':
+      # If it's the root of the hierarchy, leave it alone.
+      return True
+
+    if self.parent is not None:
+      self.parent.Instantiate()
+    osutils.SafeMakedirs(self.path, sudo=True)
+
+    force_inheritance = True
+    if self.parent.GetValue('cgroup.clone_children', '').strip() == '1':
+      force_inheritance = False
+
+    if force_inheritance:
+      if self._SUPPORTS_AUTOINHERIT:
+        # If the cgroup version supports it, flip the auto-inheritance setting
+        # on so that cgroups nested here don't have to manually transfer
+        # settings
+        self._SudoSet('cgroup.clone_children', '1')
+
+      try:
+        # TODO(ferringb): sort out an appropriate filter/list for using:
+        # for name in os.listdir(parent):
+        # rather than just transfering these two values.
+        for name in ('cpuset.cpus', 'cpuset.mems'):
+          if not self._overwrite:
+            # Top level nodes like cros/cbuildbot we don't want to overwrite-
+            # users/system may've leveled configuration.  If it's empty,
+            # overwrite it in those cases.
+            val = self.GetValue(name, '').strip()
+            if val:
+              continue
+          self._SudoSet(name, self.parent.GetValue(name, ''))
+      except (EnvironmentError, cros_build_lib.RunCommandError):
+        # Do not leave half created cgroups hanging around-
+        # it makes compatibility a pain since we have to rewrite
+        # the cgroup each time.  If instantiation fails, we know
+        # the group is screwed up, or the instantiaton code is-
+        # either way, no reason to leave it alive.
+        self.RemoveThisGroup()
+        raise
+
+    return True
+
+  # Since some of this code needs to check/reset this function to be ran,
+  # we use a more developer friendly variable name.
+  Instantiate._cache_key = '_inited'  # pylint: disable=protected-access
+
+  def _SudoSet(self, key, value):
+    """Set a cgroup file in this namespace to a specific value"""
+    name = self._LimitName(key, True)
+    try:
+      return sudo.SetFileContents(name, value, cwd=os.path.dirname(name))
+    except cros_build_lib.RunCommandError as e:
+      if e.exception is not None:
+        # Command failed before the exec itself; convert ENOENT
+        # appropriately.
+        exc = e.exception
+        if isinstance(exc, EnvironmentError) and exc.errno == errno.ENOENT:
+          raise _GroupWasRemoved(self.namespace, e)
+      raise
+
+  def RemoveThisGroup(self, strict=False):
+    """Remove this specific cgroup
+
+    If strict is True, then we must be removed.
+    """
+    if self._RemoveGroupOnDisk(self.path, strict=strict):
+      self._inited = None
+      return True
+    return False
+
+  @classmethod
+  def _RemoveGroupOnDisk(cls, path, strict, sudo_strict=True):
+    """Perform the actual group removal.
+
+    Args:
+      path: The cgroup's location on disk.
+      strict: Boolean; if true, then it's an error if the group can't be
+        removed.  This can occur if there are still processes in it, or in
+        a nested group.
+      sudo_strict: See SudoRunCommand's strict option.
+    """
+    # Depth first recursively remove our children cgroups, then ourselves.
+    # Allow this to fail since currently it's possible for the cleanup code
+    # to not fully kill the hierarchy.  Note that we must do just rmdirs,
+    # rm -rf cannot be used- it tries to remove files which are unlinkable
+    # in cgroup (only namespaces can be removed via rmdir).
+    # See Documentation/cgroups/ for further details.
+    path = os.path.normpath(path) + '/'
+    # Do a sanity check to ensure that we're not touching anything we
+    # shouldn't.
+    if not path.startswith(cls.CGROUP_ROOT):
+      raise RuntimeError("cgroups.py: Was asked to wipe path %s, refusing. "
+                         "strict was %r, sudo_strict was %r"
+                         % (path, strict, sudo_strict))
+
+    result = cros_build_lib.SudoRunCommand(
+        ['find', path, '-depth', '-type', 'd', '-exec', 'rmdir', '{}', '+'],
+        redirect_stderr=True, error_code_ok=not strict,
+        print_cmd=False, strict=sudo_strict)
+    if result.returncode == 0:
+      return True
+    elif not os.path.isdir(path):
+      # We were invoked against a nonexistant path.
+      return True
+    return False
+
+  def TransferCurrentProcess(self, threads=True):
+    """Move the current process into this cgroup.
+
+    If threads is True, we move our threads into the group in addition.
+    Note this must be called in a threadsafe manner; it primarily exists
+    as a helpful default since python stdlib generates some background
+    threads (even when the code is operated synchronously).  While we
+    try to handle that scenario, it's implicitly racy since python
+    gives no clean/sane way to control/stop thread creation; thus it's
+    on the invokers head to ensure no new threads are being generated
+    while this is ran.
+    """
+    if not threads:
+      return self.TransferPid(os.getpid())
+
+    seen = set()
+    while True:
+      force_run = False
+      threads = set(self._GetCurrentProcessThreads())
+      for tid in threads:
+        # Track any failures; a failure means the thread died under
+        # feet, implying we shouldn't trust the current state.
+        force_run |= not self.TransferPid(tid, True)
+      if not force_run and threads == seen:
+        # We got two runs of this code seeing the same threads; assume
+        # we got them all since the first run moved those threads into
+        # our cgroup, and the second didn't see any new threads.  While
+        # there may have been new threads between run1/run2, we do run2
+        # purely to snag threads we missed in run1; anything split by
+        # a thread from run1 would auto inherit our cgroup.
+        return
+      seen = threads
+
+  def _GetCurrentProcessThreads(self):
+    """Lookup the given tasks (pids fundamentally) for our process."""
+    # Note that while we could try doing tricks like threading.enumerate,
+    # that's not guranteed to pick up background c/ffi threads; generally
+    # that's ultra rare, but the potential exists thus we ask the kernel
+    # instead.  What sucks however is that python releases the GIL; thus
+    # consuming code has to know of this, and protect against it.
+    return map(int, os.listdir('/proc/self/task'))
+
+  @EnsureInitialized
+  def TransferPid(self, pid, allow_missing=False):
+    """Assigns a given process to this cgroup."""
+    # Assign this root process to the new cgroup.
+    try:
+      self._SudoSet('tasks', '%d' % int(pid))
+      return True
+    except cros_build_lib.RunCommandError:
+      if not allow_missing:
+        raise
+      return False
+
+  # TODO(ferringb): convert to snakeoil.weakref.WeakRefFinalizer
+  def __del__(self):
+    if self.autoclean and self._inited and self.CGROUP_ROOT:
+      # Suppress any sudo_strict behaviour, since we may be invoked
+      # during interpreter shutdown.
+      self._RemoveGroupOnDisk(self.path, False, sudo_strict=False)
+
+  def KillProcesses(self, poll_interval=0.05, remove=False, sigterm_timeout=10):
+    """Kill all processes in this namespace."""
+
+    my_pids = set(map(str, self._GetCurrentProcessThreads()))
+
+    def _SignalPids(pids, signum):
+      cros_build_lib.SudoRunCommand(
+          ['kill', '-%i' % signum] + sorted(pids),
+          print_cmd=False, error_code_ok=True, redirect_stdout=True,
+          combine_stdout_stderr=True)
+
+    # First sigterm what we can, exiting after 2 runs w/out seeing pids.
+    # Let this phase run for a max of 10 seconds; afterwards, switch to
+    # sigkilling.
+    time_end = time.time() + sigterm_timeout
+    saw_pids, pids = True, set()
+    while time.time() < time_end:
+      previous_pids = pids
+      pids = self.tasks
+
+      self_kill = my_pids.intersection(pids)
+      if self_kill:
+        raise Exception("Bad API usage: asked to kill cgroup %s, but "
+                        "current pid %s is in that group.  Effectively "
+                        "asked to kill ourselves."
+                        % (self.namespace, self_kill))
+
+      if not pids:
+        if not saw_pids:
+          break
+        saw_pids = False
+      else:
+        saw_pids = True
+        new_pids = pids.difference(previous_pids)
+        if new_pids:
+          _SignalPids(new_pids, signal.SIGTERM)
+          # As long as new pids keep popping up, skip sleeping and just keep
+          # stomping them as quickly as possible (whack-a-mole is a good visual
+          # analogy of this).  We do this to ensure that fast moving spawns
+          # are dealt with as quickly as possible.  When considering this code,
+          # it's best to think about forkbomb scenarios- shouldn't occur, but
+          # synthetic fork-bombs can occur, thus this code being aggressive.
+          continue
+
+      time.sleep(poll_interval)
+
+    # Next do a sigkill scan.  Again, exit only after no pids have been seen
+    # for two scans, and all groups are removed.
+    groups_existed = True
+    while True:
+      pids = self.all_tasks
+
+      if pids:
+        self_kill = my_pids.intersection(pids)
+        if self_kill:
+          raise Exception("Bad API usage: asked to kill cgroup %s, but "
+                          "current pid %i is in that group.  Effectively "
+                          "asked to kill ourselves."
+                          % (self.namespace, self_kill))
+
+        _SignalPids(pids, signal.SIGKILL)
+        saw_pids = True
+      elif not (saw_pids or groups_existed):
+        break
+      else:
+        saw_pids = False
+
+      time.sleep(poll_interval)
+
+      # Note this is done after the sleep; try to give the kernel time to
+      # shutdown the processes.  They may still be transitioning to defunct
+      # kernel side by when we hit this scan, but that's fine- the next will
+      # get it.
+      # This needs to be nonstrict; it's possible the kernel is currently
+      # killing the pids we've just sigkill'd, thus the group isn't removable
+      # yet.  Additionally, it's possible a child got forked we didn't see.
+      # Ultimately via our killing/removal attempts, it will be removed,
+      # just not necessarily on the first run.
+      if remove:
+        if self.RemoveThisGroup(strict=False):
+          # If we successfully removed this group, then there can be no pids,
+          # sub groups, etc, within it.  No need to scan further.
+          return True
+        groups_existed = True
+      else:
+        groups_existed = [group.RemoveThisGroup(strict=False)
+                          for group in self.nested_groups]
+        groups_existed = not all(groups_existed)
+
+
+
+  @classmethod
+  def _FindCurrentCrosGroup(cls, pid=None):
+    """Find and return the cros namespace a pid is currently in.
+
+    If no pid is given, os.getpid() is substituted.
+    """
+    if pid is None:
+      pid = 'self'
+    elif not isinstance(pid, (long, int)):
+      raise ValueError("pid must be None, or an integer/long.  Got %r" % (pid,))
+
+    cpuset = None
+    try:
+      # See the kernels Documentation/filesystems/proc.txt if you're unfamiliar
+      # w/ procfs, and keep in mind that we have to work across multiple kernel
+      # versions.
+      cpuset = osutils.ReadFile('/proc/%s/cpuset' % (pid,)).rstrip('\n')
+    except EnvironmentError as e:
+      if e.errno != errno.ENOENT:
+        raise
+      with open('/proc/%s/cgroup' % pid) as f:
+        for line in f:
+          # First digit is the hierachy index, 2nd is subsytem, 3rd is space.
+          # 2:cpuset:/
+          # 2:cpuset:/cros/cbuildbot/1234
+
+          line = line.rstrip('\n')
+          if not line:
+            continue
+          line = line.split(':', 2)
+          if line[1] == 'cpuset':
+            cpuset = line[2]
+            break
+
+    if not cpuset or not cpuset.startswith("/cros/"):
+      return None
+    return cpuset[len("/cros/"):].strip("/")
+
+  @classmethod
+  def FindStartingGroup(cls, process_name, nesting=True):
+    """Create and return the starting cgroup for ourselves nesting if allowed.
+
+    Note that the node returned is either a generic process pool (e.g.
+    cros/cbuildbot), or the parent pool we're nested within; processes
+    generated in this group are the responsibility of this process to
+    deal with- nor should this process ever try triggering a kill w/in this
+    portion of the tree since they don't truly own it.
+
+    Args:
+      process_name: See the hierarchy comments at the start of this module.
+        This should basically be the process name- cros_sdk for example,
+        cbuildbot, etc.
+      nesting: If we're invoked by another cros cgroup aware process,
+        should we nest ourselves in their hierarchy?  Generally speaking,
+        client code should never have a reason to disable nesting.
+    """
+    if not cls.IsUsable():
+      return None
+
+    target = None
+    if nesting:
+      target = cls._FindCurrentCrosGroup()
+    if target is None:
+      target = process_name
+
+    return _cros_node.AddGroup(target, autoclean=False)
+
+
+class ContainChildren(cros_build_lib.MasterPidContextManager):
+  """Context manager for containing children processes.
+
+  This manager creates a job pool derived from the specified Cgroup |node|
+  and transfers the current process into it upon __enter__.
+
+  Any children processes created at that point will inherit our cgroup;
+  they can only escape the group if they're running as root and move
+  themselves out of this hierarchy.
+
+  Upon __exit__, transfer the current process back to this group, then
+  SIGTERM (progressing to SIGKILL) any immediate children in the pool,
+  finally removing the pool if possible. After sending SIGTERM, we wait
+  |sigterm_timeout| seconds before sending SIGKILL.
+
+  If |pool_name| is given, that name is used rather than os.getpid() for
+  the job pool created.
+
+  Finally, note that during cleanup this will suppress all signals
+  to ensure that it cleanses any children before returning.
+  """
+
+  def __init__(self, node, pool_name=None, sigterm_timeout=10):
+    super(ContainChildren, self).__init__()
+    self.node = node
+    self.child = None
+    self.pid = None
+    self.pool_name = pool_name
+    self.sigterm_timeout = sigterm_timeout
+    self.run_kill = False
+
+  def _enter(self):
+    self.pid = os.getpid()
+
+    # Note: We use lazy init here so that we cannot trigger a
+    # _GroupWasRemoved -- we want that to be contained.
+    pool_name = str(self.pid) if self.pool_name is None else self.pool_name
+    self.child = self.node.AddGroup(pool_name, autoclean=True, lazy_init=True)
+    try:
+      self.child.TransferCurrentProcess()
+    except _GroupWasRemoved:
+      raise SystemExit(
+          "Group %s was removed under our feet; pool shutdown is underway"
+          % self.child.namespace)
+    self.run_kill = True
+
+  def _exit(self, *_args, **_kwargs):
+    with signals.DeferSignals():
+      self.node.TransferCurrentProcess()
+      if self.run_kill:
+        self.child.KillProcesses(remove=True,
+                                 sigterm_timeout=self.sigterm_timeout)
+      else:
+        # Non-strict since the group may have failed to be created.
+        self.child.RemoveThisGroup(strict=False)
+
+
+def SimpleContainChildren(process_name, nesting=True, pid=None, **kwargs):
+  """Convenience context manager to create a cgroup for children containment
+
+  See Cgroup.FindStartingGroup and Cgroup.ContainChildren for specifics.
+  If Cgroups aren't supported on this system, this is a noop context manager.
+  """
+  node = Cgroup.FindStartingGroup(process_name, nesting=nesting)
+  if node is None:
+    return cros_build_lib.NoOpContextManager()
+  if pid is None:
+    pid = os.getpid()
+  name = '%s:%i' % (process_name, pid)
+  return ContainChildren(node, name, **kwargs)
+
+# This is a generic group, not associated with any specific process id, so
+# we shouldn't autoclean it on exit; doing so would delete the group from
+# under the feet of any other processes interested in using the group.
+_root_node = Cgroup(None, _is_root=True, autoclean=False, lazy_init=True)
+_cros_node = _root_node.AddGroup('cros', autoclean=False, lazy_init=True,
+                                 _overwrite=False)
diff --git a/lib/cgroups_unittest b/lib/cgroups_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/cgroups_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/cgroups_unittest.py b/lib/cgroups_unittest.py
new file mode 100644
index 0000000..825fb79
--- /dev/null
+++ b/lib/cgroups_unittest.py
@@ -0,0 +1,28 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for cgroups.py."""
+
+from __future__ import print_function
+
+from chromite.lib import cgroups
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import parallel
+from chromite.lib import sudo
+
+
+class TestCreateGroups(cros_test_lib.TestCase):
+  """Unittests for creating groups."""
+
+  def _CrosSdk(self):
+    cmd = ['cros_sdk', '--', 'sleep', '0.001']
+    cros_build_lib.RunCommand(cmd)
+
+  def testCreateGroups(self):
+    """Run many cros_sdk processes in parallel to test for race conditions."""
+    with sudo.SudoKeepAlive():
+      with cgroups.SimpleContainChildren('example', sigterm_timeout=5):
+        parallel.RunTasksInProcessPool(self._CrosSdk, [[]] * 20,
+                                       processes=10)
diff --git a/lib/chrome_util.py b/lib/chrome_util.py
new file mode 100644
index 0000000..1772db6
--- /dev/null
+++ b/lib/chrome_util.py
@@ -0,0 +1,459 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Library containing utility functions used for Chrome-specific build tasks."""
+
+from __future__ import print_function
+
+import functools
+import glob
+import os
+import re
+import shlex
+import shutil
+
+from chromite.cbuildbot import failures_lib
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+
+
+# Taken from external/gyp.git/pylib.
+def _NameValueListToDict(name_value_list):
+  """Converts Name-Value list to dictionary.
+
+  Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
+  of the pairs.  If a string is simply NAME, then the value in the dictionary
+  is set to True.  If VALUE can be converted to an integer, it is.
+  """
+  result = {}
+  for item in name_value_list:
+    tokens = item.split('=', 1)
+    if len(tokens) == 2:
+      # If we can make it an int, use that, otherwise, use the string.
+      try:
+        token_value = int(tokens[1])
+      except ValueError:
+        token_value = tokens[1]
+      # Set the variable to the supplied value.
+      result[tokens[0]] = token_value
+    else:
+      # No value supplied, treat it as a boolean and set it.
+      result[tokens[0]] = True
+  return result
+
+
+def ProcessGypDefines(defines):
+  """Validate and convert a string containing GYP_DEFINES to dictionary."""
+  assert defines is not None
+  return _NameValueListToDict(shlex.split(defines))
+
+
+def DictToGypDefines(def_dict):
+  """Convert a dict to GYP_DEFINES format."""
+  def_list = []
+  for k, v in def_dict.iteritems():
+    def_list.append("%s='%s'" % (k, v))
+  return ' '.join(def_list)
+
+
+class Conditions(object):
+  """Functions that return conditions used to construct Path objects.
+
+  Condition functions returned by the public methods have signature
+  f(gyp_defines, staging_flags).  For description of gyp_defines and
+  staging_flags see docstring for StageChromeFromBuildDir().
+  """
+
+  @classmethod
+  def _GypSet(cls, flag, value, gyp_defines, _staging_flags):
+    val = gyp_defines.get(flag)
+    return val == value if value is not None else bool(val)
+
+  @classmethod
+  def _GypNotSet(cls, flag, gyp_defines, staging_flags):
+    return not cls._GypSet(flag, None, gyp_defines, staging_flags)
+
+  @classmethod
+  def _StagingFlagSet(cls, flag, _gyp_defines, staging_flags):
+    return flag in staging_flags
+
+  @classmethod
+  def _StagingFlagNotSet(cls, flag, gyp_defines, staging_flags):
+    return not cls._StagingFlagSet(flag, gyp_defines, staging_flags)
+
+  @classmethod
+  def GypSet(cls, flag, value=None):
+    """Returns condition that tests a gyp flag is set (possibly to a value)."""
+    return functools.partial(cls._GypSet, flag, value)
+
+  @classmethod
+  def GypNotSet(cls, flag):
+    """Returns condition that tests a gyp flag is not set."""
+    return functools.partial(cls._GypNotSet, flag)
+
+  @classmethod
+  def StagingFlagSet(cls, flag):
+    """Returns condition that tests a staging_flag is set."""
+    return functools.partial(cls._StagingFlagSet, flag)
+
+  @classmethod
+  def StagingFlagNotSet(cls, flag):
+    """Returns condition that tests a staging_flag is not set."""
+    return functools.partial(cls._StagingFlagNotSet, flag)
+
+
+class MultipleMatchError(failures_lib.StepFailure):
+  """A glob pattern matches multiple files but a non-dir dest was specified."""
+
+
+class MissingPathError(failures_lib.StepFailure):
+  """An expected path is non-existant."""
+
+
+class MustNotBeDirError(failures_lib.StepFailure):
+  """The specified path should not be a directory, but is."""
+
+
+class Copier(object):
+  """File/directory copier.
+
+  Provides destination stripping and permission setting functionality.
+  """
+
+  DEFAULT_BLACKLIST = (r'(^|.*/)\.svn($|/.*)',)
+
+  def __init__(self, strip_bin=None, strip_flags=None, default_mode=0o644,
+               dir_mode=0o755, exe_mode=0o755, blacklist=None):
+    """Initialization.
+
+    Args:
+      strip_bin: Path to the program used to strip binaries.  If set to None,
+                 binaries will not be stripped.
+      strip_flags: A list of flags to pass to the |strip_bin| executable.
+      default_mode: Default permissions to set on files.
+      dir_mode: Mode to set for directories.
+      exe_mode: Permissions to set on executables.
+      blacklist: A list of path patterns to ignore during the copy.
+    """
+    self.strip_bin = strip_bin
+    self.strip_flags = strip_flags
+    self.default_mode = default_mode
+    self.dir_mode = dir_mode
+    self.exe_mode = exe_mode
+    self.blacklist = blacklist
+    if self.blacklist is None:
+      self.blacklist = self.DEFAULT_BLACKLIST
+
+  @staticmethod
+  def Log(src, dest, directory):
+    sep = ' [d] -> ' if directory else ' -> '
+    logging.debug('%s %s %s', src, sep, dest)
+
+  def _PathIsBlacklisted(self, path):
+    for pattern in self.blacklist:
+      if re.match(pattern, path):
+        return True
+    return False
+
+  def _CopyFile(self, src, dest, path):
+    """Perform the copy.
+
+    Args:
+      src: The path of the file/directory to copy.
+      dest: The exact path of the destination.  Should not already exist.
+      path: The Path instance containing copy operation modifiers (such as
+            Path.exe, Path.strip, etc.)
+    """
+    assert not os.path.isdir(src), '%s: Not expecting a directory!' % src
+    osutils.SafeMakedirs(os.path.dirname(dest), mode=self.dir_mode)
+    if path.exe and self.strip_bin and path.strip and os.path.getsize(src) > 0:
+      strip_flags = (['--strip-unneeded'] if self.strip_flags is None else
+                     self.strip_flags)
+      cros_build_lib.DebugRunCommand(
+          [self.strip_bin] + strip_flags + ['-o', dest, src])
+      shutil.copystat(src, dest)
+    else:
+      shutil.copy2(src, dest)
+
+    mode = path.mode
+    if mode is None:
+      mode = self.exe_mode if path.exe else self.default_mode
+    os.chmod(dest, mode)
+
+  def Copy(self, src_base, dest_base, path, strict=False, sloppy=False):
+    """Copy artifact(s) from source directory to destination.
+
+    Args:
+      src_base: The directory to apply the src glob pattern match in.
+      dest_base: The directory to copy matched files to.  |Path.dest|.
+      path: A Path instance that specifies what is to be copied.
+      strict: If set, enforce that all optional files are copied.
+      sloppy: If set, ignore when mandatory artifacts are missing.
+
+    Returns:
+      A list of the artifacts copied.
+    """
+    assert not (strict and sloppy), 'strict and sloppy are not compatible.'
+    copied_paths = []
+    src = os.path.join(src_base, path.src)
+    if not src.endswith('/') and os.path.isdir(src):
+      raise MustNotBeDirError('%s must not be a directory\n'
+                              'Aborting copy...' % (src,))
+    paths = glob.glob(src)
+    if not paths:
+      if ((strict and not path.optional) or
+          (not strict and not (path.optional or path.cond) and not sloppy)):
+        msg = ('%s does not exist and is required.\n'
+               'You can bypass this error with --sloppy.\n'
+               'Aborting copy...' % src)
+        raise MissingPathError(msg)
+      elif path.optional or (not strict and path.cond):
+        logging.debug('%s does not exist and is optional.  Skipping.', src)
+      else:
+        logging.warning('%s does not exist and is required.  Skipping anyway.',
+                        src)
+    elif len(paths) > 1 and path.dest and not path.dest.endswith('/'):
+      raise MultipleMatchError(
+          'Glob pattern %r has multiple matches, but dest %s '
+          'is not a directory.\n'
+          'Aborting copy...' % (path.src, path.dest))
+    else:
+      for p in paths:
+        rel_src = os.path.relpath(p, src_base)
+        if path.dest is None:
+          rel_dest = rel_src
+        elif path.dest.endswith('/'):
+          rel_dest = os.path.join(path.dest, os.path.basename(p))
+        else:
+          rel_dest = path.dest
+        assert not rel_dest.endswith('/')
+        dest = os.path.join(dest_base, rel_dest)
+
+        copied_paths.append(p)
+        self.Log(p, dest, os.path.isdir(p))
+        if os.path.isdir(p):
+          for sub_path in osutils.DirectoryIterator(p):
+            rel_path = os.path.relpath(sub_path, p)
+            sub_dest = os.path.join(dest, rel_path)
+            if self._PathIsBlacklisted(rel_path):
+              continue
+            if sub_path.endswith('/'):
+              osutils.SafeMakedirs(sub_dest, mode=self.dir_mode)
+            else:
+              self._CopyFile(sub_path, sub_dest, path)
+        else:
+          self._CopyFile(p, dest, path)
+
+    return copied_paths
+
+
+class Path(object):
+  """Represents an artifact to be copied from build dir to staging dir."""
+
+  def __init__(self, src, exe=False, cond=None, dest=None, mode=None,
+               optional=False, strip=True):
+    """Initializes the object.
+
+    Args:
+      src: The relative path of the artifact.  Can be a file or a directory.
+           Can be a glob pattern.
+      exe: Identifes the path as either being an executable or containing
+           executables.  Executables may be stripped during copy, and have
+           special permissions set.  We currently only support stripping of
+           specified files and glob patterns that return files.  If |src| is a
+           directory or contains directories, the content of the directory will
+           not be stripped.
+      cond: A condition (see Conditions class) to test for in deciding whether
+            to process this artifact. If supplied, the artifact will be treated
+            as optional unless --strict is supplied.
+      dest: Name to give to the target file/directory.  Defaults to keeping the
+            same name as the source.
+      mode: The mode to set for the matched files, and the contents of matched
+            directories.
+      optional: Whether to enforce the existence of the artifact.  If unset, the
+                script errors out if the artifact does not exist.  In 'sloppy'
+                mode, the Copier class treats all artifacts as optional.
+      strip: If |exe| is set, whether to strip the executable.
+    """
+    self.src = src
+    self.exe = exe
+    self.cond = cond
+    self.dest = dest
+    self.mode = mode
+    self.optional = optional
+    self.strip = strip
+
+  def ShouldProcess(self, gyp_defines, staging_flags):
+    """Tests whether this artifact should be copied."""
+    if self.cond:
+      return self.cond(gyp_defines, staging_flags)
+    return True
+
+
+_DISABLE_NACL = 'disable_nacl'
+
+_CHROME_INTERNAL_FLAG = 'chrome_internal'
+_HIGHDPI_FLAG = 'highdpi'
+STAGING_FLAGS = (
+    _CHROME_INTERNAL_FLAG,
+    _HIGHDPI_FLAG,
+)
+
+_CHROME_SANDBOX_DEST = 'chrome-sandbox'
+C = Conditions
+
+# Files shared between all deployment types.
+_COPY_PATHS_COMMON = (
+    Path('chrome_sandbox', mode=0o4755, dest=_CHROME_SANDBOX_DEST),
+    Path('icudtl.dat'),
+    # Set as optional for backwards compatibility.
+    Path('lib/libpeerconnection.so',
+         exe=True,
+         cond=C.StagingFlagSet(_CHROME_INTERNAL_FLAG),
+         optional=True),
+    Path('libffmpegsumo.so', exe=True, optional=True),
+    Path('libosmesa.so', exe=True, optional=True),
+    Path('libpdf.so', exe=True, optional=True),
+    Path('libppGoogleNaClPluginChrome.so',
+         exe=True,
+         cond=C.GypNotSet(_DISABLE_NACL),
+         optional=True),
+    Path('mojo_shell', exe=True, optional=True),
+    # Do not strip the nacl_helper_bootstrap binary because the binutils
+    # objcopy/strip mangles the ELF program headers.
+    Path('nacl_helper_bootstrap',
+         exe=True,
+         strip=False,
+         cond=C.GypNotSet(_DISABLE_NACL)),
+    Path('nacl_irt_*.nexe', cond=C.GypNotSet(_DISABLE_NACL)),
+    Path('nacl_helper',
+         exe=True,
+         optional=True,
+         cond=C.GypNotSet(_DISABLE_NACL)),
+    Path('nacl_helper_nonsfi',
+         exe=True,
+         optional=True,
+         cond=C.GypNotSet(_DISABLE_NACL)),
+    Path('natives_blob.bin', optional=True),
+    Path('pnacl/', cond=C.GypNotSet(_DISABLE_NACL)),
+    Path('snapshot_blob.bin', optional=True),
+)
+
+_COPY_PATHS_APP_SHELL = (
+    Path('app_shell', exe=True),
+    Path('extensions_shell_and_test.pak'),
+) + _COPY_PATHS_COMMON
+
+_COPY_PATHS_CHROME = (
+    Path('chrome', exe=True),
+    Path('chrome-wrapper'),
+    Path('chrome_100_percent.pak'),
+    Path('chrome_200_percent.pak', cond=C.StagingFlagSet(_HIGHDPI_FLAG)),
+    Path('chrome_material_100_percent.pak', optional=True),
+    Path('chrome_material_200_percent.pak',
+         optional=True,
+         cond=C.StagingFlagSet(_HIGHDPI_FLAG)),
+    Path('keyboard_resources.pak'),
+    Path('lib/*.so',
+         exe=True,
+         cond=C.GypSet('component', value='shared_library')),
+    # Set as optional for backwards compatibility.
+    Path('libexif.so', exe=True, optional=True),
+    # Widevine binaries are already pre-stripped.  In addition, they don't
+    # play well with the binutils stripping tools, so skip stripping.
+    Path('libwidevinecdmadapter.so',
+         exe=True,
+         strip=False,
+         cond=C.StagingFlagSet(_CHROME_INTERNAL_FLAG)),
+    Path('libwidevinecdm.so',
+         exe=True,
+         strip=False,
+         cond=C.StagingFlagSet(_CHROME_INTERNAL_FLAG)),
+    Path('locales/'),
+    Path('resources/'),
+    Path('resources.pak'),
+    Path('xdg-settings'),
+    Path('*.png'),
+) + _COPY_PATHS_COMMON
+
+_COPY_PATHS_ENVOY = (
+    Path('envoy_shell', exe=True),
+    Path('envoy_shell.pak'),
+) + _COPY_PATHS_COMMON
+
+_COPY_PATHS_MAP = {
+    'app_shell': _COPY_PATHS_APP_SHELL,
+    'chrome': _COPY_PATHS_CHROME,
+    'envoy': _COPY_PATHS_ENVOY,
+}
+
+
+def _FixPermissions(dest_base):
+  """Last minute permission fixes."""
+  cros_build_lib.DebugRunCommand(['chmod', '-R', 'a+r', dest_base])
+  cros_build_lib.DebugRunCommand(
+      ['find', dest_base, '-perm', '/110', '-exec', 'chmod', 'a+x', '{}', '+'])
+
+
+def GetCopyPaths(deployment_type='chrome'):
+  """Returns the list of copy paths used as a filter for staging files.
+
+  Args:
+    deployment_type: String describing the deployment type. Either "app_shell",
+                     "chrome", or "envoy".
+
+  Returns:
+    The list of paths to use as a filter for staging files.
+  """
+  paths = _COPY_PATHS_MAP.get(deployment_type)
+  if paths is None:
+    raise RuntimeError('Invalid deployment type "%s"' % deployment_type)
+  return paths
+
+def StageChromeFromBuildDir(staging_dir, build_dir, strip_bin, strict=False,
+                            sloppy=False, gyp_defines=None, staging_flags=None,
+                            strip_flags=None, copy_paths=_COPY_PATHS_CHROME):
+  """Populates a staging directory with necessary build artifacts.
+
+  If |strict| is set, then we decide what to stage based on the |gyp_defines|
+  and |staging_flags| passed in.  Otherwise, we stage everything that we know
+  about, that we can find.
+
+  Args:
+    staging_dir: Path to an empty staging directory.
+    build_dir: Path to location of Chrome build artifacts.
+    strip_bin: Path to executable used for stripping binaries.
+    strict: If set, decide what to stage based on the |gyp_defines| and
+            |staging_flags| passed in, and enforce that all optional files
+            are copied.  Otherwise, we stage optional files if they are
+            there, but we don't complain if they're not.
+    sloppy: Ignore when mandatory artifacts are missing.
+    gyp_defines: A dictionary (i.e., one returned by ProcessGypDefines)
+      containing GYP_DEFINES Chrome was built with.
+    staging_flags: A list of extra staging flags.  Valid flags are specified in
+      STAGING_FLAGS.
+    strip_flags: A list of flags to pass to the tool used to strip binaries.
+    copy_paths: The list of paths to use as a filter for staging files.
+  """
+  os.mkdir(os.path.join(staging_dir, 'plugins'), 0o755)
+
+  if gyp_defines is None:
+    gyp_defines = {}
+  if staging_flags is None:
+    staging_flags = []
+
+  copier = Copier(strip_bin=strip_bin, strip_flags=strip_flags)
+  copied_paths = []
+  for p in copy_paths:
+    if not strict or p.ShouldProcess(gyp_defines, staging_flags):
+      copied_paths += copier.Copy(build_dir, staging_dir, p, strict=strict,
+                                  sloppy=sloppy)
+
+  if not copied_paths:
+    raise MissingPathError('Couldn\'t find anything to copy!\n'
+                           'Are you looking in the right directory?\n'
+                           'Aborting copy...')
+
+  _FixPermissions(staging_dir)
diff --git a/lib/chrome_util_unittest b/lib/chrome_util_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/chrome_util_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/chrome_util_unittest.py b/lib/chrome_util_unittest.py
new file mode 100644
index 0000000..b9feff8
--- /dev/null
+++ b/lib/chrome_util_unittest.py
@@ -0,0 +1,234 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for chrome_util."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import cros_test_lib
+from chromite.lib import chrome_util
+
+# pylint: disable=W0212,W0233
+
+# Convenience alias
+Dir = cros_test_lib.Directory
+
+
+class CopyTest(cros_test_lib.TempDirTestCase):
+  """Unittests for chrome_util Copy."""
+  def setUp(self):
+    self.src_base = os.path.join(self.tempdir, 'src_base')
+    self.dest_base = os.path.join(self.tempdir, 'dest_base')
+    os.mkdir(self.src_base)
+    os.mkdir(self.dest_base)
+    self.copier = chrome_util.Copier()
+
+  def _CopyAndVerify(self, path, src_struct, dest_struct, error=None,
+                     strict=False, sloppy=False):
+    cros_test_lib.CreateOnDiskHierarchy(self.src_base, src_struct)
+    if error:
+      self.assertRaises(error, self.copier.Copy, self.src_base, self.dest_base,
+                        path, strict=strict, sloppy=sloppy)
+      return
+
+    self.copier.Copy(self.src_base, self.dest_base, path, strict=strict,
+                     sloppy=sloppy)
+    cros_test_lib.VerifyOnDiskHierarchy(self.dest_base, dest_struct)
+
+
+class FileCopyTest(CopyTest):
+  """Testing the file copying/globbing/renaming functionality of Path class."""
+
+  ELEMENT_SRC_NAME = 'file1'
+  ELEMENT_SRC = ELEMENT_SRC_NAME
+  ELEMENTS_SRC = ['file1', 'file2', 'file3', 'monkey1', 'monkey2', 'monkey3']
+  ELEMENTS_GLOB = 'file*'
+  DIR_SRC_NAME = 'dir_src'
+
+  ELEMENT_DEST_NAME = 'file_dest'
+  ELEMENT_DEST = ELEMENT_DEST_NAME
+  ELEMENTS_DEST = ['file1', 'file2', 'file3']
+  DIR_DEST_NAME = 'dir_dest'
+
+  MATCH_NOTHING_GLOB = 'match_nothing'
+  BAD_ELEMENTS = ['wont match1', 'wont match2']
+
+  def testSurfaceCopy(self):
+    """Copying an element from the root."""
+    src_struct = self.ELEMENTS_SRC
+    dest_struct = [self.ELEMENT_SRC]
+    path = chrome_util.Path(self.ELEMENT_SRC_NAME)
+    self._CopyAndVerify(path, src_struct, dest_struct)
+
+  def testStrictAndSloppy(self):
+    """Test that strict and sloppy copies fail with an AssertionError."""
+    src_struct = self.ELEMENTS_SRC
+    dest_struct = [self.ELEMENT_SRC]
+    path = chrome_util.Path(self.ELEMENT_SRC_NAME)
+    self._CopyAndVerify(path, src_struct, dest_struct, error=AssertionError,
+                        sloppy=True, strict=True)
+
+  def testSurfaceRename(self):
+    """"Renaming of an element from the root."""
+    src_struct = self.ELEMENTS_SRC
+    dest_struct = [self.ELEMENT_DEST]
+    path = chrome_util.Path(self.ELEMENT_SRC_NAME, dest=self.ELEMENT_DEST_NAME)
+    self._CopyAndVerify(path, src_struct, dest_struct)
+
+  def testOneLevelDeepCopy(self):
+    """Copying an element inside a directory."""
+    src_struct = [Dir(self.DIR_SRC_NAME, self.ELEMENTS_SRC)]
+    dest_struct = [Dir(self.DIR_SRC_NAME, [self.ELEMENT_SRC])]
+    path = chrome_util.Path(
+        os.path.join(self.DIR_SRC_NAME, self.ELEMENT_SRC_NAME))
+    self._CopyAndVerify(path, src_struct, dest_struct)
+
+  def testOneLevelDeepRename(self):
+    """Renaming of an element inside a directory."""
+    src_struct = [Dir(self.DIR_SRC_NAME, self.ELEMENTS_SRC)]
+    dest_struct = [Dir(self.DIR_SRC_NAME, [self.ELEMENT_DEST])]
+
+    path = chrome_util.Path(
+        os.path.join(self.DIR_SRC_NAME, self.ELEMENT_SRC_NAME),
+        dest=os.path.join(self.DIR_SRC_NAME, self.ELEMENT_DEST_NAME))
+    self._CopyAndVerify(path, src_struct, dest_struct)
+
+  def testOneLevelDeepDirRename(self):
+    """Renaming of an element and its containing directory."""
+    src_struct = [Dir(self.DIR_SRC_NAME, self.ELEMENTS_SRC)]
+    dest_struct = [Dir(self.DIR_DEST_NAME, [self.ELEMENT_DEST])]
+
+    path = chrome_util.Path(
+        os.path.join(self.DIR_SRC_NAME, self.ELEMENT_SRC_NAME),
+        dest=os.path.join(self.DIR_DEST_NAME, self.ELEMENT_DEST_NAME))
+    self._CopyAndVerify(path, src_struct, dest_struct)
+
+  def testSingleGlob(self):
+    """Glob matching one element."""
+    src_struct = dest_struct = [Dir(self.DIR_SRC_NAME, [self.ELEMENT_SRC])]
+    path = chrome_util.Path(os.path.join(self.DIR_SRC_NAME, self.ELEMENTS_GLOB))
+    self._CopyAndVerify(path, src_struct, dest_struct)
+
+  def testSingleGlobToDirectory(self):
+    """Glob matching one element and dest directory provided."""
+    src_struct = [Dir(self.DIR_SRC_NAME, [self.ELEMENT_SRC])]
+    dest_struct = [Dir(self.DIR_DEST_NAME, [self.ELEMENT_SRC])]
+    path = chrome_util.Path(os.path.join(self.DIR_SRC_NAME, self.ELEMENTS_GLOB),
+                            dest=(self.DIR_DEST_NAME + os.sep))
+    self._CopyAndVerify(path, src_struct, dest_struct)
+
+  def testMultiGlob(self):
+    """Glob matching one file and dest directory provided."""
+    src_struct = [Dir(self.DIR_SRC_NAME, self.ELEMENTS_SRC)]
+    dest_struct = [Dir(self.DIR_SRC_NAME, self.ELEMENTS_DEST)]
+
+    path = chrome_util.Path(os.path.join(self.DIR_SRC_NAME, self.ELEMENTS_GLOB))
+    self._CopyAndVerify(path, src_struct, dest_struct)
+
+  def testMultiGlobToDirectory(self):
+    """Glob matching multiple elements and dest directory provided."""
+    src_struct = [Dir(self.DIR_SRC_NAME, self.ELEMENTS_SRC)]
+    dest_struct = [Dir(self.DIR_DEST_NAME, self.ELEMENTS_DEST)]
+    path = chrome_util.Path(os.path.join(self.DIR_SRC_NAME, self.ELEMENTS_GLOB),
+                            dest=(self.DIR_DEST_NAME + os.sep))
+    self._CopyAndVerify(path, src_struct, dest_struct)
+
+  def testGlobReturnsMultipleError(self):
+    """Glob returns multiple results but dest does not end with '/'."""
+    src_struct = self.ELEMENTS_SRC
+    path = chrome_util.Path(self.ELEMENTS_GLOB, dest=self.DIR_DEST_NAME)
+    self._CopyAndVerify(
+        path, src_struct, None, error=chrome_util.MultipleMatchError)
+
+  def testNoElementError(self):
+    """A path that is not optional cannot be found."""
+    src_struct = self.BAD_ELEMENTS
+    path = chrome_util.Path(self.ELEMENT_SRC_NAME)
+    self._CopyAndVerify(
+        path, src_struct, [], error=chrome_util.MissingPathError)
+
+  def testNoGlobError(self):
+    """A glob that is not optional matches nothing."""
+    src_struct = self.ELEMENTS_SRC
+    path = chrome_util.Path(self.MATCH_NOTHING_GLOB)
+    self._CopyAndVerify(
+        path, src_struct, [], error=chrome_util.MissingPathError)
+
+  def testNonDirError(self):
+    """Test case where a file pattern matches a directory."""
+    src_struct = ['file1/']
+    dest_struct = []
+    path = chrome_util.Path('file1')
+    self._CopyAndVerify(path, src_struct, dest_struct,
+                        error=chrome_util.MustNotBeDirError)
+
+  def testElementOptional(self, cond=None, strict=False, error=None,
+                          optional=True):
+    """A path cannot be found but is optional."""
+    src_struct = self.BAD_ELEMENTS
+    dest_struct = []
+    path = chrome_util.Path(self.ELEMENT_SRC_NAME, cond=cond, optional=optional)
+    self._CopyAndVerify(path, src_struct, dest_struct, error=error,
+                        strict=strict)
+
+  def testElementOptionalStrict(self):
+    """A path cannot be found but is optional, with --strict."""
+    self.testElementOptional(strict=True)
+
+  def testElementConditionalOK(self):
+    """A path cannot be found but has a condition, no --strict."""
+    self.testElementOptional(cond=lambda *args: True, optional=False)
+
+  def testElementConditionalFail(self):
+    """A path cannot be found but has a condition, with --strict."""
+    self.testElementOptional(cond=lambda *args: True, strict=True,
+                             optional=False, error=chrome_util.MissingPathError)
+
+  def testOptionalGlob(self):
+    """A glob matches nothing but is optional."""
+    src_struct = self.ELEMENTS_SRC
+    dest_struct = []
+    path = chrome_util.Path(self.MATCH_NOTHING_GLOB, optional=True)
+    self._CopyAndVerify(path, src_struct, dest_struct)
+
+
+class SloppyFileCopyTest(FileCopyTest):
+  """Test file copies with sloppy=True"""
+
+  def _CopyAndVerify(self, path, src_struct, dest_struct, **kwargs):
+    if not kwargs.get('sloppy'):
+      kwargs['strict'] = False
+      kwargs['sloppy'] = True
+
+    if kwargs.get('error') is chrome_util.MissingPathError:
+      kwargs['error'] = None
+    CopyTest._CopyAndVerify(self, path, src_struct, dest_struct, **kwargs)
+
+
+class DirCopyTest(FileCopyTest):
+  """Testing directory copying/globbing/renaming functionality of Path class."""
+
+  FILES = ['file1', 'file2', 'file3']
+  ELEMENT_SRC_NAME = 'monkey1/'
+  ELEMENT_SRC = Dir(ELEMENT_SRC_NAME, FILES)
+  ELEMENTS_SRC = [
+      # Add .svn directory to test black list functionality.
+      Dir('monkey1', FILES + [Dir('.svn', FILES)]), Dir('monkey2', FILES),
+      Dir('monkey3', FILES),
+      Dir('foon1', []), Dir('foon2', []), Dir('foon3', [])
+  ]
+  ELEMENTS_GLOB = 'monkey*'
+  DIR_SRC_NAME = 'dir_src'
+
+  ELEMENT_DEST_NAME = 'monkey_dest'
+  ELEMENT_DEST = Dir(ELEMENT_DEST_NAME, FILES)
+  ELEMENTS_DEST = [
+      Dir('monkey1', FILES), Dir('monkey2', FILES), Dir('monkey3', FILES)]
+  DIR_DEST_NAME = 'dir_dest'
+
+
+class SloppyDirCopyTest(SloppyFileCopyTest, DirCopyTest):
+  """Test directory copies with sloppy=True"""
diff --git a/lib/chroot_util.py b/lib/chroot_util.py
new file mode 100644
index 0000000..742e5b6
--- /dev/null
+++ b/lib/chroot_util.py
@@ -0,0 +1,211 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for updating and building in the chroot environment."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import brick_lib
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import sysroot_lib
+
+if cros_build_lib.IsInsideChroot():
+  # These import libraries outside chromite. See brbug.com/472.
+  from chromite.scripts import cros_list_modified_packages as workon
+  from chromite.scripts import cros_setup_toolchains as toolchain
+
+
+_HOST_PKGS = ('virtual/target-sdk', 'world',)
+
+
+def _GetToolchainPackages():
+  """Get a list of host toolchain packages."""
+  # Load crossdev cache first for faster performance.
+  toolchain.Crossdev.Load(False)
+  packages = toolchain.GetTargetPackages('host')
+  return [toolchain.GetPortagePackage('host', x) for x in packages]
+
+
+def GetEmergeCommand(sysroot=None):
+  """Returns the emerge command to use for |sysroot| (host if None)."""
+  cmd = [os.path.join(constants.CHROMITE_BIN_DIR, 'parallel_emerge')]
+  if sysroot and sysroot != '/':
+    cmd += ['--sysroot=%s' % sysroot]
+  return cmd
+
+
+def Emerge(packages, sysroot, with_deps=True, rebuild_deps=True,
+           use_binary=True, jobs=None, debug_output=False):
+  """Emerge the specified |packages|.
+
+  Args:
+    packages: List of packages to emerge.
+    sysroot: Path to the sysroot in which to emerge.
+    with_deps: Whether to include dependencies.
+    rebuild_deps: Whether to rebuild dependencies.
+    use_binary: Whether to use binary packages.
+    jobs: Number of jobs to run in parallel.
+    debug_output: Emit debug level output.
+
+  Raises:
+    cros_build_lib.RunCommandError: If emerge returns an error.
+  """
+  cros_build_lib.AssertInsideChroot()
+  if not packages:
+    raise ValueError('No packages provided')
+
+  cmd = GetEmergeCommand(sysroot)
+  cmd.append('-uNv')
+
+  modified_packages = workon.ListModifiedWorkonPackages(
+      sysroot_lib.Sysroot(sysroot))
+  if modified_packages:
+    mod_pkg_list = ' '.join(modified_packages)
+    cmd += ['--reinstall-atoms=' + mod_pkg_list,
+            '--usepkg-exclude=' + mod_pkg_list]
+
+  cmd.append('--deep' if with_deps else '--nodeps')
+  if use_binary:
+    cmd += ['-g', '--with-bdeps=y']
+    if sysroot == '/':
+      # Only update toolchains in the chroot when binpkgs are available. The
+      # toolchain rollout process only takes place when the chromiumos sdk
+      # builder finishes a successful build and pushes out binpkgs.
+      cmd += ['--useoldpkg-atoms=%s' % ' '.join(_GetToolchainPackages())]
+
+  if rebuild_deps:
+    cmd.append('--rebuild-if-unbuilt')
+  if jobs:
+    cmd.append('--jobs=%d' % jobs)
+  if debug_output:
+    cmd.append('--show-output')
+
+  cros_build_lib.SudoRunCommand(cmd + packages)
+
+
+def UpdateChroot(brick=None, board=None, update_host_packages=True):
+  """Update the chroot."""
+  # Run chroot update hooks.
+  logging.notice('Updating the chroot. This may take several minutes.')
+  cmd = [os.path.join(constants.CROSUTILS_DIR, 'run_chroot_version_hooks')]
+  cros_build_lib.RunCommand(cmd, debug_level=logging.DEBUG)
+
+  # Update toolchains.
+  cmd = [os.path.join(constants.CHROMITE_BIN_DIR, 'cros_setup_toolchains')]
+  if brick:
+    cmd += ['--targets=bricks', '--include-bricks=%s' % brick.brick_locator]
+  elif board:
+    cmd += ['--targets=boards', '--include-boards=%s' % board]
+  cros_build_lib.SudoRunCommand(cmd, debug_level=logging.DEBUG)
+
+  # Update the host before updating the board.
+  if update_host_packages:
+    Emerge(list(_HOST_PKGS), '/', rebuild_deps=False)
+
+  # Automatically discard all CONFIG_PROTECT'ed files. Those that are
+  # protected should not be overwritten until the variable is changed.
+  # Autodiscard is option "-9" followed by the "YES" confirmation.
+  cros_build_lib.SudoRunCommand(['etc-update'], input='-9\nYES\n',
+                                debug_level=logging.DEBUG)
+
+
+def SetupBoard(brick=None, board=None, update_chroot=True,
+               update_host_packages=True, use_binary=True):
+  """Set up a sysroot for |brick| or |board| (either must be provided).
+
+  This invokes UpdateChroot() with the given brick/board values, unless
+  otherwise instructed.
+
+  Args:
+    brick: Brick object we need to set up a sysroot for.
+    board: Board name to set up a sysroot for. Ignored if |brick| is provided.
+    update_chroot: Whether we should update the chroot first.
+    update_host_packages: Whether to update host packages in the chroot.
+    use_binary: If okay to use binary packages during the update.
+  """
+  if update_chroot:
+    UpdateChroot(brick=brick, board=board,
+                 update_host_packages=update_host_packages)
+
+  cmd = [os.path.join(constants.CROSUTILS_DIR, 'setup_board'),
+         '--skip_toolchain_update', '--skip_chroot_upgrade']
+  if brick:
+    brick.GeneratePortageConfig()
+    cmd.append('--brick=%s' % brick.brick_locator)
+  elif board:
+    cmd.append('--board=%s' % board)
+  else:
+    raise ValueError('Either brick or board must be provided')
+
+  if not use_binary:
+    cmd.append('--nousepkg')
+
+  cros_build_lib.RunCommand(cmd)
+
+
+def InitializeSysroots(blueprint):
+  """Initialize the sysroots needed by |blueprint|.
+
+  Args:
+    blueprint: a blueprint_lib.Blueprint object.
+  """
+  bsp = brick_lib.Brick(blueprint.GetBSP())
+
+  # Create the brick stack.
+  # Removing duplicates while preserving a sane behaviour is hard:
+  # brbug.com/1029.
+  brick_stack = []
+  for brick_locator in blueprint.GetBricks():
+    brick_stack.extend(brick_lib.Brick(brick_locator).BrickStack())
+
+  # Regenerate the portage configuration for all bricks used by this blueprint.
+  for b in blueprint.GetUsedBricks():
+    b.GeneratePortageConfig()
+
+  sysroot_path = cros_build_lib.GetSysroot(blueprint.FriendlyName())
+
+  sysroot = sysroot_lib.Sysroot(sysroot_path)
+  sysroot.CreateSkeleton()
+  sysroot.WriteConfig(sysroot.GenerateBrickConfig(brick_stack, bsp))
+  sysroot.GeneratePortageConfig()
+  sysroot.UpdateToolchain()
+
+
+def RunUnittests(sysroot, packages, extra_env=None, verbose=False,
+                 retries=None):
+  """Runs the unit tests for |packages|.
+
+  Args:
+    sysroot: Path to the sysroot to build the tests in.
+    packages: List of packages to test.
+    extra_env: Python dictionary containing the extra environment variable to
+      pass to the build command.
+    verbose: If True, show the output from emerge, even when the tests succeed.
+    retries: Number of time we should retry a failed packages. If None, use
+      parallel_emerge's default.
+
+  Raises:
+    RunCommandError if the unit tests failed.
+  """
+  env = extra_env.copy() if extra_env else {}
+  env.update({
+      'FEATURES': 'test',
+      'PKGDIR': os.path.join(sysroot, 'test-packages'),
+  })
+
+  command = [os.path.join(constants.CHROMITE_BIN_DIR, 'parallel_emerge'),
+             '--sysroot=%s' % sysroot, '--nodeps', '--buildpkgonly']
+  if verbose:
+    command += ['--show-output']
+
+  if retries is not None:
+    command += ['--retries=%s' % retries]
+
+  command += list(packages)
+
+  cros_build_lib.SudoRunCommand(command, extra_env=env, mute_output=False)
diff --git a/lib/chroot_util_unittest b/lib/chroot_util_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/chroot_util_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/chroot_util_unittest.py b/lib/chroot_util_unittest.py
new file mode 100644
index 0000000..d721871
--- /dev/null
+++ b/lib/chroot_util_unittest.py
@@ -0,0 +1,66 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests the chroot_util module."""
+
+from __future__ import print_function
+
+import itertools
+
+from chromite.lib import chroot_util
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+
+if cros_build_lib.IsInsideChroot():
+  from chromite.scripts import cros_list_modified_packages
+
+# pylint: disable=protected-access
+
+
+class ChrootUtilTest(cros_build_lib_unittest.RunCommandTempDirTestCase):
+  """Test class for the chroot_util functions."""
+
+  def testEmerge(self):
+    """Tests correct invocation of emerge."""
+    packages = ['foo-app/bar', 'sys-baz/clap']
+    self.PatchObject(cros_list_modified_packages, 'ListModifiedWorkonPackages',
+                     return_value=[packages[0]])
+
+    toolchain_packages = [
+        'sys-devel/binutils',
+        'sys-devel/gcc',
+        'sys-kernel/linux-headers',
+        'sys-libs/glibc',
+        'sys-devel/gdb'
+    ]
+    self.PatchObject(chroot_util, '_GetToolchainPackages',
+                     return_value=toolchain_packages)
+    toolchain_package_list = ' '.join(toolchain_packages)
+
+    input_values = [
+        ['/', '/build/thesysrootname'],  # sysroot
+        [True, False],  # with_deps
+        [True, False],  # rebuild_deps
+        [True, False],  # use_binary
+        [0, 1, 2, 3],   # jobs
+        [True, False],  # debug_output
+    ]
+    inputs = itertools.product(*input_values)
+    for (sysroot, with_deps, rebuild_deps, use_binary,
+         jobs, debug_output) in inputs:
+      chroot_util.Emerge(packages, sysroot=sysroot, with_deps=with_deps,
+                         rebuild_deps=rebuild_deps, use_binary=use_binary,
+                         jobs=jobs, debug_output=debug_output)
+      cmd = self.rc.call_args_list[-1][0][-1]
+      self.assertEquals(sysroot != '/',
+                        any(p.startswith('--sysroot') for p in cmd))
+      self.assertEquals(with_deps, '--deep' in cmd)
+      self.assertEquals(not with_deps, '--nodeps' in cmd)
+      self.assertEquals(rebuild_deps, '--rebuild-if-unbuilt' in cmd)
+      self.assertEquals(use_binary, '-g' in cmd)
+      self.assertEquals(use_binary, '--with-bdeps=y' in cmd)
+      self.assertEquals(use_binary and sysroot == '/',
+                        '--useoldpkg-atoms=%s' % toolchain_package_list in cmd)
+      self.assertEquals(bool(jobs), '--jobs=%d' % jobs in cmd)
+      self.assertEquals(debug_output, '--show-output' in cmd)
diff --git a/lib/cidb.py b/lib/cidb.py
new file mode 100644
index 0000000..b064572
--- /dev/null
+++ b/lib/cidb.py
@@ -0,0 +1,1281 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Continuous Integration Database Library."""
+
+from __future__ import print_function
+
+import collections
+import datetime
+import glob
+import os
+import re
+try:
+  import sqlalchemy
+  import sqlalchemy.exc
+  import sqlalchemy.interfaces
+  from sqlalchemy import MetaData
+except ImportError:
+  raise AssertionError(
+      'Unable to import sqlalchemy. Please install this package by running '
+      '`sudo apt-get install python-sqlalchemy` or similar.')
+
+from chromite.cbuildbot import constants
+from chromite.lib import clactions
+from chromite.lib import cros_logging as logging
+from chromite.lib import factory
+from chromite.lib import graphite
+from chromite.lib import osutils
+from chromite.lib import retry_stats
+
+
+CIDB_MIGRATIONS_DIR = os.path.join(constants.CHROMITE_DIR, 'cidb',
+                                   'migrations')
+
+_RETRYABLE_OPERATIONAL_ERROR_CODES = (
+    1053,   # 'Server shutdown in progress'
+    2003,   # 'Can't connect to MySQL server'
+    2006,   # Error code 2006 'MySQL server has gone away' indicates that
+            # the connection used was closed or dropped
+    2013,   # 'Lost connection to MySQL server during query'
+            # TODO(akeshet): consider only retrying UPDATE queries against
+            # this error code, not INSERT queries, since we don't know
+            # whether the query completed before or after the connection
+            # lost.
+    2026,   # 'SSL connection error: unknown error number'
+)
+
+
+def _IsRetryableException(e):
+  """Determine whether a query should be retried based on exception.
+
+  Intended for use as a handler for retry_util.
+
+  Args:
+    e: The exception to be filtered.
+
+  Returns:
+    True if the query should be retried, False otherwise.
+  """
+  # Exceptions usually are raised as sqlalchemy.exc.OperationalError, but
+  # occasionally also escape as MySQLdb.OperationalError. Neither of those
+  # exception types inherit from one another, so we fall back to string matching
+  # on the exception name. See crbug.com/483654
+  if 'OperationalError' in str(type(e)):
+    # Unwrap the error till we get to the error raised by the DB backend.
+    e_orig = e
+    while hasattr(e_orig, 'orig'):
+      e_orig = e_orig.orig
+    error_code = e_orig.args[0] if len(e_orig.args) > 0 else -1
+    if error_code in _RETRYABLE_OPERATIONAL_ERROR_CODES:
+      if error_code != 2006:
+        logging.info('RETRYING cidb query due to %s.', e)
+      return True
+
+  return False
+
+
+def _RetrySuccessHandler(attempt):
+  """If a query succeeded after retry, log it."""
+  if attempt > 1:
+    logging.info('cidb query succeeded after %s retries', attempt - 1)
+
+
+class DBException(Exception):
+  """General exception class for this module."""
+
+
+class UnsupportedMethodException(DBException):
+  """Raised when a call is made that the database does not support."""
+
+
+def minimum_schema(min_version):
+  """Generate a decorator to specify a minimum schema version for a method.
+
+  This decorator should be applied only to instance methods of
+  SchemaVersionedMySQLConnection objects.
+  """
+
+  def decorator(f):
+    def wrapper(self, *args, **kwargs):
+      if self.schema_version < min_version:
+        raise UnsupportedMethodException()
+      return f(self, *args, **kwargs)
+    return wrapper
+  return decorator
+
+
+class StrictModeListener(sqlalchemy.interfaces.PoolListener):
+  """This listener ensures that STRICT_ALL_TABLES for all connections."""
+  def connect(self, dbapi_con, *_args, **_kwargs):
+    cur = dbapi_con.cursor()
+    cur.execute("SET SESSION sql_mode='STRICT_ALL_TABLES'")
+    cur.close()
+
+
+# Tuple to keep arguments that modify SQL query retry behaviour of
+# SchemaVersionedMySQLConnection.
+SqlConnectionRetryArgs = collections.namedtuple(
+    'SqlConnectionRetryArgs',
+    ('max_retry', 'sleep', 'backoff_factor'))
+
+
+class SchemaVersionedMySQLConnection(object):
+  """Connection to a database that is aware of its schema version."""
+
+  SCHEMA_VERSION_TABLE_NAME = 'schemaVersionTable'
+  SCHEMA_VERSION_COL = 'schemaVersion'
+
+  def _UpdateConnectUrlArgs(self, key, db_credentials_dir, filename):
+    """Read an argument for the sql connection from the given file.
+
+    side effect: store argument in self._connect_url_args
+
+    Args:
+      key: Name of the argument to read.
+      db_credentials_dir: The directory containing the credentials.
+      filename: Name of the file to read.
+    """
+    file_path = os.path.join(db_credentials_dir, filename)
+    if os.path.exists(file_path):
+      self._connect_url_args[key] = osutils.ReadFile(file_path).strip()
+
+  def _UpdateSslArgs(self, key, db_credentials_dir, filename):
+    """Read an ssl argument for the sql connection from the given file.
+
+    side effect: store argument in self._ssl_args
+
+    Args:
+      key: Name of the ssl argument to read.
+      db_credentials_dir: The directory containing the credentials.
+      filename: Name of the file to read.
+    """
+    file_path = os.path.join(db_credentials_dir, filename)
+    if os.path.exists(file_path):
+      if 'ssl' not in self._ssl_args:
+        self._ssl_args['ssl'] = {}
+      self._ssl_args['ssl'][key] = file_path
+
+  def _UpdateConnectArgs(self, db_credentials_dir):
+    """Update all connection args from |db_credentials_dir|."""
+    self._UpdateConnectUrlArgs('host', db_credentials_dir, 'host.txt')
+    self._UpdateConnectUrlArgs('port', db_credentials_dir, 'port.txt')
+    self._UpdateConnectUrlArgs('username', db_credentials_dir, 'user.txt')
+    self._UpdateConnectUrlArgs('password', db_credentials_dir, 'password.txt')
+
+    self._UpdateSslArgs('cert', db_credentials_dir, 'client-cert.pem')
+    self._UpdateSslArgs('key', db_credentials_dir, 'client-key.pem')
+    self._UpdateSslArgs('ca', db_credentials_dir, 'server-ca.pem')
+
+  def __init__(self, db_name, db_migrations_dir, db_credentials_dir,
+               query_retry_args=SqlConnectionRetryArgs(8, 4, 2)):
+    """SchemaVersionedMySQLConnection constructor.
+
+    Args:
+      db_name: Name of the database to connect to.
+      db_migrations_dir: Absolute path to directory of migration scripts
+                         for this database.
+      db_credentials_dir: Absolute path to directory containing connection
+                          information to the database. Specifically, this
+                          directory may contain files names user.txt,
+                          password.txt, host.txt, port.txt, client-cert.pem,
+                          client-key.pem, and server-ca.pem This object will
+                          silently drop the relevant mysql commandline flags for
+                          missing files in the directory.
+      query_retry_args: An optional SqlConnectionRetryArgs tuple to tweak the
+                        retry behaviour of SQL queries.
+    """
+    # None, or a sqlalchemy.MetaData instance
+    self._meta = None
+
+    # pid of process on which _engine was created
+    self._engine_pid = None
+
+    self._engine = None
+
+    self.db_migrations_dir = db_migrations_dir
+    self.db_credentials_dir = db_credentials_dir
+    self.db_name = db_name
+    self.query_retry_args = query_retry_args
+
+    # mysql args that are optionally provided by files in db_credentials_dir
+    self._connect_url_args = {}
+    self._ssl_args = {}
+
+    self._UpdateConnectArgs(db_credentials_dir)
+
+    connect_url = sqlalchemy.engine.url.URL('mysql', **self._connect_url_args)
+
+    # Create a temporary engine to connect to the mysql instance, and check if
+    # a database named |db_name| exists. If not, create one. We use a temporary
+    # engine here because the real engine will be opened with a default
+    # database name given by |db_name|.
+    temp_engine = sqlalchemy.create_engine(connect_url,
+                                           connect_args=self._ssl_args,
+                                           listeners=[StrictModeListener()])
+    databases = self._ExecuteWithEngine('SHOW DATABASES',
+                                        temp_engine).fetchall()
+    if (db_name,) not in databases:
+      self._ExecuteWithEngine('CREATE DATABASE %s' % db_name, temp_engine)
+      logging.info('Created database %s', db_name)
+
+    temp_engine.dispose()
+
+    # Now create the persistent connection to the database named |db_name|.
+    # If there is a schema version table, read the current schema version
+    # from it. Otherwise, assume schema_version 0.
+    self._connect_url_args['database'] = db_name
+    self._connect_url = sqlalchemy.engine.url.URL('mysql',
+                                                  **self._connect_url_args)
+
+    self.schema_version = self.QuerySchemaVersion()
+
+    logging.info('Created a SchemaVersionedMySQLConnection, '
+                 'sqlalchemy version %s', sqlalchemy.__version__)
+
+  def DropDatabase(self):
+    """Delete all data and tables from database, and drop database.
+
+    Use with caution. All data in database will be deleted. Invalidates
+    this database connection instance.
+    """
+    self._meta = None
+    self._Execute('DROP DATABASE %s' % self.db_name)
+    self._InvalidateEngine()
+
+  def QuerySchemaVersion(self):
+    """Query the database for its current schema version number.
+
+    Returns:
+      The current schema version from the database's schema version table,
+      as an integer, or 0 if the table is empty or nonexistent.
+    """
+    tables = self._Execute('SHOW TABLES').fetchall()
+    if (self.SCHEMA_VERSION_TABLE_NAME,) in tables:
+      r = self._Execute('SELECT MAX(%s) from %s' % (
+          self.SCHEMA_VERSION_COL, self.SCHEMA_VERSION_TABLE_NAME))
+      return r.fetchone()[0] or 0
+    else:
+      return 0
+
+  def _GetMigrationScripts(self):
+    """Look for migration scripts and return their versions and paths."
+
+    Returns:
+      A list of (schema_version, script_path) tuples of the migration
+      scripts for this database, sorted in ascending schema_version order.
+    """
+    # Look for migration script files in the migration script directory,
+    # with names of the form [number]*.sql, and sort these by number.
+    migration_scripts = glob.glob(os.path.join(self.db_migrations_dir, '*.sql'))
+    migrations = []
+    for script in migration_scripts:
+      match = re.match(r'([0-9]*).*', os.path.basename(script))
+      if match:
+        migrations.append((int(match.group(1)), script))
+
+    migrations.sort()
+    return migrations
+
+  def ApplySchemaMigrations(self, maxVersion=None):
+    """Apply pending migration scripts to database, in order.
+
+    Args:
+      maxVersion: The highest version migration script to apply. If
+                  unspecified, all migrations found will be applied.
+    """
+    migrations = self._GetMigrationScripts()
+
+    # Execute the migration scripts in order, asserting that each one
+    # updates the schema version to the expected number. If maxVersion
+    # is specified stop early.
+    for (number, script) in migrations:
+      if maxVersion is not None and number > maxVersion:
+        break
+
+      if number > self.schema_version:
+        # Invalidate self._meta, then run script and ensure that schema
+        # version was increased.
+        self._meta = None
+        logging.info('Running migration script %s', script)
+        self.RunQueryScript(script)
+        self.schema_version = self.QuerySchemaVersion()
+        if self.schema_version != number:
+          raise DBException('Migration script %s did not update '
+                            'schema version to %s as expected. ' % (number,
+                                                                    script))
+
+  def RunQueryScript(self, script_path):
+    """Run a .sql script file located at |script_path| on the database."""
+    with open(script_path, 'r') as f:
+      script = f.read()
+    queries = [q.strip() for q in script.split(';') if q.strip()]
+    for q in queries:
+      # This is intentionally not wrapped in retries.
+      self._GetEngine().execute(q)
+
+  def _ReflectToMetadata(self):
+    """Use sqlalchemy reflection to construct MetaData model of database.
+
+    If self._meta is already populated, this does nothing.
+    """
+    if self._meta is not None:
+      return
+    self._meta = MetaData()
+    self._meta.reflect(bind=self._GetEngine())
+
+  def _Insert(self, table, values):
+    """Create and execute a one-row INSERT query.
+
+    Args:
+      table: Table name to insert to.
+      values: Dictionary of column values to insert.
+
+    Returns:
+      Integer primary key of the inserted row.
+    """
+    self._ReflectToMetadata()
+    ins = self._meta.tables[table].insert().values(values)
+    r = self._Execute(ins)
+    return r.inserted_primary_key[0]
+
+  def _InsertMany(self, table, values):
+    """Create and execute an multi-row INSERT query.
+
+    Args:
+      table: Table name to insert to.
+      values: A list of value dictionaries to insert multiple rows.
+
+    Returns:
+      The number of inserted rows.
+    """
+    # sqlalchemy 0.7 and prior has a bug in which it does not always
+    # correctly unpack a list of rows to multi-insert if the list contains
+    # only one item.
+    if len(values) == 1:
+      self._Insert(table, values[0])
+      return 1
+
+    self._ReflectToMetadata()
+    ins = self._meta.tables[table].insert()
+    r = self._Execute(ins, *values)
+    return r.rowcount
+
+  def _GetPrimaryKey(self, table):
+    """Gets the primary key column of |table|.
+
+    This function requires that the given table have a 1-column promary key.
+
+    Args:
+      table: Name of table to primary key for.
+
+    Returns:
+      A sqlalchemy.sql.schema.Column representing the primary key column.
+
+    Raises:
+      DBException if the table does not have a single column primary key.
+    """
+    self._ReflectToMetadata()
+    t = self._meta.tables[table]
+
+    # TODO(akeshet): between sqlalchemy 0.7 and 0.8, a breaking change was
+    # made to how t.columns and t.primary_key are stored, and in sqlalchemy
+    # 0.7 t.columns does not have a .values() method. Hence this clumsy way
+    # of extracting the primary key column. Currently, our builders have 0.7
+    # installed. Once we drop support for 0.7, this code can be simply replaced
+    # by:
+    # key_columns = t.primary_key.columns.values()
+    col_names = t.columns.keys()
+    cols = [t.columns[n] for n in col_names]
+    key_columns = [c for c in cols if c.primary_key]
+
+    if len(key_columns) != 1:
+      raise DBException('Table %s does not have a 1-column primary '
+                        'key.' % table)
+    return key_columns[0]
+
+  def _Update(self, table, row_id, values):
+    """Create and execute an UPDATE query by primary key.
+
+    Args:
+      table: Table name to update.
+      row_id: Primary key value of row to update.
+      values: Dictionary of column values to update.
+
+    Returns:
+      The number of rows that were updated (0 or 1).
+    """
+    self._ReflectToMetadata()
+    primary_key = self._GetPrimaryKey(table)
+    upd = self._meta.tables[table].update().where(
+        primary_key == row_id).values(values)
+    r = self._Execute(upd)
+    return r.rowcount
+
+  def _UpdateWhere(self, table, where, values):
+    """Create and execute an update query with a custom where clause.
+
+    Args:
+      table: Table name to update.
+      where: Raw SQL for the where clause, in string form, e.g.
+             'build_id = 1 and board = "tomato"'
+      values: dictionary of column values to update.
+
+    Returns:
+      The number of rows that were updated.
+    """
+    self._ReflectToMetadata()
+    upd = self._meta.tables[table].update().where(where)
+    r = self._Execute(upd, values)
+    return r.rowcount
+
+  def _Select(self, table, row_id, columns):
+    """Create and execute a one-row one-table SELECT query by primary key.
+
+    Args:
+      table: Table name to select from.
+      row_id: Primary key value of row to select.
+      columns: List of column names to select.
+
+    Returns:
+      A column name to column value dict for the row found, if a row was found.
+      None if no row was.
+    """
+    self._ReflectToMetadata()
+    primary_key = self._GetPrimaryKey(table)
+    table_m = self._meta.tables[table]
+    columns_m = [table_m.c[col_name] for col_name in columns]
+    sel = sqlalchemy.sql.select(columns_m).where(primary_key == row_id)
+    r = self._Execute(sel).fetchall()
+    if r:
+      assert len(r) == 1, 'Query by primary key returned more than 1 row.'
+      return dict(zip(columns, r[0]))
+    else:
+      return None
+
+  def _SelectWhere(self, table, where, columns):
+    """Create and execute a one-table SELECT query with a custom where clause.
+
+    Args:
+      table: Table name to update.
+      where: Raw SQL for the where clause, in string form, e.g.
+             'build_id = 1 and board = "tomato"'
+      columns: List of column names to select.
+
+    Returns:
+      A list of column name to column value dictionaries each representing
+      a row that was selected.
+    """
+    self._ReflectToMetadata()
+    table_m = self._meta.tables[table]
+    columns_m = [table_m.c[col_name] for col_name in columns]
+    sel = sqlalchemy.sql.select(columns_m).where(where)
+    r = self._Execute(sel)
+    return [dict(zip(columns, values)) for values in r.fetchall()]
+
+  def _Execute(self, query, *args, **kwargs):
+    """Execute a query with retries.
+
+    This method executes a query using the engine credentials that
+    were set up in the constructor for this object. If necessary,
+    a new engine unique to this pid will be created.
+
+    Args:
+      query: Query to execute, of type string, or sqlalchemy.Executible,
+             or other sqlalchemy-executible statement (see sqlalchemy
+             docs).
+      *args: Additional args passed along to .execute(...)
+      **kwargs: Additional args passed along to .execute(...)
+
+    Returns:
+      The result of .execute(...)
+    """
+    return self._ExecuteWithEngine(query, self._GetEngine(),
+                                   *args, **kwargs)
+
+  def _ExecuteWithEngine(self, query, engine, *args, **kwargs):
+    """Execute a query using |engine|, with retires.
+
+    This method wraps execution of a query against an engine in retries.
+    The engine will automatically create new connections if a prior connection
+    was dropped.
+
+    Args:
+      query: Query to execute, of type string, or sqlalchemy.Executible,
+             or other sqlalchemy-executible statement (see sqlalchemy
+             docs).
+      engine: sqlalchemy.engine to use.
+      *args: Additional args passed along to .execute(...)
+      **kwargs: Additional args passed along to .execute(...)
+
+    Returns:
+      The result of .execute(...)
+    """
+    f = lambda: engine.execute(query, *args, **kwargs)
+    logging.info('Running cidb query on pid %s, repr(query) starts with %s',
+                 os.getpid(), repr(query)[:100])
+    return retry_stats.RetryWithStats(
+        retry_stats.CIDB,
+        handler=_IsRetryableException,
+        max_retry=self.query_retry_args.max_retry,
+        sleep=self.query_retry_args.sleep,
+        backoff_factor=self.query_retry_args.backoff_factor,
+        success_functor=_RetrySuccessHandler,
+        raise_first_exception_on_failure=False,
+        functor=f)
+
+  def _GetEngine(self):
+    """Get the sqlalchemy engine for this process.
+
+    This method creates a new sqlalchemy engine if necessary, and
+    returns an engine that is unique to this process.
+
+    Returns:
+      An sqlalchemy.engine instance for this database.
+    """
+    pid = os.getpid()
+    if pid == self._engine_pid and self._engine:
+      return self._engine
+    else:
+      e = sqlalchemy.create_engine(self._connect_url,
+                                   connect_args=self._ssl_args,
+                                   listeners=[StrictModeListener()])
+      self._engine = e
+      self._engine_pid = pid
+      logging.info('Created cidb engine %s@%s for pid %s', e.url.username,
+                   e.url.host, pid)
+      return self._engine
+
+  def _InvalidateEngine(self):
+    """Dispose of an sqlalchemy engine."""
+    try:
+      pid = os.getpid()
+      if pid == self._engine_pid and self._engine:
+        self._engine.dispose()
+    finally:
+      self._engine = None
+      self._meta = None
+
+
+class CIDBConnection(SchemaVersionedMySQLConnection):
+  """Connection to a Continuous Integration database."""
+
+  _SQL_FETCH_ACTIONS = (
+      'SELECT c.id, b.id, action, c.reason, build_config, '
+      'change_number, patch_number, change_source, timestamp FROM '
+      'clActionTable c JOIN buildTable b ON build_id = b.id ')
+  _DATE_FORMAT = '%Y-%m-%d'
+
+  NUM_RESULTS_NO_LIMIT = -1
+
+  def __init__(self, db_credentials_dir, *args, **kwargs):
+    super(CIDBConnection, self).__init__('cidb', CIDB_MIGRATIONS_DIR,
+                                         db_credentials_dir, *args, **kwargs)
+
+  def GetTime(self):
+    """Gets the current time, according to database.
+
+    Returns:
+      datetime.datetime instance.
+    """
+    return self._Execute('SELECT NOW()').fetchall()[0][0]
+
+  @minimum_schema(32)
+  def InsertBuild(self, builder_name, waterfall, build_number,
+                  build_config, bot_hostname, master_build_id=None,
+                  timeout_seconds=None):
+    """Insert a build row.
+
+    Args:
+      builder_name: buildbot builder name.
+      waterfall: buildbot waterfall name.
+      build_number: buildbot build number.
+      build_config: cbuildbot config of build
+      bot_hostname: hostname of bot running the build
+      master_build_id: (Optional) primary key of master build to this build.
+      timeout_seconds: (Optional) If provided, total time allocated for this
+          build. A deadline is recorded in cidb for the current build to end.
+    """
+    values = {
+        'builder_name': builder_name,
+        'buildbot_generation': constants.BUILDBOT_GENERATION,
+        'waterfall': waterfall,
+        'build_number': build_number,
+        'build_config': build_config,
+        'bot_hostname': bot_hostname,
+        'start_time': sqlalchemy.func.current_timestamp(),
+        'master_build_id': master_build_id}
+    if timeout_seconds is not None:
+      now = self.GetTime()
+      duration = datetime.timedelta(seconds=timeout_seconds)
+      values.update({'deadline': now + duration})
+
+    return self._Insert('buildTable', values)
+
+  @minimum_schema(3)
+  def InsertCLActions(self, build_id, cl_actions):
+    """Insert a list of |cl_actions|.
+
+    If |cl_actions| is empty, this function does nothing.
+
+    Args:
+      build_id: primary key of build that performed these actions.
+      cl_actions: A list of CLAction objects.
+
+    Returns:
+      Number of actions inserted.
+    """
+    if not cl_actions:
+      return 0
+
+    values = []
+    for cl_action in cl_actions:
+      change_number = cl_action.change_number
+      patch_number = cl_action.patch_number
+      change_source = cl_action.change_source
+      action = cl_action.action
+      reason = cl_action.reason
+      values.append({
+          'build_id': build_id,
+          'change_source': change_source,
+          'change_number': change_number,
+          'patch_number': patch_number,
+          'action': action,
+          'reason': reason})
+
+    retval = self._InsertMany('clActionTable', values)
+
+    stats = graphite.StatsFactory.GetInstance()
+    for cl_action in cl_actions:
+      r = cl_action.reason or 'no_reason'
+      # TODO(akeshet) This is a slightly hacky workaround for the fact that our
+      # strategy reasons contain a ':', but statsd considers that character to
+      # be a name terminator.
+      r = r.replace(':', '_')
+      stats.Counter('.'.join(['cl_actions', cl_action.action])).increment(r)
+
+    return retval
+
+  @minimum_schema(6)
+  def InsertBoardPerBuild(self, build_id, board):
+    """Inserts a board-per-build entry into database.
+
+    Args:
+      build_id: primary key of the build in the buildTable
+      board: String board name.
+    """
+    self._Insert('boardPerBuildTable', {'build_id': build_id,
+                                        'board': board})
+
+  @minimum_schema(7)
+  def InsertChildConfigPerBuild(self, build_id, child_config):
+    """Insert a child-config-per-build entry into database.
+
+    Args:
+      build_id: primary key of the build in the buildTable
+      child_config: String child_config name.
+    """
+    self._Insert('childConfigPerBuildTable', {'build_id': build_id,
+                                              'child_config': child_config})
+
+  @minimum_schema(28)
+  def InsertBuildStage(self, build_id, name, board=None,
+                       status=constants.BUILDER_STATUS_PLANNED):
+    """Insert a build stage entry into database.
+
+    Args:
+      build_id: primary key of the build in buildTable
+      name: Full name of build stage.
+      board: (Optional) board name, if this is a board-specific stage.
+      status: (Optional) stage status, one of constants.BUILDER_ALL_STATUSES.
+              Default constants.BUILDER_STATUS_PLANNED.
+
+    Returns:
+      Integer primary key of inserted stage, i.e. build_stage_id
+    """
+    return self._Insert('buildStageTable', {'build_id': build_id,
+                                            'name': name,
+                                            'board': board,
+                                            'status': status})
+
+  @minimum_schema(29)
+  def InsertFailure(self, build_stage_id, exception_type, exception_message,
+                    exception_category=constants.EXCEPTION_CATEGORY_UNKNOWN,
+                    outer_failure_id=None,
+                    extra_info=None):
+    """Insert a failure description into database.
+
+    Args:
+      build_stage_id: primary key, in buildStageTable, of the stage where
+                      failure occured.
+      exception_type: str name of the exception class.
+      exception_message: str description of the failure.
+      exception_category: (Optional) one of
+                          constants.EXCEPTION_CATEGORY_ALL_CATEGORIES,
+                          Default: 'unknown'.
+      outer_failure_id: (Optional) primary key of outer failure which contains
+                        this failure. Used to store CompoundFailure
+                        relationship.
+      extra_info: (Optional) extra category-specific string description giving
+                  failure details. Used for programmatic triage.
+    """
+    if exception_message:
+      exception_message = exception_message[:240]
+    values = {'build_stage_id': build_stage_id,
+              'exception_type': exception_type,
+              'exception_message': exception_message,
+              'exception_category': exception_category,
+              'outer_failure_id': outer_failure_id,
+              'extra_info': extra_info}
+    return self._Insert('failureTable', values)
+
+  @minimum_schema(2)
+  def UpdateMetadata(self, build_id, metadata):
+    """Update the given metadata row in database.
+
+    Args:
+      build_id: id of row to update.
+      metadata: CBuildbotMetadata instance to update with.
+
+    Returns:
+      The number of build rows that were updated (0 or 1).
+    """
+    d = metadata.GetDict()
+    versions = d.get('version') or {}
+    return self._Update('buildTable', build_id,
+                        {'chrome_version': versions.get('chrome'),
+                         'milestone_version': versions.get('milestone'),
+                         'platform_version': versions.get('platform'),
+                         'full_version': versions.get('full'),
+                         'sdk_version': d.get('sdk-versions'),
+                         'toolchain_url': d.get('toolchain-url'),
+                         'build_type': d.get('build_type')})
+
+  @minimum_schema(32)
+  def ExtendDeadline(self, build_id, timeout_seconds):
+    """Extend the deadline for this build.
+
+    Args:
+      build_id: primary key, in buildTable, of the build for which deadline
+          should be extended.
+      timeout_seconds: Time remaining for the deadline from the current time.
+
+    Returns:
+      Number of rows updated (1 for success, 0 for failure)
+      Deadline extension can fail if
+        (1) The deadline is already past, or
+        (2) The new deadline requested is earlier than the original deadline.
+    """
+    return self._Execute(
+        'UPDATE buildTable SET deadline = NOW() + INTERVAL %d SECOND WHERE '
+        'id = %d AND '
+        '(deadline = 0 OR deadline > NOW()) AND '
+        'NOW() + INTERVAL %d SECOND > deadline'
+        % (timeout_seconds, build_id, timeout_seconds)
+        ).rowcount
+
+  @minimum_schema(6)
+  def UpdateBoardPerBuildMetadata(self, build_id, board, board_metadata):
+    """Update the given board-per-build metadata.
+
+    Args:
+      build_id: id of the build
+      board: board to update
+      board_metadata: per-board metadata dict for this board
+    """
+    update_dict = {
+        'main_firmware_version': board_metadata.get('main-firmware-version'),
+        'ec_firmware_version': board_metadata.get('ec-firmware-version'),
+    }
+    return self._UpdateWhere(
+        'boardPerBuildTable',
+        'build_id = %d and board = "%s"' % (build_id, board),
+        update_dict)
+
+  @minimum_schema(28)
+  def StartBuildStage(self, build_stage_id):
+    """Marks a build stage as inflight, in the database.
+
+    Args:
+      build_stage_id: primary key of the build stage in buildStageTable.
+    """
+    current_timestamp = sqlalchemy.func.current_timestamp()
+    return self._Update(
+        'buildStageTable',
+        build_stage_id,
+        {'status': constants.BUILDER_STATUS_INFLIGHT,
+         'start_time': current_timestamp})
+
+  @minimum_schema(28)
+  def FinishBuildStage(self, build_stage_id, status):
+    """Marks a build stage as finished, in the database.
+
+    Args:
+      build_stage_id: primary key of the build stage in buildStageTable.
+      status: one of constants.BUILDER_COMPLETED_STATUSES
+    """
+    current_timestamp = sqlalchemy.func.current_timestamp()
+    return self._Update(
+        'buildStageTable',
+        build_stage_id,
+        {'status': status,
+         'finish_time': current_timestamp,
+         'final': True})
+
+  @minimum_schema(25)
+  def FinishBuild(self, build_id, status=None, summary=None, metadata_url=None):
+    """Update the given build row, marking it as finished.
+
+    This should be called once per build, as the last update to the build.
+    This will also mark the row's final=True.
+
+    Args:
+      build_id: id of row to update.
+      status: Final build status, one of
+              constants.BUILDER_COMPLETED_STATUSES.
+      summary: A summary of the build (failures) collected from all slaves.
+      metadata_url: google storage url to metadata.json file for this build,
+                    e.g. ('gs://chromeos-image-archive/master-paladin/'
+                          'R39-6225.0.0-rc1/metadata.json')
+    """
+    self._ReflectToMetadata()
+    if summary:
+      summary = summary[:1024]
+    # The current timestamp is evaluated on the database, not locally.
+    current_timestamp = sqlalchemy.func.current_timestamp()
+    self._Update('buildTable', build_id, {'finish_time': current_timestamp,
+                                          'status': status,
+                                          'summary': summary,
+                                          'metadata_url': metadata_url,
+                                          'final': True})
+
+
+  @minimum_schema(16)
+  def FinishChildConfig(self, build_id, child_config, status=None):
+    """Marks the given child config as finished with |status|.
+
+    This should be called before FinishBuild, on all child configs that
+    were used in a build.
+
+    Args:
+      build_id: primary key of the build in the buildTable
+      child_config: String child_config name.
+      status: Final child_config status, one of
+              constants.BUILDER_COMPLETED_STATUSES or None
+              for default "inflight".
+    """
+    self._Execute(
+        'UPDATE childConfigPerBuildTable '
+        'SET status="%s", final=1 '
+        'WHERE (build_id, child_config) = (%d, "%s")' %
+        (status, build_id, child_config))
+
+
+  @minimum_schema(2)
+  def GetBuildStatus(self, build_id):
+    """Gets the status of the build.
+
+    Args:
+      build_id: build id to fetch.
+
+    Returns:
+      Dictionary for a single build (see GetBuildStatuses for keys) or
+      None if no build with this id was found.
+    """
+    statuses = self.GetBuildStatuses([build_id])
+    return statuses[0] if statuses else None
+
+  @minimum_schema(2)
+  def GetBuildStatuses(self, build_ids):
+    """Gets the statuses of the builds.
+
+    Args:
+      build_ids: A list of build id to fetch.
+
+    Returns:
+      A list of dictionary with keys (id, build_config, start_time,
+      finish_time, status, waterfall, build_number, builder_name,
+      platform_version, full_version), or None if no build with this
+      id was found.
+    """
+    return self._SelectWhere(
+        'buildTable',
+        'id IN (%s)' % ','.join(str(int(x)) for x in build_ids),
+        ['id', 'build_config', 'start_time', 'finish_time', 'status',
+         'waterfall', 'build_number', 'builder_name', 'platform_version',
+         'full_version'])
+
+  @minimum_schema(2)
+  def GetSlaveStatuses(self, master_build_id):
+    """Gets the statuses of slave builders to given build.
+
+    Args:
+      master_build_id: build id of the master build to fetch the slave
+                       statuses for.
+
+    Returns:
+      A list containing, for each slave build (row) found, a dictionary
+      with keys (id, build_config, start_time, finish_time, status).
+    """
+    return self._SelectWhere('buildTable',
+                             'master_build_id = %d' % master_build_id,
+                             ['id', 'build_config', 'start_time',
+                              'finish_time', 'status'])
+
+  @minimum_schema(30)
+  def GetSlaveStages(self, master_build_id):
+    """Gets all the stages of slave builds to given build.
+
+    Args:
+      master_build_id: build id of the master build to fetch the slave
+                       stages for.
+
+    Returns:
+      A list containing, for each stage of each slave build found,
+      a dictionary with keys (id, build_id, name, board, status, last_updated,
+      start_time, finish_time, final, build_config).
+    """
+    bs_table_columns = ['id', 'build_id', 'name', 'board', 'status',
+                        'last_updated', 'start_time', 'finish_time', 'final']
+    bs_prepended_columns = ['bs.' + x for x in bs_table_columns]
+    results = self._Execute(
+        'SELECT %s, b.build_config FROM buildStageTable bs JOIN buildTable b '
+        'ON build_id = b.id where b.master_build_id = %d' %
+        (', '.join(bs_prepended_columns), master_build_id)).fetchall()
+    columns = bs_table_columns + ['build_config']
+    return [dict(zip(columns, values)) for values in results]
+
+  @minimum_schema(32)
+  def GetTimeToDeadline(self, build_id):
+    """Gets the time remaining till the deadline for given build_id.
+
+    Always use this function to find time remaining to a deadline. This function
+    computes all times on the database. You run the risk of hitting timezone
+    issues if you compute remaining time locally.
+
+    Args:
+      build_id: The build_id of the build to query.
+
+    Returns:
+      The time remaining to the deadline in seconds.
+      0 if the deadline is already past.
+      None if no deadline is found.
+    """
+    # Sign information is lost in the timediff coercion into python
+    # datetime.timedelta type. So, we must find out if the deadline is past
+    # separately.
+    r = self._Execute(
+        'SELECT deadline >= NOW(), TIMEDIFF(deadline, NOW()) '
+        'from buildTable where id = %d' % build_id).fetchall()
+    if not r:
+      return None
+
+    time_remaining = r[0][1]
+    if time_remaining is None:
+      return None
+
+    deadline_past = (r[0][0] == 0)
+    return 0 if deadline_past else abs(time_remaining.total_seconds())
+
+  @minimum_schema(2)
+  def GetBuildHistory(self, build_config, num_results,
+                      ignore_build_id=None, start_date=None, end_date=None,
+                      starting_build_number=None):
+    """Returns basic information about most recent builds.
+
+    By default this function returns the most recent builds. Some arguments can
+    restrict the result to older builds.
+
+    Args:
+      build_config: config name of the build.
+      num_results: Number of builds to search back. Set this to
+          CIDBConnection.NUM_RESULTS_NO_LIMIT to request no limit on the number
+          of results.
+      ignore_build_id: (Optional) Ignore a specific build. This is most useful
+          to ignore the current build when querying recent past builds from a
+          build in flight.
+      start_date: (Optional, type: datetime.date) Get builds that occured on or
+          after this date.
+      end_date: (Optional, type:datetime.date) Get builds that occured on or
+          before this date.
+      starting_build_number: (Optional) The minimum build_number on the CQ
+          master for which data should be retrieved.
+
+    Returns:
+      A sorted list of dicts containing up to |number| dictionaries for
+      build statuses in descending order. Valid keys in the dictionary are
+      [id, build_config, buildbot_generation, waterfall, build_number,
+      start_time, finish_time, platform_version, full_version, status].
+    """
+    columns = ['id', 'build_config', 'buildbot_generation', 'waterfall',
+               'build_number', 'start_time', 'finish_time', 'platform_version',
+               'full_version', 'status']
+
+    where_clauses = ['build_config = "%s"' % build_config]
+    if start_date is not None:
+      where_clauses.append('date(start_time) >= date("%s")' %
+                           start_date.strftime(self._DATE_FORMAT))
+    if end_date is not None:
+      where_clauses.append('date(start_time) <= date("%s")' %
+                           end_date.strftime(self._DATE_FORMAT))
+    if starting_build_number is not None:
+      where_clauses.append('build_number >= %d' % starting_build_number)
+    if ignore_build_id is not None:
+      where_clauses.append('id != %d' % ignore_build_id)
+    query = (
+        'SELECT %s'
+        ' FROM buildTable'
+        ' WHERE %s'
+        ' ORDER BY id DESC' %
+        (', '.join(columns), ' AND '.join(where_clauses)))
+    if num_results != self.NUM_RESULTS_NO_LIMIT:
+      query += ' LIMIT %d' % num_results
+
+    results = self._Execute(query).fetchall()
+    return [dict(zip(columns, values)) for values in results]
+
+  @minimum_schema(26)
+  def GetAnnotationsForBuilds(self, build_ids):
+    """Returns the annotations for given build_ids.
+
+    Args:
+      build_ids: list of build_ids for which annotations are requested.
+
+    Returns:
+      {id: annotations} where annotations is itself a list of dicts containing
+      annotations. Valid keys in annotations are [failure_category,
+      failure_message, blame_url, notes].
+    """
+    columns_to_report = ['failure_category', 'failure_message',
+                         'blame_url', 'notes']
+    where_or_clauses = []
+    for build_id in build_ids:
+      where_or_clauses.append('build_id = %d' % build_id)
+    annotations = self._SelectWhere('annotationsTable',
+                                    ' OR '.join(where_or_clauses),
+                                    ['build_id'] + columns_to_report)
+
+    results = {}
+    for annotation in annotations:
+      build_id = annotation['build_id']
+      if build_id not in results:
+        results[build_id] = []
+      results[build_id].append(annotation)
+    return results
+
+  @minimum_schema(11)
+  def GetActionsForChanges(self, changes):
+    """Gets all the actions for the given changes.
+
+    Note, this includes all patches of the given changes.
+
+    Args:
+      changes: A list of GerritChangeTuple, GerritPatchTuple or GerritPatch
+               specifying the changes to whose actions should be fetched.
+
+    Returns:
+      A list of CLAction instances, in action id order.
+    """
+    if not changes:
+      return []
+
+    clauses = []
+    # Note: We are using a string of OR statements rather than a 'WHERE IN'
+    # style clause, because 'WHERE IN' does not make use of multi-column
+    # indexes, and therefore has poor performance with a large table.
+    for change in changes:
+      change_number = int(change.gerrit_number)
+      change_source = 'internal' if change.internal else 'external'
+      clauses.append('(change_number, change_source) = (%d, "%s")' %
+                     (change_number, change_source))
+    clause = ' OR '.join(clauses)
+    results = self._Execute(
+        '%s WHERE %s' % (self._SQL_FETCH_ACTIONS, clause)).fetchall()
+    return [clactions.CLAction(*values) for values in results]
+
+  @minimum_schema(11)
+  def GetActionHistory(self, start_date, end_date=None):
+    """Get the action history of CLs in the specified range.
+
+    This will get the full action history of any patches that were touched
+    by the CQ or Pre-CQ during the specified time range. Note: Since this
+    includes the full action history of these patches, it may include actions
+    outside the time range.
+
+    Args:
+      start_date: (Type: datetime.date) The first date on which you want action
+          history.
+      end_date: (Type: datetime.date) The last date on which you want action
+          history.
+    """
+    values = {'start_date': start_date.strftime(self._DATE_FORMAT),
+              'end_date': end_date.strftime(self._DATE_FORMAT)}
+
+    # Enforce start and end date.
+    conds = 'DATE(timestamp) >= %(start_date)s'
+    if end_date:
+      conds += ' AND DATE(timestamp) <= %(end_date)s'
+
+    changes = ('SELECT DISTINCT change_number, patch_number, change_source '
+               'FROM clActionTable WHERE %s' % conds)
+    query = '%s NATURAL JOIN (%s) as w' % (self._SQL_FETCH_ACTIONS, changes)
+    results = self._Execute(query, values).fetchall()
+    return clactions.CLActionHistory(clactions.CLAction(*values)
+                                     for values in results)
+
+  @minimum_schema(29)
+  def HasBuildStageFailed(self, build_stage_id):
+    """Determine whether a build stage has failed according to cidb.
+
+    Args:
+      build_stage_id: The id of the build_stage to query for.
+
+    Returns:
+      True if there is a failure reported for this build stage to cidb.
+    """
+    failures = self._SelectWhere('failureTable',
+                                 'build_stage_id = %d' % build_stage_id,
+                                 ['id'])
+    return bool(failures)
+
+  @minimum_schema(40)
+  def GetKeyVals(self):
+    """Get key-vals from keyvalTable.
+
+    Returns:
+      A dictionary of {key: value} strings (values may also be None).
+    """
+    results = self._Execute('SELECT k, v FROM keyvalTable').fetchall()
+    return dict(results)
+
+
+def _INV():
+  raise AssertionError('CIDB connection factory has been invalidated.')
+
+CONNECTION_TYPE_PROD = 'prod'   # production database
+CONNECTION_TYPE_DEBUG = 'debug' # debug database, used by --debug builds
+CONNECTION_TYPE_MOCK = 'mock'   # mock connection, not backed by database
+CONNECTION_TYPE_NONE = 'none'   # explicitly no connection
+CONNECTION_TYPE_INV = 'invalid' # invalidated connection
+
+class CIDBConnectionFactoryClass(factory.ObjectFactory):
+  """Factory class used by builders to fetch the appropriate cidb connection"""
+
+  _CIDB_CONNECTION_TYPES = {
+      CONNECTION_TYPE_PROD: factory.CachedFunctionCall(
+          lambda: CIDBConnection(constants.CIDB_PROD_BOT_CREDS)),
+      CONNECTION_TYPE_DEBUG: factory.CachedFunctionCall(
+          lambda: CIDBConnection(constants.CIDB_DEBUG_BOT_CREDS)),
+      CONNECTION_TYPE_MOCK: None,
+      CONNECTION_TYPE_NONE: lambda: None,
+      CONNECTION_TYPE_INV: _INV,
+      }
+
+  def _allowed_cidb_transition(self, from_setup, to_setup):
+    # Always allow factory to be invalidated.
+    if to_setup == CONNECTION_TYPE_INV:
+      return True
+
+    # Allow transition invalid - > mock
+    if from_setup == CONNECTION_TYPE_INV and to_setup == CONNECTION_TYPE_MOCK:
+      return True
+
+    # Otherwise, only allow transitions between none -> none, mock -> mock, and
+    # mock -> none.
+    if from_setup == CONNECTION_TYPE_MOCK:
+      if to_setup in (CONNECTION_TYPE_NONE, CONNECTION_TYPE_MOCK):
+        return True
+    if from_setup == CONNECTION_TYPE_NONE and to_setup == from_setup:
+      return True
+    return False
+
+  def __init__(self):
+    super(CIDBConnectionFactoryClass, self).__init__(
+        'cidb connection', self._CIDB_CONNECTION_TYPES,
+        self._allowed_cidb_transition)
+
+  def IsCIDBSetup(self):
+    """Returns True iff GetCIDBConnectionForBuilder is ready to be called."""
+    return self.is_setup
+
+  def GetCIDBConnectionType(self):
+    """Returns the type of db connection that is set up.
+
+    Returns:
+      One of ('prod', 'debug', 'mock', 'none', 'invalid', None)
+    """
+    return self.setup_type
+
+  def InvalidateCIDBSetup(self):
+    """Invalidate the CIDB connection factory.
+
+    This method may be called at any time, even after a setup method. Once
+    this is called, future calls to GetCIDBConnectionForBuilder will raise
+    an assertion error.
+    """
+    self.Setup(CONNECTION_TYPE_INV)
+
+  def SetupProdCidb(self):
+    """Sets up CIDB to use the prod instance of the database.
+
+    May be called only once, and may not be called after any other CIDB Setup
+    method, otherwise it will raise an AssertionError.
+    """
+    self.Setup(CONNECTION_TYPE_PROD)
+
+
+  def SetupDebugCidb(self):
+    """Sets up CIDB to use the debug instance of the database.
+
+    May be called only once, and may not be called after any other CIDB Setup
+    method, otherwise it will raise an AssertionError.
+    """
+    self.Setup(CONNECTION_TYPE_DEBUG)
+
+  def SetupMockCidb(self, mock_cidb=None):
+    """Sets up CIDB to use a mock object. May be called more than once.
+
+    Args:
+      mock_cidb: (optional) The mock cidb object to be returned by
+                 GetCIDBConnection. If not supplied, then CIDB will be
+                 considered not set up, but future calls to set up a
+                 non-(mock or None) connection will fail.
+    """
+    self.Setup(CONNECTION_TYPE_MOCK, mock_cidb)
+
+  def SetupNoCidb(self):
+    """Sets up CIDB to use an explicit None connection.
+
+    May be called more than once, or after SetupMockCidb.
+    """
+    self.Setup(CONNECTION_TYPE_NONE)
+
+  def ClearMock(self):
+    """Clear a mock CIDB object.
+
+    This method clears a cidb mock object, but leaves the connection factory
+    in _CONNECTION_TYPE_MOCK, so that future calls to set up a non-mock
+    cidb will fail.
+    """
+    self.SetupMockCidb()
+
+  def GetCIDBConnectionForBuilder(self):
+    """Get a CIDBConnection.
+
+    A call to one of the CIDB Setup methods must have been made before calling
+    this factory method.
+
+    Returns:
+      A CIDBConnection instance connected to either the prod or debug
+      instance of the database, or a mock connection, depending on which
+      Setup method was called. Returns None if CIDB has been explicitly
+      set up for that using SetupNoCidb.
+    """
+    return self.GetInstance()
+
+  def _ClearCIDBSetup(self):
+    """Clears the CIDB Setup state. For testing purposes only."""
+    self._clear_setup()
+
+CIDBConnectionFactory = CIDBConnectionFactoryClass()
diff --git a/lib/cidb_integration_test b/lib/cidb_integration_test
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/cidb_integration_test
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/cidb_integration_test.py b/lib/cidb_integration_test.py
new file mode 100644
index 0000000..121a0d8
--- /dev/null
+++ b/lib/cidb_integration_test.py
@@ -0,0 +1,709 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Integration tests for cidb.py module."""
+
+from __future__ import print_function
+
+import datetime
+import glob
+import os
+import random
+import shutil
+import time
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import metadata_lib
+from chromite.lib import cidb
+from chromite.lib import clactions
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import parallel
+
+
+# pylint: disable=protected-access
+
+# Used to ensure that all build_number values we use are unique.
+def _random():
+  return random.randint(1, 1000000000)
+
+
+SERIES_0_TEST_DATA_PATH = os.path.join(
+    constants.CHROMITE_DIR, 'cidb', 'test_data', 'series_0')
+
+SERIES_1_TEST_DATA_PATH = os.path.join(
+    constants.CHROMITE_DIR, 'cidb', 'test_data', 'series_1')
+
+
+class CIDBIntegrationTest(cros_test_lib.LocalSqlServerTestCase):
+  """Base class for cidb tests that connect to a test MySQL instance."""
+
+  CIDB_USER_ROOT = 'root'
+  CIDB_USER_BOT = 'bot'
+  CIDB_USER_READONLY = 'readonly'
+
+  CIDB_CREDS_DIR = {
+      CIDB_USER_BOT: os.path.join(constants.SOURCE_ROOT, 'crostools', 'cidb',
+                                  'cidb_test_bot'),
+      CIDB_USER_READONLY: os.path.join(constants.SOURCE_ROOT, 'crostools',
+                                       'cidb', 'cidb_test_readonly'),
+  }
+
+  def LocalCIDBConnection(self, cidb_user):
+    """Create a CIDBConnection with the local mysqld instance.
+
+    Args:
+      cidb_user: The mysql user to connect as.
+
+    Returns:
+      The created CIDBConnection object.
+    """
+    creds_dir_path = os.path.join(self.tempdir, 'local_cidb_creds')
+    osutils.RmDir(creds_dir_path, ignore_missing=True)
+    osutils.SafeMakedirs(creds_dir_path)
+
+    osutils.WriteFile(os.path.join(creds_dir_path, 'host.txt'),
+                      self.mysqld_host)
+    osutils.WriteFile(os.path.join(creds_dir_path, 'port.txt'),
+                      str(self.mysqld_port))
+    osutils.WriteFile(os.path.join(creds_dir_path, 'user.txt'), cidb_user)
+
+    if cidb_user in self.CIDB_CREDS_DIR:
+      shutil.copy(os.path.join(self.CIDB_CREDS_DIR[cidb_user], 'password.txt'),
+                  creds_dir_path)
+
+    return cidb.CIDBConnection(
+        creds_dir_path,
+        query_retry_args=cidb.SqlConnectionRetryArgs(4, 1, 1.1))
+
+  def _PrepareFreshDatabase(self, max_schema_version=None):
+    """Create an empty database with migrations applied.
+
+    Args:
+      max_schema_version: The highest schema version migration to apply,
+      defaults to None in which case all migrations will be applied.
+
+    Returns:
+      A CIDBConnection instance, connected to a an empty database as the
+      root user.
+    """
+    # Note: We do not use the cidb.CIDBConnectionFactory
+    # in this module. That factory method is used only to construct
+    # connections as the bot user, which is how the builders will always
+    # connect to the database. In this module, however, we need to test
+    # database connections as other mysql users.
+
+    # Connect to database and drop its contents.
+    db = self.LocalCIDBConnection(self.CIDB_USER_ROOT)
+    db.DropDatabase()
+
+    # Connect to now fresh database and apply migrations.
+    db = self.LocalCIDBConnection(self.CIDB_USER_ROOT)
+    db.ApplySchemaMigrations(max_schema_version)
+
+    return db
+
+  def _PrepareDatabase(self):
+    """Prepares a database at the latest known schema version.
+
+    If database already exists, do not delete existing database. This
+    optimization can save a lot of time, when used by tests that do not
+    require an empty database.
+    """
+    # Connect to now fresh database and apply migrations.
+    db = self.LocalCIDBConnection(self.CIDB_USER_ROOT)
+    db.ApplySchemaMigrations()
+
+    return db
+
+
+class CIDBMigrationsTest(CIDBIntegrationTest):
+  """Test that all migrations apply correctly."""
+
+  def testMigrations(self):
+    """Test that all migrations apply in bulk correctly."""
+    self._PrepareFreshDatabase()
+
+  def testIncrementalMigrations(self):
+    """Test that all migrations apply incrementally correctly."""
+    db = self._PrepareFreshDatabase(0)
+    migrations = db._GetMigrationScripts()
+    max_version = migrations[-1][0]
+
+    for i in range(1, max_version + 1):
+      db.ApplySchemaMigrations(i)
+
+  def testActions(self):
+    """Test that InsertCLActions accepts 0-, 1-, and multi-item lists."""
+    db = self._PrepareDatabase()
+    build_id = db.InsertBuild('my builder', 'chromiumos', _random(),
+                              'my config', 'my bot hostname')
+
+    a1 = clactions.CLAction.FromGerritPatchAndAction(
+        metadata_lib.GerritPatchTuple(1, 1, True),
+        constants.CL_ACTION_PICKED_UP)
+    a2 = clactions.CLAction.FromGerritPatchAndAction(
+        metadata_lib.GerritPatchTuple(1, 1, True),
+        constants.CL_ACTION_PICKED_UP)
+    a3 = clactions.CLAction.FromGerritPatchAndAction(
+        metadata_lib.GerritPatchTuple(1, 1, True),
+        constants.CL_ACTION_PICKED_UP)
+
+    db.InsertCLActions(build_id, [])
+    db.InsertCLActions(build_id, [a1])
+    db.InsertCLActions(build_id, [a2, a3])
+
+    action_count = db._GetEngine().execute(
+        'select count(*) from clActionTable').fetchall()[0][0]
+    self.assertEqual(action_count, 3)
+
+    # Test that all known CL action types can be inserted
+    fakepatch = metadata_lib.GerritPatchTuple(1, 1, True)
+    all_actions_list = [
+        clactions.CLAction.FromGerritPatchAndAction(fakepatch, action)
+        for action in constants.CL_ACTIONS]
+    db.InsertCLActions(build_id, all_actions_list)
+
+  def testWaterfallMigration(self):
+    """Test that migrating waterfall from enum to varchar preserves value."""
+    db = self._PrepareFreshDatabase(40)
+    build_id = db.InsertBuild('my builder', 'chromiumos', _random(),
+                              'my config', 'my bot hostname')
+    db.ApplySchemaMigrations(41)
+    self.assertEqual('chromiumos', db.GetBuildStatus(build_id)['waterfall'])
+
+
+class CIDBAPITest(CIDBIntegrationTest):
+  """Tests of the CIDB API."""
+
+  def testSchemaVersionTooLow(self):
+    """Tests that the minimum_schema decorator works as expected."""
+    db = self._PrepareFreshDatabase(2)
+    with self.assertRaises(cidb.UnsupportedMethodException):
+      db.InsertCLActions(0, [])
+
+  def testSchemaVersionOK(self):
+    """Tests that the minimum_schema decorator works as expected."""
+    db = self._PrepareFreshDatabase(4)
+    db.InsertCLActions(0, [])
+
+  def testGetTime(self):
+    db = self._PrepareFreshDatabase(1)
+    current_db_time = db.GetTime()
+    self.assertEqual(type(current_db_time), datetime.datetime)
+
+  def testGetKeyVals(self):
+    db = self._PrepareFreshDatabase(40)
+    # In production we would never insert into this table from a bot, but for
+    # testing purposes here this is convenient.
+    db._Execute('INSERT INTO keyvalTable(k, v) VALUES '
+                '("/foo/bar", "baz"), ("/qux/norf", NULL)')
+    self.assertEqual(db.GetKeyVals(), {'/foo/bar': 'baz', '/qux/norf': None})
+
+
+def GetTestDataSeries(test_data_path):
+  """Get metadata from json files at |test_data_path|.
+
+  Returns:
+    A list of CBuildbotMetadata objects, sorted by their start time.
+  """
+  filenames = glob.glob(os.path.join(test_data_path, '*.json'))
+  metadatas = []
+  for fname in filenames:
+    metadatas.append(
+        metadata_lib.CBuildbotMetadata.FromJSONString(osutils.ReadFile(fname)))
+
+  # Convert start time values, which are stored in RFC 2822 string format,
+  # to seconds since epoch.
+  timestamp_from_dict = lambda x: cros_build_lib.ParseUserDateTimeFormat(
+      x.GetDict()['time']['start'])
+
+  metadatas.sort(key=timestamp_from_dict)
+  return metadatas
+
+
+class DataSeries0Test(CIDBIntegrationTest):
+  """Simulate a set of 630 master/slave CQ builds."""
+
+  def testCQWithSchema39(self):
+    """Run the CQ test with schema version 39."""
+    self._PrepareFreshDatabase(39)
+    self._runCQTest()
+
+  def _runCQTest(self):
+    """Simulate a set of 630 master/slave CQ builds.
+
+    Note: This test takes about 2.5 minutes to populate its 630 builds
+    and their corresponding cl actions into the test database.
+    """
+    metadatas = GetTestDataSeries(SERIES_0_TEST_DATA_PATH)
+    self.assertEqual(len(metadatas), 630, 'Did not load expected amount of '
+                                          'test data')
+
+    bot_db = self.LocalCIDBConnection(self.CIDB_USER_BOT)
+
+    # Simulate the test builds, using a database connection as the
+    # bot user.
+    self.simulate_builds(bot_db, metadatas)
+
+    # Perform some sanity check queries against the database, connected
+    # as the readonly user.
+    readonly_db = self.LocalCIDBConnection(self.CIDB_USER_READONLY)
+
+    self._start_and_finish_time_checks(readonly_db)
+
+    build_types = readonly_db._GetEngine().execute(
+        'select build_type from buildTable').fetchall()
+    self.assertTrue(all(x == ('paladin',) for x in build_types))
+
+    self._cl_action_checks(readonly_db)
+
+    build_config_count = readonly_db._GetEngine().execute(
+        'select COUNT(distinct build_config) from buildTable').fetchall()[0][0]
+    self.assertEqual(build_config_count, 30)
+
+    # Test the _Select method, and verify that the first inserted
+    # build is a master-paladin build.
+    first_row = readonly_db._Select('buildTable', 1, ['id', 'build_config'])
+    self.assertEqual(first_row['build_config'], 'master-paladin')
+
+    # First master build has 29 slaves. Build with id 2 is a slave
+    # build with no slaves of its own.
+    self.assertEqual(len(readonly_db.GetSlaveStatuses(1)), 29)
+    self.assertEqual(len(readonly_db.GetSlaveStatuses(2)), 0)
+
+    # Make sure we can get build status by build id.
+    self.assertEqual(readonly_db.GetBuildStatus(2).get('id'), 2)
+
+    # Make sure we can get build statuses by build ids.
+    build_dicts = readonly_db.GetBuildStatuses([1, 2])
+    self.assertEqual([x.get('id') for x in build_dicts], [1, 2])
+
+    self._start_and_finish_time_checks(readonly_db)
+    self._cl_action_checks(readonly_db)
+    self._last_updated_time_checks(readonly_db)
+
+    #| Test get build_status from -- here's the relevant data from
+    # master-paladin
+    #|          id | status |
+    #|         601 | pass   |
+    #|         571 | pass   |
+    #|         541 | fail   |
+    #|         511 | pass   |
+    #|         481 | pass   |
+    # From 1929 because we always go back one build first.
+    last_status = readonly_db.GetBuildHistory('master-paladin', 1)
+    self.assertEqual(len(last_status), 1)
+    last_status = readonly_db.GetBuildHistory('master-paladin', 5)
+    self.assertEqual(len(last_status), 5)
+    # Make sure keys are sorted correctly.
+    build_ids = []
+    for index, status in enumerate(last_status):
+      # Add these to list to confirm they are sorted afterwards correctly.
+      # Should be descending.
+      build_ids.append(status['id'])
+      if index == 2:
+        self.assertEqual(status['status'], 'fail')
+      else:
+        self.assertEqual(status['status'], 'pass')
+
+    # Check the sort order.
+    self.assertEqual(sorted(build_ids, reverse=True), build_ids)
+
+  def _last_updated_time_checks(self, db):
+    """Sanity checks on the last_updated column."""
+    # We should have a diversity of last_updated times. Since the timestamp
+    # resolution is only 1 second, and we have lots of parallelism in the test,
+    # we won't have a distinct last_updated time per row.
+    # As the test is now local, almost everything happens together, so we check
+    # for a tiny number of distinct timestamps.
+    distinct_last_updated = db._GetEngine().execute(
+        'select count(distinct last_updated) from buildTable').fetchall()[0][0]
+    self.assertTrue(distinct_last_updated > 3)
+
+    ids_by_last_updated = db._GetEngine().execute(
+        'select id from buildTable order by last_updated').fetchall()
+
+    ids_by_last_updated = [id_tuple[0] for id_tuple in ids_by_last_updated]
+
+    # Build #1 should have been last updated before build # 200.
+    self.assertLess(ids_by_last_updated.index(1),
+                    ids_by_last_updated.index(200))
+
+    # However, build #1 (which was a master build) should have been last updated
+    # AFTER build #2 which was its slave.
+    self.assertGreater(ids_by_last_updated.index(1),
+                       ids_by_last_updated.index(2))
+
+  def _cl_action_checks(self, db):
+    """Sanity checks that correct cl actions were recorded."""
+    submitted_cl_count = db._GetEngine().execute(
+        'select count(*) from clActionTable where action="submitted"'
+        ).fetchall()[0][0]
+    rejected_cl_count = db._GetEngine().execute(
+        'select count(*) from clActionTable where action="kicked_out"'
+        ).fetchall()[0][0]
+    total_actions = db._GetEngine().execute(
+        'select count(*) from clActionTable').fetchall()[0][0]
+    self.assertEqual(submitted_cl_count, 56)
+    self.assertEqual(rejected_cl_count, 8)
+    self.assertEqual(total_actions, 1877)
+
+    actions_for_change = db.GetActionsForChanges(
+        [metadata_lib.GerritChangeTuple(205535, False)])
+
+    self.assertEqual(len(actions_for_change), 60)
+    last_action_dict = dict(actions_for_change[-1]._asdict())
+    last_action_dict.pop('timestamp')
+    last_action_dict.pop('id')
+    self.assertEqual(last_action_dict, {'action': 'submitted',
+                                        'build_config': 'master-paladin',
+                                        'build_id': 511L,
+                                        'change_number': 205535L,
+                                        'change_source': 'external',
+                                        'patch_number': 1L,
+                                        'reason': ''})
+
+  def _start_and_finish_time_checks(self, db):
+    """Sanity checks that correct data was recorded, and can be retrieved."""
+    max_start_time = db._GetEngine().execute(
+        'select max(start_time) from buildTable').fetchall()[0][0]
+    min_start_time = db._GetEngine().execute(
+        'select min(start_time) from buildTable').fetchall()[0][0]
+    max_fin_time = db._GetEngine().execute(
+        'select max(finish_time) from buildTable').fetchall()[0][0]
+    min_fin_time = db._GetEngine().execute(
+        'select min(finish_time) from buildTable').fetchall()[0][0]
+    self.assertGreater(max_start_time, min_start_time)
+    self.assertGreater(max_fin_time, min_fin_time)
+
+    # For all builds, finish_time should equal last_updated.
+    mismatching_times = db._GetEngine().execute(
+        'select count(*) from buildTable where finish_time != last_updated'
+        ).fetchall()[0][0]
+    self.assertEqual(mismatching_times, 0)
+
+  def simulate_builds(self, db, metadatas):
+    """Simulate a series of Commit Queue master and slave builds.
+
+    This method use the metadata objects in |metadatas| to simulate those
+    builds insertions and updates to the cidb. All metadatas encountered
+    after a particular master build will be assumed to be slaves of that build,
+    until a new master build is encountered. Slave builds for a particular
+    master will be simulated in parallel.
+
+    The first element in |metadatas| must be a CQ master build.
+
+    Args:
+      db: A CIDBConnection instance.
+      metadatas: A list of CBuildbotMetadata instances, sorted by start time.
+    """
+    m_iter = iter(metadatas)
+
+    def is_master(m):
+      return m.GetDict()['bot-config'] == 'master-paladin'
+
+    next_master = m_iter.next()
+
+    while next_master:
+      master = next_master
+      next_master = None
+      assert is_master(master)
+      master_build_id = _SimulateBuildStart(db, master)
+
+      def simulate_slave(slave_metadata):
+        build_id = _SimulateBuildStart(db, slave_metadata,
+                                       master_build_id)
+        _SimulateCQBuildFinish(db, slave_metadata, build_id)
+        logging.debug('Simulated slave build %s on pid %s', build_id,
+                      os.getpid())
+        return build_id
+
+      slave_metadatas = []
+      for slave in m_iter:
+        if is_master(slave):
+          next_master = slave
+          break
+        slave_metadatas.append(slave)
+
+      with parallel.BackgroundTaskRunner(simulate_slave, processes=15) as queue:
+        for slave in slave_metadatas:
+          queue.put([slave])
+
+      # Yes, this introduces delay in the test. But this lets us do some basic
+      # sanity tests on the |last_update| column later.
+      time.sleep(1)
+      _SimulateCQBuildFinish(db, master, master_build_id)
+      logging.debug('Simulated master build %s', master_build_id)
+
+
+class BuildStagesAndFailureTest(CIDBIntegrationTest):
+  """Test buildStageTable functionality."""
+
+  def runTest(self):
+    """Test basic buildStageTable and failureTable functionality."""
+    self._PrepareDatabase()
+
+    bot_db = self.LocalCIDBConnection(self.CIDB_USER_BOT)
+
+    master_build_id = bot_db.InsertBuild('master build',
+                                         constants.WATERFALL_INTERNAL,
+                                         _random(),
+                                         'master_config',
+                                         'master.hostname')
+
+    build_id = bot_db.InsertBuild('builder name',
+                                  constants.WATERFALL_INTERNAL,
+                                  _random(),
+                                  'build_config',
+                                  'bot_hostname',
+                                  master_build_id=master_build_id)
+
+    build_stage_id = bot_db.InsertBuildStage(build_id,
+                                             'My Stage',
+                                             board='bunny')
+
+    values = bot_db._Select('buildStageTable', build_stage_id, ['start_time'])
+    self.assertEqual(None, values['start_time'])
+
+    bot_db.StartBuildStage(build_stage_id)
+    values = bot_db._Select('buildStageTable', build_stage_id,
+                            ['start_time', 'status'])
+    self.assertNotEqual(None, values['start_time'])
+    self.assertEqual(constants.BUILDER_STATUS_INFLIGHT, values['status'])
+
+    bot_db.FinishBuildStage(build_stage_id, constants.BUILDER_STATUS_PASSED)
+    values = bot_db._Select('buildStageTable', build_stage_id,
+                            ['finish_time', 'status', 'final'])
+    self.assertNotEqual(None, values['finish_time'])
+    self.assertEqual(True, values['final'])
+    self.assertEqual(constants.BUILDER_STATUS_PASSED, values['status'])
+
+    self.assertFalse(bot_db.HasBuildStageFailed(build_stage_id))
+    for category in constants.EXCEPTION_CATEGORY_ALL_CATEGORIES:
+      e = ValueError('The value was erroneous.')
+      bot_db.InsertFailure(build_stage_id, type(e).__name__, str(e), category)
+      self.assertTrue(bot_db.HasBuildStageFailed(build_stage_id))
+
+    slave_stages = bot_db.GetSlaveStages(master_build_id)
+    self.assertEqual(len(slave_stages), 1)
+    self.assertEqual(slave_stages[0]['status'], 'pass')
+    self.assertEqual(slave_stages[0]['build_config'], 'build_config')
+    self.assertEqual(slave_stages[0]['name'], 'My Stage')
+
+class BuildTableTest(CIDBIntegrationTest):
+  """Test buildTable functionality not tested by the DataSeries tests."""
+
+  def testInsertWithDeadline(self):
+    """Test deadline setting/querying API."""
+    self._PrepareDatabase()
+    bot_db = self.LocalCIDBConnection(self.CIDB_USER_BOT)
+
+    build_id = bot_db.InsertBuild('build_name',
+                                  constants.WATERFALL_INTERNAL,
+                                  _random(),
+                                  'build_config',
+                                  'bot_hostname',
+                                  timeout_seconds=30 * 60)
+    # This will flake if the few cidb calls above take hours. Unlikely.
+    self.assertLess(10, bot_db.GetTimeToDeadline(build_id))
+
+    build_id = bot_db.InsertBuild('build_name',
+                                  constants.WATERFALL_INTERNAL,
+                                  _random(),
+                                  'build_config',
+                                  'bot_hostname',
+                                  timeout_seconds=1)
+    # Sleep till the deadline expires.
+    time.sleep(3)
+    self.assertEqual(0, bot_db.GetTimeToDeadline(build_id))
+
+    build_id = bot_db.InsertBuild('build_name',
+                                  constants.WATERFALL_INTERNAL,
+                                  _random(),
+                                  'build_config',
+                                  'bot_hostname')
+    self.assertEqual(None, bot_db.GetTimeToDeadline(build_id))
+
+    self.assertEqual(None, bot_db.GetTimeToDeadline(build_id))
+
+  def testExtendDeadline(self):
+    """Test that a deadline in the future can be extended."""
+
+    self._PrepareDatabase()
+    bot_db = self.LocalCIDBConnection(self.CIDB_USER_BOT)
+
+    build_id = bot_db.InsertBuild('build_name',
+                                  constants.WATERFALL_INTERNAL,
+                                  _random(),
+                                  'build_config',
+                                  'bot_hostname')
+    self.assertEqual(None, bot_db.GetTimeToDeadline(build_id))
+
+    self.assertEqual(1, bot_db.ExtendDeadline(build_id, 1))
+    time.sleep(2)
+    self.assertEqual(0, bot_db.GetTimeToDeadline(build_id))
+    self.assertEqual(0, bot_db.ExtendDeadline(build_id, 10 * 60))
+    self.assertEqual(0, bot_db.GetTimeToDeadline(build_id))
+
+    build_id = bot_db.InsertBuild('build_name',
+                                  constants.WATERFALL_INTERNAL,
+                                  _random(),
+                                  'build_config',
+                                  'bot_hostname',
+                                  timeout_seconds=30 * 60)
+    self.assertLess(10, bot_db.GetTimeToDeadline(build_id))
+
+    self.assertEqual(0, bot_db.ExtendDeadline(build_id, 10 * 60))
+    self.assertLess(20 * 60, bot_db.GetTimeToDeadline(build_id))
+
+    self.assertEqual(1, bot_db.ExtendDeadline(build_id, 60 * 60))
+    self.assertLess(40 * 60, bot_db.GetTimeToDeadline(build_id))
+
+
+class DataSeries1Test(CIDBIntegrationTest):
+  """Simulate a single set of canary builds."""
+
+  def runTest(self):
+    """Simulate a single set of canary builds with database schema v28."""
+    metadatas = GetTestDataSeries(SERIES_1_TEST_DATA_PATH)
+    self.assertEqual(len(metadatas), 18, 'Did not load expected amount of '
+                                         'test data')
+
+    # Migrate db to specified version. As new schema versions are added,
+    # migrations to later version can be applied after the test builds are
+    # simulated, to test that db contents are correctly migrated.
+    self._PrepareFreshDatabase(39)
+
+    bot_db = self.LocalCIDBConnection(self.CIDB_USER_BOT)
+
+    def is_master(m):
+      return m.GetValue('bot-config') == 'master-release'
+
+    master_index = metadatas.index(next(m for m in metadatas if is_master(m)))
+    master_metadata = metadatas.pop(master_index)
+    self.assertEqual(master_metadata.GetValue('bot-config'), 'master-release')
+
+    master_id = self._simulate_canary(bot_db, master_metadata)
+
+    for m in metadatas:
+      self._simulate_canary(bot_db, m, master_id)
+
+    # Verify that expected data was inserted
+    num_boards = bot_db._GetEngine().execute(
+        'select count(*) from boardPerBuildTable'
+        ).fetchall()[0][0]
+    self.assertEqual(num_boards, 40)
+
+    main_firmware_versions = bot_db._GetEngine().execute(
+        'select count(distinct main_firmware_version) from boardPerBuildTable'
+        ).fetchall()[0][0]
+    self.assertEqual(main_firmware_versions, 29)
+
+    # For all builds, finish_time should equal last_updated.
+    mismatching_times = bot_db._GetEngine().execute(
+        'select count(*) from buildTable where finish_time != last_updated'
+        ).fetchall()[0][0]
+    self.assertEqual(mismatching_times, 0)
+
+  def _simulate_canary(self, db, metadata, master_build_id=None):
+    """Helper method to simulate an individual canary build.
+
+    Args:
+      db: cidb instance to use for simulation
+      metadata: CBuildbotMetadata instance of build to simulate.
+      master_build_id: Optional id of master build.
+
+    Returns:
+      build_id of build that was simulated.
+    """
+    build_id = _SimulateBuildStart(db, metadata, master_build_id)
+    metadata_dict = metadata.GetDict()
+
+    # Insert child configs and boards
+    for child_config_dict in metadata_dict['child-configs']:
+      db.InsertChildConfigPerBuild(build_id, child_config_dict['name'])
+
+    for board in metadata_dict['board-metadata'].keys():
+      db.InsertBoardPerBuild(build_id, board)
+
+    for board, bm in metadata_dict['board-metadata'].items():
+      db.UpdateBoardPerBuildMetadata(build_id, board, bm)
+
+    db.UpdateMetadata(build_id, metadata)
+
+    status = metadata_dict['status']['status']
+    status = _TranslateStatus(status)
+
+    for child_config_dict in metadata_dict['child-configs']:
+      # Note, we are not using test data here, because the test data
+      # we have predates the existence of child-config status being
+      # stored in metadata.json. Instead, we just pretend all child
+      # configs had the same status as the main config.
+      db.FinishChildConfig(build_id, child_config_dict['name'],
+                           status)
+
+    db.FinishBuild(build_id, status)
+
+    return build_id
+
+
+def _TranslateStatus(status):
+  # TODO(akeshet): The status strings used in BuildStatus are not the same as
+  # those recorded in CBuildbotMetadata. Use a general purpose adapter.
+  if status == 'passed':
+    return 'pass'
+
+  if status == 'failed':
+    return 'fail'
+
+  return status
+
+
+def _SimulateBuildStart(db, metadata, master_build_id=None):
+  """Returns build_id for the inserted buildTable entry."""
+  metadata_dict = metadata.GetDict()
+  # TODO(akeshet): We are pretending that all these builds were on the internal
+  # waterfall at the moment, for testing purposes. This is because we don't
+  # actually save in the metadata.json any way to know which waterfall the
+  # build was on.
+  waterfall = 'chromeos'
+
+  build_id = db.InsertBuild(metadata_dict['builder-name'],
+                            waterfall,
+                            metadata_dict['build-number'],
+                            metadata_dict['bot-config'],
+                            metadata_dict['bot-hostname'],
+                            master_build_id)
+
+  return build_id
+
+
+def _SimulateCQBuildFinish(db, metadata, build_id):
+
+  metadata_dict = metadata.GetDict()
+
+  db.InsertCLActions(
+      build_id,
+      [clactions.CLAction.FromMetadataEntry(e)
+       for e in metadata_dict['cl_actions']])
+
+  db.UpdateMetadata(build_id, metadata)
+
+  status = metadata_dict['status']['status']
+  status = _TranslateStatus(status)
+  # The build summary reported by a real CQ run is more complicated -- it is
+  # computed from slave summaries by a master. For sanity checking, we just
+  # insert the current builer's summary.
+  summary = metadata_dict['status'].get('reason', None)
+
+  db.FinishBuild(build_id, status, summary)
+
+
+def main(_argv):
+  # TODO(akeshet): Allow command line args to specify alternate CIDB instance
+  # for testing.
+  cros_test_lib.main(module=__name__)
diff --git a/lib/cidb_setup_unittest b/lib/cidb_setup_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/cidb_setup_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/cidb_setup_unittest.py b/lib/cidb_setup_unittest.py
new file mode 100644
index 0000000..680dfbd
--- /dev/null
+++ b/lib/cidb_setup_unittest.py
@@ -0,0 +1,120 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for cidb.py Setup methods."""
+
+from __future__ import print_function
+
+from chromite.cbuildbot import constants
+from chromite.lib import cidb
+from chromite.lib import cros_test_lib
+from chromite.lib import factory
+
+
+class CIDBConnectionFactoryTest(cros_test_lib.MockTestCase):
+  """Test that CIDBConnectionFactory behaves as expected."""
+
+  def setUp(self):
+    # Ensure that we do not create any live connections in this unit test.
+    self.connection_mock = self.PatchObject(cidb, 'CIDBConnection')
+    # pylint: disable=W0212
+    cidb.CIDBConnectionFactory._ClearCIDBSetup()
+
+  def tearDown(self):
+    # pylint: disable=protected-access
+    cidb.CIDBConnectionFactory._ClearCIDBSetup()
+
+  def testGetConnectionBeforeSetup(self):
+    """Calling GetConnection before Setup should raise exception."""
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder)
+
+  def testSetupProd(self):
+    """Test that SetupProd behaves as expected."""
+    cidb.CIDBConnectionFactory.SetupProdCidb()
+    cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
+
+    # Expected constructor call
+    self.connection_mock.assert_called_once_with(constants.CIDB_PROD_BOT_CREDS)
+    self.assertTrue(cidb.CIDBConnectionFactory.IsCIDBSetup())
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupProdCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupDebugCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupMockCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupNoCidb)
+
+  def testSetupDebug(self):
+    """Test that SetupDebug behaves as expected."""
+    cidb.CIDBConnectionFactory.SetupDebugCidb()
+    cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
+
+    # Expected constructor call
+    self.connection_mock.assert_called_once_with(constants.CIDB_DEBUG_BOT_CREDS)
+    self.assertTrue(cidb.CIDBConnectionFactory.IsCIDBSetup())
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupProdCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupDebugCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupMockCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupNoCidb)
+
+  def testInvalidateSetup(self):
+    """Test that cidb connection can be invalidated."""
+    cidb.CIDBConnectionFactory.SetupProdCidb()
+    cidb.CIDBConnectionFactory.InvalidateCIDBSetup()
+    self.assertRaises(AssertionError,
+                      cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder)
+
+  def testSetupMock(self):
+    """Test that SetupMock behaves as expected."""
+    # Set the CIDB to mock mode, but without supplying a mock
+    cidb.CIDBConnectionFactory.SetupMockCidb()
+
+    # Calls to non-mock Setup methods should fail.
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupProdCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupDebugCidb)
+
+    # Now supply a mock.
+    a = object()
+    cidb.CIDBConnectionFactory.SetupMockCidb(a)
+    self.assertTrue(cidb.CIDBConnectionFactory.IsCIDBSetup())
+    self.assertEqual(cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder(),
+                     a)
+
+    # Mock object can be changed by future SetupMockCidb call.
+    b = object()
+    cidb.CIDBConnectionFactory.SetupMockCidb(b)
+    self.assertEqual(cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder(),
+                     b)
+
+    # Mock object can be cleared by future ClearMock call.
+    cidb.CIDBConnectionFactory.ClearMock()
+
+    # Calls to non-mock Setup methods should still fail.
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupProdCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupDebugCidb)
+
+  def testSetupNo(self):
+    """Test that SetupNoCidb behaves as expected."""
+    cidb.CIDBConnectionFactory.SetupMockCidb()
+    cidb.CIDBConnectionFactory.SetupNoCidb()
+    cidb.CIDBConnectionFactory.SetupNoCidb()
+    self.assertTrue(cidb.CIDBConnectionFactory.IsCIDBSetup())
+    self.assertEqual(cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder(),
+                     None)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupProdCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupDebugCidb)
+    self.assertRaises(factory.ObjectFactoryIllegalOperation,
+                      cidb.CIDBConnectionFactory.SetupMockCidb)
diff --git a/lib/cidb_unittest b/lib/cidb_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/cidb_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/cidb_unittest.py b/lib/cidb_unittest.py
new file mode 100644
index 0000000..9ff8056
--- /dev/null
+++ b/lib/cidb_unittest.py
@@ -0,0 +1,52 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for cidb."""
+
+from __future__ import print_function
+
+import exceptions
+import sqlalchemy
+
+from chromite.lib import cidb
+from chromite.lib import cros_test_lib
+
+
+class RetryableOperationalError(exceptions.EnvironmentError):
+  """An operational error with retryable error code."""
+
+  def __init__(self):
+    super(RetryableOperationalError, self).__init__(1053, 'retryable')
+
+
+class FatalOperationalError(exceptions.EnvironmentError):
+  """An operational error with fatal error code."""
+
+  def __init__(self):
+    super(FatalOperationalError, self).__init__(9999, 'fatal')
+
+
+class UnknownError(Exception):
+  """An error that's not an OperationalError."""
+
+
+class HelperFunctionsTest(cros_test_lib.TestCase):
+  """Test (private) helper functions in the module."""
+
+  def _WrapError(self, error):
+    return sqlalchemy.exc.OperationalError(
+        statement=None, params=None, orig=error)
+
+  # pylint: disable=protected-access
+  def testIsRetryableExceptionMatch(self):
+    self.assertTrue(cidb._IsRetryableException(RetryableOperationalError()))
+    self.assertFalse(cidb._IsRetryableException(FatalOperationalError()))
+    self.assertFalse(cidb._IsRetryableException(UnknownError()))
+
+    self.assertTrue(cidb._IsRetryableException(self._WrapError(
+        RetryableOperationalError())))
+    self.assertFalse(cidb._IsRetryableException(self._WrapError(
+        FatalOperationalError())))
+    self.assertFalse(cidb._IsRetryableException(self._WrapError(
+        UnknownError())))
diff --git a/lib/clactions.py b/lib/clactions.py
new file mode 100644
index 0000000..47f0dcc
--- /dev/null
+++ b/lib/clactions.py
@@ -0,0 +1,965 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions for interacting with a CL's action history."""
+
+from __future__ import print_function
+
+import collections
+import datetime
+import itertools
+import operator
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+
+
+site_config = config_lib.GetConfig()
+
+
+# Bidirectional mapping between pre-cq status strings and CL action strings.
+_PRECQ_STATUS_TO_ACTION = {
+    constants.CL_STATUS_INFLIGHT: constants.CL_ACTION_PRE_CQ_INFLIGHT,
+    constants.CL_STATUS_FULLY_VERIFIED:
+        constants.CL_ACTION_PRE_CQ_FULLY_VERIFIED,
+    constants.CL_STATUS_PASSED: constants.CL_ACTION_PRE_CQ_PASSED,
+    constants.CL_STATUS_FAILED: constants.CL_ACTION_PRE_CQ_FAILED,
+    constants.CL_STATUS_LAUNCHING: constants.CL_ACTION_PRE_CQ_LAUNCHING,
+    constants.CL_STATUS_WAITING: constants.CL_ACTION_PRE_CQ_WAITING,
+    constants.CL_STATUS_READY_TO_SUBMIT:
+        constants.CL_ACTION_PRE_CQ_READY_TO_SUBMIT
+}
+
+_PRECQ_ACTION_TO_STATUS = dict(
+    (v, k) for k, v in _PRECQ_STATUS_TO_ACTION.items())
+
+PRE_CQ_CL_STATUSES = set(_PRECQ_STATUS_TO_ACTION.keys())
+
+assert len(_PRECQ_STATUS_TO_ACTION) == len(_PRECQ_ACTION_TO_STATUS), \
+    '_PRECQ_STATUS_TO_ACTION values are not unique.'
+
+CL_ACTION_COLUMNS = ['id', 'build_id', 'action', 'reason',
+                     'build_config', 'change_number', 'patch_number',
+                     'change_source', 'timestamp']
+
+_CLActionTuple = collections.namedtuple('_CLActionTuple', CL_ACTION_COLUMNS)
+
+_GerritChangeTuple = collections.namedtuple('_GerritChangeTuple',
+                                            ['gerrit_number', 'internal'])
+
+
+class GerritChangeTuple(_GerritChangeTuple):
+  """A tuple for a given Gerrit change."""
+
+  def __str__(self):
+    prefix = (site_config.params.INTERNAL_CHANGE_PREFIX
+              if self.internal else site_config.params.EXTERNAL_CHANGE_PREFIX)
+    return 'CL:%s%s' % (prefix, self.gerrit_number)
+
+
+_GerritPatchTuple = collections.namedtuple('_GerritPatchTuple',
+                                           ['gerrit_number', 'patch_number',
+                                            'internal'])
+
+class GerritPatchTuple(_GerritPatchTuple):
+  """A tuple for a given Gerrit patch."""
+
+  def __str__(self):
+    prefix = (site_config.params.INTERNAL_CHANGE_PREFIX
+              if self.internal else site_config.params.EXTERNAL_CHANGE_PREFIX)
+    return 'CL:%s%s#%s' % (prefix, self.gerrit_number, self.patch_number)
+
+  def GetChangeTuple(self):
+    return GerritChangeTuple(self.gerrit_number, self.internal)
+
+
+class CLAction(_CLActionTuple):
+  """An action or history log entry for a particular CL."""
+
+  @classmethod
+  def FromGerritPatchAndAction(cls, change, action, reason=None,
+                               timestamp=None):
+    """Creates a CLAction instance from a change and action.
+
+    Args:
+      change: A GerritPatch instance.
+      action: An action string.
+      reason: Optional reason string.
+      timestamp: Optional datetime.datetime timestamp.
+    """
+    return CLAction(None, None, action, reason, None,
+                    int(change.gerrit_number), int(change.patch_number),
+                    BoolToChangeSource(change.internal), timestamp)
+
+  @classmethod
+  def FromMetadataEntry(cls, entry):
+    """Creates a CLAction instance from a metadata.json-style action tuple.
+
+    Args:
+      entry: An action tuple as retrieved from metadata.json (previously known
+             as a CLActionTuple).
+      build_metadata: The full build metadata.json entry.
+    """
+    change_dict = entry[0]
+    return CLAction(None, None, entry[1], entry[3], None,
+                    int(change_dict['gerrit_number']),
+                    int(change_dict['patch_number']),
+                    BoolToChangeSource(change_dict['internal']),
+                    entry[2])
+
+  def AsMetadataEntry(self):
+    """Get a tuple representation, suitable for metadata.json."""
+    return (self.patch._asdict(), self.action, self.timestamp, self.reason)
+
+  @property
+  def patch(self):
+    """The GerritPatch this action affects."""
+    return GerritPatchTuple(
+        gerrit_number=self.change_number,
+        patch_number=self.patch_number,
+        internal=self.change_source == constants.CHANGE_SOURCE_INTERNAL
+    )
+
+  @property
+  def bot_type(self):
+    """The type of bot that took this action.
+
+    Returns:
+        constants.CQ or constants.PRE_CQ depending on who took the action.
+    """
+    build_config = self.build_config
+    if build_config.endswith('-%s' % config_lib.CONFIG_TYPE_PALADIN):
+      return constants.CQ
+    else:
+      return constants.PRE_CQ
+
+
+def TranslatePreCQStatusToAction(status):
+  """Translate a pre-cq |status| into a cl action.
+
+  Returns:
+    An action string suitable for use in cidb, for the given pre-cq status.
+
+  Raises:
+    KeyError if |status| is not a known pre-cq status.
+  """
+  return _PRECQ_STATUS_TO_ACTION[status]
+
+
+def TranslatePreCQActionToStatus(action):
+  """Translate a cl |action| into a pre-cq status.
+
+  Returns:
+    A pre-cq status string corresponding to the given |action|.
+
+  Raises:
+    KeyError if |action| is not a known pre-cq status-transition-action.
+  """
+  return _PRECQ_ACTION_TO_STATUS[action]
+
+
+def BoolToChangeSource(internal):
+  """Translate a change.internal bool into a change_source string.
+
+  Returns:
+    'internal' if internal, else 'external'.
+  """
+  return (constants.CHANGE_SOURCE_INTERNAL if internal
+          else constants.CHANGE_SOURCE_EXTERNAL)
+
+
+def GetCLPreCQStatusAndTime(change, action_history):
+  """Get the pre-cq status and timestamp for |change| from |action_history|.
+
+  Args:
+    change: GerritPatch instance to get the pre-CQ status for.
+    action_history: A list of CLAction instances, which may include actions
+                    for other changes.
+
+  Returns:
+    A (status, timestamp) tuple where |status| is a valid pre-cq status
+    string and |timestamp| is a datetime object for when the status was
+    set. Or (None, None) if there is no pre-cq status.
+  """
+  actions_for_patch = ActionsForPatch(change, action_history)
+  actions_for_patch = [
+      a for a in actions_for_patch if a.action in _PRECQ_ACTION_TO_STATUS or
+      a.action == constants.CL_ACTION_PRE_CQ_RESET]
+
+  if (not actions_for_patch or
+      actions_for_patch[-1].action == constants.CL_ACTION_PRE_CQ_RESET):
+    return None, None
+
+  return (TranslatePreCQActionToStatus(actions_for_patch[-1].action),
+          actions_for_patch[-1].timestamp)
+
+
+def GetCLPreCQStatus(change, action_history):
+  """Get the pre-cq status for |change| based on |action_history|.
+
+  Args:
+    change: GerritPatch instance to get the pre-CQ status for.
+    action_history: A list of CLAction instances. This may include
+                    actions for changes other than |change|.
+
+  Returns:
+    The status, as a string, or None if there is no recorded pre-cq status.
+  """
+  return GetCLPreCQStatusAndTime(change, action_history)[0]
+
+
+def IsChangeScreened(change, action_history):
+  """Get's whether |change| has been pre-cq screened.
+
+  Args:
+    change: GerritPatch instance to get the pre-CQ status for.
+    action_history: A list of CLAction instances.
+
+  Returns:
+    True if the change has been pre-cq screened, false otherwise.
+  """
+  actions_for_patch = ActionsForPatch(change, action_history)
+  actions_for_patch = FilterPreResetActions(actions_for_patch)
+  return any(a.action == constants.CL_ACTION_SCREENED_FOR_PRE_CQ
+             for a in actions_for_patch)
+
+
+def ActionsForPatch(change, action_history):
+  """Filters a CL action list to only those for a given patch.
+
+  Args:
+    change: GerritPatch instance to filter for.
+    action_history: List of CLAction objects.
+  """
+  patch_number = int(change.patch_number)
+  change_number = int(change.gerrit_number)
+  change_source = BoolToChangeSource(change.internal)
+
+  actions_for_patch = [a for a in action_history
+                       if (a.change_source == change_source and
+                           a.change_number == change_number and
+                           a.patch_number == patch_number)]
+
+  return actions_for_patch
+
+
+def GetRequeuedOrSpeculative(change, action_history, is_speculative):
+  """For a |change| get either a requeued or speculative action if necessary.
+
+  This method returns an action string for an action that should be recorded
+  on |change|, or None if no action needs to be recorded.
+
+  Args:
+    change: GerritPatch instance to operate upon.
+    action_history: List of CL actions (may include actions on changes other
+                    than |change|).
+    is_speculative: Boolean indicating if |change| is speculative, i.e. it does
+                    not have CQ approval.
+
+  Returns:
+    CL_ACTION_REQUEUED, CL_ACTION_SPECULATIVE, or None.
+  """
+  actions_for_patch = ActionsForPatch(change, action_history)
+
+  if is_speculative:
+    # Speculative changes should have 1 CL_ACTION_SPECULATIVE action that is
+    # newer than the newest REQUEUED or KICKED_OUT action, and at least 1
+    # action if there is no REQUEUED or KICKED_OUT action.
+    for a in reversed(actions_for_patch):
+      if a.action == constants.CL_ACTION_SPECULATIVE:
+        return None
+      elif (a.action == constants.CL_ACTION_REQUEUED or
+            a.action == constants.CL_ACTION_KICKED_OUT):
+        return constants.CL_ACTION_SPECULATIVE
+    return constants.CL_ACTION_SPECULATIVE
+  else:
+    # Non speculative changes should have 1 CL_ACTION_REQUEUED action that is
+    # newer than the newest SPECULATIVE or KICKED_OUT action, but no action if
+    # there are no SPECULATIVE or REQUEUED actions.
+    for a in reversed(actions_for_patch):
+      if (a.action == constants.CL_ACTION_KICKED_OUT or
+          a.action == constants.CL_ACTION_SPECULATIVE):
+        return constants.CL_ACTION_REQUEUED
+      if a.action == constants.CL_ACTION_REQUEUED:
+        return None
+
+  return None
+
+
+def GetCLActionCount(change, configs, action, action_history,
+                     latest_patchset_only=True):
+  """Return how many times |action| has occured on |change|.
+
+  Args:
+    change: GerritPatch instance to operate upon.
+    configs: List or set of config names to consider.
+    action: The action string to look for.
+    action_history: List of CLAction instances to count through.
+    latest_patchset_only: If True, only count actions that occured to the
+      latest patch number. Note, this may be different than the patch
+      number specified in |change|. Default: True.
+
+  Returns:
+    The count of how many times |action| occured on |change| by the given
+    |config|.
+  """
+  change_number = int(change.gerrit_number)
+  change_source = BoolToChangeSource(change.internal)
+  actions_for_change = [a for a in action_history
+                        if (a.change_source == change_source and
+                            a.change_number == change_number)]
+
+  if actions_for_change and latest_patchset_only:
+    latest_patch_number = max(a.patch_number for a in actions_for_change)
+    actions_for_change = [a for a in actions_for_change
+                          if a.patch_number == latest_patch_number]
+
+  actions_for_change = [a for a in actions_for_change
+                        if (a.build_config in configs and
+                            a.action == action)]
+
+  return len(actions_for_change)
+
+
+def FilterPreResetActions(action_history):
+  """Filters out actions prior to most recent pre-cq reset action.
+
+  Args:
+    action_history: List of CLAction instance.
+
+  Returns:
+    List of CLAction instances that occur after the last pre-cq-reset action.
+  """
+  reset = False
+  for i, a in enumerate(action_history):
+    if a.action == constants.CL_ACTION_PRE_CQ_RESET:
+      reset = True
+      reset_index = i
+  if reset:
+    action_history = action_history[(reset_index+1):]
+  return action_history
+
+
+def GetCLPreCQProgress(change, action_history):
+  """Gets a CL's per-config PreCQ statuses.
+
+  Args:
+    change: GerritPatch instance to get statuses for.
+    action_history: List of CLAction instances.
+
+  Returns:
+    A dict of the form {config_name: (status, timestamp, build_id)} specifying
+    all the per-config pre-cq statuses, where status is one of
+    constants.CL_PRECQ_CONFIG_STATUSES, timestamp is a datetime.datetime of
+    when this status was most recently achieved, and build_id is the id of the
+    build which most recently updated this per-config status.
+  """
+  actions_for_patch = ActionsForPatch(change, action_history)
+  config_status_dict = {}
+
+  # If there is a reset action recorded, filter out all actions prior to it.
+  actions_for_patch = FilterPreResetActions(actions_for_patch)
+
+  # Only configs for which the pre-cq-launcher has requested verification
+  # should be included in the per-config status.
+  for a in actions_for_patch:
+    if a.action == constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ:
+      assert a.reason, 'Validation was requested without a specified config.'
+      config_status_dict[a.reason] = (constants.CL_PRECQ_CONFIG_STATUS_PENDING,
+                                      a.timestamp, a.build_id)
+
+  # Loop through actions_for_patch several times, in order of status priority.
+  # Each action maps to a status:
+  #   CL_ACTION_TRYBOT_LAUNCHING -> CL_PRECQ_CONFIG_STATUS_LAUNCHED
+  #   CL_ACTION_PICKED_UP -> CL_PRECQ_CONFIG_STATUS_INFLIGHT
+  #   CL_ACTION_KICKED_OUT -> CL_PRECQ_CONFIG_STATUS_FAILED
+  #   CL_ACTION_FORGIVEN -> CL_PRECQ_CONFIG_STATUS_PENDING
+  # All have the same priority.
+  for a in actions_for_patch:
+    if (a.action == constants.CL_ACTION_TRYBOT_LAUNCHING and
+        a.reason in config_status_dict):
+      config_status_dict[a.reason] = (constants.CL_PRECQ_CONFIG_STATUS_LAUNCHED,
+                                      a.timestamp, a.build_id)
+    elif (a.action == constants.CL_ACTION_PICKED_UP and
+          a.build_config in config_status_dict):
+      config_status_dict[a.build_config] = (
+          constants.CL_PRECQ_CONFIG_STATUS_INFLIGHT, a.timestamp, a.build_id)
+    elif (a.action == constants.CL_ACTION_KICKED_OUT and
+          (a.build_config in config_status_dict or
+           a.reason in config_status_dict)):
+      config = (a.build_config if a.build_config in config_status_dict else
+                a.reason)
+      config_status_dict[config] = (constants.CL_PRECQ_CONFIG_STATUS_FAILED,
+                                    a.timestamp, a.build_id)
+    elif (a.action == constants.CL_ACTION_FORGIVEN and
+          (a.build_config in config_status_dict or
+           a.reason in config_status_dict)):
+      config = (a.build_config if a.build_config in config_status_dict else
+                a.reason)
+      config_status_dict[config] = (constants.CL_PRECQ_CONFIG_STATUS_PENDING,
+                                    a.timestamp, a.build_id)
+
+  for a in actions_for_patch:
+    if (a.action == constants.CL_ACTION_VERIFIED and
+        a.build_config in config_status_dict):
+      config_status_dict[a.build_config] = (
+          constants.CL_PRECQ_CONFIG_STATUS_VERIFIED, a.timestamp, a.build_id)
+
+  return config_status_dict
+
+
+def GetPreCQProgressMap(changes, action_history):
+  """Gets the per-config pre-cq status for all changes.
+
+  Args:
+    changes: Set of GerritPatch changes to consider.
+    action_history: List of CLAction instances.
+
+  Returns:
+    A dict of the form {change: config_status_dict} where config_status_dict
+    is as returned by GetCLPreCQProgress. Any change that has not yet been
+    screened will be absent from the returned dict.
+  """
+  progress_map = {}
+  for change in changes:
+    config_status_dict = GetCLPreCQProgress(change, action_history)
+    if config_status_dict:
+      progress_map[change] = config_status_dict
+
+  return progress_map
+
+
+def GetPreCQCategories(progress_map):
+  """Gets the set of busy and verified CLs in the pre-cq.
+
+  Args:
+    progress_map: See return type of GetPreCQProgressMap.
+
+  Returns:
+    A (busy, inflight, verified) tuple where each item is a set of changes.
+    A change is verified if all its pending configs have verified it. A change
+    is busy if it is not verified, but all pending configs are either launched
+    or inflight or verified. A change is inflight if all configs are at least
+    at or past the inflight state, and at least one config is still inflight.
+  """
+  busy, inflight, verified = set(), set(), set()
+  busy_states = (constants.CL_PRECQ_CONFIG_STATUS_LAUNCHED,
+                 constants.CL_PRECQ_CONFIG_STATUS_INFLIGHT,
+                 constants.CL_PRECQ_CONFIG_STATUS_VERIFIED)
+  beyond_inflight_states = (constants.CL_PRECQ_CONFIG_STATUS_INFLIGHT,
+                            constants.CL_PRECQ_CONFIG_STATUS_VERIFIED,
+                            constants.CL_PRECQ_CONFIG_STATUS_FAILED)
+
+  for change, config_status_dict in progress_map.iteritems():
+    statuses = [x for x, _, _, in config_status_dict.values()]
+    if all(x == constants.CL_PRECQ_CONFIG_STATUS_VERIFIED for x in statuses):
+      verified.add(change)
+    elif all(x in busy_states for x in statuses):
+      busy.add(change)
+
+    if (all(x in beyond_inflight_states for x in statuses) and
+        any(x == constants.CL_PRECQ_CONFIG_STATUS_INFLIGHT for x in statuses)):
+      inflight.add(change)
+
+  return busy, inflight, verified
+
+
+def GetPreCQConfigsToTest(changes, progress_map):
+  """Gets the set of configs to be tested for any change in |changes|.
+
+  Note: All |changes| must already be screened, i.e. must appear in
+  progress_map.
+
+  Args:
+    changes: A list or set of changes (GerritPatch).
+    progress_map: See return type of GetPreCQProgressMap.
+
+  Returns:
+    A set of configs that must be launched in order to make each change in
+    |changes| be considered 'busy' by the pre-cq.
+
+  Raises:
+    KeyError if any change in |changes| is not yet screened, and hence
+    does not appear in progress_map.
+  """
+  configs_to_test = set()
+  # Failed is considered a to-test state so that if a CL fails a given config
+  # and gets rejected, it will be re-tested by that config when it is re-queued.
+  to_test_states = (constants.CL_PRECQ_CONFIG_STATUS_PENDING,
+                    constants.CL_PRECQ_CONFIG_STATUS_FAILED)
+  for change in changes:
+    for config, (status, _, _) in progress_map[change].iteritems():
+      if status in to_test_states:
+        configs_to_test.add(config)
+  return configs_to_test
+
+
+def GetRelevantChangesForBuilds(changes, action_history, build_ids):
+  """Get relevant changes for |build_ids| by examing CL actions.
+
+  Args:
+    changes: A list of GerritPatch instances to examine.
+    action_history: A list of CLAction instances.
+    build_ids: A list of build id to examine.
+
+  Returns:
+    A dictionary mapping a build id to a set of changes.
+  """
+  changes_map = dict()
+  relevant_actions = [x for x in action_history if x.build_id in build_ids]
+  for change in changes:
+    actions = ActionsForPatch(change, relevant_actions)
+    pickups = set([x.build_id for x in actions if
+                   x.action == constants.CL_ACTION_PICKED_UP])
+    discards = set([x.build_id for x in actions if
+                    x.action == constants.CL_ACTION_IRRELEVANT_TO_SLAVE])
+    relevant_build_ids = pickups - discards
+    for build_id in relevant_build_ids:
+      changes_map.setdefault(build_id, set()).add(change)
+
+  return changes_map
+
+
+# ##############################################################################
+# Aggregate history over a list of CLActions
+
+def _IntersectIntervals(intervals):
+  """Gets the intersection of a set of intervals.
+
+  Args:
+    intervals: A list of interval groups, where each interval group is itself
+               a list of (start, stop) tuples (ordered by start time and
+               non-overlapping).
+
+  Returns:
+    An interval group, as a list of (start, stop) tuples, corresponding to the
+    intersection (i.e. overlap) of the given |intervals|.
+  """
+  if not intervals:
+    return []
+
+  intersection = []
+  indices = [0] * len(intervals)
+  lengths = [len(i) for i in intervals]
+  while all(i < l for i, l in zip(indices, lengths)):
+    current_intervals = [intervals[i][j] for (i, j) in
+                         zip(itertools.count(), indices)]
+    start = max([s[0] for s in current_intervals])
+    end, end_index = min([(e[1], i) for e, i in
+                          zip(current_intervals, itertools.count())])
+    if start < end:
+      intersection.append((start, end))
+    indices[end_index] += 1
+
+  return intersection
+
+
+def _MeasureTimestampIntervals(intervals):
+  """Gets the length of a set of invervals.
+
+  Args:
+    intervals: A list of (start, stop) timestamp tuples.
+
+  Returns:
+    The total length of the given intervals, in seconds.
+  """
+  lengths = [e - s for s, e in intervals]
+  return sum(lengths, datetime.timedelta(0)).total_seconds()
+
+
+def _GetIntervals(change, action_history, start_actions, stop_actions,
+                  start_at_beginning=False):
+  """Get intervals corresponding to given start and stop actions.
+
+  Args:
+    change: GerritPatch instance of a submitted change.
+    action_history: list of CL actions.
+    start_actions: list of action types to be considered as start actions for
+                   intervals.
+    stop_actions: list of action types to be considered as stop actions for
+                  intervals.
+    start_at_beginning: optional boolean, default False. If true, consider the
+                        first action to be a start action.
+  """
+  actions_for_patch = ActionsForPatch(change, action_history)
+  if not actions_for_patch:
+    return []
+
+  intervals = []
+  in_interval = start_at_beginning
+  if in_interval:
+    start_time = actions_for_patch[0].timestamp
+  for a in actions_for_patch:
+    if in_interval and a.action in stop_actions:
+      if start_time < a.timestamp:
+        intervals.append((start_time, a.timestamp))
+      in_interval = False
+    elif not in_interval and a.action in start_actions:
+      start_time = a.timestamp
+      in_interval = True
+
+  if in_interval and start_time < actions_for_patch[-1].timestamp:
+    intervals.append((start_time, actions_for_patch[-1].timestamp))
+
+  return intervals
+
+
+def _GetReadyIntervals(change, action_history):
+  """Gets the time intervals in which |change| was fully ready.
+
+  Args:
+    change: GerritPatch instance of a submitted change.
+    action_history: list of CL actions.
+  """
+  start = (constants.CL_ACTION_REQUEUED,)
+  stop = (constants.CL_ACTION_SPECULATIVE, constants.CL_ACTION_KICKED_OUT)
+  return _GetIntervals(change, action_history, start, stop, True)
+
+
+def GetCLHandlingTime(change, action_history):
+  """Returns the handling time of |change|, in seconds.
+
+  This method computes a CL's handling time, not including the time spent
+  waiting for a developer to mark or re-mark their change as ready.
+
+  Args:
+    change: GerritPatch instance of a submitted change.
+    action_history: List of CL actions.
+  """
+  ready_intervals = _GetReadyIntervals(change, action_history)
+  return _MeasureTimestampIntervals(ready_intervals)
+
+
+def GetPreCQTime(change, action_history):
+  """Returns the time spent waiting for the pre-cq to finish."""
+  ready_intervals = _GetReadyIntervals(change, action_history)
+  start = (constants.CL_ACTION_SCREENED_FOR_PRE_CQ,)
+  stop = (constants.CL_ACTION_PRE_CQ_FULLY_VERIFIED,)
+  precq_intervals = _GetIntervals(change, action_history, start, stop)
+  return _MeasureTimestampIntervals(
+      _IntersectIntervals([ready_intervals, precq_intervals]))
+
+
+def GetCQWaitTime(change, action_history):
+  """Returns the time spent waiting for a CL to be picked up by the CQ."""
+  ready_intervals = _GetReadyIntervals(change, action_history)
+  precq_passed_interval = _GetIntervals(
+      change, action_history, (constants.CL_ACTION_PRE_CQ_PASSED,), ())
+  relevant_configs = (constants.PRE_CQ_LAUNCHER_CONFIG, constants.CQ_MASTER)
+  relevant_config_actions = [a for a in action_history
+                             if a.build_config in relevant_configs]
+  start = (constants.CL_ACTION_REQUEUED, constants.CL_ACTION_FORGIVEN)
+  stop = (constants.CL_ACTION_PICKED_UP,)
+  waiting_intervals = _GetIntervals(change, relevant_config_actions, start,
+                                    stop, True)
+  return _MeasureTimestampIntervals(
+      _IntersectIntervals([ready_intervals, waiting_intervals,
+                           precq_passed_interval]))
+
+
+def GetCQRunTime(change, action_history):
+  """Returns the time spent testing a CL in the CQ."""
+  ready_intervals = _GetReadyIntervals(change, action_history)
+  relevant_configs = (constants.CQ_MASTER,)
+  relevant_config_actions = [a for a in action_history
+                             if a.build_config in relevant_configs]
+  start = (constants.CL_ACTION_PICKED_UP,)
+  stop = (constants.CL_ACTION_FORGIVEN, constants.CL_ACTION_KICKED_OUT,
+          constants.CL_ACTION_SUBMITTED)
+  testing_intervals = _GetIntervals(change, relevant_config_actions, start,
+                                    stop)
+  return _MeasureTimestampIntervals(
+      _IntersectIntervals([ready_intervals, testing_intervals]))
+
+
+def _CLsForPatches(patches):
+  """Get GerritChangeTuples corresponding to the give GerritPatchTuples."""
+  return set(p.GetChangeTuple() for p in patches)
+
+
+def AffectedCLs(action_history):
+  """Get the CLs affected by a set of actions.
+
+  Args:
+    action_history: An iterable of CLActions.
+
+  Returns:
+    A set of GerritChangleTuple objects for the affected CLs.
+  """
+  return _CLsForPatches(AffectedPatches(action_history))
+
+
+def AffectedPatches(action_history):
+  """Get the patches affected by a set of actions.
+
+  Args:
+    action_history: An iterable of CLActions.
+
+  Returns:
+    A set of GerritPatchTuple objects for the affected patches.
+  """
+  return set(a.patch for a in action_history)
+
+
+class CLActionHistory(object):
+  """Class to derive aggregate information from CLAction histories."""
+
+  def __init__(self, action_history):
+    """Initialize the object.
+
+    Args:
+      action_history: An iterable of CLAction objects to aggregate information
+          from.
+    """
+    # We preprocess this list to speed up various lookups. It shouldn't be
+    # messed with in the lifetime of the object.
+    self._action_history = tuple(sorted(action_history,
+                                        key=operator.attrgetter('timestamp')))
+
+    # Index the given action_history in various useful forms.
+    self._per_patch_actions = {}
+    self._per_cl_actions = {}
+    self._per_patch_reject_actions = {}
+
+    # Precompute some oft-used attributes.
+    self.submit_actions = [a for a in self._action_history
+                           if a.action == constants.CL_ACTION_SUBMITTED]
+    self.reject_actions = [a for a in self._action_history
+                           if a.action == constants.CL_ACTION_KICKED_OUT]
+    self.submit_fail_actions = [a for a in self._action_history if
+                                a.action == constants.CL_ACTION_SUBMIT_FAILED]
+    self.affected_patches = AffectedPatches(self._action_history)
+    self.affected_cls = _CLsForPatches(self.affected_patches)
+
+    for action in self._action_history:
+      patch = action.patch
+      self._per_patch_actions.setdefault(patch, []).append(action)
+      self._per_cl_actions.setdefault(patch.GetChangeTuple(), []).append(action)
+    for action in self.reject_actions:
+      patch = action.patch
+      self._per_patch_reject_actions.setdefault(patch, []).append(action)
+
+  def __iter__(self):
+    """Support iterating over the entire history."""
+    for a in self._action_history:
+      yield a
+
+  def __len__(self):
+    """Return the length of the entire history."""
+    return len(self._action_history)
+
+  def GetSubmittedPatches(self, exclude_irrelevant_submissions=True):
+    """Get a list of submitted patches from the action history.
+
+    Args:
+      exclude_irrelevant_submissions: Some CLs are submitted independent of our
+          CQ infrastructure. When True, we exclude those CLs, as they shouldn't
+          affect our statistics.
+
+    Returns:
+      set of submitted GerritPatchTuple objects.
+    """
+    relevant_actions = self.submit_actions
+    if exclude_irrelevant_submissions:
+      relevant_actions = [a for a in relevant_actions
+                          if a.reason != constants.STRATEGY_NONMANIFEST]
+    return AffectedPatches(relevant_actions)
+
+  def GetSubmittedCLs(self, exclude_irrelevant_submissions=True):
+    """Get a list of submitted patches from the action history.
+
+    Args:
+      exclude_irrelevant_submissions: Some CLs are submitted independent of our
+          CQ infrastructure. When True, we exclude those CLs, as they shouldn't
+          affect our statistics.
+
+    Returns:
+      set of submitted GerritPatchTuple objects.
+    """
+    return _CLsForPatches(
+        self.GetSubmittedPatches(exclude_irrelevant_submissions))
+
+  def SortBySubmitTimes(self, cls_or_patches):
+    """Sort the given patches or cls in ascending order of submit time.
+
+    Many functions in this class returns sets of cls/patches. This is convenient
+    to dedup objects returned from various sources. While presenting this
+    information to the user, it is often better to present them in a natural
+    'order'.
+
+    Args:
+      cls_or_patches: Iterable of GerritPatchTuples or GerritChangeTuple objects
+          to sort.
+
+    Returns:
+      list sorted in ascending order of submit time. Any patches/cls that were
+      not submitted are appended to the end in a deterministic order.
+    """
+    affected_cls_or_patches = self.affected_cls | self.affected_patches
+    unknown_changes = set(cls_or_patches) - affected_cls_or_patches
+    assert not unknown_changes, 'Unknown changes: %s' % str(unknown_changes)
+
+    per_change_final_submit_time = {}
+    per_change_first_action_time = {}
+    for change in cls_or_patches:
+      actions = self._GetCLOrPatchActions(change)
+      submit_actions = [x for x in actions
+                        if x.action == constants.CL_ACTION_SUBMITTED]
+      first_action = actions[0]
+
+      if submit_actions:
+        per_change_final_submit_time[change] = submit_actions[-1].timestamp
+      else:
+        per_change_first_action_time[change] = first_action.timestamp
+
+    sorted_changes = sorted(per_change_final_submit_time.keys(),
+                            key=per_change_final_submit_time.get)
+    # We want to sort the inflight changes in some stable order. Let's sort them
+    # by order of 'first action ever taken'
+    sorted_changes += sorted(per_change_first_action_time.keys(),
+                             key=lambda x: per_change_first_action_time[x])
+    return sorted_changes
+
+  # ############################################################################
+  # Summarize handling times in different stages based on the action history.
+  def GetPatchHandlingTimes(self):
+    """Get handling times of all submitted patches.
+
+    Returns:
+      {submitted_patch: handling_time} where submitted_patch is a
+      GerritPatchTuple for a submitted patch, and handling_time is the total
+      handling time for that patch.
+    """
+    return {k: GetCLHandlingTime(k, self._per_patch_actions[k])
+            for k in self.GetSubmittedPatches()}
+
+  def GetPreCQHandlingTimes(self):
+    """Get the time spent by all submitted patches in the pre-cq.
+
+    Returns:
+      {submitted_patch: precq_handling_time} where submitted_patch is a
+      GerritPatchTuple for a submitted patch, and precq_handling_time is the
+      handling time for that patch in the pre-cq.
+    """
+    return {k: GetPreCQTime(k, self._per_patch_actions[k])
+            for k in self.GetSubmittedPatches()}
+
+  def GetCQHandlingTimes(self):
+    """Get the time spent by all submitted patches in the cq.
+
+    Returns:
+      {submitted_patch: cq_handling_time} where submitted_patch is a
+      GerritPatchTuple for a submitted patch, and cq_handling_time is the
+      handling time for that patch in the cq.
+    """
+    return {k: GetCQRunTime(k, self._per_patch_actions[k])
+            for k in self.GetSubmittedPatches()}
+
+  def GetCQWaitingTimes(self):
+    """Get the time spent by all submitted patches waiting for the cq.
+
+    Returns:
+      {submitted_patch: cq_waiting_time} where submitted_patch is a
+      GerritPatchTuple for a submitted patch, and cq_waiting_time is the
+      time spent by that patch waiting for the cq.
+    """
+    return {k: GetCQWaitTime(k, self._per_patch_actions[k])
+            for k in self.GetSubmittedPatches()}
+
+  # ############################################################################
+  # Classify CLs as good/bad based on the action history.
+  def GetFalseRejections(self, bot_type=None):
+    """Get the changes that were good, but were rejected at some point.
+
+    We consider a patch to have been rejected falsely if it is later submitted
+    because a build with no difference to the change later considered it good.
+
+    Args:
+      bot_type: (optional) constants.PRE_CQ or constants.CQ to restrict the
+          actions considered.
+
+    Returns:
+      A map from rejected patch to a list of rejection actions of the relevant
+      bot_type in ascending order of timestamps.
+    """
+    rejections = self._GetPatchRejectionsByBuilds(bot_type)
+    submitted_patches = self.GetSubmittedPatches(
+        exclude_irrelevant_submissions=False)
+    candidates = set(rejections) & submitted_patches
+
+    # Filter out candidates that were rejected because they were batched
+    # together with truly bad patches in a pre_cq run.
+    bad_precq_builds = set()
+    precq_true_rejections = self.GetTrueRejections(constants.PRE_CQ)
+    for patch in precq_true_rejections:
+      for action in precq_true_rejections[patch]:
+        bad_precq_builds.add(action.build_id)
+
+    updated_candidates = {}
+    for patch in candidates:
+      updated_actions = [a for a  in rejections[patch]
+                         if a.build_id not in bad_precq_builds]
+      if updated_actions:
+        updated_candidates[patch] = updated_actions
+    return updated_candidates
+
+  def GetTrueRejections(self, bot_type=None):
+    """Get the changes that were bad, and were rejected.
+
+    A patch rejection is considered a true rejection if a new patch was uploaded
+    after the rejection. Note that we consider a rejection a true rejection only
+    if a subsequent patch was submitted.
+
+    Returns:
+      A map from rejected patch to a list of rejection actions of the relevant
+      bot_type in ascending order of timestamps.
+    """
+    rejections = self._GetPatchRejectionsByBuilds(bot_type)
+    submitted_patches = self.GetSubmittedPatches(
+        exclude_irrelevant_submissions=False)
+    submitted_cls = set([x.GetChangeTuple() for x in submitted_patches])
+
+    candidates = {}
+    for patch in set(rejections) - submitted_patches:
+      if patch.GetChangeTuple() in submitted_cls:
+        # Some other patch for the same was submitted.
+        candidates[patch] = rejections[patch]
+
+    return candidates
+
+  # ############################################################################
+  # Helper functions.
+  def _GetPatchRejectionsByBuilds(self, bot_type=None):
+    """Gets all patches that were rejected due to build failures.
+
+    This filters out rejections that were caused by failure to apply the patch.
+
+    Args:
+      bot_type: Optional bot_type to filter actions by.
+
+    Returns:
+      dict of rejected patches to rejection actions for the given bot_type.
+    """
+    rejected_patches = AffectedPatches(self.reject_actions)
+    candidates = collections.defaultdict(list)
+    for patch in rejected_patches:
+      relevant_builds = set(a.build_id for a in self._per_patch_actions[patch]
+                            if a.action == constants.CL_ACTION_PICKED_UP)
+      relevant_actions_iter = (a for a in self._per_patch_actions[patch]
+                               if a.action == constants.CL_ACTION_KICKED_OUT)
+      if bot_type is not None:
+        relevant_actions_iter = (a for a in relevant_actions_iter
+                                 if a.bot_type == bot_type)
+
+      for action in relevant_actions_iter:
+        if action.build_id in relevant_builds:
+          candidates[patch].append(action)
+    return dict(candidates)
+
+  def _GetCLOrPatchActions(self, cl_or_patch):
+    """Get cl/patch specific actions."""
+    if isinstance(cl_or_patch, GerritChangeTuple):
+      return self._per_cl_actions[cl_or_patch]
+    else:
+      return self._per_patch_actions[cl_or_patch]
diff --git a/lib/clactions_unittest b/lib/clactions_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/clactions_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/clactions_unittest.py b/lib/clactions_unittest.py
new file mode 100644
index 0000000..c8fe3f4
--- /dev/null
+++ b/lib/clactions_unittest.py
@@ -0,0 +1,775 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for clactions methods."""
+
+from __future__ import print_function
+
+import datetime
+import itertools
+import random
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import metadata_lib
+from chromite.cbuildbot import validation_pool
+from chromite.lib import fake_cidb
+from chromite.lib import clactions
+from chromite.lib import cros_test_lib
+
+
+class CLActionTest(cros_test_lib.TestCase):
+  """Placeholder for clactions unit tests."""
+
+  def runTest(self):
+    pass
+
+
+class IntervalsTest(cros_test_lib.TestCase):
+  """Placeholder for clactions unit tests."""
+  # pylint: disable=protected-access
+
+  def testIntervals(self):
+    self.assertEqual([], clactions._IntersectIntervals([]))
+    self.assertEqual([(1, 2)], clactions._IntersectIntervals([[(1, 2)]]))
+
+    test_group_0 = [(1, 10)]
+    test_group_1 = [(2, 5), (7, 10)]
+    test_group_2 = [(2, 8), (9, 12)]
+    self.assertEqual(
+        [(2, 5), (7, 8), (9, 10)],
+        clactions._IntersectIntervals([test_group_0, test_group_1,
+                                       test_group_2])
+    )
+
+    test_group_0 = [(1, 3), (10, 12)]
+    test_group_1 = [(2, 5)]
+    self.assertEqual(
+        [(2, 3)],
+        clactions._IntersectIntervals([test_group_0, test_group_1]))
+
+
+class TestCLActionHistory(cros_test_lib.TestCase):
+  """Tests various methods related to CL action history."""
+
+  def setUp(self):
+    self.fake_db = fake_cidb.FakeCIDBConnection()
+
+  def testGetCLHandlingTime(self):
+    """Test that we correctly compute a CL's handling time."""
+    change = metadata_lib.GerritPatchTuple(1, 1, False)
+    launcher_id = self.fake_db.InsertBuild(
+        'launcher', constants.WATERFALL_INTERNAL, 1,
+        constants.PRE_CQ_LAUNCHER_CONFIG, 'hostname')
+    trybot_id = self.fake_db.InsertBuild(
+        'banana pre cq', constants.WATERFALL_INTERNAL, 1,
+        'banana-pre-cq', 'hostname')
+    master_id = self.fake_db.InsertBuild(
+        'CQ master', constants.WATERFALL_INTERNAL, 1,
+        constants.CQ_MASTER, 'hostname')
+    slave_id = self.fake_db.InsertBuild(
+        'banana paladin', constants.WATERFALL_INTERNAL, 1,
+        'banana-paladin', 'hostname')
+
+    start_time = datetime.datetime.now()
+    c = itertools.count()
+
+    def next_time():
+      return start_time + datetime.timedelta(seconds=c.next())
+
+    def a(build_id, action, reason=None):
+      self._Act(build_id, change, action, reason=reason, timestamp=next_time())
+
+    # Change is screened, picked up, and rejected by the pre-cq,
+    # non-speculatively.
+    a(launcher_id, constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+      reason='banana-pre-cq')
+    a(launcher_id, constants.CL_ACTION_SCREENED_FOR_PRE_CQ)
+    a(launcher_id, constants.CL_ACTION_TRYBOT_LAUNCHING,
+      reason='banana-pre-cq')
+    a(trybot_id, constants.CL_ACTION_PICKED_UP)
+    a(trybot_id, constants.CL_ACTION_KICKED_OUT)
+
+    # Change is re-marked by developer, picked up again by pre-cq, verified, and
+    # marked as passed.
+    a(launcher_id, constants.CL_ACTION_REQUEUED)
+    a(launcher_id, constants.CL_ACTION_TRYBOT_LAUNCHING,
+      reason='banana-pre-cq')
+    a(trybot_id, constants.CL_ACTION_PICKED_UP)
+    a(trybot_id, constants.CL_ACTION_VERIFIED)
+    a(launcher_id, constants.CL_ACTION_PRE_CQ_FULLY_VERIFIED)
+    a(launcher_id, constants.CL_ACTION_PRE_CQ_PASSED)
+
+    # Change is picked up by the CQ and rejected.
+    a(master_id, constants.CL_ACTION_PICKED_UP)
+    a(slave_id, constants.CL_ACTION_PICKED_UP)
+    a(master_id, constants.CL_ACTION_KICKED_OUT)
+
+    # Change is re-marked, picked up by the CQ, and forgiven.
+    a(launcher_id, constants.CL_ACTION_REQUEUED)
+    a(master_id, constants.CL_ACTION_PICKED_UP)
+    a(slave_id, constants.CL_ACTION_PICKED_UP)
+    a(master_id, constants.CL_ACTION_FORGIVEN)
+
+    # Change is re-marked, picked up by the CQ, and forgiven.
+    a(master_id, constants.CL_ACTION_PICKED_UP)
+    a(slave_id, constants.CL_ACTION_PICKED_UP)
+    a(master_id, constants.CL_ACTION_SUBMITTED)
+
+    action_history = self.fake_db.GetActionsForChanges([change])
+    # Note: There are 2 ticks in the total handling time that are not accounted
+    # for in the sub-times. These are the time between VALIDATION_PENDING and
+    # SCREENED, and the time between FULLY_VERIFIED and PASSED.
+    self.assertEqual(18, clactions.GetCLHandlingTime(change, action_history))
+    self.assertEqual(7, clactions.GetPreCQTime(change, action_history))
+    self.assertEqual(3, clactions.GetCQWaitTime(change, action_history))
+    self.assertEqual(6, clactions.GetCQRunTime(change, action_history))
+
+  def _Act(self, build_id, change, action, reason=None, timestamp=None):
+    self.fake_db.InsertCLActions(
+        build_id,
+        [clactions.CLAction.FromGerritPatchAndAction(change, action, reason)],
+        timestamp=timestamp)
+
+  def _GetCLStatus(self, change):
+    """Helper method to get a CL's pre-CQ status from fake_db."""
+    action_history = self.fake_db.GetActionsForChanges([change])
+    return clactions.GetCLPreCQStatus(change, action_history)
+
+  def testGetRequeuedOrSpeculative(self):
+    """Tests GetRequeuedOrSpeculative function."""
+    change = metadata_lib.GerritPatchTuple(1, 1, False)
+    speculative_change = metadata_lib.GerritPatchTuple(2, 2, False)
+    changes = [change, speculative_change]
+
+    build_id = self.fake_db.InsertBuild('n', 'w', 1, 'c', 'h')
+
+    # A fresh change should not be marked requeued. A fresh specualtive
+    # change should be marked as speculative.
+    action_history = self.fake_db.GetActionsForChanges(changes)
+    a = clactions.GetRequeuedOrSpeculative(change, action_history, False)
+    self.assertEqual(a, None)
+    a = clactions.GetRequeuedOrSpeculative(speculative_change, action_history,
+                                           True)
+    self.assertEqual(a, constants.CL_ACTION_SPECULATIVE)
+    self._Act(build_id, speculative_change, a)
+
+    # After picking up either change, neither should need an additional
+    # requeued or speculative action.
+    self._Act(build_id, speculative_change, constants.CL_ACTION_PICKED_UP)
+    self._Act(build_id, change, constants.CL_ACTION_PICKED_UP)
+    action_history = self.fake_db.GetActionsForChanges(changes)
+    a = clactions.GetRequeuedOrSpeculative(change, action_history, False)
+    self.assertEqual(a, None)
+    a = clactions.GetRequeuedOrSpeculative(speculative_change, action_history,
+                                           True)
+    self.assertEqual(a, None)
+
+    # After being rejected, both changes need an action (requeued and
+    # speculative accordingly).
+    self._Act(build_id, speculative_change, constants.CL_ACTION_KICKED_OUT)
+    self._Act(build_id, change, constants.CL_ACTION_KICKED_OUT)
+    action_history = self.fake_db.GetActionsForChanges(changes)
+    a = clactions.GetRequeuedOrSpeculative(change, action_history, False)
+    self.assertEqual(a, constants.CL_ACTION_REQUEUED)
+    self._Act(build_id, change, a)
+    a = clactions.GetRequeuedOrSpeculative(speculative_change, action_history,
+                                           True)
+    self.assertEqual(a, constants.CL_ACTION_SPECULATIVE)
+    self._Act(build_id, speculative_change, a)
+
+    # Once a speculative change becomes un-speculative, it needs a REQUEUD
+    # action.
+    action_history = self.fake_db.GetActionsForChanges(changes)
+    a = clactions.GetRequeuedOrSpeculative(speculative_change, action_history,
+                                           False)
+    self.assertEqual(a, constants.CL_ACTION_REQUEUED)
+    self._Act(build_id, speculative_change, a)
+
+  def testGetCLPreCQStatus(self):
+    change = metadata_lib.GerritPatchTuple(1, 1, False)
+    # Initial pre-CQ status of a change is None.
+    self.assertEqual(self._GetCLStatus(change), None)
+
+    # Builders can update the CL's pre-CQ status.
+    build_id = self.fake_db.InsertBuild(
+        constants.PRE_CQ_LAUNCHER_NAME, constants.WATERFALL_INTERNAL, 1,
+        constants.PRE_CQ_LAUNCHER_CONFIG, 'bot-hostname')
+
+    self._Act(build_id, change, constants.CL_ACTION_PRE_CQ_WAITING)
+    self.assertEqual(self._GetCLStatus(change), constants.CL_STATUS_WAITING)
+
+    self._Act(build_id, change, constants.CL_ACTION_PRE_CQ_INFLIGHT)
+    self.assertEqual(self._GetCLStatus(change), constants.CL_STATUS_INFLIGHT)
+
+    # Recording a cl action that is not a valid pre-cq status should leave
+    # pre-cq status unaffected.
+    self._Act(build_id, change, 'polenta')
+    self.assertEqual(self._GetCLStatus(change), constants.CL_STATUS_INFLIGHT)
+
+    self._Act(build_id, change, constants.CL_ACTION_PRE_CQ_RESET)
+    self.assertEqual(self._GetCLStatus(change), None)
+
+  def testGetCLPreCQProgress(self):
+    change = metadata_lib.GerritPatchTuple(1, 1, False)
+    s = lambda: clactions.GetCLPreCQProgress(
+        change, self.fake_db.GetActionsForChanges([change]))
+
+    self.assertEqual({}, s())
+
+    # Simulate the pre-cq-launcher screening changes for pre-cq configs
+    # to test with.
+    launcher_build_id = self.fake_db.InsertBuild(
+        constants.PRE_CQ_LAUNCHER_NAME, constants.WATERFALL_INTERNAL,
+        1, constants.PRE_CQ_LAUNCHER_CONFIG, 'bot hostname 1')
+
+    self._Act(launcher_build_id, change,
+              constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+              'pineapple-pre-cq')
+    self._Act(launcher_build_id, change,
+              constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+              'banana-pre-cq')
+
+    configs = ['banana-pre-cq', 'pineapple-pre-cq']
+
+    self.assertEqual(configs, sorted(s().keys()))
+    for c in configs:
+      self.assertEqual(constants.CL_PRECQ_CONFIG_STATUS_PENDING,
+                       s()[c][0])
+
+    # Simulate a prior build rejecting change
+    self._Act(launcher_build_id, change,
+              constants.CL_ACTION_KICKED_OUT,
+              'pineapple-pre-cq')
+    self.assertEqual(constants.CL_PRECQ_CONFIG_STATUS_FAILED,
+                     s()['pineapple-pre-cq'][0])
+
+    # Simulate the pre-cq-launcher launching tryjobs for all pending configs.
+    for c in configs:
+      self._Act(launcher_build_id, change,
+                constants.CL_ACTION_TRYBOT_LAUNCHING, c)
+    for c in configs:
+      self.assertEqual(constants.CL_PRECQ_CONFIG_STATUS_LAUNCHED,
+                       s()[c][0])
+
+    # Simulate the tryjobs launching, and picking up the changes.
+    banana_build_id = self.fake_db.InsertBuild(
+        'banana', constants.WATERFALL_TRYBOT, 12, 'banana-pre-cq',
+        'banana hostname')
+    pineapple_build_id = self.fake_db.InsertBuild(
+        'pineapple', constants.WATERFALL_TRYBOT, 87, 'pineapple-pre-cq',
+        'pineapple hostname')
+
+    self._Act(banana_build_id, change, constants.CL_ACTION_PICKED_UP)
+    self._Act(pineapple_build_id, change, constants.CL_ACTION_PICKED_UP)
+    for c in configs:
+      self.assertEqual(constants.CL_PRECQ_CONFIG_STATUS_INFLIGHT,
+                       s()[c][0])
+
+    # Simulate the changes being retried.
+    self._Act(banana_build_id, change, constants.CL_ACTION_FORGIVEN)
+    self._Act(launcher_build_id, change, constants.CL_ACTION_FORGIVEN,
+              'pineapple-pre-cq')
+    for c in configs:
+      self.assertEqual(constants.CL_PRECQ_CONFIG_STATUS_PENDING,
+                       s()[c][0])
+    # Simulate the changes being rejected, either by the configs themselves
+    # or by the pre-cq-launcher.
+    self._Act(banana_build_id, change, constants.CL_ACTION_KICKED_OUT)
+    self._Act(launcher_build_id, change, constants.CL_ACTION_KICKED_OUT,
+              'pineapple-pre-cq')
+    for c in configs:
+      self.assertEqual(constants.CL_PRECQ_CONFIG_STATUS_FAILED,
+                       s()[c][0])
+    # Simulate the tryjobs verifying the changes.
+    self._Act(banana_build_id, change, constants.CL_ACTION_VERIFIED)
+    self._Act(pineapple_build_id, change, constants.CL_ACTION_VERIFIED)
+    for c in configs:
+      self.assertEqual(constants.CL_PRECQ_CONFIG_STATUS_VERIFIED,
+                       s()[c][0])
+
+    # Simulate the pre-cq status being reset.
+    self._Act(launcher_build_id, change, constants.CL_ACTION_PRE_CQ_RESET)
+    self.assertEqual({}, s())
+
+  def testGetCLPreCQCategoriesAndPendingCLs(self):
+    c1 = metadata_lib.GerritPatchTuple(1, 1, False)
+    c2 = metadata_lib.GerritPatchTuple(2, 2, False)
+    c3 = metadata_lib.GerritPatchTuple(3, 3, False)
+    c4 = metadata_lib.GerritPatchTuple(4, 4, False)
+    c5 = metadata_lib.GerritPatchTuple(5, 5, False)
+
+    launcher_build_id = self.fake_db.InsertBuild(
+        constants.PRE_CQ_LAUNCHER_NAME, constants.WATERFALL_INTERNAL,
+        1, constants.PRE_CQ_LAUNCHER_CONFIG, 'bot hostname 1')
+    pineapple_build_id = self.fake_db.InsertBuild(
+        'pineapple', constants.WATERFALL_TRYBOT, 87, 'pineapple-pre-cq',
+        'pineapple hostname')
+    guava_build_id = self.fake_db.InsertBuild(
+        'guava', constants.WATERFALL_TRYBOT, 7, 'guava-pre-cq',
+        'guava hostname')
+
+    # c1 has 3 pending verifications, but only 1 inflight and 1
+    # launching, so it is not busy/inflight.
+    self._Act(launcher_build_id, c1,
+              constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+              'pineapple-pre-cq')
+    self._Act(launcher_build_id, c1,
+              constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+              'banana-pre-cq')
+    self._Act(launcher_build_id, c1,
+              constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+              'guava-pre-cq')
+    self._Act(launcher_build_id, c1,
+              constants.CL_ACTION_TRYBOT_LAUNCHING,
+              'banana-pre-cq')
+    self._Act(pineapple_build_id, c1, constants.CL_ACTION_PICKED_UP)
+
+    # c2 has 3 pending verifications, 1 inflight and 1 launching, and 1 passed,
+    # so it is busy.
+    self._Act(launcher_build_id, c2,
+              constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+              'pineapple-pre-cq')
+    self._Act(launcher_build_id, c2,
+              constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+              'banana-pre-cq')
+    self._Act(launcher_build_id, c2,
+              constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+              'guava-pre-cq')
+    self._Act(launcher_build_id, c2, constants.CL_ACTION_TRYBOT_LAUNCHING,
+              'banana-pre-cq')
+    self._Act(pineapple_build_id, c2, constants.CL_ACTION_PICKED_UP)
+    self._Act(guava_build_id, c2, constants.CL_ACTION_VERIFIED)
+
+    # c3 has 2 pending verifications, both passed, so it is passed.
+    self._Act(launcher_build_id, c3,
+              constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+              'pineapple-pre-cq')
+    self._Act(launcher_build_id, c3,
+              constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+              'guava-pre-cq')
+    self._Act(pineapple_build_id, c3, constants.CL_ACTION_VERIFIED)
+    self._Act(guava_build_id, c3, constants.CL_ACTION_VERIFIED)
+
+    # c4 has 2 pending verifications: one is inflight and the other
+    # passed. It is considered inflight and busy.
+    self._Act(launcher_build_id, c4,
+              constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+              'pineapple-pre-cq')
+    self._Act(launcher_build_id, c4,
+              constants.CL_ACTION_VALIDATION_PENDING_PRE_CQ,
+              'guava-pre-cq')
+    self._Act(pineapple_build_id, c4, constants.CL_ACTION_PICKED_UP)
+    self._Act(guava_build_id, c4, constants.CL_ACTION_VERIFIED)
+
+    # c5 has not even been screened.
+
+    changes = [c1, c2, c3, c4, c5]
+    action_history = self.fake_db.GetActionsForChanges(changes)
+    progress_map = clactions.GetPreCQProgressMap(changes, action_history)
+
+    self.assertEqual(({c2, c4}, {c4}, {c3}),
+                     clactions.GetPreCQCategories(progress_map))
+
+    # Among changes c1, c2, c3, only the guava-pre-cq config is pending. The
+    # other configs are either inflight, launching, or passed everywhere.
+    screened_changes = set(changes).intersection(progress_map)
+    self.assertEqual({'guava-pre-cq'},
+                     clactions.GetPreCQConfigsToTest(screened_changes,
+                                                     progress_map))
+
+
+class TestCLStatusCounter(cros_test_lib.TestCase):
+  """Tests that GetCLActionCount behaves as expected."""
+
+  def setUp(self):
+    self.fake_db = fake_cidb.FakeCIDBConnection()
+
+  def testGetCLActionCount(self):
+    c1p1 = metadata_lib.GerritPatchTuple(1, 1, False)
+    c1p2 = metadata_lib.GerritPatchTuple(1, 2, False)
+    precq_build_id = self.fake_db.InsertBuild(
+        constants.PRE_CQ_LAUNCHER_NAME, constants.WATERFALL_INTERNAL, 1,
+        constants.PRE_CQ_LAUNCHER_CONFIG, 'bot-hostname')
+    melon_build_id = self.fake_db.InsertBuild(
+        'melon builder name', constants.WATERFALL_INTERNAL, 1,
+        'melon-config-name', 'grape-bot-hostname')
+
+    # Count should be zero before any actions are recorded.
+
+    action_history = self.fake_db.GetActionsForChanges([c1p1])
+    self.assertEqual(
+        0,
+        clactions.GetCLActionCount(
+            c1p1, validation_pool.CQ_PIPELINE_CONFIGS,
+            constants.CL_ACTION_KICKED_OUT, action_history))
+
+    # Record 3 failures for c1p1, and some other actions. Only count the
+    # actions from builders in validation_pool.CQ_PIPELINE_CONFIGS.
+    self.fake_db.InsertCLActions(
+        precq_build_id,
+        [clactions.CLAction.FromGerritPatchAndAction(
+            c1p1, constants.CL_ACTION_KICKED_OUT)])
+    self.fake_db.InsertCLActions(
+        precq_build_id,
+        [clactions.CLAction.FromGerritPatchAndAction(
+            c1p1, constants.CL_ACTION_PICKED_UP)])
+    self.fake_db.InsertCLActions(
+        precq_build_id,
+        [clactions.CLAction.FromGerritPatchAndAction(
+            c1p1, constants.CL_ACTION_KICKED_OUT)])
+    self.fake_db.InsertCLActions(
+        melon_build_id,
+        [clactions.CLAction.FromGerritPatchAndAction(
+            c1p1, constants.CL_ACTION_KICKED_OUT)])
+
+    action_history = self.fake_db.GetActionsForChanges([c1p1])
+    self.assertEqual(
+        2,
+        clactions.GetCLActionCount(
+            c1p1, validation_pool.CQ_PIPELINE_CONFIGS,
+            constants.CL_ACTION_KICKED_OUT, action_history))
+
+    # Record a failure for c1p2. Now the latest patches failure count should be
+    # 1 (true weather we pass c1p1 or c1p2), whereas the total failure count
+    # should be 3.
+    self.fake_db.InsertCLActions(
+        precq_build_id,
+        [clactions.CLAction.FromGerritPatchAndAction(
+            c1p2, constants.CL_ACTION_KICKED_OUT)])
+
+    action_history = self.fake_db.GetActionsForChanges([c1p1])
+    self.assertEqual(
+        1,
+        clactions.GetCLActionCount(
+            c1p1, validation_pool.CQ_PIPELINE_CONFIGS,
+            constants.CL_ACTION_KICKED_OUT, action_history))
+    self.assertEqual(
+        1,
+        clactions.GetCLActionCount(
+            c1p2, validation_pool.CQ_PIPELINE_CONFIGS,
+            constants.CL_ACTION_KICKED_OUT, action_history))
+    self.assertEqual(
+        3,
+        clactions.GetCLActionCount(
+            c1p2, validation_pool.CQ_PIPELINE_CONFIGS,
+            constants.CL_ACTION_KICKED_OUT, action_history,
+            latest_patchset_only=False))
+
+
+class TestCLActionHistorySmoke(cros_test_lib.TestCase):
+  """A basic test for the simpler aggregating API for CLActionHistory."""
+
+  def setUp(self):
+
+    self.cl1 = clactions.GerritChangeTuple(11111, True)
+    self.cl1_patch1 = clactions.GerritPatchTuple(
+        self.cl1.gerrit_number, 1, self.cl1.internal)
+    self.cl1_patch2 = clactions.GerritPatchTuple(
+        self.cl1.gerrit_number, 2, self.cl1.internal)
+
+    self.cl2 = clactions.GerritChangeTuple(22222, True)
+    self.cl2_patch1 = clactions.GerritPatchTuple(
+        self.cl2.gerrit_number, 1, self.cl2.internal)
+    self.cl2_patch2 = clactions.GerritPatchTuple(
+        self.cl2.gerrit_number, 2, self.cl2.internal)
+
+    self.cl3 = clactions.GerritChangeTuple(33333, True)
+    self.cl3_patch1 = clactions.GerritPatchTuple(
+        self.cl3.gerrit_number, 2, self.cl3.internal)
+
+    # Expected actions in chronological order, most recent first.
+    self.action1 = clactions.CLAction.FromGerritPatchAndAction(
+        self.cl1_patch2, constants.CL_ACTION_SUBMITTED,
+        timestamp=self._NDaysAgo(1))
+    self.action2 = clactions.CLAction.FromGerritPatchAndAction(
+        self.cl1_patch2, constants.CL_ACTION_KICKED_OUT,
+        timestamp=self._NDaysAgo(2))
+    self.action3 = clactions.CLAction.FromGerritPatchAndAction(
+        self.cl2_patch2, constants.CL_ACTION_SUBMITTED,
+        timestamp=self._NDaysAgo(3))
+    self.action4 = clactions.CLAction.FromGerritPatchAndAction(
+        self.cl1_patch1, constants.CL_ACTION_SUBMIT_FAILED,
+        timestamp=self._NDaysAgo(4))
+    self.action5 = clactions.CLAction.FromGerritPatchAndAction(
+        self.cl1_patch1, constants.CL_ACTION_KICKED_OUT,
+        timestamp=self._NDaysAgo(5))
+    self.action6 = clactions.CLAction.FromGerritPatchAndAction(
+        self.cl3_patch1, constants.CL_ACTION_SUBMITTED,
+        reason=constants.STRATEGY_NONMANIFEST,
+        timestamp=self._NDaysAgo(6))
+
+    # CLActionHistory does not require the history to be given in chronological
+    # order, so we provide them in reverse order, and expect them to be sorted
+    # as appropriate.
+    self.cl_action_stats = clactions.CLActionHistory([
+        self.action1, self.action2, self.action3, self.action4, self.action5,
+        self.action6])
+
+  def _NDaysAgo(self, num_days):
+    return datetime.datetime.today() - datetime.timedelta(num_days)
+
+  def testAffected(self):
+    """Tests that the Affected* methods DTRT."""
+    self.assertEqual(set([self.cl1, self.cl2, self.cl3]),
+                     self.cl_action_stats.affected_cls)
+    self.assertEqual(
+        set([self.cl1_patch1, self.cl1_patch2, self.cl2_patch2,
+             self.cl3_patch1]),
+        self.cl_action_stats.affected_patches)
+
+  def testActions(self):
+    """Tests that different types of actions are listed correctly."""
+    self.assertEqual([self.action5, self.action2],
+                     self.cl_action_stats.reject_actions)
+    self.assertEqual([self.action6, self.action3, self.action1],
+                     self.cl_action_stats.submit_actions)
+    self.assertEqual([self.action4],
+                     self.cl_action_stats.submit_fail_actions)
+
+  def testSubmitted(self):
+    """Tests that the list of submitted objects is correct."""
+    self.assertEqual(set([self.cl1, self.cl2]),
+                     self.cl_action_stats.GetSubmittedCLs())
+    self.assertEqual(set([self.cl1, self.cl2, self.cl3]),
+                     self.cl_action_stats.GetSubmittedCLs(False))
+    self.assertEqual(set([self.cl1_patch2, self.cl2_patch2]),
+                     self.cl_action_stats.GetSubmittedPatches())
+    self.assertEqual(set([self.cl1_patch2, self.cl2_patch2, self.cl3_patch1]),
+                     self.cl_action_stats.GetSubmittedPatches(False))
+
+
+class TestCLActionHistoryRejections(cros_test_lib.TestCase):
+  """Involved test of aggregation of rejections."""
+
+  CQ_BUILD_CONFIG = 'lumpy-paladin'
+  PRE_CQ_BUILD_CONFIG = 'pre-cq-group'
+
+  def setUp(self):
+    self._days_forward = 1
+    self._build_id = 1
+    self.action_history = []
+    self.cl_action_stats = None
+
+    self.cl1 = clactions.GerritChangeTuple(11111, True)
+    self.cl1_patch1 = clactions.GerritPatchTuple(
+        self.cl1.gerrit_number, 1, self.cl1.internal)
+    self.cl1_patch2 = clactions.GerritPatchTuple(
+        self.cl1.gerrit_number, 2, self.cl1.internal)
+
+    self.cl2 = clactions.GerritChangeTuple(22222, True)
+    self.cl2_patch1 = clactions.GerritPatchTuple(
+        self.cl2.gerrit_number, 1, self.cl2.internal)
+    self.cl2_patch2 = clactions.GerritPatchTuple(
+        self.cl2.gerrit_number, 2, self.cl2.internal)
+
+  def _AppendToHistory(self, patch, action, **kwargs):
+    kwargs.setdefault('id', -1)
+    kwargs.setdefault('build_id', -1)
+    kwargs.setdefault('reason', '')
+    kwargs.setdefault('build_config', '')
+    kwargs['timestamp'] = (datetime.datetime.today() +
+                           datetime.timedelta(self._days_forward))
+    self._days_forward += 1
+    kwargs['action'] = action
+    kwargs['change_number'] = int(patch.gerrit_number)
+    kwargs['patch_number'] = int(patch.patch_number)
+    kwargs['change_source'] = clactions.BoolToChangeSource(patch.internal)
+
+    action = clactions.CLAction(**kwargs)
+    self.action_history.append(action)
+    return action
+
+  def _PickupAndRejectPatch(self, patch, **kwargs):
+    kwargs.setdefault('build_id', self._build_id)
+    self._build_id += 1
+    pickup_action = self._AppendToHistory(patch, constants.CL_ACTION_PICKED_UP,
+                                          **kwargs)
+    reject_action = self._AppendToHistory(patch, constants.CL_ACTION_KICKED_OUT,
+                                          **kwargs)
+    return pickup_action, reject_action
+
+  def _CreateCLActionHistory(self):
+    """Create the object under test, reordering the history.
+
+    We reorder history in a fixed but arbitrary way, to test that order doesn't
+    matter for the object under test.
+    """
+    random.seed(4)  # Everyone knows this is the randomest number on earth.
+    random.shuffle(self.action_history)
+    self.cl_action_stats = clactions.CLActionHistory(self.action_history)
+
+  def testRejectionsNoRejection(self):
+    """Tests the null case."""
+    self._AppendToHistory(self.cl1_patch1, constants.CL_ACTION_SUBMITTED)
+    self._CreateCLActionHistory()
+    self.assertEqual({}, self.cl_action_stats.GetTrueRejections())
+    self.assertEqual({}, self.cl_action_stats.GetFalseRejections())
+
+  def testTrueRejectionsSkipApplyFailure(self):
+    """Test that apply failures are not considered true rejections."""
+    self._AppendToHistory(self.cl1_patch1, constants.CL_ACTION_KICKED_OUT)
+    self._AppendToHistory(self.cl1_patch2, constants.CL_ACTION_SUBMITTED)
+    self._CreateCLActionHistory()
+    self.assertEqual({}, self.cl_action_stats.GetTrueRejections())
+
+  def testTrueRejectionsIncludeLaterSubmitted(self):
+    """Tests that we include CLs which have a patch that was later submitted."""
+    _, reject_action = self._PickupAndRejectPatch(self.cl1_patch1)
+    self._AppendToHistory(self.cl1_patch2, constants.CL_ACTION_SUBMITTED)
+    self._CreateCLActionHistory()
+    self.assertEqual({self.cl1_patch1: [reject_action]},
+                     self.cl_action_stats.GetTrueRejections())
+
+  def testTrueRejectionsMultipleRejectionsOnPatch(self):
+    """Tests that we include all rejection actions on a patch."""
+    _, reject_action1 = self._PickupAndRejectPatch(self.cl1_patch1)
+    _, reject_action2 = self._PickupAndRejectPatch(self.cl1_patch1)
+    self._AppendToHistory(self.cl1_patch2, constants.CL_ACTION_SUBMITTED)
+    self._CreateCLActionHistory()
+    self.assertEqual({self.cl1_patch1: [reject_action1, reject_action2]},
+                     self.cl_action_stats.GetTrueRejections())
+
+  def testTrueRejectionsByCQ(self):
+    """A complex test filtering for rejections by the cq.
+
+    For a patch that has been rejected by both the pre-cq and cq, only cq's
+    actions should be reported. For a patch that has been rejected by only the
+    pre-cq, the rejection should not be included at all.
+    """
+    _, reject_action1 = self._PickupAndRejectPatch(
+        self.cl1_patch1, build_config=self.PRE_CQ_BUILD_CONFIG)
+    _, reject_action2 = self._PickupAndRejectPatch(
+        self.cl1_patch1, build_config=self.CQ_BUILD_CONFIG)
+    self._AppendToHistory(self.cl1_patch2, constants.CL_ACTION_SUBMITTED)
+    _, reject_action3 = self._PickupAndRejectPatch(
+        self.cl2_patch1, build_config=self.PRE_CQ_BUILD_CONFIG)
+    self._AppendToHistory(self.cl2_patch2, constants.CL_ACTION_SUBMITTED)
+    self._CreateCLActionHistory()
+    self.assertEqual({self.cl1_patch1: [reject_action1, reject_action2],
+                      self.cl2_patch1: [reject_action3]},
+                     self.cl_action_stats.GetTrueRejections())
+    self.assertEqual({self.cl1_patch1: [reject_action2]},
+                     self.cl_action_stats.GetTrueRejections(constants.CQ))
+
+  def testFalseRejectionsSkipApplyFailure(self):
+    """Test that apply failures are not considered false rejections."""
+    self._AppendToHistory(self.cl1_patch1, constants.CL_ACTION_KICKED_OUT)
+    self._AppendToHistory(self.cl1_patch1, constants.CL_ACTION_SUBMITTED)
+    self._CreateCLActionHistory()
+    self.assertEqual({}, self.cl_action_stats.GetTrueRejections())
+
+  def testFalseRejectionMultiplePatchesFalselyRejected(self):
+    """Test the case when we reject mulitple patches falsely."""
+    _, reject_action1 = self._PickupAndRejectPatch(self.cl1_patch1)
+    _, reject_action2 = self._PickupAndRejectPatch(self.cl1_patch1)
+    self._AppendToHistory(self.cl1_patch1, constants.CL_ACTION_SUBMITTED)
+    _, reject_action3 = self._PickupAndRejectPatch(self.cl1_patch2)
+    self._AppendToHistory(self.cl1_patch2, constants.CL_ACTION_SUBMITTED)
+    self._CreateCLActionHistory()
+    self.assertEqual({self.cl1_patch1: [reject_action1, reject_action2],
+                      self.cl1_patch2: [reject_action3]},
+                     self.cl_action_stats.GetFalseRejections())
+
+  def testFalseRejectionsByCQ(self):
+    """Test that we list CQ spciefic rejections correctly."""
+    self._PickupAndRejectPatch(self.cl1_patch1,
+                               build_config=self.PRE_CQ_BUILD_CONFIG)
+    _, reject_action1 = self._PickupAndRejectPatch(
+        self.cl1_patch1, build_config=self.CQ_BUILD_CONFIG)
+    self._AppendToHistory(self.cl1_patch1, action=constants.CL_ACTION_SUBMITTED)
+    self._CreateCLActionHistory()
+    self.assertEqual({self.cl1_patch1: [reject_action1]},
+                     self.cl_action_stats.GetFalseRejections(constants.CQ))
+
+  def testFalseRejectionsSkipsBadPreCQRun(self):
+    """Test that we don't consider rejections on bad pre-cq buuilds false.
+
+    We batch related CLs together on pre-cq runs. Rejections beause a certain
+    pre-cq build failed are considered to not be false because a CL was still to
+    blame.
+    """
+    # Use our own build_ids to tie CLs together.
+    bad_build_id = 21
+    # This false rejection is due to a bad build.
+    self._PickupAndRejectPatch(self.cl1_patch1,
+                               build_config=self.PRE_CQ_BUILD_CONFIG,
+                               build_id=bad_build_id)
+    # This is a true rejection, marking the pre-cq build as a bad build.
+    _, reject_action1 = self._PickupAndRejectPatch(
+        self.cl2_patch1,
+        build_config=self.PRE_CQ_BUILD_CONFIG,
+        build_id=bad_build_id)
+    self._AppendToHistory(self.cl1_patch1,
+                          constants.CL_ACTION_SUBMITTED)
+    self._AppendToHistory(self.cl2_patch2,
+                          constants.CL_ACTION_SUBMITTED)
+    self._CreateCLActionHistory()
+    self.assertEqual({self.cl2_patch1: [reject_action1]},
+                     self.cl_action_stats.GetTrueRejections())
+    self.assertEqual({}, self.cl_action_stats.GetFalseRejections())
+
+  def testFalseRejectionsSkipsBadPreCQAction(self):
+    """Test that we skip only the bad pre-cq actions when skipping bad builds.
+
+    If a patch is rejected by a bad pre-cq run, and then rejected again by
+    other builds, we should only skip the first action.
+    """
+    # Use our own build_ids to tie CLs together.
+    bad_build_id = 21
+    # This false rejection is due to a bad build.
+    self._PickupAndRejectPatch(self.cl1_patch1,
+                               build_config=self.PRE_CQ_BUILD_CONFIG,
+                               build_id=bad_build_id)
+    # This is a true rejection, marking the pre-cq build as a bad build.
+    _, reject_action1 = self._PickupAndRejectPatch(
+        self.cl2_patch1,
+        build_config=self.PRE_CQ_BUILD_CONFIG,
+        build_id=bad_build_id)
+    # This is a valid false rejection.
+    _, reject_action2 = self._PickupAndRejectPatch(
+        self.cl1_patch1, build_config=self.PRE_CQ_BUILD_CONFIG)
+    # This is also a valid false rejection.
+    _, reject_action3 = self._PickupAndRejectPatch(
+        self.cl1_patch1, build_config=self.CQ_BUILD_CONFIG)
+    self._AppendToHistory(self.cl1_patch1,
+                          constants.CL_ACTION_SUBMITTED)
+    self._AppendToHistory(self.cl2_patch2,
+                          constants.CL_ACTION_SUBMITTED)
+    self._CreateCLActionHistory()
+    self.assertEqual({self.cl2_patch1: [reject_action1]},
+                     self.cl_action_stats.GetTrueRejections())
+    self.assertEqual({self.cl1_patch1: [reject_action2, reject_action3]},
+                     self.cl_action_stats.GetFalseRejections())
+    self.assertEqual({self.cl1_patch1: [reject_action2]},
+                     self.cl_action_stats.GetFalseRejections(constants.PRE_CQ))
+    self.assertEqual({self.cl1_patch1: [reject_action3]},
+                     self.cl_action_stats.GetFalseRejections(constants.CQ))
+
+  def testFalseRejectionsMergeConflictByBotType(self):
+    """Test the case when one bot has merge conflict.
+
+    If pre-cq falsely rejects a patch, and CQ has a merge conflict, but later
+    submits the CL, the false rejection should only show up for pre-cq.
+    """
+    _, reject_action1 = self._PickupAndRejectPatch(
+        self.cl1_patch1,
+        build_config=self.PRE_CQ_BUILD_CONFIG)
+    self._AppendToHistory(self.cl1_patch1, constants.CL_ACTION_KICKED_OUT,
+                          build_config=self.CQ_BUILD_CONFIG)
+    self._AppendToHistory(self.cl1_patch1, constants.CL_ACTION_SUBMITTED,
+                          build_config=self.CQ_BUILD_CONFIG)
+    self._CreateCLActionHistory()
+    self.assertEqual({self.cl1_patch1: [reject_action1]},
+                     self.cl_action_stats.GetFalseRejections(constants.PRE_CQ))
+    self.assertEqual({}, self.cl_action_stats.GetFalseRejections(constants.CQ))
+
+  def testRejectionsPatchSubmittedThenUpdated(self):
+    """Test the case when a patch is submitted, then updated."""
+    _, reject_action1 = self._PickupAndRejectPatch(self.cl1_patch1)
+    self._AppendToHistory(self.cl1_patch1, constants.CL_ACTION_SUBMITTED)
+    self._AppendToHistory(self.cl1_patch2, constants.CL_ACTION_PICKED_UP)
+    self._CreateCLActionHistory()
+    self.assertEqual({}, self.cl_action_stats.GetTrueRejections())
+    self.assertEqual({self.cl1_patch1: [reject_action1]},
+                     self.cl_action_stats.GetFalseRejections())
diff --git a/lib/cleanup.py b/lib/cleanup.py
new file mode 100644
index 0000000..5f31161
--- /dev/null
+++ b/lib/cleanup.py
@@ -0,0 +1,124 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Context Manager to ensure cleanup code is run."""
+
+from __future__ import print_function
+
+import contextlib
+import os
+import multiprocessing
+import signal
+import sys
+
+from chromite.lib import cros_build_lib
+from chromite.lib import locking
+
+
+class EnforcedCleanupSection(cros_build_lib.MasterPidContextManager):
+
+  """Context manager used to ensure that a section of cleanup code is run
+
+  This is designed such that a child splits off, ensuring that even if the
+  parent is sigkilled, the section marked *will* be run.  This is implemented
+  via a ProcessLock shared between parent, and a process split off to
+  survive any sigkills/hard crashes in the parent.
+
+  The usage of this is basically in a pseudo-transactional manner:
+
+  >>> with EnforcedCleanupSection() as critical:
+  ...   with other_handler:
+  ...     try:
+  ...       with critical.ForkWatchdog():
+  ...         # Everything past here doesn't run during enforced cleanup
+  ...         # ... normal code ...
+  ...     finally:
+  ...       pass # This is guaranteed to run.
+  ...    # The __exit__ for other_handler is guaranteed to run.
+  ...  # Anything from this point forward will only be run by the invoking
+  ...  # process. If cleanup enforcement had to occur, any code from this
+  ...  # point forward won't be run.
+  >>>
+  """
+  def __init__(self):
+    cros_build_lib.MasterPidContextManager.__init__(self)
+    self._lock = locking.ProcessLock(verbose=False)
+    self._forked = False
+    self._is_child = False
+    self._watchdog_alive = False
+    self._read_pipe, self._write_pipe = multiprocessing.Pipe(duplex=False)
+
+  @contextlib.contextmanager
+  def ForkWatchdog(self):
+    if self._forked:
+      raise RuntimeError("ForkWatchdog was invoked twice for %s" % (self,))
+    self._lock.write_lock()
+
+    pid = os.fork()
+    self._forked = True
+
+    if pid:
+      # Parent; nothing further to do here.
+      self._watchdog_alive = True
+      try:
+        yield
+      finally:
+        self._KillWatchdog()
+      return
+
+    # Get ourselves a new process group; note that we do not reparent
+    # to init.
+    os.setsid()
+
+    # Since we share stdin/stdout/whatever, suppress sigint should we somehow
+    # become the foreground process in the session group.
+    # pylint: disable=W0212
+    signal.signal(signal.SIGINT, signal.SIG_IGN)
+    # Child code.  We lose the lock via lockf/fork semantics.
+    self._is_child = True
+    try:
+      self._lock.write_lock()
+    except BaseException as e:
+      print("EnforcedCleanupSection %s excepted(%r) attempting "
+            "to take the write lock; hard exiting." % (self, e),
+            file=sys.stderr)
+      sys.stderr.flush()
+      # We have no way of knowing the state of the parent if this locking
+      # fails- failure means a code bug.  Specifically, we don't know if
+      # cleanup code was run, thus just flat out bail.
+      os._exit(1)
+
+    # Check if the parent exited cleanly; if so, we don't need to do anything.
+    if self._read_pipe.poll() and self._read_pipe.recv_bytes():
+      for handle in (sys.__stdin__, sys.__stdout__, sys.__stderr__):
+        try:
+          handle.flush()
+        except EnvironmentError:
+          pass
+      os._exit(0)
+
+    # Allow masterpid context managers to run in this case, since we're
+    # explicitly designed for this cleanup.
+    cros_build_lib.MasterPidContextManager.ALTERNATE_MASTER_PID = os.getpid()
+
+    raise RuntimeError("Parent exited uncleanly; forcing cleanup code to run.")
+
+  def _enter(self):
+    self._lock.write_lock()
+    return self
+
+  def _KillWatchdog(self):
+    """Kill the child watchdog cleanly."""
+    if self._watchdog_alive:
+      self._write_pipe.send_bytes('\n')
+      self._lock.unlock()
+      self._lock.close()
+
+  def _exit(self, _exc, _exc_type, _tb):
+    if self._is_child:
+      # All cleanup code that would've run, has ran.
+      # Hard exit to bypass any further code execution.
+      # pylint: disable=W0212
+      os._exit(0)
+    self._KillWatchdog()
diff --git a/lib/commandline.py b/lib/commandline.py
new file mode 100644
index 0000000..6d95514
--- /dev/null
+++ b/lib/commandline.py
@@ -0,0 +1,939 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Purpose of this module is to hold common script/commandline functionality.
+
+This ranges from optparse, to a basic script wrapper setup (much like
+what is used for chromite.bin.*).
+"""
+
+from __future__ import print_function
+
+import argparse
+import collections
+import datetime
+import functools
+import os
+import optparse
+import signal
+import sys
+import urlparse
+
+# TODO(build): sort the cbuildbot.constants/lib.constants issue;
+# lib shouldn't have to import from buildbot like this.
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import path_util
+from chromite.lib import terminal
+from chromite.lib import workspace_lib
+
+
+DEVICE_SCHEME_FILE = 'file'
+DEVICE_SCHEME_SSH = 'ssh'
+DEVICE_SCHEME_USB = 'usb'
+
+
+class ChrootRequiredError(Exception):
+  """Raised when a command must be run in the chroot
+
+  This exception is intended to be caught by code which will restart execution
+  in the chroot. Throwing this exception allows contexts to be exited and
+  general cleanup to happen before we exec an external binary.
+
+  The command to run inside the chroot, and (optionally) special cros_sdk
+  arguments are attached to the exception. Any adjustments to the arguments
+  should be done before raising the exception.
+  """
+  def __init__(self, cmd, chroot_args=None, extra_env=None):
+    """Constructor for ChrootRequiredError.
+
+    Args:
+      cmd: Command line to run inside the chroot as a list of strings.
+      chroot_args: Arguments to pass directly to cros_sdk.
+      extra_env: Environmental variables to set in the chroot.
+    """
+    super(ChrootRequiredError, self).__init__()
+    self.cmd = cmd
+    self.chroot_args = chroot_args
+    self.extra_env = extra_env
+
+
+class ExecRequiredError(Exception):
+  """Raised when a command needs to exec, after cleanup.
+
+  This exception is intended to be caught by code which will exec another
+  command. Throwing this exception allows contexts to be exited and general
+  cleanup to happen before we exec an external binary.
+
+  The command to run is attached to the exception. Any adjustments to the
+  arguments should be done before raising the exception.
+  """
+  def __init__(self, cmd):
+    """Constructor for ExecRequiredError.
+
+    Args:
+      cmd: Command line to run inside the chroot as a list of strings.
+    """
+    super(ExecRequiredError, self).__init__()
+    self.cmd = cmd
+
+
+def AbsolutePath(_option, _opt, value):
+  """Expand paths and make them absolute."""
+  return osutils.ExpandPath(value)
+
+
+def NormalizeGSPath(value):
+  """Normalize GS paths."""
+  url = gs.CanonicalizeURL(value, strict=True)
+  return '%s%s' % (gs.BASE_GS_URL, os.path.normpath(url[len(gs.BASE_GS_URL):]))
+
+
+def NormalizeLocalOrGSPath(value):
+  """Normalize a local or GS path."""
+  ptype = 'gs_path' if gs.PathIsGs(value) else 'path'
+  return VALID_TYPES[ptype](value)
+
+
+def ParseBool(value):
+  """Parse bool argument into a bool value.
+
+  For the existing type=bool functionality, the parser uses the built-in bool(x)
+  function to determine the value.  This function will only return false if x
+  is False or omitted.  Even with this type specified, however, arguments that
+  are generated from a command line initially get parsed as a string, and for
+  any string value passed in to bool(x), it will always return True.
+
+  Args:
+    value: String representing a boolean value.
+
+  Returns:
+    True or False.
+  """
+  return cros_build_lib.BooleanShellValue(value, False)
+
+
+def ParseDate(value):
+  """Parse date argument into a datetime.date object.
+
+  Args:
+    value: String representing a single date in "YYYY-MM-DD" format.
+
+  Returns:
+    A datetime.date object.
+  """
+  try:
+    return datetime.datetime.strptime(value, '%Y-%m-%d').date()
+  except ValueError:
+    # Give a helpful error message about the format expected.  Putting this
+    # message in the exception is useless because argparse ignores the
+    # exception message and just says the value is invalid.
+    logging.error('Date is expected to be in format YYYY-MM-DD.')
+    raise
+
+
+def NormalizeUri(value):
+  """Normalize a local path or URI."""
+  o = urlparse.urlparse(value)
+  if o.scheme == 'file':
+    # Trim off the file:// prefix.
+    return VALID_TYPES['path'](value[7:])
+  elif o.scheme not in ('', 'gs'):
+    o = list(o)
+    o[2] = os.path.normpath(o[2])
+    return urlparse.urlunparse(o)
+  else:
+    return NormalizeLocalOrGSPath(value)
+
+
+# A Device object holds information parsed from the command line input:
+#   scheme: DEVICE_SCHEME_SSH, DEVICE_SCHEME_USB, or DEVICE_SCHEME_FILE.
+#   username: String SSH username or None.
+#   hostname: String SSH hostname or None.
+#   port: Int SSH port or None.
+#   path: String USB/file path or None.
+#   raw: String raw input from the command line.
+# For now this is a superset of all information for USB, SSH, or file devices.
+# If functionality diverges based on type, it may be useful to split this into
+# separate device classes instead.
+Device = cros_build_lib.Collection(
+    'Device', scheme=None, username=None, hostname=None, port=None, path=None,
+    raw=None)
+
+
+class DeviceParser(object):
+  """Parses devices as an argparse argument type.
+
+  In addition to parsing user input, this class will also ensure that only
+  supported device schemes are accepted by the parser. For example,
+  `cros deploy` only makes sense with an SSH device, but `cros flash` can use
+  SSH, USB, or file device schemes.
+
+  If the device input is malformed or the scheme is wrong, an error message will
+  be printed and the program will exit.
+
+  Valid device inputs are:
+    - [ssh://][username@]hostname[:port].
+    - usb://[path].
+    - file://path or /absolute_path.
+    - [ssh://]:vm:.
+
+  The last item above is an alias for ssh'ing into a virtual machine on a
+  localhost.  It gets translated into 'localhost:9222'.
+
+  Usage:
+    parser = argparse.ArgumentParser()
+
+    parser.add_argument(
+      'ssh_device',
+      type=commandline.DeviceParser(commandline.DEVICE_SCHEME_SSH))
+
+    parser.add_argument(
+      'usb_or_file_device',
+      type=commandline.DeviceParser([commandline.DEVICE_SCHEME_USB,
+                                     commandline.DEVICE_SCHEME_FILE]))
+  """
+
+  def __init__(self, schemes):
+    """Initializes the parser.
+
+    See the class comments for usage examples.
+
+    Args:
+      schemes: A scheme or list of schemes to accept.
+    """
+    self.schemes = [schemes] if isinstance(schemes, basestring) else schemes
+    # Provide __name__ for argparse to print on failure, or else it will use
+    # repr() which creates a confusing error message.
+    self.__name__ = type(self).__name__
+
+  def __call__(self, value):
+    """Parses a device input and enforces constraints.
+
+    DeviceParser is an object so that a set of valid schemes can be specified,
+    but argparse expects a parsing function, so we overload __call__() for
+    argparse to use.
+
+    Args:
+      value: String representing a device target. See class comments for
+        valid device input formats.
+
+    Returns:
+      A Device object.
+
+    Raises:
+      ValueError: |value| is not a valid device specifier or doesn't
+        match the supported list of schemes.
+    """
+    try:
+      device = self._ParseDevice(value)
+      self._EnforceConstraints(device, value)
+      return device
+    except ValueError as e:
+      # argparse ignores exception messages, so print the message manually.
+      logging.error(e)
+      raise
+    except Exception as e:
+      logging.error('Internal error while parsing device input: %s', e)
+      raise
+
+  def _EnforceConstraints(self, device, value):
+    """Verifies that user-specified constraints are upheld.
+
+    Checks that the parsed device has a scheme that matches what the user
+    expects. Additional constraints can be added if needed.
+
+    Args:
+      device: Device object.
+      value: String representing a device target.
+
+    Raises:
+      ValueError: |device| has the wrong scheme.
+    """
+    if device.scheme not in self.schemes:
+      raise ValueError('Unsupported scheme "%s" for device "%s"' %
+                       (device.scheme, value))
+
+  def _ParseDevice(self, value):
+    """Parse a device argument.
+
+    Args:
+      value: String representing a device target.
+
+    Returns:
+      A Device object.
+
+    Raises:
+      ValueError: |value| is not a valid device specifier.
+    """
+    # ':vm:' is an alias for ssh'ing into a virtual machihne on localhost;
+    # translate it appropriately.
+    if value.strip().lower() == ':vm:':
+      value = 'localhost:9222'
+    elif value.strip().lower() == 'ssh://:vm:':
+      value = 'ssh://localhost:9222'
+    parsed = urlparse.urlparse(value)
+    if not parsed.scheme:
+      # Default to a file scheme for absolute paths, SSH scheme otherwise.
+      if value and value[0] == '/':
+        scheme = DEVICE_SCHEME_FILE
+      else:
+        # urlparse won't provide hostname/username/port unless a scheme is
+        # specified so we need to re-parse.
+        parsed = urlparse.urlparse('%s://%s' % (DEVICE_SCHEME_SSH, value))
+        scheme = DEVICE_SCHEME_SSH
+    else:
+      scheme = parsed.scheme.lower()
+
+    if scheme == DEVICE_SCHEME_SSH:
+      hostname = parsed.hostname
+      port = parsed.port
+      if hostname == 'localhost' and not port:
+        # Use of localhost as the actual machine is uncommon enough relative to
+        # the use of KVM that we require users to specify localhost:22 if they
+        # actually want to connect to the localhost.  Otherwise the expectation
+        # is that they intend to access the VM but forget or didn't know to use
+        # port 9222.
+        raise ValueError('To connect to localhost, use ssh://localhost:22 '
+                         'explicitly, or use ssh://localhost:9222 for the local'
+                         ' VM.')
+      if not hostname:
+        raise ValueError('Hostname is required for device "%s"' % value)
+      return Device(scheme=scheme, username=parsed.username, hostname=hostname,
+                    port=port, raw=value)
+    elif scheme == DEVICE_SCHEME_USB:
+      path = parsed.netloc + parsed.path
+      # Change path '' to None for consistency.
+      return Device(scheme=scheme, path=path if path else None, raw=value)
+    elif scheme == DEVICE_SCHEME_FILE:
+      path = parsed.netloc + parsed.path
+      if not path:
+        raise ValueError('Path is required for "%s"' % value)
+      return Device(scheme=scheme, path=path, raw=value)
+    else:
+      raise ValueError('Unknown device scheme "%s" in "%s"' % (scheme, value))
+
+
+def NormalizeWorkspacePath(path, default_dir=None, extension=None):
+  """Normalize a workspace path.
+
+  Converts |path| into a locator and applies |default_dir| and/or
+  |extension| if specified.
+
+  Args:
+    path: Relative, absolute, or locator path in the CWD workspace.
+    default_dir: If |path| does not contain '/', prepend this
+      directory to the result.
+    extension: If |path| doesn't end in this extension, append this
+      extension to the result.
+
+  Returns:
+    Workspace locator corresponding to the modified |path|.
+
+  Raises:
+    ValueError: |path| isn't in the workspace.
+  """
+  if default_dir and '/' not in path:
+    path = os.path.join(default_dir, path)
+
+  if extension:
+    extension = '.' + extension
+    if os.path.splitext(path)[1] != extension:
+      path += extension
+
+  if workspace_lib.IsLocator(path):
+    return path
+
+  locator = workspace_lib.PathToLocator(path)
+  if not locator:
+    # argparse ignores exception messages; log it as well so the user sees it.
+    error_message = '%s is not in the current workspace.' % path
+    logging.error(error_message)
+    raise ValueError(error_message)
+  return locator
+
+
+def NormalizeBrickPath(path):
+  """Normalize a brick path using some common assumptions.
+
+  Makes the following changes to |path|:
+    1. Put non-paths in //bricks (e.g. foo -> //bricks/foo).
+    2. Convert to a workspace locator.
+
+  Args:
+    path: brick path.
+
+  Returns:
+    Locator to the brick.
+  """
+  return NormalizeWorkspacePath(path, default_dir='//bricks')
+
+
+def NormalizeBspPath(path):
+  """Normalize a BSP path using some common assumptions.
+
+  Makes the following changes to |path|:
+    1. Put non-paths in //bsps (e.g. foo -> //bsps/foo).
+    2. Convert to a workspace locator.
+
+  Args:
+    path: BSP path.
+
+  Returns:
+    Locator to the BSP.
+  """
+  return NormalizeWorkspacePath(path, default_dir='//bsps')
+
+
+def NormalizeBlueprintPath(path):
+  """Normalize a blueprint path using some common assumptions.
+
+  Makes the following changes to |path|:
+    1. Put non-paths in //blueprints (e.g. foo -> //blueprints/foo).
+    2. Add .json if not already present.
+    3. Convert to a workspace locator.
+
+  Args:
+    path: blueprint path.
+
+  Returns:
+    Locator to the blueprint.
+  """
+  return NormalizeWorkspacePath(path, default_dir='//blueprints',
+                                extension='json')
+
+
+VALID_TYPES = {
+    'bool': ParseBool,
+    'date': ParseDate,
+    'path': osutils.ExpandPath,
+    'gs_path': NormalizeGSPath,
+    'local_or_gs_path': NormalizeLocalOrGSPath,
+    'path_or_uri': NormalizeUri,
+    'blueprint_path': NormalizeBlueprintPath,
+    'brick_path': NormalizeBrickPath,
+    'bsp_path': NormalizeBspPath,
+    'workspace_path': NormalizeWorkspacePath,
+}
+
+
+def OptparseWrapCheck(desc, check_f, _option, opt, value):
+  """Optparse adapter for type checking functionality."""
+  try:
+    return check_f(value)
+  except ValueError:
+    raise optparse.OptionValueError(
+        'Invalid %s given: --%s=%s' % (desc, opt, value))
+
+
+class Option(optparse.Option):
+  """Subclass to implement path evaluation & other useful types."""
+
+  _EXTRA_TYPES = ('path', 'gs_path')
+  TYPES = optparse.Option.TYPES + _EXTRA_TYPES
+  TYPE_CHECKER = optparse.Option.TYPE_CHECKER.copy()
+  for t in _EXTRA_TYPES:
+    TYPE_CHECKER[t] = functools.partial(OptparseWrapCheck, t, VALID_TYPES[t])
+
+
+class FilteringOption(Option):
+  """Subclass that supports Option filtering for FilteringOptionParser"""
+
+  def take_action(self, action, dest, opt, value, values, parser):
+    if action in FilteringOption.ACTIONS:
+      Option.take_action(self, action, dest, opt, value, values, parser)
+
+    if value is None:
+      value = []
+    elif not self.nargs or self.nargs <= 1:
+      value = [value]
+
+    parser.AddParsedArg(self, opt, [str(v) for v in value])
+
+
+# TODO: logging.Formatter is not a subclass of object in python
+# 2.6. Make ColoredFormatter explicitly inherit from object so that
+# functions such as super() will not fail. This should be removed
+# after python is upgraded to 2.7 on master2 (crbug.com/409273).
+class ColoredFormatter(logging.Formatter, object):
+  """A logging formatter that can color the messages."""
+
+  _COLOR_MAPPING = {
+      'WARNING': terminal.Color.YELLOW,
+      'ERROR': terminal.Color.RED,
+  }
+
+  def __init__(self, *args, **kwargs):
+    """Initializes the formatter.
+
+    Args:
+      args: See logging.Formatter for specifics.
+      kwargs: See logging.Formatter for specifics.
+      enable_color: Whether to enable colored logging. Defaults
+        to None, where terminal.Color will set to a sane default.
+    """
+    self.color = terminal.Color(enabled=kwargs.pop('enable_color', None))
+    super(ColoredFormatter, self).__init__(*args, **kwargs)
+
+  def format(self, record, **kwargs):
+    """Formats |record| with color."""
+    msg = super(ColoredFormatter, self).format(record, **kwargs)
+    color = self._COLOR_MAPPING.get(record.levelname)
+    return msg if not color else self.color.Color(color, msg)
+
+
+class ChromiteStreamHandler(logging.StreamHandler):
+  """A stream handler for logging."""
+
+
+class BaseParser(object):
+  """Base parser class that includes the logic to add logging controls."""
+
+  DEFAULT_LOG_LEVELS = ('fatal', 'critical', 'error', 'warning', 'notice',
+                        'info', 'debug')
+
+  DEFAULT_LOG_LEVEL = 'info'
+  ALLOW_LOGGING = True
+
+  def __init__(self, **kwargs):
+    """Initialize this parser instance.
+
+    kwargs:
+      logging: Defaults to ALLOW_LOGGING from the class; if given,
+        add --log-level.
+      default_log_level: If logging is enabled, override the default logging
+        level.  Defaults to the class's DEFAULT_LOG_LEVEL value.
+      log_levels: If logging is enabled, this overrides the enumeration of
+        allowed logging levels.  If not given, defaults to the classes
+        DEFAULT_LOG_LEVELS value.
+      manual_debug: If logging is enabled and this is True, suppress addition
+        of a --debug alias.  This option defaults to True unless 'debug' has
+        been exempted from the allowed logging level targets.
+      caching: If given, must be either a callable that discerns the cache
+        location if it wasn't specified (the prototype must be akin to
+        lambda parser, values:calculated_cache_dir_path; it may return None to
+        indicate that it handles setting the value on its own later in the
+        parsing including setting the env), or True; if True, the
+        machinery defaults to invoking the class's FindCacheDir method
+        (which can be overridden).  FindCacheDir $CROS_CACHEDIR, falling
+        back to $REPO/.cache, finally falling back to $TMP.
+        Note that the cache_dir is not created, just discerned where it
+        should live.
+        If False, or caching is not given, then no --cache-dir option will be
+        added.
+    """
+    self.debug_enabled = False
+    self.caching_group = None
+    self.debug_group = None
+    self.default_log_level = None
+    self.log_levels = None
+    self.logging_enabled = kwargs.get('logging', self.ALLOW_LOGGING)
+    self.default_log_level = kwargs.get('default_log_level',
+                                        self.DEFAULT_LOG_LEVEL)
+    self.log_levels = tuple(x.lower() for x in
+                            kwargs.get('log_levels', self.DEFAULT_LOG_LEVELS))
+    self.debug_enabled = (not kwargs.get('manual_debug', False)
+                          and 'debug' in self.log_levels)
+    self.caching = kwargs.get('caching', False)
+
+  @staticmethod
+  def PopUsedArgs(kwarg_dict):
+    """Removes keys used by the base parser from the kwarg namespace."""
+    parser_keys = ['logging', 'default_log_level', 'log_levels', 'manual_debug',
+                   'caching']
+    for key in parser_keys:
+      kwarg_dict.pop(key, None)
+
+  def SetupOptions(self):
+    """Sets up special chromite options for an OptionParser."""
+    if self.logging_enabled:
+      self.debug_group = self.add_option_group('Debug options')
+      self.add_option_to_group(
+          self.debug_group, '--log-level', choices=self.log_levels,
+          default=self.default_log_level,
+          help='Set logging level to report at.')
+      self.add_option_to_group(
+          self.debug_group, '--log_format', action='store',
+          default=constants.LOGGER_FMT,
+          help='Set logging format to use.')
+      if self.debug_enabled:
+        self.add_option_to_group(
+            self.debug_group, '--debug', action='store_const', const='debug',
+            dest='log_level', help='Alias for `--log-level=debug`. '
+            'Useful for debugging bugs/failures.')
+      self.add_option_to_group(
+          self.debug_group, '--nocolor', action='store_false', dest='color',
+          default=None,
+          help='Do not use colorized output (or `export NOCOLOR=true`)')
+
+    if self.caching:
+      self.caching_group = self.add_option_group('Caching Options')
+      self.add_option_to_group(
+          self.caching_group, '--cache-dir', default=None, type='path',
+          help='Override the calculated chromeos cache directory; '
+          "typically defaults to '$REPO/.cache' .")
+
+  def SetupLogging(self, opts):
+    """Sets up logging based on |opts|."""
+    value = opts.log_level.upper()
+    logger = logging.getLogger()
+    logger.setLevel(getattr(logging, value))
+    formatter = ColoredFormatter(fmt=opts.log_format,
+                                 datefmt=constants.LOGGER_DATE_FMT,
+                                 enable_color=opts.color)
+
+    # Only set colored formatter for ChromiteStreamHandler instances,
+    # which could have been added by ScriptWrapperMain() below.
+    chromite_handlers = [x for x in logger.handlers if
+                         isinstance(x, ChromiteStreamHandler)]
+    for handler in chromite_handlers:
+      handler.setFormatter(formatter)
+
+    return value
+
+  def DoPostParseSetup(self, opts, args):
+    """Method called to handle post opts/args setup.
+
+    This can be anything from logging setup to positional arg count validation.
+
+    Args:
+      opts: optparse.Values or argparse.Namespace instance
+      args: position arguments unconsumed from parsing.
+
+    Returns:
+      (opts, args), w/ whatever modification done.
+    """
+    if self.logging_enabled:
+      value = self.SetupLogging(opts)
+      if self.debug_enabled:
+        opts.debug = (value == 'DEBUG')
+
+    if self.caching:
+      path = os.environ.get(constants.SHARED_CACHE_ENVVAR)
+      if path is not None and opts.cache_dir is None:
+        opts.cache_dir = os.path.abspath(path)
+
+      opts.cache_dir_specified = opts.cache_dir is not None
+      if not opts.cache_dir_specified:
+        func = self.FindCacheDir if not callable(self.caching) else self.caching
+        opts.cache_dir = func(self, opts)
+      if opts.cache_dir is not None:
+        self.ConfigureCacheDir(opts.cache_dir)
+
+    return opts, args
+
+  @staticmethod
+  def ConfigureCacheDir(cache_dir):
+    if cache_dir is None:
+      os.environ.pop(constants.SHARED_CACHE_ENVVAR, None)
+      logging.debug('Removed cache_dir setting')
+    else:
+      os.environ[constants.SHARED_CACHE_ENVVAR] = cache_dir
+      logging.debug('Configured cache_dir to %r', cache_dir)
+
+  @classmethod
+  def FindCacheDir(cls, _parser, _opts):
+    logging.debug('Cache dir lookup.')
+    return path_util.FindCacheDir()
+
+  def add_option_group(self, *args, **kwargs):
+    """Returns a new option group see optparse.OptionParser.add_option_group."""
+    raise NotImplementedError('Subclass must override this method')
+
+  @staticmethod
+  def add_option_to_group(group, *args, **kwargs):
+    """Adds the given option defined by args and kwargs to group."""
+    group.add_option(*args, **kwargs)
+
+
+class ArgumentNamespace(argparse.Namespace):
+  """Class to mimic argparse.Namespace with value freezing support."""
+  __metaclass__ = cros_build_lib.FrozenAttributesClass
+  _FROZEN_ERR_MSG = 'Option values are frozen, cannot alter %s.'
+
+
+# Note that because optparse.Values is not a new-style class this class
+# must use the mixin FrozenAttributesMixin rather than the metaclass
+# FrozenAttributesClass.
+class OptionValues(cros_build_lib.FrozenAttributesMixin, optparse.Values):
+  """Class to mimic optparse.Values with value freezing support."""
+  _FROZEN_ERR_MSG = 'Option values are frozen, cannot alter %s.'
+
+  def __init__(self, defaults, *args, **kwargs):
+    cros_build_lib.FrozenAttributesMixin.__init__(self)
+    optparse.Values.__init__(self, defaults, *args, **kwargs)
+
+    # Used by FilteringParser.
+    self.parsed_args = None
+
+
+PassedOption = collections.namedtuple(
+    'PassedOption', ['opt_inst', 'opt_str', 'value_str'])
+
+
+class FilteringParser(optparse.OptionParser, BaseParser):
+  """Custom option parser for filtering options.
+
+  Aside from adding a couple of types (path for absolute paths,
+  gs_path for google storage urls, and log_level for logging level control),
+  this additionally exposes logging control by default; if undesired,
+  either derive from this class setting ALLOW_LOGGING to False, or
+  pass in logging=False to the constructor.
+  """
+
+  DEFAULT_OPTION_CLASS = FilteringOption
+
+  def __init__(self, usage=None, **kwargs):
+    BaseParser.__init__(self, **kwargs)
+    self.PopUsedArgs(kwargs)
+    kwargs.setdefault('option_class', self.DEFAULT_OPTION_CLASS)
+    optparse.OptionParser.__init__(self, usage=usage, **kwargs)
+    self.SetupOptions()
+
+  def parse_args(self, args=None, values=None):
+    # If no Values object is specified then use our custom OptionValues.
+    if values is None:
+      values = OptionValues(defaults=self.defaults)
+
+    values.parsed_args = []
+
+    opts, remaining = optparse.OptionParser.parse_args(
+        self, args=args, values=values)
+    return self.DoPostParseSetup(opts, remaining)
+
+  def AddParsedArg(self, opt_inst, opt_str, value_str):
+    """Add a parsed argument with attributes.
+
+    Args:
+      opt_inst: An instance of a raw optparse.Option object that represents the
+                option.
+      opt_str: The option string.
+      value_str: A list of string-ified values dentified by OptParse.
+    """
+    self.values.parsed_args.append(PassedOption(opt_inst, opt_str, value_str))
+
+  @staticmethod
+  def FilterArgs(parsed_args, filter_fn):
+    """Filter the argument by passing it through a function.
+
+    Args:
+      parsed_args: The list of parsed argument namedtuples to filter.  Tuples
+        are of the form (opt_inst, opt_str, value_str).
+      filter_fn: A function with signature f(PassedOption), and returns True if
+        the argument is to be passed through.  False if not.
+
+    Returns:
+      A tuple containing two lists - one of accepted arguments and one of
+      removed arguments.
+    """
+    removed = []
+    accepted = []
+    for arg in parsed_args:
+      target = accepted if filter_fn(arg) else removed
+      target.append(arg.opt_str)
+      target.extend(arg.value_str)
+
+    return accepted, removed
+
+
+class SharedParser(argparse.ArgumentParser):
+  """A type of parser that may be used as a shared parent for subparsers."""
+
+  def __init__(self, **kwargs):
+    kwargs.setdefault('add_help', False)
+    argparse.ArgumentParser.__init__(self, **kwargs)
+
+
+class ArgumentParser(BaseParser, argparse.ArgumentParser):
+  """Custom argument parser for use by chromite.
+
+  This class additionally exposes logging control by default; if undesired,
+  either derive from this class setting ALLOW_LOGGING to False, or
+  pass in logging=False to the constructor.
+  """
+
+  def __init__(self, usage=None, **kwargs):
+    kwargs.setdefault('formatter_class', argparse.RawDescriptionHelpFormatter)
+    BaseParser.__init__(self, **kwargs)
+    self.PopUsedArgs(kwargs)
+    argparse.ArgumentParser.__init__(self, usage=usage, **kwargs)
+    self._SetupTypes()
+    self.SetupOptions()
+
+  def _SetupTypes(self):
+    """Register types with ArgumentParser."""
+    for t, check_f in VALID_TYPES.iteritems():
+      self.register('type', t, check_f)
+
+  def add_option_group(self, *args, **kwargs):
+    """Return an argument group rather than an option group."""
+    return self.add_argument_group(*args, **kwargs)
+
+  @staticmethod
+  def add_option_to_group(group, *args, **kwargs):
+    """Adds an argument rather than an option to the given group."""
+    return group.add_argument(*args, **kwargs)
+
+  def parse_args(self, args=None, namespace=None):
+    """Translates OptionParser call to equivalent ArgumentParser call."""
+    # If no Namespace object is specified then use our custom ArgumentNamespace.
+    if namespace is None:
+      namespace = ArgumentNamespace()
+
+    # Unlike OptionParser, ArgParser works only with a single namespace and no
+    # args. Re-use BaseParser DoPostParseSetup but only take the namespace.
+    namespace = argparse.ArgumentParser.parse_args(
+        self, args=args, namespace=namespace)
+    return self.DoPostParseSetup(namespace, None)[0]
+
+
+class _ShutDownException(SystemExit):
+  """Exception raised when user hits CTRL+C."""
+
+  def __init__(self, sig_num, message):
+    self.signal = sig_num
+    # Setup a usage message primarily for any code that may intercept it
+    # while this exception is crashing back up the stack to us.
+    SystemExit.__init__(self, message)
+    self.args = (sig_num, message)
+
+
+def _DefaultHandler(signum, _frame):
+  # Don't double process sigterms; just trigger shutdown from the first
+  # exception.
+  signal.signal(signum, signal.SIG_IGN)
+  raise _ShutDownException(
+      signum, 'Received signal %i; shutting down' % (signum,))
+
+
+def _RestartInChroot(cmd, chroot_args, extra_env):
+  """Rerun inside the chroot.
+
+  Args:
+    cmd: Command line to run inside the chroot as a list of strings.
+    chroot_args: Arguments to pass directly to cros_sdk (or None).
+    extra_env: Dictionary of environmental variables to set inside the
+        chroot (or None).
+  """
+  return cros_build_lib.RunCommand(cmd, error_code_ok=True,
+                                   enter_chroot=True, chroot_args=chroot_args,
+                                   extra_env=extra_env,
+                                   cwd=constants.SOURCE_ROOT,
+                                   mute_output=False).returncode
+
+
+def RunInsideChroot(command, chroot_args=None):
+  """Restart the current command inside the chroot.
+
+  This method is only valid for any code that is run via ScriptWrapperMain.
+  It allows proper cleanup of the local context by raising an exception handled
+  in ScriptWrapperMain.
+
+  Args:
+    command: An instance of CliCommand to be restarted inside the chroot.
+    chroot_args: List of command-line arguments to pass to cros_sdk, if invoked.
+  """
+  if cros_build_lib.IsInsideChroot():
+    return
+
+  # Produce the command line to execute inside the chroot.
+  argv = sys.argv[:]
+  argv[0] = path_util.ToChrootPath(argv[0])
+
+  # Set log-level of cros_sdk to be same as log-level of command entering the
+  # chroot.
+  if chroot_args is None:
+    chroot_args = []
+  chroot_args += ['--log-level', command.options.log_level]
+
+  raise ChrootRequiredError(argv, chroot_args)
+
+
+def ReExec():
+  """Restart the current command.
+
+  This method is only valid for any code that is run via ScriptWrapperMain.
+  It allows proper cleanup of the local context by raising an exception handled
+  in ScriptWrapperMain.
+  """
+  # The command to exec.
+  raise ExecRequiredError(sys.argv[:])
+
+
+def ScriptWrapperMain(find_target_func, argv=None,
+                      log_level=logging.DEBUG,
+                      log_format=constants.LOGGER_FMT):
+  """Function usable for chromite.script.* style wrapping.
+
+  Note that this function invokes sys.exit on the way out by default.
+
+  Args:
+    find_target_func: a function, which, when given the absolute
+      pathway the script was invoked via (for example,
+      /home/ferringb/cros/trunk/chromite/bin/cros_sdk; note that any
+      trailing .py from the path name will be removed),
+      will return the main function to invoke (that functor will take
+      a single arg- a list of arguments, and shall return either None
+      or an integer, to indicate the exit code).
+    argv: sys.argv, or an equivalent tuple for testing.  If nothing is
+      given, sys.argv is defaulted to.
+    log_level: Default logging level to start at.
+    log_format: Default logging format to use.
+  """
+  if argv is None:
+    argv = sys.argv[:]
+  target = os.path.abspath(argv[0])
+  name = os.path.basename(target)
+  if target.endswith('.py'):
+    target = os.path.splitext(target)[0]
+  target = find_target_func(target)
+  if target is None:
+    print('Internal error detected- no main functor found in module %r.' %
+          (name,), file=sys.stderr)
+    sys.exit(100)
+
+  # Set up basic logging information for all modules that use logging.
+  # Note a script target may setup default logging in its module namespace
+  # which will take precedence over this.
+  logger = logging.getLogger()
+  logger.setLevel(log_level)
+  logger_handler = ChromiteStreamHandler()
+  logger_handler.setFormatter(
+      logging.Formatter(fmt=log_format, datefmt=constants.LOGGER_DATE_FMT))
+  logger.addHandler(logger_handler)
+
+  signal.signal(signal.SIGTERM, _DefaultHandler)
+
+  ret = 1
+  try:
+    ret = target(argv[1:])
+  except _ShutDownException as e:
+    sys.stdout.flush()
+    print('%s: Signaled to shutdown: caught %i signal.' % (name, e.signal),
+          file=sys.stderr)
+    sys.stderr.flush()
+  except SystemExit as e:
+    # Right now, let this crash through- longer term, we'll update the scripts
+    # in question to not use sys.exit, and make this into a flagged error.
+    raise
+  except ChrootRequiredError as e:
+    ret = _RestartInChroot(e.cmd, e.chroot_args, e.extra_env)
+  except ExecRequiredError as e:
+    logging.shutdown()
+    # This does not return.
+    os.execv(e.cmd[0], e.cmd)
+  except Exception as e:
+    sys.stdout.flush()
+    print('%s: Unhandled exception:' % (name,), file=sys.stderr)
+    sys.stderr.flush()
+    raise
+  finally:
+    logging.shutdown()
+
+  if ret is None:
+    ret = 0
+  sys.exit(ret)
diff --git a/lib/commandline_unittest b/lib/commandline_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/commandline_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/commandline_unittest.py b/lib/commandline_unittest.py
new file mode 100644
index 0000000..5333676
--- /dev/null
+++ b/lib/commandline_unittest.py
@@ -0,0 +1,566 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the commandline module."""
+
+from __future__ import print_function
+
+import argparse
+import cPickle
+import signal
+import os
+import sys
+
+from chromite.cli import command
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import gs
+from chromite.lib import path_util
+
+
+# pylint: disable=protected-access
+
+
+class TestShutDownException(cros_test_lib.TestCase):
+  """Test that ShutDownException can be pickled."""
+
+  def testShutDownException(self):
+    """Test that ShutDownException can be pickled."""
+    ex = commandline._ShutDownException(signal.SIGTERM, 'Received SIGTERM')
+    ex2 = cPickle.loads(cPickle.dumps(ex))
+    self.assertEqual(ex.signal, ex2.signal)
+    self.assertEqual(ex.message, ex2.message)
+
+
+class GSPathTest(cros_test_lib.OutputTestCase):
+  """Test type=gs_path normalization functionality."""
+
+  GS_REL_PATH = 'bucket/path/to/artifacts'
+
+  @staticmethod
+  def _ParseCommandLine(argv):
+    parser = commandline.ArgumentParser()
+    parser.add_argument('-g', '--gs-path', type='gs_path',
+                        help='GS path that contains the chrome to deploy.')
+    return parser.parse_args(argv)
+
+  def _RunGSPathTestCase(self, raw, parsed):
+    options = self._ParseCommandLine(['--gs-path', raw])
+    self.assertEquals(options.gs_path, parsed)
+
+  def testNoGSPathCorrectionNeeded(self):
+    """Test case where GS path correction is not needed."""
+    gs_path = '%s/%s' % (gs.BASE_GS_URL, self.GS_REL_PATH)
+    self._RunGSPathTestCase(gs_path, gs_path)
+
+  def testTrailingSlashRemoval(self):
+    """Test case where GS path ends with /."""
+    gs_path = '%s/%s/' % (gs.BASE_GS_URL, self.GS_REL_PATH)
+    self._RunGSPathTestCase(gs_path, gs_path.rstrip('/'))
+
+  def testDuplicateSlashesRemoved(self):
+    """Test case where GS path contains many / in a row."""
+    self._RunGSPathTestCase(
+        '%s/a/dir/with//////////slashes' % gs.BASE_GS_URL,
+        '%s/a/dir/with/slashes' % gs.BASE_GS_URL)
+
+  def testRelativePathsRemoved(self):
+    """Test case where GS path contain /../ logic."""
+    self._RunGSPathTestCase(
+        '%s/a/dir/up/here/.././../now/down/there' % gs.BASE_GS_URL,
+        '%s/a/dir/now/down/there' % gs.BASE_GS_URL)
+
+  def testCorrectionNeeded(self):
+    """Test case where GS path correction is needed."""
+    self._RunGSPathTestCase(
+        '%s/%s/' % (gs.PRIVATE_BASE_HTTPS_URL, self.GS_REL_PATH),
+        '%s/%s' % (gs.BASE_GS_URL, self.GS_REL_PATH))
+
+  def testInvalidPath(self):
+    """Path cannot be normalized."""
+    with self.OutputCapturer():
+      self.assertRaises2(
+          SystemExit, self._RunGSPathTestCase, 'http://badhost.com/path', '',
+          check_attrs={'code': 2})
+
+
+class BoolTest(cros_test_lib.TestCase):
+  """Test type='bool' functionality."""
+
+  @staticmethod
+  def _ParseCommandLine(argv):
+    parser = commandline.ArgumentParser()
+    parser.add_argument('-e', '--enable', type='bool',
+                        help='Boolean Argument.')
+    return parser.parse_args(argv)
+
+  def _RunBoolTestCase(self, enable, expected):
+    options = self._ParseCommandLine(['--enable', enable])
+    self.assertEquals(options.enable, expected)
+
+  def testBoolTrue(self):
+    """Test case setting the value to true."""
+    self._RunBoolTestCase('True', True)
+    self._RunBoolTestCase('1', True)
+    self._RunBoolTestCase('true', True)
+    self._RunBoolTestCase('yes', True)
+    self._RunBoolTestCase('TrUe', True)
+
+  def testBoolFalse(self):
+    """Test case setting the value to false."""
+    self._RunBoolTestCase('False', False)
+    self._RunBoolTestCase('0', False)
+    self._RunBoolTestCase('false', False)
+    self._RunBoolTestCase('no', False)
+    self._RunBoolTestCase('FaLse', False)
+
+
+class DeviceParseTest(cros_test_lib.OutputTestCase):
+  """Test device parsing functionality."""
+
+  _ALL_SCHEMES = (commandline.DEVICE_SCHEME_FILE,
+                  commandline.DEVICE_SCHEME_SSH,
+                  commandline.DEVICE_SCHEME_USB)
+
+  def _CheckDeviceParse(self, device_input, scheme, username=None,
+                        hostname=None, port=None, path=None):
+    """Checks that parsing a device input gives the expected result.
+
+    Args:
+      device_input: String input specifying a device.
+      scheme: String expected scheme.
+      username: String expected username or None.
+      hostname: String expected hostname or None.
+      port: Int expected port or None.
+      path: String expected path or None.
+    """
+    parser = commandline.ArgumentParser()
+    parser.add_argument('device', type=commandline.DeviceParser(scheme))
+    device = parser.parse_args([device_input]).device
+    self.assertEqual(device.scheme, scheme)
+    self.assertEqual(device.username, username)
+    self.assertEqual(device.hostname, hostname)
+    self.assertEqual(device.port, port)
+    self.assertEqual(device.path, path)
+
+  def _CheckDeviceParseFails(self, device_input, schemes=_ALL_SCHEMES):
+    """Checks that parsing a device input fails.
+
+    Args:
+      device_input: String input specifying a device.
+      schemes: A scheme or list of allowed schemes, by default allows all.
+    """
+    parser = commandline.ArgumentParser()
+    parser.add_argument('device', type=commandline.DeviceParser(schemes))
+    with self.OutputCapturer():
+      self.assertRaises2(SystemExit, parser.parse_args, [device_input])
+
+  def testNoDevice(self):
+    """Verify that an empty device specification fails."""
+    self._CheckDeviceParseFails('')
+
+  def testSshScheme(self):
+    """Verify that SSH scheme-only device specification fails."""
+    self._CheckDeviceParseFails('ssh://')
+
+  def testSshHostname(self):
+    """Test SSH hostname-only device specification."""
+    self._CheckDeviceParse('192.168.1.200',
+                           scheme=commandline.DEVICE_SCHEME_SSH,
+                           hostname='192.168.1.200')
+
+  def testSshUsernameHostname(self):
+    """Test SSH username and hostname device specification."""
+    self._CheckDeviceParse('me@foo_host',
+                           scheme=commandline.DEVICE_SCHEME_SSH,
+                           username='me',
+                           hostname='foo_host')
+
+  def testSshUsernameHostnamePort(self):
+    """Test SSH username, hostname, and port device specification."""
+    self._CheckDeviceParse('me@foo_host:4500',
+                           scheme=commandline.DEVICE_SCHEME_SSH,
+                           username='me',
+                           hostname='foo_host',
+                           port=4500)
+
+  def testSshSchemeUsernameHostnamePort(self):
+    """Test SSH scheme, username, hostname, and port device specification."""
+    self._CheckDeviceParse('ssh://me@foo_host:4500',
+                           scheme=commandline.DEVICE_SCHEME_SSH,
+                           username='me',
+                           hostname='foo_host',
+                           port=4500)
+
+  def testUsbScheme(self):
+    """Test USB scheme-only device specification."""
+    self._CheckDeviceParse('usb://', scheme=commandline.DEVICE_SCHEME_USB)
+
+  def testUsbSchemePath(self):
+    """Test USB scheme and path device specification."""
+    self._CheckDeviceParse('usb://path/to/my/device',
+                           scheme=commandline.DEVICE_SCHEME_USB,
+                           path='path/to/my/device')
+
+  def testFileScheme(self):
+    """Verify that file scheme-only device specification fails."""
+    self._CheckDeviceParseFails('file://')
+
+  def testFileSchemePath(self):
+    """Test file scheme and path device specification."""
+    self._CheckDeviceParse('file://foo/bar',
+                           scheme=commandline.DEVICE_SCHEME_FILE,
+                           path='foo/bar')
+
+  def testAbsolutePath(self):
+    """Verify that an absolute path defaults to file scheme."""
+    self._CheckDeviceParse('/path/to/my/device',
+                           scheme=commandline.DEVICE_SCHEME_FILE,
+                           path='/path/to/my/device')
+
+  def testUnsupportedScheme(self):
+    """Verify that an unsupported scheme fails."""
+    self._CheckDeviceParseFails('ssh://192.168.1.200',
+                                schemes=commandline.DEVICE_SCHEME_USB)
+    self._CheckDeviceParseFails('usb://path/to/my/device',
+                                schemes=[commandline.DEVICE_SCHEME_SSH,
+                                         commandline.DEVICE_SCHEME_FILE])
+
+  def testUnknownScheme(self):
+    """Verify that an unknown scheme fails."""
+    self._CheckDeviceParseFails('ftp://192.168.1.200')
+
+  def testSchemeCaseInsensitive(self):
+    """Verify that schemes are case-insensitive."""
+    self._CheckDeviceParse('SSH://foo_host',
+                           scheme=commandline.DEVICE_SCHEME_SSH,
+                           hostname='foo_host')
+
+
+class NormalizeWorkspacePathTest(cros_test_lib.WorkspaceTestCase):
+  """Tests for NormalizeWorkspacePath() and associated functions."""
+
+  def setUp(self):
+    self.CreateWorkspace()
+    # By default set the CWD to be the workspace directory.
+    self.cwd_mock = self.PatchObject(os, 'getcwd')
+    self.cwd_mock.return_value = self.workspace_path
+
+  def _VerifyNormalized(self, path, expected, **kwargs):
+    """Verifies tests on NormalizeWorkspacePath().
+
+    Args:
+      path: Input path to test.
+      expected: Expected output.
+      kwargs: Keyword args for NormalizeWorkspacePath().
+    """
+    self.assertEqual(expected,
+                     commandline.NormalizeWorkspacePath(path, **kwargs))
+
+
+  def testLocatorConversion(self):
+    """Tests NormalizeWorkspacePath() conversion to a locator."""
+    # Relative paths.
+    self._VerifyNormalized('a', '//a')
+    self._VerifyNormalized('a/b', '//a/b')
+
+    # Absolute paths.
+    self._VerifyNormalized(os.path.join(self.workspace_path, 'a'), '//a')
+    self._VerifyNormalized(os.path.join(self.workspace_path, 'a', 'b'), '//a/b')
+
+    # Locators should be unchanged.
+    self._VerifyNormalized('//a', '//a')
+    self._VerifyNormalized('//a/b', '//a/b')
+
+    # Paths outside the workspace should fail.
+    for path in ('/', '..'):
+      with self.assertRaises(ValueError):
+        commandline.NormalizeWorkspacePath(path)
+
+  def testDefaultDir(self):
+    """Tests the default_dir parameter."""
+    self._VerifyNormalized('a', '//default/a', default_dir='//default')
+    self._VerifyNormalized('a/b', '//a/b', default_dir='//default')
+    self._VerifyNormalized('./a', '//a', default_dir='//default')
+
+  def testExtension(self):
+    """Tests the extension parameter."""
+    self._VerifyNormalized('a', '//a.txt', extension='txt')
+    self._VerifyNormalized('a.bin', '//a.bin.txt', extension='txt')
+    self._VerifyNormalized('a.txt', '//a.txt', extension='txt')
+
+  def testSpecificPaths(self):
+    """Tests normalizing brick/BSP/blueprint paths."""
+    self.assertEqual('//bricks/a', commandline.NormalizeBrickPath('a'))
+    self.assertEqual('//bsps/a', commandline.NormalizeBspPath('a'))
+    self.assertEqual('//blueprints/a.json',
+                     commandline.NormalizeBlueprintPath('a'))
+
+  def testParser(self):
+    """Tests adding these types to a parser."""
+    parser = commandline.ArgumentParser()
+    parser.add_argument('path', type='workspace_path')
+    parser.add_argument('brick', type='brick_path')
+    parser.add_argument('bsp', type='bsp_path')
+    parser.add_argument('blueprint', type='blueprint_path')
+
+    options = parser.parse_args(['my_path', 'my_brick', 'my_bsp',
+                                 'my_blueprint'])
+    self.assertEqual('//my_path', options.path)
+    self.assertEqual('//bricks/my_brick', options.brick)
+    self.assertEqual('//bsps/my_bsp', options.bsp)
+    self.assertEqual('//blueprints/my_blueprint.json', options.blueprint)
+
+
+class CacheTest(cros_test_lib.MockTempDirTestCase):
+  """Test cache dir default / override functionality."""
+
+  CACHE_DIR = '/fake/cache/dir'
+
+  def setUp(self):
+    self.PatchObject(commandline.ArgumentParser, 'ConfigureCacheDir')
+    dir_struct = [
+        'repo/.repo/',
+    ]
+    cros_test_lib.CreateOnDiskHierarchy(self.tempdir, dir_struct)
+    self.repo_root = os.path.join(self.tempdir, 'repo')
+    self.cwd_mock = self.PatchObject(os, 'getcwd')
+    self.parser = commandline.ArgumentParser(caching=True)
+
+  def _CheckCall(self, cwd_retval, args_to_parse, expected, assert_func):
+    # pylint: disable=E1101
+    self.cwd_mock.return_value = cwd_retval
+    self.parser.parse_args(args_to_parse)
+    cache_dir_mock = self.parser.ConfigureCacheDir
+    self.assertEquals(1, cache_dir_mock.call_count)
+    assert_func(cache_dir_mock.call_args[0][0], expected)
+
+  def testRepoRootNoOverride(self):
+    """Test default cache location when in a repo checkout."""
+    self._CheckCall(self.repo_root, [], self.repo_root, self.assertStartsWith)
+
+  def testRepoRootWithOverride(self):
+    """User provided cache location overrides repo checkout default."""
+    self._CheckCall(self.repo_root, ['--cache-dir', self.CACHE_DIR],
+                    self.CACHE_DIR, self.assertEquals)
+
+
+class ParseArgsTest(cros_test_lib.TestCase):
+  """Test parse_args behavior of our custom argument parsing classes."""
+
+  def _CreateOptionParser(self, cls):
+    """Create a class of optparse.OptionParser with prepared config.
+
+    Args:
+      cls: Some subclass of optparse.OptionParser.
+
+    Returns:
+      The created OptionParser object.
+    """
+    usage = 'usage: some usage'
+    parser = cls(usage=usage)
+
+    # Add some options.
+    parser.add_option('-x', '--xxx', action='store_true', default=False,
+                      help='Gimme an X')
+    parser.add_option('-y', '--yyy', action='store_true', default=False,
+                      help='Gimme a Y')
+    parser.add_option('-a', '--aaa', type='string', default='Allan',
+                      help='Gimme an A')
+    parser.add_option('-b', '--bbb', type='string', default='Barry',
+                      help='Gimme a B')
+    parser.add_option('-c', '--ccc', type='string', default='Connor',
+                      help='Gimme a C')
+
+    return parser
+
+  def _CreateArgumentParser(self, cls):
+    """Create a class of argparse.ArgumentParser with prepared config.
+
+    Args:
+      cls: Some subclass of argparse.ArgumentParser.
+
+    Returns:
+      The created ArgumentParser object.
+    """
+    usage = 'usage: some usage'
+    parser = cls(usage=usage)
+
+    # Add some options.
+    parser.add_argument('-x', '--xxx', action='store_true', default=False,
+                        help='Gimme an X')
+    parser.add_argument('-y', '--yyy', action='store_true', default=False,
+                        help='Gimme a Y')
+    parser.add_argument('-a', '--aaa', type=str, default='Allan',
+                        help='Gimme an A')
+    parser.add_argument('-b', '--bbb', type=str, default='Barry',
+                        help='Gimme a B')
+    parser.add_argument('-c', '--ccc', type=str, default='Connor',
+                        help='Gimme a C')
+    parser.add_argument('args', type=str, nargs='*', help='args')
+
+    return parser
+
+  def _TestParser(self, parser):
+    """Test the given parser with a prepared argv."""
+    argv = ['-x', '--bbb', 'Bobby', '-c', 'Connor', 'foobar']
+
+    parsed = parser.parse_args(argv)
+
+    if isinstance(parser, commandline.FilteringParser):
+      # optparse returns options and args separately.
+      options, args = parsed
+      self.assertEquals(['foobar'], args)
+    else:
+      # argparse returns just options.  Options configured above to have the
+      # args stored at option "args".
+      options = parsed
+      self.assertEquals(['foobar'], parsed.args)
+
+    self.assertTrue(options.xxx)
+    self.assertFalse(options.yyy)
+
+    self.assertEquals('Allan', options.aaa)
+    self.assertEquals('Bobby', options.bbb)
+    self.assertEquals('Connor', options.ccc)
+
+    self.assertRaises(AttributeError, getattr, options, 'xyz')
+
+    # Now try altering option values.
+    options.aaa = 'Arick'
+    self.assertEquals('Arick', options.aaa)
+
+    # Now freeze the options and try altering again.
+    options.Freeze()
+    self.assertRaises(commandline.cros_build_lib.AttributeFrozenError,
+                      setattr, options, 'aaa', 'Arnold')
+    self.assertEquals('Arick', options.aaa)
+
+  def testFilterParser(self):
+    self._TestParser(self._CreateOptionParser(commandline.FilteringParser))
+
+  def testArgumentParser(self):
+    self._TestParser(self._CreateArgumentParser(commandline.ArgumentParser))
+
+
+class ScriptWrapperMainTest(cros_test_lib.MockTestCase):
+  """Test the behavior of the ScriptWrapperMain function."""
+
+  def setUp(self):
+    self.PatchObject(sys, 'exit')
+    self.lastTargetFound = None
+
+  SYS_ARGV = ['/cmd', '/cmd', 'arg1', 'arg2']
+  CMD_ARGS = ['/cmd', 'arg1', 'arg2']
+  CHROOT_ARGS = ['--workspace', '/work']
+
+  def testRestartInChrootPreserveArgs(self):
+    """Verify args to ScriptWrapperMain are passed through to chroot.."""
+    # Setup Mocks/Fakes
+    rc = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
+    rc.SetDefaultCmdResult()
+
+    def findTarget(target):
+      """ScriptWrapperMain needs a function to find a function to run."""
+      def raiseChrootRequiredError(args):
+        raise commandline.ChrootRequiredError(args)
+
+      self.lastTargetFound = target
+      return raiseChrootRequiredError
+
+    # Run Test
+    commandline.ScriptWrapperMain(findTarget, self.SYS_ARGV)
+
+    # Verify Results
+    rc.assertCommandContains(enter_chroot=True)
+    rc.assertCommandContains(self.CMD_ARGS)
+    self.assertEqual('/cmd', self.lastTargetFound)
+
+  def testRestartInChrootWithChrootArgs(self):
+    """Verify args and chroot args from exception are used."""
+    # Setup Mocks/Fakes
+    rc = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
+    rc.SetDefaultCmdResult()
+
+    def findTarget(_):
+      """ScriptWrapperMain needs a function to find a function to run."""
+      def raiseChrootRequiredError(_args):
+        raise commandline.ChrootRequiredError(self.CMD_ARGS, self.CHROOT_ARGS)
+
+      return raiseChrootRequiredError
+
+    # Run Test
+    commandline.ScriptWrapperMain(findTarget, ['unrelated'])
+
+    # Verify Results
+    rc.assertCommandContains(enter_chroot=True)
+    rc.assertCommandContains(self.CMD_ARGS)
+    rc.assertCommandContains(chroot_args=self.CHROOT_ARGS)
+
+
+class TestRunInsideChroot(cros_test_lib.MockTestCase):
+  """Test commandline.RunInsideChroot()."""
+
+  def setUp(self):
+    self.orig_argv = sys.argv
+    sys.argv = ['/cmd', 'arg1', 'arg2']
+
+    self.mockFromHostToChrootPath = self.PatchObject(
+        path_util, 'ToChrootPath', return_value='/inside/cmd')
+
+    # Return values for these two should be set by each test.
+    self.mock_inside_chroot = self.PatchObject(cros_build_lib, 'IsInsideChroot')
+
+    # Mocked CliCommand object to pass to RunInsideChroot.
+    self.cmd = command.CliCommand(argparse.Namespace())
+    self.cmd.options.log_level = 'info'
+
+  def teardown(self):
+    sys.argv = self.orig_argv
+
+  def _VerifyRunInsideChroot(self, expected_cmd, expected_chroot_args=None,
+                             log_level_args=None, **kwargs):
+    """Run RunInsideChroot, and verify it raises with expected values.
+
+    Args:
+      expected_cmd: Command that should be executed inside the chroot.
+      expected_chroot_args: Args that should be passed as chroot args.
+      log_level_args: Args that set the log level of cros_sdk.
+      kwargs: Additional args to pass to RunInsideChroot().
+    """
+    with self.assertRaises(commandline.ChrootRequiredError) as cm:
+      commandline.RunInsideChroot(self.cmd, **kwargs)
+
+    if log_level_args is None:
+      log_level_args = ['--log-level', self.cmd.options.log_level]
+
+    if expected_chroot_args is not None:
+      log_level_args.extend(expected_chroot_args)
+      expected_chroot_args = log_level_args
+    else:
+      expected_chroot_args = log_level_args
+
+    self.assertEqual(expected_cmd, cm.exception.cmd)
+    self.assertEqual(expected_chroot_args, cm.exception.chroot_args)
+
+  def testRunInsideChroot(self):
+    """Test we can restart inside the chroot."""
+    self.mock_inside_chroot.return_value = False
+    self._VerifyRunInsideChroot(['/inside/cmd', 'arg1', 'arg2'])
+
+  def testRunInsideChrootLogLevel(self):
+    """Test chroot restart with properly inherited log-level."""
+    self.cmd.options.log_level = 'notice'
+    self.mock_inside_chroot.return_value = False
+    self._VerifyRunInsideChroot(['/inside/cmd', 'arg1', 'arg2'],
+                                log_level_args=['--log-level', 'notice'])
+
+  def testRunInsideChrootAlreadyInside(self):
+    """Test we don't restart inside the chroot if we are already there."""
+    self.mock_inside_chroot.return_value = True
+
+    # Since we are in the chroot, it should return, doing nothing.
+    commandline.RunInsideChroot(self.cmd)
diff --git a/lib/cros_build_lib.py b/lib/cros_build_lib.py
new file mode 100644
index 0000000..01d9652
--- /dev/null
+++ b/lib/cros_build_lib.py
@@ -0,0 +1,2263 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common python commands used by various build scripts."""
+
+from __future__ import print_function
+
+import __main__
+import collections
+import contextlib
+from datetime import datetime
+import email.utils
+import errno
+import functools
+import getpass
+import hashlib
+import inspect
+import operator
+import os
+import pprint
+import re
+import signal
+import socket
+import subprocess
+import sys
+import tempfile
+import time
+import traceback
+import types
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_logging as logging
+from chromite.lib import signals
+
+
+STRICT_SUDO = False
+
+# For use by ShellQuote.  Match all characters that the shell might treat
+# specially.  This means a number of things:
+#  - Reserved characters.
+#  - Characters used in expansions (brace, variable, path, globs, etc...).
+#  - Characters that an interactive shell might use (like !).
+#  - Whitespace so that one arg turns into multiple.
+# See the bash man page as well as the POSIX shell documentation for more info:
+#   http://www.gnu.org/software/bash/manual/bashref.html
+#   http://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html
+_SHELL_QUOTABLE_CHARS = frozenset('[|&;()<> \t!{}[]=*?~$"\'\\#^')
+# The chars that, when used inside of double quotes, need escaping.
+# Order here matters as we need to escape backslashes first.
+_SHELL_ESCAPE_CHARS = r'\"`$'
+
+
+def ShellQuote(s):
+  """Quote |s| in a way that is safe for use in a shell.
+
+  We aim to be safe, but also to produce "nice" output.  That means we don't
+  use quotes when we don't need to, and we prefer to use less quotes (like
+  putting it all in single quotes) than more (using double quotes and escaping
+  a bunch of stuff, or mixing the quotes).
+
+  While python does provide a number of alternatives like:
+   - pipes.quote
+   - shlex.quote
+  They suffer from various problems like:
+   - Not widely available in different python versions.
+   - Do not produce pretty output in many cases.
+   - Are in modules that rarely otherwise get used.
+
+  Note: We don't handle reserved shell words like "for" or "case".  This is
+  because those only matter when they're the first element in a command, and
+  there is no use case for that.  When we want to run commands, we tend to
+  run real programs and not shell ones.
+
+  Args:
+    s: The string to quote.
+
+  Returns:
+    A safely (possibly quoted) string.
+  """
+  s = s.encode('utf-8')
+
+  # See if no quoting is needed so we can return the string as-is.
+  for c in s:
+    if c in _SHELL_QUOTABLE_CHARS:
+      break
+  else:
+    if not s:
+      return "''"
+    else:
+      return s
+
+  # See if we can use single quotes first.  Output is nicer.
+  if "'" not in s:
+    return "'%s'" % s
+
+  # Have to use double quotes.  Escape the few chars that still expand when
+  # used inside of double quotes.
+  for c in _SHELL_ESCAPE_CHARS:
+    if c in s:
+      s = s.replace(c, r'\%s' % c)
+  return '"%s"' % s
+
+
+def ShellUnquote(s):
+  """Do the opposite of ShellQuote.
+
+  This function assumes that the input is a valid escaped string. The behaviour
+  is undefined on malformed strings.
+
+  Args:
+    s: An escaped string.
+
+  Returns:
+    The unescaped version of the string.
+  """
+  if not s:
+    return ''
+
+  if s[0] == "'":
+    return s[1:-1]
+
+  if s[0] != '"':
+    return s
+
+  s = s[1:-1]
+  output = ''
+  i = 0
+  while i < len(s) - 1:
+    # Skip the backslash when it makes sense.
+    if s[i] == '\\' and s[i + 1] in _SHELL_ESCAPE_CHARS:
+      i += 1
+    output += s[i]
+    i += 1
+  return output + s[i] if i < len(s) else output
+
+
+def CmdToStr(cmd):
+  """Translate a command list into a space-separated string.
+
+  The resulting string should be suitable for logging messages and for
+  pasting into a terminal to run.  Command arguments are surrounded by
+  quotes to keep them grouped, even if an argument has spaces in it.
+
+  Examples:
+    ['a', 'b'] ==> "'a' 'b'"
+    ['a b', 'c'] ==> "'a b' 'c'"
+    ['a', 'b\'c'] ==> '\'a\' "b\'c"'
+    [u'a', "/'$b"] ==> '\'a\' "/\'$b"'
+    [] ==> ''
+    See unittest for additional (tested) examples.
+
+  Args:
+    cmd: List of command arguments.
+
+  Returns:
+    String representing full command.
+  """
+  # Use str before repr to translate unicode strings to regular strings.
+  return ' '.join(ShellQuote(arg) for arg in cmd)
+
+
+class CommandResult(object):
+  """An object to store various attributes of a child process."""
+
+  def __init__(self, cmd=None, error=None, output=None, returncode=None):
+    self.cmd = cmd
+    self.error = error
+    self.output = output
+    self.returncode = returncode
+
+  @property
+  def cmdstr(self):
+    """Return self.cmd as a space-separated string, useful for log messages."""
+    return CmdToStr(self.cmd)
+
+
+class RunCommandError(Exception):
+  """Error caught in RunCommand() method."""
+
+  def __init__(self, msg, result, exception=None):
+    self.msg, self.result, self.exception = msg, result, exception
+    if exception is not None and not isinstance(exception, Exception):
+      raise ValueError('exception must be an exception instance; got %r'
+                       % (exception,))
+    Exception.__init__(self, msg)
+    self.args = (msg, result, exception)
+
+  def Stringify(self, error=True, output=True):
+    """Custom method for controlling what is included in stringifying this.
+
+    Each individual argument is the literal name of an attribute
+    on the result object; if False, that value is ignored for adding
+    to this string content.  If true, it'll be incorporated.
+
+    Args:
+      error: See comment about individual arguments above.
+      output: See comment about individual arguments above.
+    """
+    items = ['return code: %s' % (self.result.returncode,)]
+    if error and self.result.error:
+      items.append(self.result.error)
+    if output and self.result.output:
+      items.append(self.result.output)
+    items.append(self.msg)
+    return '\n'.join(items)
+
+  def __str__(self):
+    # __str__ needs to return ascii, thus force a conversion to be safe.
+    return self.Stringify().decode('utf-8', 'replace').encode(
+        'ascii', 'xmlcharrefreplace')
+
+  def __eq__(self, other):
+    return (type(self) == type(other) and
+            self.args == other.args)
+
+  def __ne__(self, other):
+    return not self.__eq__(other)
+
+
+class TerminateRunCommandError(RunCommandError):
+  """We were signaled to shutdown while running a command.
+
+  Client code shouldn't generally know, nor care about this class.  It's
+  used internally to suppress retry attempts when we're signaled to die.
+  """
+
+
+def SudoRunCommand(cmd, user='root', **kwargs):
+  """Run a command via sudo.
+
+  Client code must use this rather than coming up with their own RunCommand
+  invocation that jams sudo in- this function is used to enforce certain
+  rules in our code about sudo usage, and as a potential auditing point.
+
+  Args:
+    cmd: The command to run.  See RunCommand for rules of this argument-
+         SudoRunCommand purely prefixes it with sudo.
+    user: The user to run the command as.
+    kwargs: See RunCommand options, it's a direct pass thru to it.
+          Note that this supports a 'strict' keyword that defaults to True.
+          If set to False, it'll suppress strict sudo behavior.
+
+  Returns:
+    See RunCommand documentation.
+
+  Raises:
+    This function may immediately raise RunCommandError if we're operating
+    in a strict sudo context and the API is being misused.
+    Barring that, see RunCommand's documentation- it can raise the same things
+    RunCommand does.
+  """
+  sudo_cmd = ['sudo']
+
+  strict = kwargs.pop('strict', True)
+
+  if user == 'root' and os.geteuid() == 0:
+    return RunCommand(cmd, **kwargs)
+
+  if strict and STRICT_SUDO:
+    if 'CROS_SUDO_KEEP_ALIVE' not in os.environ:
+      raise RunCommandError(
+          'We were invoked in a strict sudo non - interactive context, but no '
+          'sudo keep alive daemon is running.  This is a bug in the code.',
+          CommandResult(cmd=cmd, returncode=126))
+    sudo_cmd += ['-n']
+
+  if user != 'root':
+    sudo_cmd += ['-u', user]
+
+  # Pass these values down into the sudo environment, since sudo will
+  # just strip them normally.
+  extra_env = kwargs.pop('extra_env', None)
+  extra_env = {} if extra_env is None else extra_env.copy()
+
+  for var in constants.ENV_PASSTHRU:
+    if var not in extra_env and var in os.environ:
+      extra_env[var] = os.environ[var]
+
+  sudo_cmd.extend('%s=%s' % (k, v) for k, v in extra_env.iteritems())
+
+  # Finally, block people from passing options to sudo.
+  sudo_cmd.append('--')
+
+  if isinstance(cmd, basestring):
+    # We need to handle shell ourselves so the order is correct:
+    #  $ sudo [sudo args] -- bash -c '[shell command]'
+    # If we let RunCommand take care of it, we'd end up with:
+    #  $ bash -c 'sudo [sudo args] -- [shell command]'
+    shell = kwargs.pop('shell', False)
+    if not shell:
+      raise Exception('Cannot run a string command without a shell')
+    sudo_cmd.extend(['/bin/bash', '-c', cmd])
+  else:
+    sudo_cmd.extend(cmd)
+
+  return RunCommand(sudo_cmd, **kwargs)
+
+
+def _KillChildProcess(proc, int_timeout, kill_timeout, cmd, original_handler,
+                      signum, frame):
+  """Used as a signal handler by RunCommand.
+
+  This is internal to Runcommand.  No other code should use this.
+  """
+  if signum:
+    # If we've been invoked because of a signal, ignore delivery of that signal
+    # from this point forward.  The invoking context of _KillChildProcess
+    # restores signal delivery to what it was prior; we suppress future delivery
+    # till then since this code handles SIGINT/SIGTERM fully including
+    # delivering the signal to the original handler on the way out.
+    signal.signal(signum, signal.SIG_IGN)
+
+  # Do not trust Popen's returncode alone; we can be invoked from contexts where
+  # the Popen instance was created, but no process was generated.
+  if proc.returncode is None and proc.pid is not None:
+    try:
+      while proc.poll() is None and int_timeout >= 0:
+        time.sleep(0.1)
+        int_timeout -= 0.1
+
+      proc.terminate()
+      while proc.poll() is None and kill_timeout >= 0:
+        time.sleep(0.1)
+        kill_timeout -= 0.1
+
+      if proc.poll() is None:
+        # Still doesn't want to die.  Too bad, so sad, time to die.
+        proc.kill()
+    except EnvironmentError as e:
+      logging.warning('Ignoring unhandled exception in _KillChildProcess: %s',
+                      e)
+
+    # Ensure our child process has been reaped.
+    proc.wait()
+
+  if not signals.RelaySignal(original_handler, signum, frame):
+    # Mock up our own, matching exit code for signaling.
+    cmd_result = CommandResult(cmd=cmd, returncode=signum << 8)
+    raise TerminateRunCommandError('Received signal %i' % signum, cmd_result)
+
+
+class _Popen(subprocess.Popen):
+  """subprocess.Popen derivative customized for our usage.
+
+  Specifically, we fix terminate/send_signal/kill to work if the child process
+  was a setuid binary; on vanilla kernels, the parent can wax the child
+  regardless, on goobuntu this apparently isn't allowed, thus we fall back
+  to the sudo machinery we have.
+
+  While we're overriding send_signal, we also suppress ESRCH being raised
+  if the process has exited, and suppress signaling all together if the process
+  has knowingly been waitpid'd already.
+  """
+
+  def send_signal(self, signum):
+    if self.returncode is not None:
+      # The original implementation in Popen would allow signaling whatever
+      # process now occupies this pid, even if the Popen object had waitpid'd.
+      # Since we can escalate to sudo kill, we do not want to allow that.
+      # Fixing this addresses that angle, and makes the API less sucky in the
+      # process.
+      return
+
+    try:
+      os.kill(self.pid, signum)
+    except EnvironmentError as e:
+      if e.errno == errno.EPERM:
+        # Kill returns either 0 (signal delivered), or 1 (signal wasn't
+        # delivered).  This isn't particularly informative, but we still
+        # need that info to decide what to do, thus the error_code_ok=True.
+        ret = SudoRunCommand(['kill', '-%i' % signum, str(self.pid)],
+                             print_cmd=False, redirect_stdout=True,
+                             redirect_stderr=True, error_code_ok=True)
+        if ret.returncode == 1:
+          # The kill binary doesn't distinguish between permission denied,
+          # and the pid is missing.  Denied can only occur under weird
+          # grsec/selinux policies.  We ignore that potential and just
+          # assume the pid was already dead and try to reap it.
+          self.poll()
+      elif e.errno == errno.ESRCH:
+        # Since we know the process is dead, reap it now.
+        # Normally Popen would throw this error- we suppress it since frankly
+        # that's a misfeature and we're already overriding this method.
+        self.poll()
+      else:
+        raise
+
+
+# pylint: disable=redefined-builtin
+def RunCommand(cmd, print_cmd=True, error_message=None, redirect_stdout=False,
+               redirect_stderr=False, cwd=None, input=None, enter_chroot=False,
+               shell=False, env=None, extra_env=None, ignore_sigint=False,
+               combine_stdout_stderr=False, log_stdout_to_file=None,
+               chroot_args=None, debug_level=logging.INFO,
+               error_code_ok=False, int_timeout=1, kill_timeout=1,
+               log_output=False, stdout_to_pipe=False, capture_output=False,
+               quiet=False, mute_output=None):
+  """Runs a command.
+
+  Args:
+    cmd: cmd to run.  Should be input to subprocess.Popen. If a string, shell
+      must be true. Otherwise the command must be an array of arguments, and
+      shell must be false.
+    print_cmd: prints the command before running it.
+    error_message: prints out this message when an error occurs.
+    redirect_stdout: returns the stdout.
+    redirect_stderr: holds stderr output until input is communicated.
+    cwd: the working directory to run this cmd.
+    input: The data to pipe into this command through stdin.  If a file object
+      or file descriptor, stdin will be connected directly to that.
+    enter_chroot: this command should be run from within the chroot.  If set,
+      cwd must point to the scripts directory. If we are already inside the
+      chroot, this command will be run as if |enter_chroot| is False.
+    shell: Controls whether we add a shell as a command interpreter.  See cmd
+      since it has to agree as to the type.
+    env: If non-None, this is the environment for the new process.  If
+      enter_chroot is true then this is the environment of the enter_chroot,
+      most of which gets removed from the cmd run.
+    extra_env: If set, this is added to the environment for the new process.
+      In enter_chroot=True case, these are specified on the post-entry
+      side, and so are often more useful.  This dictionary is not used to
+      clear any entries though.
+    ignore_sigint: If True, we'll ignore signal.SIGINT before calling the
+      child.  This is the desired behavior if we know our child will handle
+      Ctrl-C.  If we don't do this, I think we and the child will both get
+      Ctrl-C at the same time, which means we'll forcefully kill the child.
+    combine_stdout_stderr: Combines stdout and stderr streams into stdout.
+    log_stdout_to_file: If set, redirects stdout to file specified by this path.
+      If |combine_stdout_stderr| is set to True, then stderr will also be logged
+      to the specified file.
+    chroot_args: An array of arguments for the chroot environment wrapper.
+    debug_level: The debug level of RunCommand's output.
+    error_code_ok: Does not raise an exception when command returns a non-zero
+      exit code. Instead, returns the CommandResult object containing the exit
+      code. Note: will still raise an exception if the cmd file does not exist.
+    int_timeout: If we're interrupted, how long (in seconds) should we give the
+      invoked process to clean up before we send a SIGTERM.
+    kill_timeout: If we're interrupted, how long (in seconds) should we give the
+      invoked process to shutdown from a SIGTERM before we SIGKILL it.
+    log_output: Log the command and its output automatically.
+    stdout_to_pipe: Redirect stdout to pipe.
+    capture_output: Set |redirect_stdout| and |redirect_stderr| to True.
+    quiet: Set |print_cmd| to False, |stdout_to_pipe| and
+      |combine_stdout_stderr| to True.
+    mute_output: Mute subprocess printing to parent stdout/stderr. Defaults to
+      None, which bases muting on |debug_level|.
+
+  Returns:
+    A CommandResult object.
+
+  Raises:
+    RunCommandError:  Raises exception on error with optional error_message.
+  """
+  if capture_output:
+    redirect_stdout, redirect_stderr = True, True
+
+  if quiet:
+    debug_level = logging.DEBUG
+    stdout_to_pipe, combine_stdout_stderr = True, True
+
+  # Set default for variables.
+  stdout = None
+  stderr = None
+  stdin = None
+  cmd_result = CommandResult()
+
+  if mute_output is None:
+    mute_output = logging.getLogger().getEffectiveLevel() > debug_level
+
+  # Force the timeout to float; in the process, if it's not convertible,
+  # a self-explanatory exception will be thrown.
+  kill_timeout = float(kill_timeout)
+
+  def _get_tempfile():
+    try:
+      return tempfile.TemporaryFile(bufsize=0)
+    except EnvironmentError as e:
+      if e.errno != errno.ENOENT:
+        raise
+      # This can occur if we were pointed at a specific location for our
+      # TMP, but that location has since been deleted.  Suppress that issue
+      # in this particular case since our usage gurantees deletion,
+      # and since this is primarily triggered during hard cgroups shutdown.
+      return tempfile.TemporaryFile(bufsize=0, dir='/tmp')
+
+  # Modify defaults based on parameters.
+  # Note that tempfiles must be unbuffered else attempts to read
+  # what a separate process did to that file can result in a bad
+  # view of the file.
+  if log_stdout_to_file:
+    stdout = open(log_stdout_to_file, 'w+')
+  elif stdout_to_pipe:
+    stdout = subprocess.PIPE
+  elif redirect_stdout or mute_output or log_output:
+    stdout = _get_tempfile()
+
+  if combine_stdout_stderr:
+    stderr = subprocess.STDOUT
+  elif redirect_stderr or mute_output or log_output:
+    stderr = _get_tempfile()
+
+  # If subprocesses have direct access to stdout or stderr, they can bypass
+  # our buffers, so we need to flush to ensure that output is not interleaved.
+  if stdout is None or stderr is None:
+    sys.stdout.flush()
+    sys.stderr.flush()
+
+  # If input is a string, we'll create a pipe and send it through that.
+  # Otherwise we assume it's a file object that can be read from directly.
+  if isinstance(input, basestring):
+    stdin = subprocess.PIPE
+  elif input is not None:
+    stdin = input
+    input = None
+
+  if isinstance(cmd, basestring):
+    if not shell:
+      raise Exception('Cannot run a string command without a shell')
+    cmd = ['/bin/bash', '-c', cmd]
+    shell = False
+  elif shell:
+    raise Exception('Cannot run an array command with a shell')
+
+  # If we are using enter_chroot we need to use enterchroot pass env through
+  # to the final command.
+  env = env.copy() if env is not None else os.environ.copy()
+  env.update(extra_env if extra_env else {})
+  if enter_chroot and not IsInsideChroot():
+    wrapper = ['cros_sdk']
+    if cwd:
+      # If the current working directory is set, try to find cros_sdk relative
+      # to cwd. Generally cwd will be the buildroot therefore we want to use
+      # {cwd}/chromite/bin/cros_sdk. For more info PTAL at crbug.com/432620
+      path = os.path.join(cwd, constants.CHROMITE_BIN_SUBDIR, 'cros_sdk')
+      if os.path.exists(path):
+        wrapper = [path]
+
+    if chroot_args:
+      wrapper += chroot_args
+
+    if extra_env:
+      wrapper.extend('%s=%s' % (k, v) for k, v in extra_env.iteritems())
+
+    cmd = wrapper + ['--'] + cmd
+
+  for var in constants.ENV_PASSTHRU:
+    if var not in env and var in os.environ:
+      env[var] = os.environ[var]
+
+  # Print out the command before running.
+  if print_cmd or log_output:
+    if cwd:
+      logging.log(debug_level, 'RunCommand: %s in %s', CmdToStr(cmd), cwd)
+    else:
+      logging.log(debug_level, 'RunCommand: %s', CmdToStr(cmd))
+
+  cmd_result.cmd = cmd
+
+  proc = None
+  # Verify that the signals modules is actually usable, and won't segfault
+  # upon invocation of getsignal.  See signals.SignalModuleUsable for the
+  # details and upstream python bug.
+  use_signals = signals.SignalModuleUsable()
+  try:
+    proc = _Popen(cmd, cwd=cwd, stdin=stdin, stdout=stdout,
+                  stderr=stderr, shell=False, env=env,
+                  close_fds=True)
+
+    if use_signals:
+      if ignore_sigint:
+        old_sigint = signal.signal(signal.SIGINT, signal.SIG_IGN)
+      else:
+        old_sigint = signal.getsignal(signal.SIGINT)
+        signal.signal(signal.SIGINT,
+                      functools.partial(_KillChildProcess, proc, int_timeout,
+                                        kill_timeout, cmd, old_sigint))
+
+      old_sigterm = signal.getsignal(signal.SIGTERM)
+      signal.signal(signal.SIGTERM,
+                    functools.partial(_KillChildProcess, proc, int_timeout,
+                                      kill_timeout, cmd, old_sigterm))
+
+    try:
+      (cmd_result.output, cmd_result.error) = proc.communicate(input)
+    finally:
+      if use_signals:
+        signal.signal(signal.SIGINT, old_sigint)
+        signal.signal(signal.SIGTERM, old_sigterm)
+
+      if stdout and not log_stdout_to_file and not stdout_to_pipe:
+        stdout.seek(0)
+        cmd_result.output = stdout.read()
+        stdout.close()
+
+      if stderr and stderr != subprocess.STDOUT:
+        stderr.seek(0)
+        cmd_result.error = stderr.read()
+        stderr.close()
+
+    cmd_result.returncode = proc.returncode
+
+    if log_output:
+      if cmd_result.output:
+        logging.log(debug_level, '(stdout):\n%s', cmd_result.output)
+      if cmd_result.error:
+        logging.log(debug_level, '(stderr):\n%s', cmd_result.error)
+
+    if not error_code_ok and proc.returncode:
+      msg = ('Failed command "%s", cwd=%s, extra env=%r'
+             % (CmdToStr(cmd), cwd, extra_env))
+      if error_message:
+        msg += '\n%s' % error_message
+      raise RunCommandError(msg, cmd_result)
+  except OSError as e:
+    estr = str(e)
+    if e.errno == errno.EACCES:
+      estr += '; does the program need `chmod a+x`?'
+    raise RunCommandError(estr, CommandResult(cmd=cmd), exception=e)
+  finally:
+    if proc is not None:
+      # Ensure the process is dead.
+      _KillChildProcess(proc, int_timeout, kill_timeout, cmd, None, None, None)
+
+  return cmd_result
+# pylint: enable=redefined-builtin
+
+
+# Convenience RunCommand methods.
+#
+# We don't use functools.partial because it binds the methods at import time,
+# which doesn't work well with unit tests, since it bypasses the mock that may
+# be set up for RunCommand.
+
+def DebugRunCommand(*args, **kwargs):
+  kwargs.setdefault('debug_level', logging.DEBUG)
+  return RunCommand(*args, **kwargs)
+
+
+class DieSystemExit(SystemExit):
+  """Custom Exception used so we can intercept this if necessary."""
+
+
+def Die(message, *args, **kwargs):
+  """Emits an error message with a stack trace and halts execution.
+
+  Args:
+    message: The message to be emitted before exiting.
+  """
+  logging.error(message, *args, **kwargs)
+  raise DieSystemExit(1)
+
+
+def GetSysrootToolPath(sysroot, tool_name):
+  """Returns the path to the sysroot specific version of a tool.
+
+  Does not check that the tool actually exists.
+
+  Args:
+    sysroot: build root of the system in question.
+    tool_name: string name of tool desired (e.g. 'equery').
+
+  Returns:
+    string path to tool inside the sysroot.
+  """
+  if sysroot == '/':
+    return os.path.join(sysroot, 'usr', 'bin', tool_name)
+
+  return os.path.join(sysroot, 'build', 'bin', tool_name)
+
+
+def ListFiles(base_dir):
+  """Recursively list files in a directory.
+
+  Args:
+    base_dir: directory to start recursively listing in.
+
+  Returns:
+    A list of files relative to the base_dir path or
+    An empty list of there are no files in the directories.
+  """
+  directories = [base_dir]
+  files_list = []
+  while directories:
+    directory = directories.pop()
+    for name in os.listdir(directory):
+      fullpath = os.path.join(directory, name)
+      if os.path.isfile(fullpath):
+        files_list.append(fullpath)
+      elif os.path.isdir(fullpath):
+        directories.append(fullpath)
+
+  return files_list
+
+
+def IsInsideChroot():
+  """Returns True if we are inside chroot."""
+  return os.path.exists('/etc/cros_chroot_version')
+
+
+def AssertInsideChroot():
+  """Die if we are outside the chroot"""
+  if not IsInsideChroot():
+    Die('%s: please run inside the chroot', os.path.basename(sys.argv[0]))
+
+
+def AssertOutsideChroot():
+  """Die if we are inside the chroot"""
+  if IsInsideChroot():
+    Die('%s: please run outside the chroot', os.path.basename(sys.argv[0]))
+
+
+def GetChromeosVersion(str_obj):
+  """Helper method to parse output for CHROMEOS_VERSION_STRING.
+
+  Args:
+    str_obj: a string, which may contain Chrome OS version info.
+
+  Returns:
+    A string, value of CHROMEOS_VERSION_STRING environment variable set by
+      chromeos_version.sh. Or None if not found.
+  """
+  if str_obj is not None:
+    match = re.search(r'CHROMEOS_VERSION_STRING=([0-9_.]+)', str_obj)
+    if match and match.group(1):
+      logging.info('CHROMEOS_VERSION_STRING = %s' % match.group(1))
+      return match.group(1)
+
+  logging.info('CHROMEOS_VERSION_STRING NOT found')
+  return None
+
+
+def GetHostName(fully_qualified=False):
+  """Return hostname of current machine, with domain if |fully_qualified|."""
+  hostname = socket.gethostname()
+  try:
+    hostname = socket.gethostbyaddr(hostname)[0]
+  except socket.gaierror as e:
+    logging.warning('please check your /etc/hosts file; resolving your hostname'
+                    ' (%s) failed: %s', hostname, e)
+
+  if fully_qualified:
+    return hostname
+  else:
+    return hostname.partition('.')[0]
+
+
+def GetHostDomain():
+  """Return domain of current machine.
+
+  If there is no domain, return 'localdomain'.
+  """
+
+  hostname = GetHostName(fully_qualified=True)
+  domain = hostname.partition('.')[2]
+  return domain if domain else 'localdomain'
+
+
+def HostIsCIBuilder(fq_hostname=None, golo_only=False, gce_only=False):
+  """Return True iff a host is a continuous-integration builder.
+
+  Args:
+    fq_hostname: The fully qualified hostname. By default, we fetch it for you.
+    golo_only: Only return True if the host is in the Chrome Golo. Defaults to
+      False.
+    gce_only: Only return True if the host is in the Chrome GCE block. Defaults
+      to False.
+  """
+  if not fq_hostname:
+    fq_hostname = GetHostName(fully_qualified=True)
+  in_golo = fq_hostname.endswith('.' + constants.GOLO_DOMAIN)
+  in_gce = fq_hostname.endswith('.' + constants.CHROME_DOMAIN)
+  if golo_only:
+    return in_golo
+  elif gce_only:
+    return in_gce
+  else:
+    return in_golo or in_gce
+
+
+def TimedCommand(functor, *args, **kwargs):
+  """Wrapper for simple log timing of other python functions.
+
+  If you want to log info about how long it took to run an arbitrary command,
+  you would do something like:
+    TimedCommand(RunCommand, ['wget', 'http://foo'])
+
+  Args:
+    functor: The function to run.
+    args: The args to pass to the function.
+    kwargs: Optional args to pass to the function.
+    timed_log_level: The log level to use (defaults to logging.INFO).
+    timed_log_msg: The message to log after the command completes.  It may have
+      keywords: "name" (the function name), "args" (the args passed to the
+      func), "kwargs" (the kwargs passed to the func), "ret" (the return value
+      from the func), and "delta" (the timing delta).
+    timed_log_callback: Function to call upon completion (instead of logging).
+      Will be passed (log_level, log_msg, result, datetime.timedelta).
+  """
+  log_msg = kwargs.pop(
+      'timed_log_msg',
+      '%(name)s(*%(args)r, **%(kwargs)r)=%(ret)s took: %(delta)s')
+  log_level = kwargs.pop('timed_log_level', logging.INFO)
+  log_callback = kwargs.pop('timed_log_callback', None)
+  start = datetime.now()
+  ret = functor(*args, **kwargs)
+  delta = datetime.now() - start
+  log_msg %= {
+      'name': getattr(functor, '__name__', repr(functor)),
+      'args': args,
+      'kwargs': kwargs,
+      'ret': ret,
+      'delta': delta,
+  }
+  if log_callback is None:
+    logging.log(log_level, log_msg)
+  else:
+    log_callback(log_level, log_msg, ret, delta)
+  return ret
+
+
+COMP_NONE = 0
+COMP_GZIP = 1
+COMP_BZIP2 = 2
+COMP_XZ = 3
+
+
+def FindCompressor(compression, chroot=None):
+  """Locate a compressor utility program (possibly in a chroot).
+
+  Since we compress/decompress a lot, make it easy to locate a
+  suitable utility program in a variety of locations.  We favor
+  the one in the chroot over /, and the parallel implementation
+  over the single threaded one.
+
+  Args:
+    compression: The type of compression desired.
+    chroot: Optional path to a chroot to search.
+
+  Returns:
+    Path to a compressor.
+
+  Raises:
+    ValueError: If compression is unknown.
+  """
+  if compression == COMP_GZIP:
+    std = 'gzip'
+    para = 'pigz'
+  elif compression == COMP_BZIP2:
+    std = 'bzip2'
+    para = 'pbzip2'
+  elif compression == COMP_XZ:
+    std = 'xz'
+    para = 'xz'
+  elif compression == COMP_NONE:
+    return 'cat'
+  else:
+    raise ValueError('unknown compression')
+
+  roots = []
+  if chroot:
+    roots.append(chroot)
+  roots.append('/')
+
+  for prog in [para, std]:
+    for root in roots:
+      for subdir in ['', 'usr']:
+        path = os.path.join(root, subdir, 'bin', prog)
+        if os.path.exists(path):
+          return path
+
+  return std
+
+
+def CompressionStrToType(s):
+  """Convert a compression string type to a constant.
+
+  Args:
+    s: string to check
+
+  Returns:
+    A constant, or None if the compression type is unknown.
+  """
+  _COMP_STR = {
+      'gz': COMP_GZIP,
+      'bz2': COMP_BZIP2,
+      'xz': COMP_XZ,
+  }
+  if s:
+    return _COMP_STR.get(s)
+  else:
+    return COMP_NONE
+
+
+def CompressFile(infile, outfile):
+  """Compress a file using compressor specified by |outfile| suffix.
+
+  Args:
+    infile: File to compress.
+    outfile: Name of output file. Compression used is based on the
+             type of suffix of the name specified (e.g.: .bz2).
+  """
+  comp_str = outfile.rsplit('.', 1)[-1]
+  comp_type = CompressionStrToType(comp_str)
+  assert comp_type and comp_type != COMP_NONE
+  comp = FindCompressor(comp_type)
+  cmd = [comp, '-c', infile]
+  RunCommand(cmd, log_stdout_to_file=outfile)
+
+
+def UncompressFile(infile, outfile):
+  """Uncompress a file using compressor specified by |infile| suffix.
+
+  Args:
+    infile: File to uncompress. Compression used is based on the
+            type of suffix of the name specified (e.g.: .bz2).
+    outfile: Name of output file.
+  """
+  comp_str = infile.rsplit('.', 1)[-1]
+  comp_type = CompressionStrToType(comp_str)
+  assert comp_type and comp_type != COMP_NONE
+  comp = FindCompressor(comp_type)
+  cmd = [comp, '-dc', infile]
+  RunCommand(cmd, log_stdout_to_file=outfile)
+
+
+def CreateTarball(target, cwd, sudo=False, compression=COMP_XZ, chroot=None,
+                  inputs=None, extra_args=None, **kwargs):
+  """Create a tarball.  Executes 'tar' on the commandline.
+
+  Args:
+    target: The path of the tar file to generate.
+    cwd: The directory to run the tar command.
+    sudo: Whether to run with "sudo".
+    compression: The type of compression desired.  See the FindCompressor
+      function for details.
+    chroot: See FindCompressor().
+    inputs: A list of files or directories to add to the tarball.  If unset,
+      defaults to ".".
+    extra_args: A list of extra args to pass to "tar".
+    kwargs: Any RunCommand options/overrides to use.
+
+  Returns:
+    The cmd_result object returned by the RunCommand invocation.
+  """
+  if inputs is None:
+    inputs = ['.']
+  if extra_args is None:
+    extra_args = []
+  kwargs.setdefault('debug_level', logging.DEBUG)
+
+  comp = FindCompressor(compression, chroot=chroot)
+  cmd = (['tar'] +
+         extra_args +
+         ['--sparse', '-I', comp, '-cf', target] +
+         list(inputs))
+  rc_func = SudoRunCommand if sudo else RunCommand
+  return rc_func(cmd, cwd=cwd, **kwargs)
+
+
+def GetInput(prompt):
+  """Helper function to grab input from a user.   Makes testing easier."""
+  return raw_input(prompt)
+
+
+def GetChoice(title, options, group_size=0):
+  """Ask user to choose an option from the list.
+
+  When |group_size| is 0, then all items in |options| will be extracted and
+  shown at the same time.  Otherwise, the items will be extracted |group_size|
+  at a time, and then shown to the user.  This makes it easier to support
+  generators that are slow, extremely large, or people usually want to pick
+  from the first few choices.
+
+  Args:
+    title: The text to display before listing options.
+    options: Iterable which provides options to display.
+    group_size: How many options to show before asking the user to choose.
+
+  Returns:
+    An integer of the index in |options| the user picked.
+  """
+  def PromptForChoice(max_choice, more):
+    prompt = 'Please choose an option [0-%d]' % max_choice
+    if more:
+      prompt += ' (Enter for more options)'
+    prompt += ': '
+
+    while True:
+      choice = GetInput(prompt)
+      if more and not choice.strip():
+        return None
+      try:
+        choice = int(choice)
+      except ValueError:
+        print('Input is not an integer')
+        continue
+      if choice < 0 or choice > max_choice:
+        print('Choice %d out of range (0-%d)' % (choice, max_choice))
+        continue
+      return choice
+
+  print(title)
+  max_choice = 0
+  for i, opt in enumerate(options):
+    if i and group_size and not i % group_size:
+      choice = PromptForChoice(i - 1, True)
+      if choice is not None:
+        return choice
+    print('  [%d]: %s' % (i, opt))
+    max_choice = i
+
+  return PromptForChoice(max_choice, False)
+
+
+def BooleanPrompt(prompt='Do you want to continue?', default=True,
+                  true_value='yes', false_value='no', prolog=None):
+  """Helper function for processing boolean choice prompts.
+
+  Args:
+    prompt: The question to present to the user.
+    default: Boolean to return if the user just presses enter.
+    true_value: The text to display that represents a True returned.
+    false_value: The text to display that represents a False returned.
+    prolog: The text to display before prompt.
+
+  Returns:
+    True or False.
+  """
+  true_value, false_value = true_value.lower(), false_value.lower()
+  true_text, false_text = true_value, false_value
+  if true_value == false_value:
+    raise ValueError('true_value and false_value must differ: got %r'
+                     % true_value)
+
+  if default:
+    true_text = true_text[0].upper() + true_text[1:]
+  else:
+    false_text = false_text[0].upper() + false_text[1:]
+
+  prompt = ('\n%s (%s/%s)? ' % (prompt, true_text, false_text))
+
+  if prolog:
+    prompt = ('\n%s\n%s' % (prolog, prompt))
+
+  while True:
+    try:
+      response = GetInput(prompt).lower()
+    except EOFError:
+      # If the user hits CTRL+D, or stdin is disabled, use the default.
+      print()
+      response = None
+    except KeyboardInterrupt:
+      # If the user hits CTRL+C, just exit the process.
+      print()
+      Die('CTRL+C detected; exiting')
+
+    if not response:
+      return default
+    if true_value.startswith(response):
+      if not false_value.startswith(response):
+        return True
+      # common prefix between the two...
+    elif false_value.startswith(response):
+      return False
+
+
+def BooleanShellValue(sval, default, msg=None):
+  """See if the string value is a value users typically consider as boolean
+
+  Often times people set shell variables to different values to mean "true"
+  or "false".  For example, they can do:
+    export FOO=yes
+    export BLAH=1
+    export MOO=true
+  Handle all that user ugliness here.
+
+  If the user picks an invalid value, you can use |msg| to display a non-fatal
+  warning rather than raising an exception.
+
+  Args:
+    sval: The string value we got from the user.
+    default: If we can't figure out if the value is true or false, use this.
+    msg: If |sval| is an unknown value, use |msg| to warn the user that we
+         could not decode the input.  Otherwise, raise ValueError().
+
+  Returns:
+    The interpreted boolean value of |sval|.
+
+  Raises:
+    ValueError() if |sval| is an unknown value and |msg| is not set.
+  """
+  if sval is None:
+    return default
+
+  if isinstance(sval, basestring):
+    s = sval.lower()
+    if s in ('yes', 'y', '1', 'true'):
+      return True
+    elif s in ('no', 'n', '0', 'false'):
+      return False
+
+  if msg is not None:
+    logging.warning('%s: %r', msg, sval)
+    return default
+  else:
+    raise ValueError('Could not decode as a boolean value: %r' % sval)
+
+
+# Suppress whacked complaints about abstract class being unused.
+class MasterPidContextManager(object):
+  """Allow context managers to restrict their exit to within the same PID."""
+
+  # In certain cases we actually want this ran outside
+  # of the main pid- specifically in backup processes
+  # doing cleanup.
+  ALTERNATE_MASTER_PID = None
+
+  def __init__(self):
+    self._invoking_pid = None
+
+  def __enter__(self):
+    self._invoking_pid = os.getpid()
+    return self._enter()
+
+  def __exit__(self, exc_type, exc, exc_tb):
+    curpid = os.getpid()
+    if curpid == self.ALTERNATE_MASTER_PID:
+      self._invoking_pid = curpid
+    if curpid == self._invoking_pid:
+      return self._exit(exc_type, exc, exc_tb)
+
+  def _enter(self):
+    raise NotImplementedError(self, '_enter')
+
+  def _exit(self, exc_type, exc, exc_tb):
+    raise NotImplementedError(self, '_exit')
+
+
+@contextlib.contextmanager
+def NoOpContextManager():
+  yield
+
+
+def AllowDisabling(enabled, functor, *args, **kwargs):
+  """Context Manager wrapper that can be used to enable/disable usage.
+
+  This is mainly useful to control whether or not a given Context Manager
+  is used.
+
+  For example:
+
+  with AllowDisabling(options.timeout <= 0, Timeout, options.timeout):
+    ... do code w/in a timeout context..
+
+  If options.timeout is a positive integer, then the_Timeout context manager is
+  created and ran.  If it's zero or negative, then the timeout code is disabled.
+
+  While Timeout *could* handle this itself, it's redundant having each
+  implementation do this, thus the generic wrapper.
+  """
+  if enabled:
+    return functor(*args, **kwargs)
+  return NoOpContextManager()
+
+
+class ContextManagerStack(object):
+  """Context manager that is designed to safely allow nesting and stacking.
+
+  Python2.7 directly supports a with syntax generally removing the need for
+  this, although this form avoids indentation hell if there is a lot of context
+  managers.  It also permits more programmatic control and allowing conditional
+  usage.
+
+  For Python2.6, see http://docs.python.org/library/contextlib.html; the short
+  version is that there is a race in the available stdlib/language rules under
+  2.6 when dealing w/ multiple context managers, thus this safe version was
+  added.
+
+  For each context manager added to this instance, it will unwind them,
+  invoking them as if it had been constructed as a set of manually nested
+  with statements.
+  """
+
+  def __init__(self):
+    self._stack = []
+
+  def Add(self, functor, *args, **kwargs):
+    """Add a context manager onto the stack.
+
+    Usage of this is essentially the following:
+    >>> stack.add(Timeout, 60)
+
+    It must be done in this fashion, else there is a mild race that exists
+    between context manager instantiation and initial __enter__.
+
+    Invoking it in the form specified eliminates that race.
+
+    Args:
+      functor: A callable to instantiate a context manager.
+      args and kwargs: positional and optional args to functor.
+
+    Returns:
+      The newly created (and __enter__'d) context manager.
+      Note: This is not the same value as the "with" statement -- that returns
+      the value from the __enter__ function while this is the manager itself.
+    """
+    obj = None
+    try:
+      obj = functor(*args, **kwargs)
+      return obj
+    finally:
+      if obj is not None:
+        obj.__enter__()
+        self._stack.append(obj)
+
+  def __enter__(self):
+    # Nothing to do in this case.  The individual __enter__'s are done
+    # when the context managers are added, which will likely be after
+    # the __enter__ method of this stack is called.
+    return self
+
+  def __exit__(self, exc_type, exc, exc_tb):
+    # Exit each context manager in stack in reverse order, tracking the results
+    # to know whether or not to suppress the exception raised (or to switch that
+    # exception to a new one triggered by an individual handler's __exit__).
+    for handler in reversed(self._stack):
+      # pylint: disable=bare-except
+      try:
+        if handler.__exit__(exc_type, exc, exc_tb):
+          exc_type = exc = exc_tb = None
+      except:
+        exc_type, exc, exc_tb = sys.exc_info()
+
+    self._stack = []
+
+    # Return True if any exception was handled.
+    if all(x is None for x in (exc_type, exc, exc_tb)):
+      return True
+
+    # Raise any exception that is left over from exiting all context managers.
+    # Normally a single context manager would return False to allow caller to
+    # re-raise the exception itself, but here the exception might have been
+    # raised during the exiting of one of the individual context managers.
+    raise exc_type, exc, exc_tb
+
+
+class ApiMismatchError(Exception):
+  """Raised by GetTargetChromiteApiVersion."""
+
+
+class NoChromiteError(Exception):
+  """Raised when an expected chromite installation was missing."""
+
+
+def GetTargetChromiteApiVersion(buildroot, validate_version=True):
+  """Get the re-exec API version of the target chromite.
+
+  Args:
+    buildroot: The directory containing the chromite to check.
+    validate_version: If set to true, checks the target chromite for
+      compatibility, and raises an ApiMismatchError when there is an
+      incompatibility.
+
+  Returns:
+    The version number in (major, minor) tuple.
+
+  Raises:
+    May raise an ApiMismatchError if validate_version is set.
+  """
+  try:
+    api = RunCommand(
+        [constants.PATH_TO_CBUILDBOT, '--reexec-api-version'],
+        cwd=buildroot, error_code_ok=True, capture_output=True)
+  except RunCommandError:
+    # Although error_code_ok=True was used, this exception will still be raised
+    # if the executible did not exist.
+    full_cbuildbot_path = os.path.join(buildroot, constants.PATH_TO_CBUILDBOT)
+    if not os.path.exists(full_cbuildbot_path):
+      raise NoChromiteError('No cbuildbot found in buildroot %s, expected to '
+                            'find %s. ' % (buildroot, full_cbuildbot_path))
+    raise
+
+  # If the command failed, then we're targeting a cbuildbot that lacks the
+  # option; assume 0:0 (ie, initial state).
+  major = minor = 0
+  if api.returncode == 0:
+    major, minor = map(int, api.output.strip().split('.', 1))
+
+  if validate_version and major != constants.REEXEC_API_MAJOR:
+    raise ApiMismatchError(
+        'The targeted version of chromite in buildroot %s requires '
+        'api version %i, but we are api version %i.  We cannot proceed.'
+        % (buildroot, major, constants.REEXEC_API_MAJOR))
+
+  return major, minor
+
+
+def GetChrootVersion(chroot=None, buildroot=None):
+  """Extract the version of the chroot.
+
+  Args:
+    chroot: Full path to the chroot to examine.
+    buildroot: If |chroot| is not set, find it relative to |buildroot|.
+
+  Returns:
+    The version of the chroot dir.
+  """
+  if chroot is None and buildroot is None:
+    raise ValueError('need either |chroot| or |buildroot| to search')
+
+  from chromite.lib import osutils
+  if chroot is None:
+    chroot = os.path.join(buildroot, constants.DEFAULT_CHROOT_DIR)
+  ver_path = os.path.join(chroot, 'etc', 'cros_chroot_version')
+  try:
+    return osutils.ReadFile(ver_path).strip()
+  except IOError:
+    logging.warning('could not read %s', ver_path)
+    return None
+
+
+def iflatten_instance(iterable, terminate_on_kls=(basestring,)):
+  """Derivative of snakeoil.lists.iflatten_instance; flatten an object.
+
+  Given an object, flatten it into a single depth iterable-
+  stopping descent on objects that either aren't iterable, or match
+  isinstance(obj, terminate_on_kls).
+
+  Example:
+  >>> print list(iflatten_instance([1, 2, "as", ["4", 5]))
+  [1, 2, "as", "4", 5]
+  """
+  def descend_into(item):
+    if isinstance(item, terminate_on_kls):
+      return False
+    try:
+      iter(item)
+    except TypeError:
+      return False
+    # Note strings can be infinitely descended through- thus this
+    # recursion limiter.
+    return not isinstance(item, basestring) or len(item) > 1
+
+  if not descend_into(iterable):
+    yield iterable
+    return
+  for item in iterable:
+    if not descend_into(item):
+      yield item
+    else:
+      for subitem in iflatten_instance(item, terminate_on_kls):
+        yield subitem
+
+
+# TODO: Remove this once we move to snakeoil.
+def load_module(name):
+  """load a module
+
+  Args:
+    name: python dotted namespace path of the module to import
+
+  Returns:
+    imported module
+
+  Raises:
+    FailedImport if importing fails
+  """
+  m = __import__(name)
+  # __import__('foo.bar') returns foo, so...
+  for bit in name.split('.')[1:]:
+    m = getattr(m, bit)
+  return m
+
+
+def PredicateSplit(func, iterable):
+  """Splits an iterable into two groups based on a predicate return value.
+
+  Args:
+    func: A functor that takes an item as its argument and returns a boolean
+      value indicating which group the item belongs.
+    iterable: The collection to split.
+
+  Returns:
+    A tuple containing two lists, the first containing items that func()
+    returned True for, and the second containing items that func() returned
+    False for.
+  """
+  trues, falses = [], []
+  for x in iterable:
+    (trues if func(x) else falses).append(x)
+  return trues, falses
+
+
+@contextlib.contextmanager
+def Open(obj, mode='r'):
+  """Convenience ctx that accepts a file path or an already open file object."""
+  if isinstance(obj, basestring):
+    with open(obj, mode=mode) as f:
+      yield f
+  else:
+    yield obj
+
+
+def LoadKeyValueFile(obj, ignore_missing=False, multiline=False):
+  """Turn a key=value file into a dict
+
+  Note: If you're designing a new data store, please use json rather than
+  this format.  This func is designed to work with legacy/external files
+  where json isn't an option.
+
+  Args:
+    obj: The file to read.  Can be a path or an open file object.
+    ignore_missing: If the file does not exist, return an empty dict.
+    multiline: Allow a value enclosed by quotes to span multiple lines.
+
+  Returns:
+    a dict of all the key=value pairs found in the file.
+  """
+  d = {}
+
+  try:
+    with Open(obj) as f:
+      key = None
+      in_quotes = None
+      for raw_line in f:
+        line = raw_line.split('#')[0]
+        if not line.strip():
+          continue
+
+        # Continue processing a multiline value.
+        if multiline and in_quotes and key:
+          if line.rstrip()[-1] == in_quotes:
+            # Wrap up the multiline value if the line ends with a quote.
+            d[key] += line.rstrip()[:-1]
+            in_quotes = None
+          else:
+            d[key] += line
+          continue
+
+        chunks = line.split('=', 1)
+        if len(chunks) != 2:
+          raise ValueError('Malformed key=value file %r; line %r'
+                           % (obj, raw_line))
+        key = chunks[0].strip()
+        val = chunks[1].strip()
+        if len(val) >= 2 and val[0] in "\"'" and val[0] == val[-1]:
+          # Strip matching quotes on the same line.
+          val = val[1:-1]
+        elif val and multiline and val[0] in "\"'":
+          # Unmatched quote here indicates a multiline value. Do not
+          # strip the '\n' at the end of the line.
+          in_quotes = val[0]
+          val = chunks[1].lstrip()[1:]
+        d[key] = val
+  except EnvironmentError as e:
+    if not (ignore_missing and e.errno == errno.ENOENT):
+      raise
+
+  return d
+
+
+def MemoizedSingleCall(functor):
+  """Decorator for simple functor targets, caching the results
+
+  The functor must accept no arguments beyond either a class or self (depending
+  on if this is used in a classmethod/instancemethod context).  Results of the
+  wrapped method will be written to the class/instance namespace in a specially
+  named cached value.  All future invocations will just reuse that value.
+
+  Note that this cache is per-process, so sibling and parent processes won't
+  notice updates to the cache.
+  """
+  # TODO(build): Should we rebase to snakeoil.klass.cached* functionality?
+  # pylint: disable=protected-access
+  @functools.wraps(functor)
+  def wrapper(obj):
+    key = wrapper._cache_key
+    val = getattr(obj, key, None)
+    if val is None:
+      val = functor(obj)
+      setattr(obj, key, val)
+    return val
+
+  # Use name mangling to store the cached value in a (hopefully) unique place.
+  wrapper._cache_key = '_%s_cached' % (functor.__name__.lstrip('_'),)
+  return wrapper
+
+
+def Memoize(f):
+  """Decorator for memoizing a function.
+
+  Caches all calls to the function using a ._memo_cache dict mapping (args,
+  kwargs) to the results of the first function call with those args and kwargs.
+
+  If any of args or kwargs are not hashable, trying to store them in a dict will
+  cause a ValueError.
+
+  Note that this cache is per-process, so sibling and parent processes won't
+  notice updates to the cache.
+  """
+  # pylint: disable=protected-access
+  f._memo_cache = {}
+
+  @functools.wraps(f)
+  def wrapper(*args, **kwargs):
+    # Make sure that the key is hashable... as long as the contents of args and
+    # kwargs are hashable.
+    # TODO(phobbs) we could add an option to use the id(...) of an object if
+    # it's not hashable.  Then "MemoizedSingleCall" would be obsolete.
+    key = (tuple(args), tuple(sorted(kwargs.items())))
+    if key in f._memo_cache:
+      return f._memo_cache[key]
+
+    result = f(*args, **kwargs)
+    f._memo_cache[key] = result
+    return result
+
+  return wrapper
+
+
+def SafeRun(functors, combine_exceptions=False):
+  """Executes a list of functors, continuing on exceptions.
+
+  Args:
+    functors: An iterable of functors to call.
+    combine_exceptions: If set, and multiple exceptions are encountered,
+      SafeRun will raise a RuntimeError containing a list of all the exceptions.
+      If only one exception is encountered, then the default behavior of
+      re-raising the original exception with unmodified stack trace will be
+      kept.
+
+  Raises:
+    The first exception encountered, with corresponding backtrace, unless
+    |combine_exceptions| is specified and there is more than one exception
+    encountered, in which case a RuntimeError containing a list of all the
+    exceptions that were encountered is raised.
+  """
+  errors = []
+
+  for f in functors:
+    try:
+      f()
+    except Exception as e:
+      # Append the exception object and the traceback.
+      errors.append((e, sys.exc_info()[2]))
+
+  if errors:
+    if len(errors) == 1 or not combine_exceptions:
+      # To preserve the traceback.
+      inst, tb = errors[0]
+      raise inst, None, tb
+    else:
+      raise RuntimeError([e[0] for e in errors])
+
+
+def ParseDurationToSeconds(duration):
+  """Parses a string duration of the form HH:MM:SS into seconds.
+
+  Args:
+    duration: A string such as '12:43:12' (representing in this case
+              12 hours, 43 minutes, 12 seconds).
+
+  Returns:
+    An integer number of seconds.
+  """
+  h, m, s = [int(t) for t in duration.split(':')]
+  return s + 60 * m + 3600 * h
+
+
+def UserDateTimeFormat(timeval=None):
+  """Format a date meant to be viewed by a user
+
+  The focus here is to have a format that is easily readable by humans,
+  but still easy (and unambiguous) for a machine to parse.  Hence, we
+  use the RFC 2822 date format (with timezone name appended).
+
+  Args:
+    timeval: Either a datetime object or a floating point time value as accepted
+             by gmtime()/localtime().  If None, the current time is used.
+
+  Returns:
+    A string format such as 'Wed, 20 Feb 2013 15:25:15 -0500 (EST)'
+  """
+  if isinstance(timeval, datetime):
+    timeval = time.mktime(timeval.timetuple())
+  return '%s (%s)' % (email.utils.formatdate(timeval=timeval, localtime=True),
+                      time.strftime('%Z', time.localtime(timeval)))
+
+
+def GetCommonPathPrefix(paths):
+  """Get the longest common directory of |paths|.
+
+  Args:
+    paths: A list of absolute directory or file paths.
+
+  Returns:
+    Absolute path to the longest directory common to |paths|, with no
+    trailing '/'.
+  """
+  return os.path.dirname(os.path.commonprefix(paths))
+
+
+def ParseUserDateTimeFormat(time_string):
+  """Parse a time string into a floating point time value.
+
+  This function is essentially the inverse of UserDateTimeFormat.
+
+  Args:
+    time_string: A string datetime represetation in RFC 2822 format, such as
+                 'Wed, 20 Feb 2013 15:25:15 -0500 (EST)'.
+
+  Returns:
+    Floating point Unix timestamp (seconds since epoch).
+  """
+  return email.utils.mktime_tz(email.utils.parsedate_tz(time_string))
+
+
+def GetDefaultBoard():
+  """Gets the default board.
+
+  Returns:
+    The default board (as a string), or None if either the default board
+    file was missing or malformed.
+  """
+  default_board_file_name = os.path.join(constants.SOURCE_ROOT, 'src',
+                                         'scripts', '.default_board')
+  try:
+    with open(default_board_file_name) as default_board_file:
+      default_board = default_board_file.read().strip()
+      # Check for user typos like whitespace
+      if not re.match('[a-zA-Z0-9-_]*$', default_board):
+        logging.warning('Noticed invalid default board: |%s|. Ignoring this '
+                        'default.', default_board)
+        default_board = None
+  except IOError:
+    return None
+
+  return default_board
+
+
+def GetBoard(device_board, override_board=None, force=False):
+  """Gets the board name to use.
+
+  Ask user to confirm when |override_board| and |device_board| are
+  both None.
+
+  Args:
+    device_board: The board detected on the device.
+    override_board: Overrides the board.
+    force: Force using the default board if |device_board| is None.
+
+  Returns:
+    Returns the first non-None board in the following order:
+    |override_board|, |device_board|, and GetDefaultBoard().
+
+  Raises:
+    DieSystemExit: If user enters no.
+  """
+  if override_board:
+    return override_board
+
+  board = device_board or GetDefaultBoard()
+  if not device_board:
+    msg = 'Cannot detect board name; using default board %s.' % board
+    if not force and not BooleanPrompt(default=False, prolog=msg):
+      Die('Exiting...')
+
+    logging.warning(msg)
+
+  return board
+
+
+class AttributeFrozenError(Exception):
+  """Raised when frozen attribute value is modified."""
+
+
+class FrozenAttributesClass(type):
+  """Metaclass for any class to support freezing attribute values.
+
+  This metaclass can be used by any class to add the ability to
+  freeze attribute values with the Freeze method.
+
+  Use by adding this line in a class:
+    __metaclass__ = FrozenAttributesClass
+  """
+  _FROZEN_ERR_MSG = 'Attribute values are frozen, cannot alter %s.'
+
+  def __new__(mcs, clsname, bases, scope):
+    # Create Freeze method that freezes current attributes.
+    if 'Freeze' in scope:
+      raise TypeError('Class %s has its own Freeze method, cannot use with'
+                      ' the FrozenAttributesClass metaclass.' % clsname)
+
+    # Make sure cls will have _FROZEN_ERR_MSG set.
+    scope.setdefault('_FROZEN_ERR_MSG', mcs._FROZEN_ERR_MSG)
+
+    # Create the class.
+    # pylint: disable=bad-super-call
+    cls = super(FrozenAttributesClass, mcs).__new__(mcs, clsname, bases, scope)
+
+    # Replace cls.__setattr__ with the one that honors freezing.
+    orig_setattr = cls.__setattr__
+
+    def SetAttr(obj, name, value):
+      """If the object is frozen then abort."""
+      # pylint: disable=protected-access
+      if getattr(obj, '_frozen', False):
+        raise AttributeFrozenError(obj._FROZEN_ERR_MSG % name)
+      if isinstance(orig_setattr, types.MethodType):
+        orig_setattr(obj, name, value)
+      else:
+        super(cls, obj).__setattr__(name, value)
+    cls.__setattr__ = SetAttr
+
+    # Add new cls.Freeze method.
+    def Freeze(obj):
+      # pylint: disable=protected-access
+      obj._frozen = True
+    cls.Freeze = Freeze
+
+    return cls
+
+
+class FrozenAttributesMixin(object):
+  """Alternate mechanism for freezing attributes in a class.
+
+  If an existing class is not a new-style class then it will be unable to
+  use the FrozenAttributesClass metaclass directly.  Simply use this class
+  as a mixin instead to accomplish the same thing.
+  """
+  __metaclass__ = FrozenAttributesClass
+
+
+def GetIPv4Address(dev=None, global_ip=True):
+  """Returns any global/host IP address or the IP address of the given device.
+
+  socket.gethostname() is insufficient for machines where the host files are
+  not set up "correctly."  Since some of our builders may have this issue,
+  this method gives you a generic way to get the address so you are reachable
+  either via a VM or remote machine on the same network.
+
+  Args:
+    dev: Get the IP address of the device (e.g. 'eth0').
+    global_ip: If set True, returns a globally valid IP address. Otherwise,
+      returns a local IP address (default: True).
+  """
+  cmd = ['ip', 'addr', 'show']
+  cmd += ['scope', 'global' if global_ip else 'host']
+  cmd += [] if dev is None else ['dev', dev]
+
+  result = RunCommand(cmd, print_cmd=False, capture_output=True)
+  matches = re.findall(r'\binet (\d+\.\d+\.\d+\.\d+).*', result.output)
+  if matches:
+    return matches[0]
+  logging.warning('Failed to find ip address in %r', result.output)
+  return None
+
+
+def GetSysroot(board=None):
+  """Returns the sysroot for |board| or '/' if |board| is None."""
+  return '/' if board is None else os.path.join('/build', board)
+
+
+def Collection(classname, **kwargs):
+  """Create a new class with mutable named members.
+
+  This is like collections.namedtuple, but mutable.  Also similar to the
+  python 3.3 types.SimpleNamespace.
+
+  Example:
+    # Declare default values for this new class.
+    Foo = cros_build_lib.Collection('Foo', a=0, b=10)
+    # Create a new class but set b to 4.
+    foo = Foo(b=4)
+    # Print out a (will be the default 0) and b (will be 4).
+    print('a = %i, b = %i' % (foo.a, foo.b))
+  """
+
+  def sn_init(self, **kwargs):
+    """The new class's __init__ function."""
+    # First verify the kwargs don't have excess settings.
+    valid_keys = set(self.__slots__[1:])
+    these_keys = set(kwargs.keys())
+    invalid_keys = these_keys - valid_keys
+    if invalid_keys:
+      raise TypeError('invalid keyword arguments for this object: %r' %
+                      invalid_keys)
+
+    # Now initialize this object.
+    for k in valid_keys:
+      setattr(self, k, kwargs.get(k, self.__defaults__[k]))
+
+  def sn_repr(self):
+    """The new class's __repr__ function."""
+    return '%s(%s)' % (classname, ', '.join(
+        '%s=%r' % (k, getattr(self, k)) for k in self.__slots__[1:]))
+
+  # Give the new class a unique name and then generate the code for it.
+  classname = 'Collection_%s' % classname
+  expr = '\n'.join((
+      'class %(classname)s(object):',
+      '  __slots__ = ["__defaults__", "%(slots)s"]',
+      '  __defaults__ = {}',
+  )) % {
+      'classname': classname,
+      'slots': '", "'.join(sorted(str(k) for k in kwargs)),
+  }
+
+  # Create the class in a local namespace as exec requires.
+  namespace = {}
+  exec expr in namespace
+  new_class = namespace[classname]
+
+  # Bind the helpers.
+  new_class.__defaults__ = kwargs.copy()
+  new_class.__init__ = sn_init
+  new_class.__repr__ = sn_repr
+
+  return new_class
+
+
+PartitionInfo = collections.namedtuple(
+    'PartitionInfo',
+    ['number', 'start', 'end', 'size', 'file_system', 'name', 'flags']
+)
+
+
+def _ParseParted(lines, unit='MB'):
+  """Returns partition information from `parted print` output."""
+  ret = []
+  # Sample output (partition #, start, end, size, file system, name, flags):
+  #   /foo/chromiumos_qemu_image.bin:3360MB:file:512:512:gpt:;
+  #   11:0.03MB:8.42MB:8.39MB::RWFW:;
+  #   6:8.42MB:8.42MB:0.00MB::KERN-C:;
+  #   7:8.42MB:8.42MB:0.00MB::ROOT-C:;
+  #   9:8.42MB:8.42MB:0.00MB::reserved:;
+  #   10:8.42MB:8.42MB:0.00MB::reserved:;
+  #   2:10.5MB:27.3MB:16.8MB::KERN-A:;
+  #   4:27.3MB:44.0MB:16.8MB::KERN-B:;
+  #   8:44.0MB:60.8MB:16.8MB:ext4:OEM:;
+  #   12:128MB:145MB:16.8MB:fat16:EFI-SYSTEM:boot;
+  #   5:145MB:2292MB:2147MB::ROOT-B:;
+  #   3:2292MB:4440MB:2147MB:ext2:ROOT-A:;
+  #   1:4440MB:7661MB:3221MB:ext4:STATE:;
+  pattern = re.compile(r'(([^:]*:){6}[^:]*);')
+  for line in lines:
+    match = pattern.match(line)
+    if match:
+      d = dict(zip(PartitionInfo._fields, match.group(1).split(':')))
+      # Disregard any non-numeric partition number (e.g. the file path).
+      if d['number'].isdigit():
+        d['number'] = int(d['number'])
+        for key in ['start', 'end', 'size']:
+          d[key] = float(d[key][:-len(unit)])
+        ret.append(PartitionInfo(**d))
+  return ret
+
+
+def _ParseCgpt(lines, unit='MB'):
+  """Returns partition information from `cgpt show` output."""
+  #   start        size    part  contents
+  # 1921024     2097152       1  Label: "STATE"
+  #                              Type: Linux data
+  #                              UUID: EEBD83BE-397E-BD44-878B-0DDDD5A5C510
+  #   20480       32768       2  Label: "KERN-A"
+  #                              Type: ChromeOS kernel
+  #                              UUID: 7007C2F3-08E5-AB40-A4BC-FF5B01F5460D
+  #                              Attr: priority=15 tries=15 successful=1
+  start_pattern = re.compile(r'''\s+(\d+)\s+(\d+)\s+(\d+)\s+Label: "(.+)"''')
+  ret = []
+  line_no = 0
+  while line_no < len(lines):
+    line = lines[line_no]
+    line_no += 1
+    m = start_pattern.match(line)
+    if not m:
+      continue
+
+    start, size, number, label = m.groups()
+    number = int(number)
+    start = int(start) * 512
+    size = int(size) * 512
+    end = start + size
+    # Parted uses 1000, not 1024.
+    divisors = {
+        'B': 1.0,
+        'KB': 1000.0,
+        'MB': 1000000.0,
+        'GB': 1000000000.0,
+    }
+    divisor = divisors[unit]
+    start = start / divisor
+    end = end / divisor
+    size = size / divisor
+
+    ret.append(PartitionInfo(number=number, start=start, end=end, size=size,
+                             name=label, file_system='', flags=''))
+
+  return ret
+
+
+def GetImageDiskPartitionInfo(image_path, unit='MB', key_selector='name'):
+  """Returns the disk partition table of an image.
+
+  Args:
+    image_path: Path to the image file.
+    unit: The unit to display (e.g., 'B', 'KB', 'MB', 'GB').
+      See `parted` documentation for more info.
+    key_selector: The value of the partition that will be used as the key for
+      that partition in this function's returned dictionary.
+
+  Returns:
+    A dictionary of ParitionInfo items keyed by |key_selector|.
+  """
+
+  if IsInsideChroot():
+    # Inside chroot, use `cgpt`.
+    cmd = ['cgpt', 'show', image_path]
+    func = _ParseCgpt
+  else:
+    # Outside chroot, use `parted`.
+    cmd = ['parted', '-m', image_path, 'unit', unit, 'print']
+    func = _ParseParted
+
+  lines = RunCommand(
+      cmd,
+      extra_env={'PATH': '/sbin:%s' % os.environ['PATH'], 'LC_ALL': 'C'},
+      capture_output=True).output.splitlines()
+  infos = func(lines, unit)
+  selector = operator.attrgetter(key_selector)
+  return dict((selector(x), x) for x in infos)
+
+
+def GetRandomString(length=20):
+  """Returns a random string of |length|."""
+  md5 = hashlib.md5(os.urandom(length))
+  md5.update(UserDateTimeFormat())
+  return md5.hexdigest()
+
+
+def MachineDetails():
+  """Returns a string to help identify the source of a job.
+
+  This is not meant for machines to parse; instead, we want content that is easy
+  for humans to read when trying to figure out where "something" is coming from.
+  For example, when a service has grabbed a lock in Google Storage, and we want
+  to see what process actually triggered that (in case it is a test gone rogue),
+  the content in here should help triage.
+
+  Note: none of the details included may be secret so they can be freely pasted
+  into bug reports/chats/logs/etc...
+
+  Note: this content should not be large
+
+  Returns:
+    A string with content that helps identify this system/process/etc...
+  """
+  return '\n'.join((
+      'PROG=%s' % __main__.__file__,
+      'USER=%s' % getpass.getuser(),
+      'HOSTNAME=%s' % GetHostName(fully_qualified=True),
+      'PID=%s' % os.getpid(),
+      'TIMESTAMP=%s' % UserDateTimeFormat(),
+      'RANDOM_JUNK=%s' % GetRandomString(),
+  )) + '\n'
+
+
+def FormatDetailedTraceback(exc_info=None):
+  """Generate a traceback including details like local variables.
+
+  Args:
+    exc_info: The exception tuple to format; defaults to sys.exc_info().
+      See the help on that function for details on the type.
+
+  Returns:
+    A string of the formatted |exc_info| details.
+  """
+  if exc_info is None:
+    exc_info = sys.exc_info()
+
+  ret = []
+  try:
+    # pylint: disable=unpacking-non-sequence
+    exc_type, exc_value, exc_tb = exc_info
+
+    if exc_type:
+      ret += [
+          'Traceback (most recent call last):\n',
+          'Note: Call args reflect *current* state, not *entry* state\n',
+      ]
+
+    while exc_tb:
+      frame = exc_tb.tb_frame
+
+      ret += traceback.format_tb(exc_tb, 1)
+      args = inspect.getargvalues(frame)
+      _, _, fname, _ = traceback.extract_tb(exc_tb, 1)[0]
+      ret += [
+          '    Call: %s%s\n' % (fname, inspect.formatargvalues(*args)),
+          '    Locals:\n',
+      ]
+      if frame.f_locals:
+        keys = sorted(frame.f_locals.keys(), key=str.lower)
+        keylen = max(len(x) for x in keys)
+        typelen = max(len(str(type(x))) for x in frame.f_locals.values())
+        for key in keys:
+          val = frame.f_locals[key]
+          ret += ['      %-*s: %-*s %s\n' %
+                  (keylen, key, typelen, type(val), pprint.saferepr(val))]
+      exc_tb = exc_tb.tb_next
+
+    if exc_type:
+      ret += traceback.format_exception_only(exc_type, exc_value)
+  finally:
+    # Help python with its circular references.
+    del exc_tb
+
+  return ''.join(ret)
+
+
+def PrintDetailedTraceback(exc_info=None, file=None):
+  """Print a traceback including details like local variables.
+
+  Args:
+    exc_info: The exception tuple to format; defaults to sys.exc_info().
+      See the help on that function for details on the type.
+    file: The file object to write the details to; defaults to sys.stderr.
+  """
+  # We use |file| to match the existing traceback API.
+  # pylint: disable=redefined-builtin
+  if exc_info is None:
+    exc_info = sys.exc_info()
+  if file is None:
+    file = sys.stderr
+
+  # Try to print out extended details on the current exception.
+  # If that fails, still fallback to the normal exception path.
+  curr_exc_info = exc_info
+  try:
+    output = FormatDetailedTraceback()
+    if output:
+      print(output, file=file)
+  except Exception:
+    print('Could not decode extended exception details:', file=file)
+    traceback.print_exc(file=file)
+    print(file=file)
+    traceback.print_exception(*curr_exc_info, file=sys.stdout)
+  finally:
+    # Help python with its circular references.
+    del exc_info
+    del curr_exc_info
+
+
+class _FdCapturer(object):
+  """Helper class to capture output at the file descriptor level.
+
+  This is meant to be used with sys.stdout or sys.stderr. By capturing
+  file descriptors, this will also intercept subprocess output, which
+  reassigning sys.stdout or sys.stderr will not do.
+
+  Output will only be captured, it will no longer be printed while
+  the capturer is active.
+  """
+
+  def __init__(self, source, output=None):
+    """Construct the _FdCapturer object.
+
+    Does not start capturing until Start() is called.
+
+    Args:
+      source: A file object to capture. Typically sys.stdout or
+        sys.stderr, but will work with anything that implements flush()
+        and fileno().
+      output: A file name where the captured output is to be stored. If None,
+        then the output will be stored to a temporary file.
+    """
+    self._source = source
+    self._captured = ''
+    self._saved_fd = None
+    self._tempfile = None
+    self._capturefile = None
+    self._capturefile_reader = None
+    self._capturefile_name = output
+
+  def _SafeCreateTempfile(self, tempfile_obj):
+    """Ensure that the tempfile is created safely.
+
+    (1) Stash away a reference to the tempfile.
+    (2) Unlink the file from the filesystem.
+
+    (2) ensures that if we crash, the file gets deleted. (1) ensures that while
+    we are running, we hold a reference to the file so the system does not close
+    the file.
+
+    Args:
+      tempfile_obj: A tempfile object.
+    """
+    self._tempfile = tempfile_obj
+    os.unlink(tempfile_obj.name)
+
+  def Start(self):
+    """Begin capturing output."""
+    if self._capturefile_name is None:
+      tempfile_obj = tempfile.NamedTemporaryFile(delete=False)
+      self._capturefile = tempfile_obj.file
+      self._capturefile_name = tempfile_obj.name
+      self._capturefile_reader = open(self._capturefile_name)
+      self._SafeCreateTempfile(tempfile_obj)
+    else:
+      # Open file passed in for writing. Set buffering=1 for line level
+      # buffering.
+      self._capturefile = open(self._capturefile_name, 'w', buffering=1)
+      self._capturefile_reader = open(self._capturefile_name)
+    # Save the original fd so we can revert in Stop().
+    self._saved_fd = os.dup(self._source.fileno())
+    os.dup2(self._capturefile.fileno(), self._source.fileno())
+
+  def Stop(self):
+    """Stop capturing output."""
+    self.GetCaptured()
+    if self._saved_fd is not None:
+      os.dup2(self._saved_fd, self._source.fileno())
+      os.close(self._saved_fd)
+      self._saved_fd = None
+    # If capturefile and capturefile_reader exist, close them as they were
+    # opened in self.Start().
+    if self._capturefile_reader is not None:
+      self._capturefile_reader.close()
+      self._capturefile_reader = None
+    if self._capturefile is not None:
+      self._capturefile.close()
+      self._capturefile = None
+
+  def GetCaptured(self):
+    """Return all output captured up to this point.
+
+    Can be used while capturing or after Stop() has been called.
+    """
+    self._source.flush()
+    if self._capturefile_reader is not None:
+      self._captured += self._capturefile_reader.read()
+    return self._captured
+
+  def ClearCaptured(self):
+    """Erase all captured output."""
+    self.GetCaptured()
+    self._captured = ''
+
+
+class OutputCapturer(object):
+  """Class for capturing stdout/stderr output.
+
+  Class is designed as a 'ContextManager'.  Example usage:
+
+  with cros_build_lib.OutputCapturer() as output:
+    # Capturing of stdout/stderr automatically starts now.
+    # Do stuff that sends output to stdout/stderr.
+    # Capturing automatically stops at end of 'with' block.
+
+  # stdout/stderr can be retrieved from the OutputCapturer object:
+  stdout = output.GetStdoutLines() # Or other access methods
+
+  # Some Assert methods are only valid if capturing was used in test.
+  self.AssertOutputContainsError() # Or other related methods
+
+  # OutputCapturer can also be used to capture output to specified files.
+  with self.OutputCapturer(stdout_path='/tmp/stdout.txt') as output:
+    # Do stuff.
+    # stdout will be captured to /tmp/stdout.txt.
+  """
+
+  OPER_MSG_SPLIT_RE = re.compile(r'^\033\[1;.*?\033\[0m$|^[^\n]*$',
+                                 re.DOTALL | re.MULTILINE)
+
+  __slots__ = ['_stdout_capturer', '_stderr_capturer', '_quiet_fail']
+
+  def __init__(self, stdout_path=None, stderr_path=None, quiet_fail=False):
+    """Initalize OutputCapturer with capture files.
+
+    If OutputCapturer is initialized with filenames to capture stdout and stderr
+    to, then those files are used. Otherwise, temporary files are created.
+
+    Args:
+      stdout_path: File to capture stdout to. If None, a temporary file is used.
+      stderr_path: File to capture stderr to. If None, a temporary file is used.
+      quiet_fail: If True fail quietly without printing the captured stdout and
+        stderr.
+    """
+    self._stdout_capturer = _FdCapturer(sys.stdout, output=stdout_path)
+    self._stderr_capturer = _FdCapturer(sys.stderr, output=stderr_path)
+    self._quiet_fail = quiet_fail
+
+  def __enter__(self):
+    # This method is called with entering 'with' block.
+    self.StartCapturing()
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    # This method is called when exiting 'with' block.
+    self.StopCapturing()
+
+    if exc_type and not self._quiet_fail:
+      print('Exception during output capturing: %r' % (exc_val,))
+      stdout = self.GetStdout()
+      if stdout:
+        print('Captured stdout was:\n%s' % stdout)
+      else:
+        print('No captured stdout')
+      stderr = self.GetStderr()
+      if stderr:
+        print('Captured stderr was:\n%s' % stderr)
+      else:
+        print('No captured stderr')
+
+  def StartCapturing(self):
+    """Begin capturing stdout and stderr."""
+    self._stdout_capturer.Start()
+    self._stderr_capturer.Start()
+
+  def StopCapturing(self):
+    """Stop capturing stdout and stderr."""
+    self._stdout_capturer.Stop()
+    self._stderr_capturer.Stop()
+
+  def ClearCaptured(self):
+    """Clear any captured stdout/stderr content."""
+    self._stdout_capturer.ClearCaptured()
+    self._stderr_capturer.ClearCaptured()
+
+  def GetStdout(self):
+    """Return captured stdout so far."""
+    return self._stdout_capturer.GetCaptured()
+
+  def GetStderr(self):
+    """Return captured stderr so far."""
+    return self._stderr_capturer.GetCaptured()
+
+  def _GetOutputLines(self, output, include_empties):
+    """Split |output| into lines, optionally |include_empties|.
+
+    Return array of lines.
+    """
+
+    lines = self.OPER_MSG_SPLIT_RE.findall(output)
+    if not include_empties:
+      lines = [ln for ln in lines if ln]
+
+    return lines
+
+  def GetStdoutLines(self, include_empties=True):
+    """Return captured stdout so far as array of lines.
+
+    If |include_empties| is false filter out all empty lines.
+    """
+    return self._GetOutputLines(self.GetStdout(), include_empties)
+
+  def GetStderrLines(self, include_empties=True):
+    """Return captured stderr so far as array of lines.
+
+    If |include_empties| is false filter out all empty lines.
+    """
+    return self._GetOutputLines(self.GetStderr(), include_empties)
diff --git a/lib/cros_build_lib_unittest b/lib/cros_build_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/cros_build_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/cros_build_lib_unittest.py b/lib/cros_build_lib_unittest.py
new file mode 100644
index 0000000..87167ea
--- /dev/null
+++ b/lib/cros_build_lib_unittest.py
@@ -0,0 +1,1814 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the cros_build_lib module."""
+
+from __future__ import print_function
+
+import contextlib
+import datetime
+import difflib
+import errno
+import functools
+import itertools
+import mock
+import os
+import signal
+import socket
+import StringIO
+import sys
+import time
+import __builtin__
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import repository
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+from chromite.lib import retry_util
+from chromite.lib import signals as cros_signals
+
+
+# pylint: disable=W0212,R0904
+
+
+class RunCommandErrorStrTest(cros_test_lib.TestCase):
+  """Test that RunCommandError __str__ works as expected."""
+
+  def testNonUTF8Characters(self):
+    """Test that non-UTF8 characters do not kill __str__"""
+    result = cros_build_lib.RunCommand(['ls', '/does/not/exist'],
+                                       error_code_ok=True)
+    rce = cros_build_lib.RunCommandError('\x81', result)
+    str(rce)
+
+
+class CmdToStrTest(cros_test_lib.TestCase):
+  """Test the CmdToStr function."""
+
+  def setUp(self):
+    self.differ = difflib.Differ()
+
+  def _assertEqual(self, func, test_input, test_output, result):
+    """Like assertEqual but with built in diff support."""
+    diff = '\n'.join(list(self.differ.compare([test_output], [result])))
+    msg = ('Expected %s to translate %r to %r, but got %r\n%s' %
+           (func, test_input, test_output, result, diff))
+    self.assertEqual(test_output, result, msg)
+
+  def _testData(self, functor, tests, check_type=True):
+    """Process a dict of test data."""
+    for test_output, test_input in tests.iteritems():
+      result = functor(test_input)
+      self._assertEqual(functor.__name__, test_input, test_output, result)
+
+      if check_type:
+        # Also make sure the result is a string, otherwise the %r output will
+        # include a "u" prefix and that is not good for logging.
+        self.assertEqual(type(test_output), str)
+
+  def testShellQuote(self):
+    """Basic ShellQuote tests."""
+    # Dict of expected output strings to input lists.
+    tests_quote = {
+        "''": '',
+        'a': unicode('a'),
+        "'a b c'": unicode('a b c'),
+        "'a\tb'": 'a\tb',
+        "'/a$file'": '/a$file',
+        "'/a#file'": '/a#file',
+        """'b"c'""": 'b"c',
+        "'a@()b'": 'a@()b',
+        'j%k': 'j%k',
+        r'''"s'a\$va\\rs"''': r"s'a$va\rs",
+        r'''"\\'\\\""''': r'''\'\"''',
+        r'''"'\\\$"''': r"""'\$""",
+    }
+
+    # Expected input output specific to ShellUnquote. This string cannot be
+    # produced by ShellQuote but is still a valid bash escaped string.
+    tests_unquote = {
+        r'''\$''': r'''"\\$"''',
+    }
+
+    def aux(s):
+      return cros_build_lib.ShellUnquote(cros_build_lib.ShellQuote(s))
+
+    self._testData(cros_build_lib.ShellQuote, tests_quote)
+    self._testData(cros_build_lib.ShellUnquote, tests_unquote)
+
+    # Test that the operations are reversible.
+    self._testData(aux, {k: k for k in tests_quote.values()}, False)
+    self._testData(aux, {k: k for k in tests_quote.keys()}, False)
+
+  def testCmdToStr(self):
+    # Dict of expected output strings to input lists.
+    tests = {
+        r"a b": ['a', 'b'],
+        r"'a b' c": ['a b', 'c'],
+        r'''a "b'c"''': ['a', "b'c"],
+        r'''a "/'\$b" 'a b c' "xy'z"''':
+            [unicode('a'), "/'$b", 'a b c', "xy'z"],
+        '': [],
+    }
+    self._testData(cros_build_lib.CmdToStr, tests)
+
+
+class RunCommandMock(partial_mock.PartialCmdMock):
+  """Provides a context where all RunCommand invocations low-level mocked."""
+
+  TARGET = 'chromite.lib.cros_build_lib'
+  ATTRS = ('RunCommand',)
+  DEFAULT_ATTR = 'RunCommand'
+
+  def RunCommand(self, cmd, *args, **kwargs):
+    result = self._results['RunCommand'].LookupResult(
+        (cmd,), hook_args=(cmd,) + args, hook_kwargs=kwargs)
+
+    popen_mock = PopenMock()
+    popen_mock.AddCmdResult(partial_mock.Ignore(), result.returncode,
+                            result.output, result.error)
+    with popen_mock:
+      return self.backup['RunCommand'](cmd, *args, **kwargs)
+
+
+class RunCommandTestCase(cros_test_lib.MockTestCase):
+  """MockTestCase that mocks out RunCommand by default."""
+
+  def setUp(self):
+    self.rc = self.StartPatcher(RunCommandMock())
+    self.rc.SetDefaultCmdResult()
+    self.assertCommandCalled = self.rc.assertCommandCalled
+    self.assertCommandContains = self.rc.assertCommandContains
+
+
+class RunCommandTempDirTestCase(RunCommandTestCase,
+                                cros_test_lib.TempDirTestCase):
+  """Convenience class mixing TempDirTestCase and RunCommandTestCase"""
+
+
+class PopenMock(partial_mock.PartialCmdMock):
+  """Provides a context where all _Popen instances are low-level mocked."""
+
+  TARGET = 'chromite.lib.cros_build_lib._Popen'
+  ATTRS = ('__init__',)
+  DEFAULT_ATTR = '__init__'
+
+  def __init__(self):
+    partial_mock.PartialCmdMock.__init__(self, create_tempdir=True)
+
+  def _target__init__(self, inst, cmd, *args, **kwargs):
+    result = self._results['__init__'].LookupResult(
+        (cmd,), hook_args=(inst, cmd,) + args, hook_kwargs=kwargs)
+
+    script = os.path.join(self.tempdir, 'mock_cmd.sh')
+    stdout = os.path.join(self.tempdir, 'output')
+    stderr = os.path.join(self.tempdir, 'error')
+    osutils.WriteFile(stdout, result.output)
+    osutils.WriteFile(stderr, result.error)
+    osutils.WriteFile(
+        script,
+        ['#!/bin/bash\n', 'cat %s\n' % stdout, 'cat %s >&2\n' % stderr,
+         'exit %s' % result.returncode])
+    os.chmod(script, 0o700)
+    kwargs['cwd'] = self.tempdir
+    self.backup['__init__'](inst, [script, '--'] + cmd, *args, **kwargs)
+
+
+class TestRunCommandNoMock(cros_test_lib.TestCase):
+  """Class that tests RunCommand by not mocking subprocess.Popen"""
+
+  def testErrorCodeNotRaisesError(self):
+    """Don't raise exception when command returns non-zero exit code."""
+    result = cros_build_lib.RunCommand(['ls', '/does/not/exist'],
+                                       error_code_ok=True)
+    self.assertTrue(result.returncode != 0)
+
+  def testMissingCommandRaisesError(self):
+    """Raise error when command is not found."""
+    self.assertRaises(cros_build_lib.RunCommandError, cros_build_lib.RunCommand,
+                      ['/does/not/exist'], error_code_ok=False)
+    self.assertRaises(cros_build_lib.RunCommandError, cros_build_lib.RunCommand,
+                      ['/does/not/exist'], error_code_ok=True)
+
+  def testInputString(self):
+    """Verify input argument when it is a string."""
+    for data in ('', 'foo', 'bar\nhigh'):
+      result = cros_build_lib.RunCommand(['cat'], input=data)
+      self.assertEqual(result.output, data)
+
+  def testInputFileObject(self):
+    """Verify input argument when it is a file object."""
+    result = cros_build_lib.RunCommand(['cat'], input=open('/dev/null'))
+    self.assertEqual(result.output, '')
+
+    result = cros_build_lib.RunCommand(['cat'], input=open(__file__))
+    self.assertEqual(result.output, osutils.ReadFile(__file__))
+
+  def testInputFileDescriptor(self):
+    """Verify input argument when it is a file descriptor."""
+    with open('/dev/null') as f:
+      result = cros_build_lib.RunCommand(['cat'], input=f.fileno())
+      self.assertEqual(result.output, '')
+
+    with open(__file__) as f:
+      result = cros_build_lib.RunCommand(['cat'], input=f.fileno())
+      self.assertEqual(result.output, osutils.ReadFile(__file__))
+
+
+def _ForceLoggingLevel(functor):
+  def inner(*args, **kwargs):
+    logger = logging.getLogger()
+    current = logger.getEffectiveLevel()
+    try:
+      logger.setLevel(logging.INFO)
+      return functor(*args, **kwargs)
+    finally:
+      logger.setLevel(current)
+  return inner
+
+
+class TestRunCommand(cros_test_lib.MockTestCase):
+  """Tests of RunCommand functionality."""
+
+  def setUp(self):
+    # Get the original value for SIGINT so our signal() mock can return the
+    # correct thing.
+    self._old_sigint = signal.getsignal(signal.SIGINT)
+
+    # Mock the return value of Popen().
+    self.error = 'test error'
+    self.output = 'test output'
+    self.proc_mock = mock.MagicMock(
+        returncode=0,
+        communicate=lambda x: (self.output, self.error))
+    self.popen_mock = self.PatchObject(cros_build_lib, '_Popen',
+                                       return_value=self.proc_mock)
+
+    self.signal_mock = self.PatchObject(signal, 'signal')
+    self.getsignal_mock = self.PatchObject(signal, 'getsignal')
+    self.PatchObject(cros_signals, 'SignalModuleUsable', return_value=True)
+
+  @contextlib.contextmanager
+  def _MockChecker(self, cmd, **kwargs):
+    """Verify the mocks we set up"""
+    ignore_sigint = kwargs.pop('ignore_sigint', False)
+
+    # Make some arbitrary functors we can pretend are signal handlers.
+    # Note that these are intentionally defined on the fly via lambda-
+    # this is to ensure that they're unique to each run.
+    sigint_suppress = lambda signum, frame: None
+    sigint_suppress.__name__ = 'sig_ign_sigint'
+    normal_sigint = lambda signum, frame: None
+    normal_sigint.__name__ = 'sigint'
+    normal_sigterm = lambda signum, frame: None
+    normal_sigterm.__name__ = 'sigterm'
+
+    # Set up complicated mock for signal.signal().
+    def _SignalChecker(sig, _action):
+      """Return the right signal values so we can check the calls."""
+      if sig == signal.SIGINT:
+        return sigint_suppress if ignore_sigint else normal_sigint
+      elif sig == signal.SIGTERM:
+        return normal_sigterm
+      else:
+        raise ValueError('unknown sig %i' % sig)
+    self.signal_mock.side_effect = _SignalChecker
+
+    # Set up complicated mock for signal.getsignal().
+    def _GetsignalChecker(sig):
+      """Return the right signal values so we can check the calls."""
+      if sig == signal.SIGINT:
+        self.assertFalse(ignore_sigint)
+        return normal_sigint
+      elif sig == signal.SIGTERM:
+        return normal_sigterm
+      else:
+        raise ValueError('unknown sig %i' % sig)
+    self.getsignal_mock.side_effect = _GetsignalChecker
+
+    # Let the body of code run, then check the signal behavior afterwards.
+    # We don't get visibility into signal ordering vs command execution,
+    # but it's kind of hard to mess up that, so we won't bother.
+    yield
+
+    class RejectSigIgn(object):
+      """Make sure the signal action is not SIG_IGN."""
+      def __eq__(self, other):
+        return other != signal.SIG_IGN
+
+    # Verify the signals checked/setup are correct.
+    if ignore_sigint:
+      self.signal_mock.assert_has_calls([
+          mock.call(signal.SIGINT, signal.SIG_IGN),
+          mock.call(signal.SIGTERM, RejectSigIgn()),
+          mock.call(signal.SIGINT, sigint_suppress),
+          mock.call(signal.SIGTERM, normal_sigterm),
+      ])
+      self.assertEqual(self.getsignal_mock.call_count, 1)
+    else:
+      self.signal_mock.assert_has_calls([
+          mock.call(signal.SIGINT, RejectSigIgn()),
+          mock.call(signal.SIGTERM, RejectSigIgn()),
+          mock.call(signal.SIGINT, normal_sigint),
+          mock.call(signal.SIGTERM, normal_sigterm),
+      ])
+      self.assertEqual(self.getsignal_mock.call_count, 2)
+
+    # Verify various args are passed down to the real command.
+    pargs = self.popen_mock.call_args[0][0]
+    self.assertEqual(cmd, pargs)
+
+    # Verify various kwargs are passed down to the real command.
+    pkwargs = self.popen_mock.call_args[1]
+    for key in ('cwd', 'stdin', 'stdout', 'stderr'):
+      kwargs.setdefault(key, None)
+    kwargs.setdefault('shell', False)
+    kwargs.setdefault('env', mock.ANY)
+    kwargs['close_fds'] = True
+    self.longMessage = True
+    for key in kwargs.keys():
+      self.assertEqual(kwargs[key], pkwargs[key],
+                       msg='kwargs[%s] mismatch' % key)
+
+  def _AssertCrEqual(self, expected, actual):
+    """Helper method to compare two CommandResult objects.
+
+    This is needed since assertEqual does not know how to compare two
+    CommandResult objects.
+
+    Args:
+      expected: a CommandResult object, expected result.
+      actual: a CommandResult object, actual result.
+    """
+    self.assertEqual(expected.cmd, actual.cmd)
+    self.assertEqual(expected.error, actual.error)
+    self.assertEqual(expected.output, actual.output)
+    self.assertEqual(expected.returncode, actual.returncode)
+
+  @_ForceLoggingLevel
+  def _TestCmd(self, cmd, real_cmd, sp_kv=None, rc_kv=None, sudo=False):
+    """Factor out common setup logic for testing RunCommand().
+
+    Args:
+      cmd: a string or an array of strings that will be passed to RunCommand.
+      real_cmd: the real command we expect RunCommand to call (might be
+          modified to have enter_chroot).
+      sp_kv: key-value pairs passed to subprocess.Popen().
+      rc_kv: key-value pairs passed to RunCommand().
+      sudo: use SudoRunCommand() rather than RunCommand().
+    """
+    if sp_kv is None:
+      sp_kv = {}
+    if rc_kv is None:
+      rc_kv = {}
+
+    expected_result = cros_build_lib.CommandResult()
+    expected_result.cmd = real_cmd
+    expected_result.error = self.error
+    expected_result.output = self.output
+    expected_result.returncode = self.proc_mock.returncode
+
+    arg_dict = dict()
+    for attr in ('close_fds', 'cwd', 'env', 'stdin', 'stdout', 'stderr',
+                 'shell'):
+      if attr in sp_kv:
+        arg_dict[attr] = sp_kv[attr]
+      else:
+        if attr == 'close_fds':
+          arg_dict[attr] = True
+        elif attr == 'shell':
+          arg_dict[attr] = False
+        else:
+          arg_dict[attr] = None
+
+    if sudo:
+      runcmd = cros_build_lib.SudoRunCommand
+    else:
+      runcmd = cros_build_lib.RunCommand
+    with self._MockChecker(real_cmd, ignore_sigint=rc_kv.get('ignore_sigint'),
+                           **sp_kv):
+      actual_result = runcmd(cmd, **rc_kv)
+
+    self._AssertCrEqual(expected_result, actual_result)
+
+  def testReturnCodeZeroWithArrayCmd(self, ignore_sigint=False):
+    """--enter_chroot=False and --cmd is an array of strings.
+
+    Parameterized so this can also be used by some other tests w/ alternate
+    params to RunCommand().
+
+    Args:
+      ignore_sigint: If True, we'll tell RunCommand to ignore sigint.
+    """
+    self.proc_mock.returncode = 0
+    cmd_list = ['foo', 'bar', 'roger']
+    self._TestCmd(cmd_list, cmd_list,
+                  rc_kv=dict(ignore_sigint=ignore_sigint))
+
+  def testSignalRestoreNormalCase(self):
+    """Test RunCommand() properly sets/restores sigint.  Normal case."""
+    self.testReturnCodeZeroWithArrayCmd(ignore_sigint=True)
+
+  def testReturnCodeZeroWithArrayCmdEnterChroot(self):
+    """--enter_chroot=True and --cmd is an array of strings."""
+    self.proc_mock.returncode = 0
+    cmd_list = ['foo', 'bar', 'roger']
+    real_cmd = cmd_list
+    if not cros_build_lib.IsInsideChroot():
+      real_cmd = ['cros_sdk', '--'] + cmd_list
+    self._TestCmd(cmd_list, real_cmd, rc_kv=dict(enter_chroot=True))
+
+  @_ForceLoggingLevel
+  def testCommandFailureRaisesError(self, ignore_sigint=False):
+    """Verify error raised by communicate() is caught.
+
+    Parameterized so this can also be used by some other tests w/ alternate
+    params to RunCommand().
+
+    Args:
+      ignore_sigint: If True, we'll tell RunCommand to ignore sigint.
+    """
+    cmd = 'test cmd'
+    self.proc_mock.returncode = 1
+    with self._MockChecker(['/bin/bash', '-c', cmd],
+                           ignore_sigint=ignore_sigint):
+      self.assertRaises(cros_build_lib.RunCommandError,
+                        cros_build_lib.RunCommand, cmd, shell=True,
+                        ignore_sigint=ignore_sigint, error_code_ok=False)
+
+  @_ForceLoggingLevel
+  def testSubprocessCommunicateExceptionRaisesError(self, ignore_sigint=False):
+    """Verify error raised by communicate() is caught.
+
+    Parameterized so this can also be used by some other tests w/ alternate
+    params to RunCommand().
+
+    Args:
+      ignore_sigint: If True, we'll tell RunCommand to ignore sigint.
+    """
+    cmd = ['test', 'cmd']
+    self.proc_mock.communicate = mock.MagicMock(side_effect=ValueError)
+    with self._MockChecker(cmd, ignore_sigint=ignore_sigint):
+      self.assertRaises(ValueError, cros_build_lib.RunCommand, cmd,
+                        ignore_sigint=ignore_sigint)
+
+  def testSignalRestoreExceptionCase(self):
+    """Test RunCommand() properly sets/restores sigint.  Exception case."""
+    self.testSubprocessCommunicateExceptionRaisesError(ignore_sigint=True)
+
+  def testEnvWorks(self):
+    """Test RunCommand(..., env=xyz) works."""
+    # We'll put this bogus environment together, just to make sure
+    # subprocess.Popen gets passed it.
+    env = {'Tom': 'Jerry', 'Itchy': 'Scratchy'}
+
+    # This is a simple case, copied from testReturnCodeZeroWithArrayCmd()
+    self.proc_mock.returncode = 0
+    cmd_list = ['foo', 'bar', 'roger']
+
+    # Run.  We expect the env= to be passed through from sp (subprocess.Popen)
+    # to rc (RunCommand).
+    self._TestCmd(cmd_list, cmd_list,
+                  sp_kv=dict(env=env),
+                  rc_kv=dict(env=env))
+
+  def testExtraEnvOnlyWorks(self):
+    """Test RunCommand(..., extra_env=xyz) works."""
+    # We'll put this bogus environment together, just to make sure
+    # subprocess.Popen gets passed it.
+    extra_env = {'Pinky' : 'Brain'}
+    ## This is a little bit circular, since the same logic is used to compute
+    ## the value inside, but at least it checks that this happens.
+    total_env = os.environ.copy()
+    total_env.update(extra_env)
+
+    # This is a simple case, copied from testReturnCodeZeroWithArrayCmd()
+    self.proc_mock.returncode = 0
+    cmd_list = ['foo', 'bar', 'roger']
+
+    # Run.  We expect the env= to be passed through from sp (subprocess.Popen)
+    # to rc (RunCommand).
+    self._TestCmd(cmd_list, cmd_list,
+                  sp_kv=dict(env=total_env),
+                  rc_kv=dict(extra_env=extra_env))
+
+  def testExtraEnvTooWorks(self):
+    """Test RunCommand(..., env=xy, extra_env=z) works."""
+    # We'll put this bogus environment together, just to make sure
+    # subprocess.Popen gets passed it.
+    env = {'Tom': 'Jerry', 'Itchy': 'Scratchy'}
+    extra_env = {'Pinky': 'Brain'}
+    total_env = {'Tom': 'Jerry', 'Itchy': 'Scratchy', 'Pinky': 'Brain'}
+
+    # This is a simple case, copied from testReturnCodeZeroWithArrayCmd()
+    self.proc_mock.returncode = 0
+    cmd_list = ['foo', 'bar', 'roger']
+
+    # Run.  We expect the env= to be passed through from sp (subprocess.Popen)
+    # to rc (RunCommand).
+    self._TestCmd(cmd_list, cmd_list,
+                  sp_kv=dict(env=total_env),
+                  rc_kv=dict(env=env, extra_env=extra_env))
+
+  @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False)
+  def testChrootExtraEnvWorks(self, _inchroot_mock):
+    """Test RunCommand(..., enter_chroot=True, env=xy, extra_env=z) works."""
+    # We'll put this bogus environment together, just to make sure
+    # subprocess.Popen gets passed it.
+    env = {'Tom': 'Jerry', 'Itchy': 'Scratchy'}
+    extra_env = {'Pinky': 'Brain'}
+    total_env = {'Tom': 'Jerry', 'Itchy': 'Scratchy', 'Pinky': 'Brain'}
+
+    # This is a simple case, copied from testReturnCodeZeroWithArrayCmd()
+    self.proc_mock.returncode = 0
+    cmd_list = ['foo', 'bar', 'roger']
+
+    # Run.  We expect the env= to be passed through from sp (subprocess.Popen)
+    # to rc (RunCommand).
+    self._TestCmd(cmd_list, ['cros_sdk', 'Pinky=Brain', '--'] + cmd_list,
+                  sp_kv=dict(env=total_env),
+                  rc_kv=dict(env=env, extra_env=extra_env, enter_chroot=True))
+
+  def testExceptionEquality(self):
+    """Verify equality methods for RunCommandError"""
+
+    c1 = cros_build_lib.CommandResult(cmd=['ls', 'arg'], returncode=1)
+    c2 = cros_build_lib.CommandResult(cmd=['ls', 'arg1'], returncode=1)
+    c3 = cros_build_lib.CommandResult(cmd=['ls', 'arg'], returncode=2)
+    e1 = cros_build_lib.RunCommandError('Message 1', c1)
+    e2 = cros_build_lib.RunCommandError('Message 1', c1)
+    e_diff_msg = cros_build_lib.RunCommandError('Message 2', c1)
+    e_diff_cmd = cros_build_lib.RunCommandError('Message 1', c2)
+    e_diff_code = cros_build_lib.RunCommandError('Message 1', c3)
+
+    self.assertEqual(e1, e2)
+    self.assertNotEqual(e1, e_diff_msg)
+    self.assertNotEqual(e1, e_diff_cmd)
+    self.assertNotEqual(e1, e_diff_code)
+
+  def testSudoRunCommand(self):
+    """Test SudoRunCommand(...) works."""
+    cmd_list = ['foo', 'bar', 'roger']
+    sudo_list = ['sudo', '--'] + cmd_list
+    self.proc_mock.returncode = 0
+    self._TestCmd(cmd_list, sudo_list, sudo=True)
+
+  def testSudoRunCommandShell(self):
+    """Test SudoRunCommand(..., shell=True) works."""
+    cmd = 'foo bar roger'
+    sudo_list = ['sudo', '--', '/bin/bash', '-c', cmd]
+    self.proc_mock.returncode = 0
+    self._TestCmd(cmd, sudo_list, sudo=True,
+                  rc_kv=dict(shell=True))
+
+  def testSudoRunCommandEnv(self):
+    """Test SudoRunCommand(..., extra_env=z) works."""
+    cmd_list = ['foo', 'bar', 'roger']
+    sudo_list = ['sudo', 'shucky=ducky', '--'] + cmd_list
+    extra_env = {'shucky' : 'ducky'}
+    self.proc_mock.returncode = 0
+    self._TestCmd(cmd_list, sudo_list, sudo=True,
+                  rc_kv=dict(extra_env=extra_env))
+
+  def testSudoRunCommandUser(self):
+    """Test SudoRunCommand(..., user='...') works."""
+    cmd_list = ['foo', 'bar', 'roger']
+    sudo_list = ['sudo', '-u', 'MMMMMonster', '--'] + cmd_list
+    self.proc_mock.returncode = 0
+    self._TestCmd(cmd_list, sudo_list, sudo=True,
+                  rc_kv=dict(user='MMMMMonster'))
+
+  def testSudoRunCommandUserShell(self):
+    """Test SudoRunCommand(..., user='...', shell=True) works."""
+    cmd = 'foo bar roger'
+    sudo_list = ['sudo', '-u', 'MMMMMonster', '--', '/bin/bash', '-c', cmd]
+    self.proc_mock.returncode = 0
+    self._TestCmd(cmd, sudo_list, sudo=True,
+                  rc_kv=dict(user='MMMMMonster', shell=True))
+
+
+class TestRunCommandOutput(cros_test_lib.TempDirTestCase,
+                           cros_test_lib.OutputTestCase):
+  """Tests of RunCommand output options."""
+
+  @_ForceLoggingLevel
+  def testLogStdoutToFile(self):
+    log = os.path.join(self.tempdir, 'output')
+    ret = cros_build_lib.RunCommand(
+        ['echo', 'monkeys'], log_stdout_to_file=log)
+    self.assertEqual(osutils.ReadFile(log), 'monkeys\n')
+    self.assertIs(ret.output, None)
+    self.assertIs(ret.error, None)
+
+    os.unlink(log)
+    ret = cros_build_lib.RunCommand(
+        ['sh', '-c', 'echo monkeys3 >&2'],
+        log_stdout_to_file=log, redirect_stderr=True)
+    self.assertEqual(ret.error, 'monkeys3\n')
+    self.assertExists(log)
+    self.assertEqual(os.path.getsize(log), 0)
+
+    os.unlink(log)
+    ret = cros_build_lib.RunCommand(
+        ['sh', '-c', 'echo monkeys4; echo monkeys5 >&2'],
+        log_stdout_to_file=log, combine_stdout_stderr=True)
+    self.assertIs(ret.output, None)
+    self.assertIs(ret.error, None)
+    self.assertEqual(osutils.ReadFile(log), 'monkeys4\nmonkeys5\n')
+
+  def _CaptureRunCommand(self, command, mute_output):
+    """Capture a RunCommand() output with the specified |mute_output|.
+
+    Args:
+      command: command to send to RunCommand().
+      mute_output: RunCommand() |mute_output| parameter.
+
+    Returns:
+      A (stdout, stderr) pair of captured output.
+    """
+    with self.OutputCapturer() as output:
+      cros_build_lib.RunCommand(command,
+                                debug_level=logging.DEBUG,
+                                mute_output=mute_output)
+    return (output.GetStdout(), output.GetStderr())
+
+  @_ForceLoggingLevel
+  def testSubprocessMuteOutput(self):
+    """Test RunCommand |mute_output| parameter."""
+    command = ['sh', '-c', 'echo foo; echo bar >&2']
+    # Always mute: we shouldn't get any output.
+    self.assertEqual(self._CaptureRunCommand(command, mute_output=True),
+                     ('', ''))
+    # Mute based on |debug_level|: we should't get any output.
+    self.assertEqual(self._CaptureRunCommand(command, mute_output=None),
+                     ('', ''))
+    # Never mute: we should get 'foo\n' and 'bar\n'.
+    self.assertEqual(self._CaptureRunCommand(command, mute_output=False),
+                     ('foo\n', 'bar\n'))
+
+  def testRunCommandAtNoticeLevel(self):
+    """Ensure that RunCommand prints output when mute_output is False."""
+    # Needed by cros_sdk and brillo/cros chroot.
+    with self.OutputCapturer():
+      cros_build_lib.RunCommand(['echo', 'foo'], mute_output=False,
+                                error_code_ok=True, print_cmd=False,
+                                debug_level=logging.NOTICE)
+    self.AssertOutputContainsLine('foo')
+
+
+class TestRetries(cros_test_lib.MockTempDirTestCase):
+  """Tests of GenericRetry and relatives."""
+
+  def testGenericRetry(self):
+    """Test basic semantics of retry and success recording."""
+    source = iter(xrange(5)).next
+
+    def f():
+      val = source()
+      if val < 4:
+        raise ValueError()
+      return val
+
+    s = []
+    def sf(attempt):
+      s.append(attempt)
+
+    handler = lambda ex: isinstance(ex, ValueError)
+
+    self.assertRaises(ValueError, retry_util.GenericRetry, handler, 3, f,
+                      success_functor=sf)
+    self.assertEqual(s, [])
+
+    self.assertEqual(4, retry_util.GenericRetry(handler, 1, f,
+                                                success_functor=sf))
+    self.assertEqual(s, [1])
+
+    self.assertRaises(StopIteration, retry_util.GenericRetry, handler, 3, f,
+                      success_functor=sf)
+    self.assertEqual(s, [1])
+
+  def testRaisedException(self):
+    """Test which exception gets raised by repeated failure."""
+
+    def getTestFunction():
+      """Get function that fails once with ValueError, Then AssertionError."""
+      source = itertools.count()
+      def f():
+        if source.next() == 0:
+          raise ValueError()
+        else:
+          raise AssertionError()
+      return f
+
+    handler = lambda ex: True
+
+    with self.assertRaises(ValueError):
+      retry_util.GenericRetry(handler, 3, getTestFunction())
+
+    with self.assertRaises(AssertionError):
+      retry_util.GenericRetry(handler, 3, getTestFunction(),
+                              raise_first_exception_on_failure=False)
+
+  def testSuccessFunctorException(self):
+    """Exceptions in |success_functor| should not be retried."""
+    def sf(_):
+      assert False
+
+    with self.assertRaises(AssertionError):
+      retry_util.GenericRetry(lambda: True, 1, lambda: None, success_functor=sf)
+
+  def testRetryExceptionBadArgs(self):
+    """Verify we reject non-classes or tuples of classes"""
+    self.assertRaises(TypeError, retry_util.RetryException, '', 3, map)
+    self.assertRaises(TypeError, retry_util.RetryException, 123, 3, map)
+    self.assertRaises(TypeError, retry_util.RetryException, None, 3, map)
+    self.assertRaises(TypeError, retry_util.RetryException, [None], 3, map)
+
+  def testRetryException(self):
+    """Verify we retry only when certain exceptions get thrown"""
+    source, source2 = iter(xrange(6)).next, iter(xrange(6)).next
+    def f():
+      val = source2()
+      self.assertEqual(val, source())
+      if val < 2:
+        raise OSError()
+      if val < 5:
+        raise ValueError()
+      return val
+    self.assertRaises(OSError, retry_util.RetryException,
+                      (OSError, ValueError), 2, f)
+    self.assertRaises(ValueError, retry_util.RetryException,
+                      (OSError, ValueError), 1, f)
+    self.assertEqual(5, retry_util.RetryException(ValueError, 1, f))
+    self.assertRaises(StopIteration, retry_util.RetryException,
+                      ValueError, 3, f)
+
+  def testRetryWithBackoff(self):
+    sleep_history = []
+    def mock_sleep(x):
+      sleep_history.append(x)
+    self.PatchObject(time, 'sleep', new=mock_sleep)
+    def always_fails():
+      raise ValueError()
+    handler = lambda x: True
+    with self.assertRaises(ValueError):
+      retry_util.GenericRetry(handler, 5, always_fails, sleep=1,
+                              backoff_factor=2)
+
+    self.assertEqual(sleep_history, [1, 2, 4, 8, 16])
+
+  def testBasicRetry(self):
+    # pylint: disable=E1101
+    path = os.path.join(self.tempdir, 'script')
+    paths = {
+        'stop': os.path.join(self.tempdir, 'stop'),
+        'store': os.path.join(self.tempdir, 'store'),
+    }
+    osutils.WriteFile(
+        path,
+        "import sys\n"
+        "val = int(open(%(store)r).read())\n"
+        "stop_val = int(open(%(stop)r).read())\n"
+        "open(%(store)r, 'w').write(str(val + 1))\n"
+        "print val\n"
+        "sys.exit(0 if val == stop_val else 1)\n" % paths)
+
+    os.chmod(path, 0o755)
+
+    def _setup_counters(start, stop):
+      sleep_mock.reset_mock()
+      osutils.WriteFile(paths['store'], str(start))
+      osutils.WriteFile(paths['stop'], str(stop))
+
+    def _check_counters(sleep, sleep_cnt):
+      calls = [mock.call(sleep * (x + 1)) for x in range(sleep_cnt)]
+      sleep_mock.assert_has_calls(calls)
+
+    sleep_mock = self.PatchObject(time, 'sleep')
+
+    _setup_counters(0, 0)
+    command = ['python2', path]
+    kwargs = {'redirect_stdout': True, 'print_cmd': False}
+    self.assertEqual(cros_build_lib.RunCommand(command, **kwargs).output, '0\n')
+    _check_counters(0, 0)
+
+    func = retry_util.RunCommandWithRetries
+
+    _setup_counters(2, 2)
+    self.assertEqual(func(0, command, sleep=0, **kwargs).output, '2\n')
+    _check_counters(0, 0)
+
+    _setup_counters(0, 2)
+    self.assertEqual(func(2, command, sleep=1, **kwargs).output, '2\n')
+    _check_counters(1, 2)
+
+    _setup_counters(0, 1)
+    self.assertEqual(func(1, command, sleep=2, **kwargs).output, '1\n')
+    _check_counters(2, 1)
+
+    _setup_counters(0, 3)
+    self.assertRaises(cros_build_lib.RunCommandError,
+                      func, 2, command, sleep=3, **kwargs)
+    _check_counters(3, 2)
+
+
+class TestTimedCommand(cros_test_lib.MockTestCase):
+  """Tests for TimedCommand()"""
+
+  # TODO: Would be nice to insert a hook into the logging system so we verify
+  # the message actually gets passed down.  The logging module swallows the
+  # exceptions it throws internally when not all args get converted.
+
+  def setUp(self):
+    self.cmd = mock.MagicMock(return_value=1234)
+    self.cmd.__name__ = 'name'
+
+  def testBasic(self):
+    """Make sure simple stuff works."""
+    cros_build_lib.TimedCommand(self.cmd)
+    self.cmd.assert_called_once_with()
+
+  def testArgs(self):
+    """Verify passing of optional args to the destination function."""
+    cros_build_lib.TimedCommand(self.cmd, 'arg', 1, kw=True, alist=[])
+    self.cmd.assert_called_once_with('arg', 1, kw=True, alist=[])
+
+  def testReturn(self):
+    """Verify return values get passed back."""
+    ret = cros_build_lib.TimedCommand(self.cmd)
+    self.assertEqual(ret, 1234)
+
+  def testCallback(self):
+    """Verify log callback does the right thing."""
+    def cb(lvl, msg, ret, delta):
+      self.assertEqual(lvl, 10)
+      self.assertEqual(msg, 'msg!')
+      self.assertEqual(ret, 1234)
+      self.assertTrue(isinstance(delta, datetime.timedelta))
+    cros_build_lib.TimedCommand(self.cmd, timed_log_level=10,
+                                timed_log_msg='msg!', timed_log_callback=cb)
+
+  def testLog(self):
+    """Verify the logger module gets called."""
+    m = self.PatchObject(logging, 'log')
+    cros_build_lib.TimedCommand(self.cmd, timed_log_level=logging.WARNING,
+                                timed_log_msg='msg!')
+    self.assertEqual(m.call_count, 1)
+
+  def testLogStraight(self):
+    """Verify logging messages does the right thing."""
+    cros_build_lib.TimedCommand(self.cmd, timed_log_level=logging.WARNING,
+                                timed_log_msg='msg!')
+
+
+class TestListFiles(cros_test_lib.TempDirTestCase):
+  """Tests of ListFiles funciton."""
+
+  def _CreateNestedDir(self, dir_structure):
+    for entry in dir_structure:
+      full_path = os.path.join(os.path.join(self.tempdir, entry))
+      # ensure dirs are created
+      try:
+        os.makedirs(os.path.dirname(full_path))
+        if full_path.endswith('/'):
+          # we only want to create directories
+          return
+      except OSError as err:
+        if err.errno == errno.EEXIST:
+          # we don't care if the dir already exists
+          pass
+        else:
+          raise
+      # create dummy files
+      tmp = open(full_path, 'w')
+      tmp.close()
+
+  def testTraverse(self):
+    """Test that we are traversing the directory properly."""
+    dir_structure = ['one/two/test.txt', 'one/blah.py',
+                     'three/extra.conf']
+    self._CreateNestedDir(dir_structure)
+
+    files = cros_build_lib.ListFiles(self.tempdir)
+    for f in files:
+      f = f.replace(self.tempdir, '').lstrip('/')
+      if f not in dir_structure:
+        self.fail('%s was not found in %s' % (f, dir_structure))
+
+  def testEmptyFilePath(self):
+    """Test that we return nothing when directories are empty."""
+    dir_structure = ['one/', 'two/', 'one/a/']
+    self._CreateNestedDir(dir_structure)
+    files = cros_build_lib.ListFiles(self.tempdir)
+    self.assertEqual(files, [])
+
+  def testNoSuchDir(self):
+    try:
+      cros_build_lib.ListFiles(os.path.join(self.tempdir, 'missing'))
+    except OSError as err:
+      self.assertEqual(err.errno, errno.ENOENT)
+
+
+class HelperMethodSimpleTests(cros_test_lib.OutputTestCase):
+  """Tests for various helper methods without using mocks."""
+
+  def _TestChromeosVersion(self, test_str, expected=None):
+    actual = cros_build_lib.GetChromeosVersion(test_str)
+    self.assertEqual(expected, actual)
+
+  def testGetChromeosVersionWithValidVersionReturnsValue(self):
+    expected = '0.8.71.2010_09_10_1530'
+    test_str = ' CHROMEOS_VERSION_STRING=0.8.71.2010_09_10_1530 '
+    self._TestChromeosVersion(test_str, expected)
+
+  def testGetChromeosVersionWithMultipleVersionReturnsFirstMatch(self):
+    expected = '0.8.71.2010_09_10_1530'
+    test_str = (' CHROMEOS_VERSION_STRING=0.8.71.2010_09_10_1530 '
+                ' CHROMEOS_VERSION_STRING=10_1530 ')
+    self._TestChromeosVersion(test_str, expected)
+
+  def testGetChromeosVersionWithInvalidVersionReturnsDefault(self):
+    test_str = ' CHROMEOS_VERSION_STRING=invalid_version_string '
+    self._TestChromeosVersion(test_str)
+
+  def testGetChromeosVersionWithEmptyInputReturnsDefault(self):
+    self._TestChromeosVersion('')
+
+  def testGetChromeosVersionWithNoneInputReturnsDefault(self):
+    self._TestChromeosVersion(None)
+
+  def testUserDateTime(self):
+    """Test with a raw time value."""
+    expected = 'Mon, 16 Jun 1980 05:03:20 -0700 (PDT)'
+    with cros_test_lib.SetTimeZone('US/Pacific'):
+      timeval = 330005000
+      self.assertEqual(cros_build_lib.UserDateTimeFormat(timeval=timeval),
+                       expected)
+
+  def testUserDateTimeDateTime(self):
+    """Test with a datetime object."""
+    expected = 'Mon, 16 Jun 1980 00:00:00 -0700 (PDT)'
+    with cros_test_lib.SetTimeZone('US/Pacific'):
+      timeval = datetime.datetime(1980, 6, 16)
+      self.assertEqual(cros_build_lib.UserDateTimeFormat(timeval=timeval),
+                       expected)
+
+  def testUserDateTimeDateTimeInWinter(self):
+    """Test that we correctly switch from PDT to PST."""
+    expected = 'Wed, 16 Jan 1980 00:00:00 -0800 (PST)'
+    with cros_test_lib.SetTimeZone('US/Pacific'):
+      timeval = datetime.datetime(1980, 1, 16)
+      self.assertEqual(cros_build_lib.UserDateTimeFormat(timeval=timeval),
+                       expected)
+
+  def testUserDateTimeDateTimeInEST(self):
+    """Test that we correctly switch from PDT to EST."""
+    expected = 'Wed, 16 Jan 1980 00:00:00 -0500 (EST)'
+    with cros_test_lib.SetTimeZone('US/Eastern'):
+      timeval = datetime.datetime(1980, 1, 16)
+      self.assertEqual(cros_build_lib.UserDateTimeFormat(timeval=timeval),
+                       expected)
+
+  def testUserDateTimeCurrentTime(self):
+    """Test that we can get the current time."""
+    cros_build_lib.UserDateTimeFormat()
+
+  def testParseUserDateTimeFormat(self):
+    stringtime = cros_build_lib.UserDateTimeFormat(100000.0)
+    self.assertEqual(cros_build_lib.ParseUserDateTimeFormat(stringtime),
+                     100000.0)
+
+  def testParseDurationToSeconds(self):
+    self.assertEqual(cros_build_lib.ParseDurationToSeconds('1:01:01'),
+                     3600 + 60 + 1)
+
+  def testMachineDetails(self):
+    """Verify we don't crash."""
+    contents = cros_build_lib.MachineDetails()
+    self.assertNotEqual(contents, '')
+    self.assertEqual(contents[-1], '\n')
+
+  def testGetCommonPathPrefix(self):
+    """Test helper function correctness."""
+    self.assertEqual('/a', cros_build_lib.GetCommonPathPrefix(['/a/b']))
+    self.assertEqual('/a', cros_build_lib.GetCommonPathPrefix(['/a/']))
+    self.assertEqual('/', cros_build_lib.GetCommonPathPrefix(['/a']))
+    self.assertEqual(
+        '/a', cros_build_lib.GetCommonPathPrefix(['/a/b', '/a/c']))
+    self.assertEqual(
+        '/a/b', cros_build_lib.GetCommonPathPrefix(['/a/b/c', '/a/b/d']))
+    self.assertEqual('/', cros_build_lib.GetCommonPathPrefix(['/a/b', '/c/d']))
+    self.assertEqual(
+        '/', cros_build_lib.GetCommonPathPrefix(['/a/b', '/aa/b']))
+
+  def testFormatDetailedTraceback(self):
+    """Verify various aspects of the traceback"""
+    # When there is no active exception, should output nothing.
+    data = cros_build_lib.FormatDetailedTraceback()
+    self.assertEqual(data, '')
+
+    # Generate a local exception and test it.
+    try:
+      varint = 12345
+      varstr = 'vaaars'
+      raise Exception('fooood')
+    except Exception:
+      lines = cros_build_lib.FormatDetailedTraceback().splitlines()
+      # Check basic start/finish lines.
+      self.assertIn('Traceback ', lines[0])
+      self.assertIn('Exception: fooood', lines[-1])
+
+      # Verify some local vars get correctly decoded.
+      for line in lines:
+        if 'varint' in line:
+          self.assertIn('int', line)
+          self.assertIn(str(varint), line)
+          break
+      else:
+        raise AssertionError('could not find local "varint" in output:\n\n%s' %
+                             ''.join(lines))
+
+      for line in lines:
+        if 'varstr' in line:
+          self.assertIn('str', line)
+          self.assertIn(varstr, line)
+          break
+      else:
+        raise AssertionError('could not find local "varstr" in output:\n\n%s' %
+                             ''.join(lines))
+
+  def _testPrintDetailedTraceback(self, check_stdout):
+    """Helper method for testing PrintDetailedTraceback."""
+    try:
+      varint = 12345
+      varstr = 'vaaars'
+      raise Exception('fooood')
+    except Exception:
+      with self.OutputCapturer() as output:
+        if check_stdout is None:
+          stream = None
+        elif check_stdout:
+          stream = sys.stdout
+        else:
+          stream = sys.stderr
+        cros_build_lib.PrintDetailedTraceback(file=stream)
+
+        # The non-selected stream shouldn't have anything.
+        data = output.GetStderr() if check_stdout else output.GetStdout()
+        self.assertEqual(data, '')
+
+        kwargs = {
+            'check_stdout': check_stdout,
+            'check_stderr': not check_stdout,
+        }
+        self.AssertOutputContainsLine(r'Traceback ', **kwargs)
+        self.AssertOutputContainsLine(r'Exception: fooood', **kwargs)
+        self.AssertOutputContainsLine(r'varint.*int.*%s' % varint, **kwargs)
+        self.AssertOutputContainsLine(r'varstr.*str.*%s' % varstr, **kwargs)
+
+  def testPrintDetailedTracebackStderrDefault(self):
+    """Verify default (stderr) handling"""
+    self._testPrintDetailedTraceback(None)
+
+  def testPrintDetailedTracebackStderr(self):
+    """Verify stderr handling"""
+    self._testPrintDetailedTraceback(False)
+
+  def testPrintDetailedTracebackStdout(self):
+    """Verify stdout handling"""
+    self._testPrintDetailedTraceback(True)
+
+
+class TestInput(cros_test_lib.MockOutputTestCase):
+  """Tests of input gathering functionality."""
+
+  def testGetInput(self):
+    """Verify GetInput() basic behavior."""
+    response = 'Some response'
+    self.PatchObject(__builtin__, 'raw_input', return_value=response)
+    self.assertEquals(response, cros_build_lib.GetInput('prompt'))
+
+  def testBooleanPrompt(self):
+    """Verify BooleanPrompt() full behavior."""
+    m = self.PatchObject(cros_build_lib, 'GetInput')
+
+    m.return_value = ''
+    self.assertTrue(cros_build_lib.BooleanPrompt())
+    self.assertFalse(cros_build_lib.BooleanPrompt(default=False))
+
+    m.return_value = 'yes'
+    self.assertTrue(cros_build_lib.BooleanPrompt())
+    m.return_value = 'ye'
+    self.assertTrue(cros_build_lib.BooleanPrompt())
+    m.return_value = 'y'
+    self.assertTrue(cros_build_lib.BooleanPrompt())
+
+    m.return_value = 'no'
+    self.assertFalse(cros_build_lib.BooleanPrompt())
+    m.return_value = 'n'
+    self.assertFalse(cros_build_lib.BooleanPrompt())
+
+  def testBooleanShellValue(self):
+    """Verify BooleanShellValue() inputs work as expected"""
+    for v in (None,):
+      self.assertTrue(cros_build_lib.BooleanShellValue(v, True))
+      self.assertFalse(cros_build_lib.BooleanShellValue(v, False))
+
+    for v in (1234, '', 'akldjsf', '"'):
+      self.assertRaises(ValueError, cros_build_lib.BooleanShellValue, v, True)
+      self.assertTrue(cros_build_lib.BooleanShellValue(v, True, msg=''))
+      self.assertFalse(cros_build_lib.BooleanShellValue(v, False, msg=''))
+
+    for v in ('yes', 'YES', 'YeS', 'y', 'Y', '1', 'true', 'True', 'TRUE',):
+      self.assertTrue(cros_build_lib.BooleanShellValue(v, True))
+      self.assertTrue(cros_build_lib.BooleanShellValue(v, False))
+
+    for v in ('no', 'NO', 'nO', 'n', 'N', '0', 'false', 'False', 'FALSE',):
+      self.assertFalse(cros_build_lib.BooleanShellValue(v, True))
+      self.assertFalse(cros_build_lib.BooleanShellValue(v, False))
+
+  def testGetChoiceLists(self):
+    """Verify GetChoice behavior w/lists."""
+    m = self.PatchObject(cros_build_lib, 'GetInput')
+
+    m.return_value = '1'
+    ret = cros_build_lib.GetChoice('title', ['a', 'b', 'c'])
+    self.assertEqual(ret, 1)
+
+  def testGetChoiceGenerator(self):
+    """Verify GetChoice behavior w/generators."""
+    m = self.PatchObject(cros_build_lib, 'GetInput')
+
+    m.return_value = '2'
+    ret = cros_build_lib.GetChoice('title', xrange(3))
+    self.assertEqual(ret, 2)
+
+  def testGetChoiceWindow(self):
+    """Verify GetChoice behavior w/group_size set."""
+    m = self.PatchObject(cros_build_lib, 'GetInput')
+
+    cnt = [0]
+    def _Gen():
+      while True:
+        cnt[0] += 1
+        yield 'a'
+
+    m.side_effect = ['\n', '2']
+    ret = cros_build_lib.GetChoice('title', _Gen(), group_size=2)
+    self.assertEqual(ret, 2)
+
+    # Verify we showed the correct number of times.
+    self.assertEqual(cnt[0], 5)
+
+
+class TestContextManagerStack(cros_test_lib.TestCase):
+  """Test the ContextManagerStack class."""
+
+  def test(self):
+    invoked = []
+    counter = iter(itertools.count()).next
+    def _mk_kls(has_exception=None, exception_kls=None, suppress=False):
+      class foon(object):
+        """Simple context manager which runs checks on __exit__."""
+        marker = counter()
+        def __enter__(self):
+          return self
+
+        # pylint: disable=no-self-argument,bad-context-manager
+        def __exit__(obj_self, exc_type, exc, traceback):
+          invoked.append(obj_self.marker)
+          if has_exception is not None:
+            self.assertTrue(all(x is not None
+                                for x in (exc_type, exc, traceback)))
+            self.assertTrue(exc_type == has_exception)
+          if exception_kls:
+            raise exception_kls()
+          if suppress:
+            return True
+      return foon
+
+    with cros_build_lib.ContextManagerStack() as stack:
+      # Note... these tests are in reverse, since the exception
+      # winds its way up the stack.
+      stack.Add(_mk_kls())
+      stack.Add(_mk_kls(ValueError, suppress=True))
+      stack.Add(_mk_kls(IndexError, exception_kls=ValueError))
+      stack.Add(_mk_kls(IndexError))
+      stack.Add(_mk_kls(exception_kls=IndexError))
+      stack.Add(_mk_kls())
+    self.assertEqual(invoked, list(reversed(range(6))))
+
+
+class TestManifestCheckout(cros_test_lib.TempDirTestCase):
+  """Tests for ManifestCheckout functionality."""
+
+  def setUp(self):
+    self.manifest_dir = os.path.join(self.tempdir, '.repo', 'manifests')
+
+    # Initialize a repo instance here.
+    local_repo = os.path.join(constants.SOURCE_ROOT, '.repo/repo/.git')
+
+    # Create a copy of our existing manifests.git, but rewrite it so it
+    # looks like a remote manifests.git.  This is to avoid hitting the
+    # network, and speeds things up in general.
+    local_manifests = 'file://%s/.repo/manifests.git' % constants.SOURCE_ROOT
+    temp_manifests = os.path.join(self.tempdir, 'manifests.git')
+    git.RunGit(self.tempdir, ['clone', '-n', '--bare', local_manifests])
+    git.RunGit(temp_manifests,
+               ['fetch', '-f', '-u', local_manifests,
+                'refs/remotes/origin/*:refs/heads/*'])
+    git.RunGit(temp_manifests, ['branch', '-D', 'default'])
+    repo = repository.RepoRepository(
+        temp_manifests, self.tempdir,
+        repo_url='file://%s' % local_repo, repo_branch='default')
+    repo.Initialize()
+
+    self.active_manifest = os.path.realpath(
+        os.path.join(self.tempdir, '.repo', 'manifest.xml'))
+
+  def testManifestInheritance(self):
+    osutils.WriteFile(self.active_manifest, """
+        <manifest>
+          <include name="include-target.xml" />
+          <include name="empty.xml" />
+          <project name="monkeys" path="baz" remote="foon" revision="master" />
+        </manifest>""")
+    # First, verify it properly explodes if the include can't be found.
+    self.assertRaises(EnvironmentError,
+                      git.ManifestCheckout, self.tempdir)
+
+    # Next, verify it can read an empty manifest; this is to ensure
+    # that we can point Manifest at the empty manifest without exploding,
+    # same for ManifestCheckout; this sort of thing is primarily useful
+    # to ensure no step of an include assumes everything is yet assembled.
+    empty_path = os.path.join(self.manifest_dir, 'empty.xml')
+    osutils.WriteFile(empty_path, '<manifest/>')
+    git.Manifest(empty_path)
+    git.ManifestCheckout(self.tempdir, manifest_path=empty_path)
+
+    # Next, verify include works.
+    osutils.WriteFile(
+        os.path.join(self.manifest_dir, 'include-target.xml'),
+        """
+        <manifest>
+          <remote name="foon" fetch="http://localhost" />
+        </manifest>""")
+    manifest = git.ManifestCheckout(self.tempdir)
+    self.assertEqual(list(manifest.checkouts_by_name), ['monkeys'])
+    self.assertEqual(list(manifest.remotes), ['foon'])
+
+  # pylint: disable=E1101
+  def testGetManifestsBranch(self):
+    func = git.ManifestCheckout._GetManifestsBranch
+    manifest = self.manifest_dir
+    repo_root = self.tempdir
+
+    # pylint: disable=W0613
+    def reconfig(merge='master', origin='origin'):
+      if merge is not None:
+        merge = 'refs/heads/%s' % merge
+      for key in ('merge', 'origin'):
+        val = locals()[key]
+        key = 'branch.default.%s' % key
+        if val is None:
+          git.RunGit(manifest, ['config', '--unset', key], error_code_ok=True)
+        else:
+          git.RunGit(manifest, ['config', key, val])
+
+    # First, verify our assumptions about a fresh repo init are correct.
+    self.assertEqual('default', git.GetCurrentBranch(manifest))
+    self.assertEqual('master', func(repo_root))
+
+    # Ensure we can handle a missing origin; this can occur jumping between
+    # branches, and can be worked around.
+    reconfig(origin=None)
+    self.assertEqual('default', git.GetCurrentBranch(manifest))
+    self.assertEqual('master', func(repo_root))
+
+    def assertExcept(message, **kwargs):
+      reconfig(**kwargs)
+      self.assertRaises2(OSError, func, repo_root, ex_msg=message,
+                         check_attrs={'errno': errno.ENOENT})
+
+    # No merge target means the configuration isn't usable, period.
+    assertExcept("git tracking configuration for that branch is broken",
+                 merge=None)
+
+    # Ensure we detect if we're on the wrong branch, even if it has
+    # tracking setup.
+    git.RunGit(manifest, ['checkout', '-t', 'origin/master', '-b', 'test'])
+    assertExcept("It should be checked out to 'default'")
+
+    # Ensure we handle detached HEAD w/ an appropriate exception.
+    git.RunGit(manifest, ['checkout', '--detach', 'test'])
+    assertExcept("It should be checked out to 'default'")
+
+    # Finally, ensure that if the default branch is non-existant, we still throw
+    # a usable exception.
+    git.RunGit(manifest, ['branch', '-d', 'default'])
+    assertExcept("It should be checked out to 'default'")
+
+  def testGitMatchBranchName(self):
+    git_repo = os.path.join(self.tempdir, '.repo', 'manifests')
+
+    branches = git.MatchBranchName(git_repo, 'default', namespace='')
+    self.assertEqual(branches, ['refs/heads/default'])
+
+    branches = git.MatchBranchName(git_repo, 'default', namespace='refs/heads/')
+    self.assertEqual(branches, ['default'])
+
+    branches = git.MatchBranchName(git_repo, 'origin/f.*link',
+                                   namespace='refs/remotes/')
+    self.assertTrue('firmware-link-' in branches[0])
+
+    branches = git.MatchBranchName(git_repo, 'r23')
+    self.assertEqual(branches, ['refs/remotes/origin/release-R23-2913.B'])
+
+
+class Test_iflatten_instance(cros_test_lib.TestCase):
+  """Test iflatten_instance function."""
+
+  def test_it(self):
+    f = lambda *a: list(cros_build_lib.iflatten_instance(*a))
+    self.assertEqual([1, 2], f([1, 2]))
+    self.assertEqual([1, '2a'], f([1, '2a']))
+    self.assertEqual([1, 2, 'b'], f([1, [2, 'b']]))
+    self.assertEqual([1, 2, 'f', 'd', 'a', 's'], f([1, 2, ('fdas',)], int))
+    self.assertEqual([''], f(''))
+
+
+class TestKeyValueFiles(cros_test_lib.TempDirTestCase):
+  """Tests handling of key/value files."""
+
+  def setUp(self):
+    self.contents = """# A comment !@
+A = 1
+AA= 2
+AAA =3
+AAAA\t=\t4
+AAAAA\t   \t=\t   5
+AAAAAA = 6     \t\t# Another comment
+\t
+\t# Aerith lives!
+C = 'D'
+CC= 'D'
+CCC ='D'
+\x20
+ \t# monsters go boom #
+E \t= "Fxxxxx" # Blargl
+EE= "Faaa\taaaa"\x20
+EEE ="Fk  \t  kkkk"\t
+Q = "'q"
+\tQQ ="q'"\x20
+ QQQ='"q"'\t
+R = "r
+"
+RR = "rr
+rrr"
+RRR = 'rrr
+ RRRR
+ rrr
+'
+SSS=" ss
+'ssss'
+ss"
+T="
+ttt"
+"""
+    self.expected = {
+        'A': '1',
+        'AA': '2',
+        'AAA': '3',
+        'AAAA': '4',
+        'AAAAA': '5',
+        'AAAAAA': '6',
+        'C': 'D',
+        'CC': 'D',
+        'CCC': 'D',
+        'E': 'Fxxxxx',
+        'EE': 'Faaa\taaaa',
+        'EEE': 'Fk  \t  kkkk',
+        'Q': "'q",
+        'QQ': "q'",
+        'QQQ': '"q"',
+        'R': 'r\n',
+        'RR': 'rr\nrrr',
+        'RRR': 'rrr\n RRRR\n rrr\n',
+        'SSS': ' ss\n\'ssss\'\nss',
+        'T': '\nttt'
+    }
+
+    self.conf_file = os.path.join(self.tempdir, 'file.conf')
+    osutils.WriteFile(self.conf_file, self.contents)
+
+  def _RunAndCompare(self, test_input, multiline):
+    result = cros_build_lib.LoadKeyValueFile(test_input, multiline=multiline)
+    self.assertEqual(self.expected, result)
+
+  def testLoadFilePath(self):
+    """Verify reading a simple file works"""
+    self._RunAndCompare(self.conf_file, True)
+
+  def testLoadStringIO(self):
+    """Verify passing in StringIO object works."""
+    self._RunAndCompare(StringIO.StringIO(self.contents), True)
+
+  def testLoadFileObject(self):
+    """Verify passing in open file object works."""
+    with open(self.conf_file) as f:
+      self._RunAndCompare(f, True)
+
+  def testNoMultlineValues(self):
+    """Verify exception is thrown when multiline is disabled."""
+    self.assertRaises(ValueError, self._RunAndCompare, self.conf_file, False)
+
+
+class SafeRunTest(cros_test_lib.TestCase):
+  """Tests SafeRunTest functionality."""
+
+  def _raise_exception(self, e):
+    raise e
+
+  def testRunsSafely(self):
+    """Verify that we are robust to exceptions."""
+    def append_val(value):
+      call_list.append(value)
+
+    call_list = []
+    f_list = [functools.partial(append_val, 1),
+              functools.partial(self._raise_exception,
+                                Exception('testRunsSafely exception.')),
+              functools.partial(append_val, 2)]
+    self.assertRaises(Exception, cros_build_lib.SafeRun, f_list)
+    self.assertEquals(call_list, [1, 2])
+
+  def testRaisesFirstException(self):
+    """Verify we raise the first exception when multiple are encountered."""
+    class E1(Exception):
+      """Simple exception class."""
+      pass
+
+    class E2(Exception):
+      """Simple exception class."""
+      pass
+
+    f_list = [functools.partial(self._raise_exception, e) for e in [E1, E2]]
+    self.assertRaises(E1, cros_build_lib.SafeRun, f_list)
+
+  def testCombinedRaise(self):
+    """Raises a RuntimeError with exceptions combined."""
+    f_list = [functools.partial(self._raise_exception, Exception())] * 3
+    self.assertRaises(RuntimeError, cros_build_lib.SafeRun, f_list,
+                      combine_exceptions=True)
+
+
+class FrozenAttributesTest(cros_test_lib.TestCase):
+  """Tests FrozenAttributesMixin functionality."""
+
+  class DummyClass(object):
+    """Any class that does not override __setattr__."""
+
+  class SetattrClass(object):
+    """Class that does override __setattr__."""
+    SETATTR_OFFSET = 10
+    def __setattr__(self, attr, value):
+      """Adjust value here to later confirm that this code ran."""
+      object.__setattr__(self, attr, self.SETATTR_OFFSET + value)
+
+  def _TestBasics(self, cls):
+    # pylint: disable=W0201
+    def _Expected(val):
+      return getattr(cls, 'SETATTR_OFFSET', 0) + val
+
+    obj = cls()
+    obj.a = 1
+    obj.b = 2
+    self.assertEquals(_Expected(1), obj.a)
+    self.assertEquals(_Expected(2), obj.b)
+
+    obj.Freeze()
+    self.assertRaises(cros_build_lib.AttributeFrozenError, setattr, obj, 'a', 3)
+    self.assertEquals(_Expected(1), obj.a)
+
+    self.assertRaises(cros_build_lib.AttributeFrozenError, setattr, obj, 'c', 3)
+    self.assertFalse(hasattr(obj, 'c'))
+
+  def testFrozenByMetaclass(self):
+    """Test attribute freezing with FrozenAttributesClass."""
+    class DummyByMeta(self.DummyClass):
+      """Class that freezes DummyClass using metaclass construct."""
+      __metaclass__ = cros_build_lib.FrozenAttributesClass
+
+    self._TestBasics(DummyByMeta)
+
+    class SetattrByMeta(self.SetattrClass):
+      """Class that freezes SetattrClass using metaclass construct."""
+      __metaclass__ = cros_build_lib.FrozenAttributesClass
+
+    self._TestBasics(SetattrByMeta)
+
+  def testFrozenByMixinFirst(self):
+    """Test attribute freezing with FrozenAttributesMixin first in hierarchy."""
+    class Dummy(cros_build_lib.FrozenAttributesMixin, self.DummyClass):
+      """Class that freezes DummyClass using mixin construct."""
+
+    self._TestBasics(Dummy)
+
+    class Setattr(cros_build_lib.FrozenAttributesMixin, self.SetattrClass):
+      """Class that freezes SetattrClass using mixin construct."""
+
+    self._TestBasics(Setattr)
+
+  def testFrozenByMixinLast(self):
+    """Test attribute freezing with FrozenAttributesMixin last in hierarchy."""
+    class Dummy(self.DummyClass, cros_build_lib.FrozenAttributesMixin):
+      """Class that freezes DummyClass using mixin construct."""
+
+    self._TestBasics(Dummy)
+
+    class Setattr(self.SetattrClass, cros_build_lib.FrozenAttributesMixin):
+      """Class that freezes SetattrClass using mixin construct."""
+
+    self._TestBasics(Setattr)
+
+
+class TestGetIPv4Address(RunCommandTestCase):
+  """Tests the GetIPv4Address function."""
+
+  IP_GLOBAL_OUTPUT = """
+1: lo: <LOOPBACK,UP,LOWER_UP> mtu 16436 qdisc noqueue state UNKNOWN
+    link/loopback 00:00:00:00:00:00 brd 00:00:00:00:00:00
+2: eth0: <NO-CARRIER,BROADCAST,MULTICAST,UP> mtu 1500 qdisc pfifo_fast state \
+DOWN qlen 1000
+    link/ether cc:cc:cc:cc:cc:cc brd ff:ff:ff:ff:ff:ff
+3: eth1: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc pfifo_fast state UP \
+qlen 1000
+    link/ether dd:dd:dd:dd:dd:dd brd ff:ff:ff:ff:ff:ff
+    inet 111.11.11.111/22 brd 111.11.11.255 scope global eth1
+    inet6 cdef:0:cdef:cdef:cdef:cdef:cdef:cdef/64 scope global dynamic
+       valid_lft 2592000sec preferred_lft 604800sec
+"""
+
+  def testGetIPv4AddressParseResult(self):
+    """Verifies we can parse the output and get correct IP address."""
+    self.rc.AddCmdResult(partial_mock.In('ip'), output=self.IP_GLOBAL_OUTPUT)
+    self.assertEqual(cros_build_lib.GetIPv4Address(), '111.11.11.111')
+
+  def testGetIPv4Address(self):
+    """Tests that correct shell commmand is called."""
+    cros_build_lib.GetIPv4Address(global_ip=False, dev='eth0')
+    self.rc.assertCommandContains(
+        ['ip', 'addr', 'show', 'scope', 'host', 'dev', 'eth0'])
+
+    cros_build_lib.GetIPv4Address(global_ip=True)
+    self.rc.assertCommandContains(['ip', 'addr', 'show', 'scope', 'global'])
+
+
+class TestGetHostname(cros_test_lib.MockTestCase):
+  """Tests GetHostName & GetHostDomain functionality."""
+
+  def setUp(self):
+    self.gethostname_mock = self.PatchObject(
+        socket, 'gethostname', return_value='m!!n')
+    self.gethostbyaddr_mock = self.PatchObject(
+        socket, 'gethostbyaddr', return_value=(
+            'm!!n.google.com', ('cow', 'bar',), ('127.0.0.1.a',)))
+
+  def testGetHostNameNonQualified(self):
+    """Verify non-qualified behavior"""
+    self.assertEqual(cros_build_lib.GetHostName(), 'm!!n')
+
+  def testGetHostNameFullyQualified(self):
+    """Verify fully qualified behavior"""
+    self.assertEqual(cros_build_lib.GetHostName(fully_qualified=True),
+                     'm!!n.google.com')
+
+  def testGetHostNameBadDns(self):
+    """Do not fail when the user's dns is bad"""
+    self.gethostbyaddr_mock.side_effect = socket.gaierror('should be caught')
+    self.assertEqual(cros_build_lib.GetHostName(), 'm!!n')
+
+  def testGetHostDomain(self):
+    """Verify basic behavior"""
+    self.assertEqual(cros_build_lib.GetHostDomain(), 'google.com')
+
+
+class TestGetChrootVersion(cros_test_lib.MockTestCase):
+  """Tests GetChrootVersion functionality."""
+
+  def testSimpleBuildroot(self):
+    """Verify buildroot arg works"""
+    read_mock = self.PatchObject(osutils, 'ReadFile', return_value='12\n')
+    ret = cros_build_lib.GetChrootVersion(buildroot='/build/root')
+    self.assertEqual(ret, '12')
+    read_mock.assert_called_with('/build/root/chroot/etc/cros_chroot_version')
+
+  def testSimpleChroot(self):
+    """Verify chroot arg works"""
+    read_mock = self.PatchObject(osutils, 'ReadFile', return_value='70')
+    ret = cros_build_lib.GetChrootVersion(chroot='/ch/root')
+    self.assertEqual(ret, '70')
+    read_mock.assert_called_with('/ch/root/etc/cros_chroot_version')
+
+  def testNoChroot(self):
+    """Verify we don't blow up when there is no chroot yet"""
+    ret = cros_build_lib.GetChrootVersion(chroot='/.$om3/place/nowhere')
+    self.assertEqual(ret, None)
+
+
+class CollectionTest(cros_test_lib.TestCase):
+  """Tests for Collection helper."""
+
+  def testDefaults(self):
+    """Verify default values kick in."""
+    O = cros_build_lib.Collection('O', a=0, b='string', c={})
+    o = O()
+    self.assertEqual(o.a, 0)
+    self.assertEqual(o.b, 'string')
+    self.assertEqual(o.c, {})
+
+  def testOverrideDefaults(self):
+    """Verify we can set custom values at instantiation time."""
+    O = cros_build_lib.Collection('O', a=0, b='string', c={})
+    o = O(a=1000)
+    self.assertEqual(o.a, 1000)
+    self.assertEqual(o.b, 'string')
+    self.assertEqual(o.c, {})
+
+  def testSetNoNewMembers(self):
+    """Verify we cannot add new members after the fact."""
+    O = cros_build_lib.Collection('O', a=0, b='string', c={})
+    o = O()
+
+    # Need the func since self.assertRaises evaluates the args in this scope.
+    def _setit(collection):
+      collection.does_not_exit = 10
+    self.assertRaises(AttributeError, _setit, o)
+    self.assertRaises(AttributeError, setattr, o, 'new_guy', 10)
+
+  def testGetNoNewMembers(self):
+    """Verify we cannot get new members after the fact."""
+    O = cros_build_lib.Collection('O', a=0, b='string', c={})
+    o = O()
+
+    # Need the func since self.assertRaises evaluates the args in this scope.
+    def _getit(collection):
+      return collection.does_not_exit
+    self.assertRaises(AttributeError, _getit, o)
+    self.assertRaises(AttributeError, getattr, o, 'foooo')
+
+  def testNewValue(self):
+    """Verify we change members correctly."""
+    O = cros_build_lib.Collection('O', a=0, b='string', c={})
+    o = O()
+    o.a = 'a string'
+    o.c = 123
+    self.assertEqual(o.a, 'a string')
+    self.assertEqual(o.b, 'string')
+    self.assertEqual(o.c, 123)
+
+  def testString(self):
+    """Make sure the string representation is readable by da hue mans."""
+    O = cros_build_lib.Collection('O', a=0, b='string', c={})
+    o = O()
+    self.assertEqual("Collection_O(a=0, b='string', c={})", str(o))
+
+
+class GetImageDiskPartitionInfoTests(RunCommandTestCase):
+  """Tests the GetImageDiskPartitionInfo function."""
+
+  SAMPLE_PARTED = """/foo/chromiumos_qemu_image.bin:3360MB:file:512:512:gpt:;
+11:0.03MB:8.42MB:8.39MB::RWFW:;
+6:8.42MB:8.42MB:0.00MB::KERN-C:;
+7:8.42MB:8.42MB:0.00MB::ROOT-C:;
+9:8.42MB:8.42MB:0.00MB::reserved:;
+10:8.42MB:8.42MB:0.00MB::reserved:;
+2:10.5MB:27.3MB:16.8MB::KERN-A:;
+4:27.3MB:44.0MB:16.8MB::KERN-B:;
+8:44.0MB:60.8MB:16.8MB:ext4:OEM:;
+12:128MB:145MB:16.8MB:fat16:EFI-SYSTEM:boot;
+5:145MB:2292MB:2147MB::ROOT-B:;
+3:2292MB:4440MB:2147MB:ext2:ROOT-A:;
+1:4440MB:7661MB:3221MB:ext4:STATE:;
+"""
+
+  SAMPLE_CGPT = """
+       start        size    part  contents
+           0           1          PMBR (Boot GUID: 88FB7EB8-2B3F-B943-B933-\
+EEC571FFB6E1)
+           1           1          Pri GPT header
+           2          32          Pri GPT table
+     1921024     2097152       1  Label: "STATE"
+                                  Type: Linux data
+                                  UUID: EEBD83BE-397E-BD44-878B-0DDDD5A5C510
+       20480       32768       2  Label: "KERN-A"
+                                  Type: ChromeOS kernel
+                                  UUID: 7007C2F3-08E5-AB40-A4BC-FF5B01F5460D
+                                  Attr: priority=15 tries=15 successful=1
+     1101824      819200       3  Label: "ROOT-A"
+                                  Type: ChromeOS rootfs
+                                  UUID: F4C5C3AD-027F-894B-80CD-3DEC57932948
+       53248       32768       4  Label: "KERN-B"
+                                  Type: ChromeOS kernel
+                                  UUID: C85FB478-404C-8741-ADB8-11312A35880D
+                                  Attr: priority=0 tries=0 successful=0
+      282624      819200       5  Label: "ROOT-B"
+                                  Type: ChromeOS rootfs
+                                  UUID: A99F4231-1EC3-C542-AC0C-DF3729F5DB07
+       16448           1       6  Label: "KERN-C"
+                                  Type: ChromeOS kernel
+                                  UUID: 81F0E336-FAC9-174D-A08C-864FE627B637
+                                  Attr: priority=0 tries=0 successful=0
+       16449           1       7  Label: "ROOT-C"
+                                  Type: ChromeOS rootfs
+                                  UUID: 9E127FCA-30C1-044E-A5F2-DF74E6932692
+       86016       32768       8  Label: "OEM"
+                                  Type: Linux data
+                                  UUID: 72986347-A37C-684F-9A19-4DBAF41C55A9
+       16450           1       9  Label: "reserved"
+                                  Type: ChromeOS reserved
+                                  UUID: BA85A0A7-1850-964D-8EF8-6707AC106C3A
+       16451           1      10  Label: "reserved"
+                                  Type: ChromeOS reserved
+                                  UUID: 16C9EC9B-50FA-DD46-98DC-F781360817B4
+          64       16384      11  Label: "RWFW"
+                                  Type: ChromeOS firmware
+                                  UUID: BE8AECB9-4F78-7C44-8F23-5A9273B7EC8F
+      249856       32768      12  Label: "EFI-SYSTEM"
+                                  Type: EFI System Partition
+                                  UUID: 88FB7EB8-2B3F-B943-B933-EEC571FFB6E1
+     4050847          32          Sec GPT table
+     4050879           1          Sec GPT header
+"""
+
+  def testCgpt(self):
+    """Tests that we can list all partitions with `cgpt` correctly."""
+    self.PatchObject(cros_build_lib, 'IsInsideChroot', return_value=True)
+    self.rc.AddCmdResult(partial_mock.Ignore(), output=self.SAMPLE_CGPT)
+    partitions = cros_build_lib.GetImageDiskPartitionInfo('...', unit='B')
+    self.assertEqual(partitions['STATE'].start, 983564288)
+    self.assertEqual(partitions['STATE'].size, 1073741824)
+    self.assertEqual(partitions['STATE'].number, 1)
+    self.assertEqual(partitions['STATE'].name, 'STATE')
+    self.assertEqual(partitions['EFI-SYSTEM'].start, 249856 * 512)
+    self.assertEqual(partitions['EFI-SYSTEM'].size, 32768 * 512)
+    self.assertEqual(partitions['EFI-SYSTEM'].number, 12)
+    self.assertEqual(partitions['EFI-SYSTEM'].name, 'EFI-SYSTEM')
+    # Because "reserved" is duplicated, we only have 11 key-value pairs.
+    self.assertEqual(11, len(partitions))
+
+  def testNormalPath(self):
+    self.PatchObject(cros_build_lib, 'IsInsideChroot', return_value=False)
+    self.rc.AddCmdResult(partial_mock.Ignore(), output=self.SAMPLE_PARTED)
+    partitions = cros_build_lib.GetImageDiskPartitionInfo('_ignored')
+    # Because "reserved" is duplicated, we only have 11 key-value pairs.
+    self.assertEqual(11, len(partitions))
+    self.assertEqual(1, partitions['STATE'].number)
+    self.assertEqual(2147, partitions['ROOT-A'].size)
+
+  def testKeyedByNumber(self):
+    self.PatchObject(cros_build_lib, 'IsInsideChroot', return_value=False)
+    self.rc.AddCmdResult(partial_mock.Ignore(), output=self.SAMPLE_PARTED)
+    partitions = cros_build_lib.GetImageDiskPartitionInfo(
+        '_ignored', key_selector='number'
+    )
+    self.assertEqual(12, len(partitions))
+    self.assertEqual('STATE', partitions[1].name)
+    self.assertEqual(2147, partitions[3].size)
+    self.assertEqual('reserved', partitions[9].name)
+    self.assertEqual('reserved', partitions[10].name)
+
+  def testChangeUnitOutsideChroot(self):
+
+    def changeUnit(unit):
+      cros_build_lib.GetImageDiskPartitionInfo('_ignored', unit)
+      self.assertCommandContains(
+          ['-m', '_ignored', 'unit', unit, 'print'],
+      )
+
+    self.PatchObject(cros_build_lib, 'IsInsideChroot', return_value=False)
+    self.rc.AddCmdResult(partial_mock.Ignore(), output=self.SAMPLE_PARTED)
+    # We must use 2-char units here because the mocked output is in 'MB'.
+    changeUnit('MB')
+    changeUnit('KB')
+
+  def testChangeUnitInsideChroot(self):
+    self.PatchObject(cros_build_lib, 'IsInsideChroot', return_value=True)
+    self.rc.AddCmdResult(partial_mock.Ignore(), output=self.SAMPLE_CGPT)
+    partitions = cros_build_lib.GetImageDiskPartitionInfo('_ignored', 'B')
+    self.assertEqual(partitions['STATE'].start, 983564288)
+    self.assertEqual(partitions['STATE'].size, 1073741824)
+    partitions = cros_build_lib.GetImageDiskPartitionInfo('_ignored', 'KB')
+    self.assertEqual(partitions['STATE'].start, 983564288 / 1000.0)
+    self.assertEqual(partitions['STATE'].size, 1073741824 / 1000.0)
+    partitions = cros_build_lib.GetImageDiskPartitionInfo('_ignored', 'MB')
+    self.assertEqual(partitions['STATE'].start, 983564288 / 10.0**6)
+    self.assertEqual(partitions['STATE'].size, 1073741824 / 10.0**6)
+
+    self.assertRaises(KeyError, cros_build_lib.GetImageDiskPartitionInfo,
+                      '_ignored', 'PB')
diff --git a/lib/cros_import.py b/lib/cros_import.py
new file mode 100644
index 0000000..cd32550
--- /dev/null
+++ b/lib/cros_import.py
@@ -0,0 +1,49 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions for importing python modules."""
+
+from __future__ import print_function
+
+
+def ImportModule(target):
+  """Import |target| and return a reference to it.
+
+  This uses the same import mechanism as the "import" statement.  That means
+  references get added to the global scope and the results are cached.  This
+  is different from the imp module which always loads and does not update the
+  existing variable/module/etc... scope.
+
+  Examples:
+    # This statement:
+    module = ImportModule('chromite.lib.cros_build_lib')
+    # Is equivalent to:
+    module = ImportModule(['chromite', 'lib', 'cros_build_lib'])
+    # Is equivalent to:
+    import chromite.lib.cros_build_lib
+    module = chromite.lib.cros_build_lib
+
+  Args:
+    target: A name like you'd use with the "import" statement (ignoring the
+      "from" part). May also be an iterable of the path components.
+
+  Returns:
+    A reference to the module.
+  """
+  # Normalize |target| into a dotted string and |parts| into a list.
+  if isinstance(target, basestring):
+    parts = target.split('.')
+  else:
+    parts = list(target)
+    target = '.'.join(parts)
+
+  # This caches things like normal, so no need to worry about overhead of
+  # reloading modules multiple times.
+  module = __import__(target)
+
+  # __import__ gets us the root of the namespace import; walk our way up.
+  for attr in parts[1:]:
+    module = getattr(module, attr)
+
+  return module
diff --git a/lib/cros_import_unittest b/lib/cros_import_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/cros_import_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/cros_import_unittest.py b/lib/cros_import_unittest.py
new file mode 100644
index 0000000..a444866
--- /dev/null
+++ b/lib/cros_import_unittest.py
@@ -0,0 +1,40 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the cros_import module."""
+
+from __future__ import print_function
+
+from chromite.lib import cros_import
+from chromite.lib import cros_test_lib
+
+
+class ImportTest(cros_test_lib.TempDirTestCase):
+  """Tests for the ImportModule function."""
+
+  def testMissingModule(self):
+    """Check behavior on unknown modules."""
+    self.assertRaises(ImportError, cros_import.ImportModule, 'asdf.aja.ew.q.a')
+
+  def _testImportModule(self, target):
+    """Verify we can import |target| successfully."""
+    module = cros_import.ImportModule(target)
+    self.assertTrue(hasattr(module, 'ImportModule'))
+
+  def testImportString(self):
+    """Verify we can import using a string."""
+    self._testImportModule('chromite.lib.cros_import')
+
+  def testImportTupleList(self):
+    """Verify we can import using a tuple & list."""
+    parts = ('chromite', 'lib', 'cros_import')
+    self._testImportModule(parts)
+    self._testImportModule(list(parts))
+
+  def testImportGenerator(self):
+    """Verify we can import using a generator."""
+    def target():
+      for p in ('chromite', 'lib', 'cros_import'):
+        yield p
+    self._testImportModule(target())
diff --git a/lib/cros_logging.py b/lib/cros_logging.py
new file mode 100644
index 0000000..9c66d85
--- /dev/null
+++ b/lib/cros_logging.py
@@ -0,0 +1,101 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Logging module to be used by all scripts.
+
+cros_logging is a wrapper around logging with additional support for NOTICE
+level. This is to be used instead of the default logging module. The new
+logging level can only be used from here.
+"""
+
+from __future__ import print_function
+
+import sys
+# pylint: disable=unused-wildcard-import, wildcard-import
+from logging import *
+# pylint: enable=unused-wildcard-import, wildcard-import
+
+# Have to import shutdown explicitly from logging because it is not included
+# in logging's __all__.
+# pylint: disable=unused-import
+from logging import shutdown
+# pylint: enable=unused-import
+
+
+# Notice Level.
+NOTICE = 25
+addLevelName(NOTICE, 'NOTICE')
+
+
+# Notice implementation.
+def notice(message, *args, **kwargs):
+  """Log 'msg % args' with severity 'NOTICE'."""
+  log(NOTICE, message, *args, **kwargs)
+
+
+# Only buildbot aware entry-points need to spew buildbot specific logs. Require
+# user action for the special log lines.
+_buildbot_markers_enabled = False
+def EnableBuildbotMarkers():
+  # pylint: disable=global-statement
+  global _buildbot_markers_enabled
+  _buildbot_markers_enabled = True
+
+
+def _PrintForBuildbot(handle, buildbot_tag, *args):
+  """Log a line for buildbot.
+
+  This function dumps a line to log recognizable by buildbot if
+  EnableBuildbotMarkers has been called. Otherwise, it dumps the same line in a
+  human friendly way that buildbot ignores.
+
+  Args:
+    handle: The pipe to dump the log to. If None, log to sys.stderr.
+    buildbot_tag: A tag specifying the type of buildbot log.
+    *args: The rest of the str arguments to be dumped to the log.
+  """
+  if _buildbot_markers_enabled:
+    args_separator = '@'
+    args_prefix = '@'
+    end_marker = '@@@'
+  else:
+    args_separator = '; '
+    args_prefix = ': '
+    end_marker = ''
+
+  # Cast each argument, because we end up getting all sorts of objects from
+  # callers.
+  suffix = args_separator.join([str(x) for x in args])
+  if suffix:
+    suffix = args_prefix + suffix
+  line = '\n' + end_marker + buildbot_tag + suffix + end_marker + '\n'
+
+  if handle is None:
+    handle = sys.stderr
+  handle.write(line)
+
+
+def PrintBuildbotLink(text, url, handle=None):
+  """Prints out a link to buildbot."""
+  _PrintForBuildbot(handle, 'STEP_LINK', text, url)
+
+
+def PrintBuildbotStepText(text, handle=None):
+  """Prints out stage text to buildbot."""
+  _PrintForBuildbot(handle, 'STEP_TEXT', text)
+
+
+def PrintBuildbotStepWarnings(handle=None):
+  """Marks a stage as having warnings."""
+  _PrintForBuildbot(handle, 'STEP_WARNINGS')
+
+
+def PrintBuildbotStepFailure(handle=None):
+  """Marks a stage as having failures."""
+  _PrintForBuildbot(handle, 'STEP_FAILURE')
+
+
+def PrintBuildbotStepName(name, handle=None):
+  """Marks a step name for buildbot to display."""
+  _PrintForBuildbot(handle, 'BUILD_STEP', name)
diff --git a/lib/cros_logging_unittest b/lib/cros_logging_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/cros_logging_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/cros_logging_unittest.py b/lib/cros_logging_unittest.py
new file mode 100644
index 0000000..4a6ad95
--- /dev/null
+++ b/lib/cros_logging_unittest.py
@@ -0,0 +1,78 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for cros_logging."""
+
+from __future__ import print_function
+
+import sys
+
+from chromite.lib import cros_logging as logging
+from chromite.lib import cros_test_lib
+
+
+class CrosloggingTest(cros_test_lib.OutputTestCase):
+  """Test logging works as expected."""
+
+  def setUp(self):
+    self.logger = logging.getLogger()
+    sh = logging.StreamHandler(sys.stdout)
+    self.logger.addHandler(sh)
+
+  def AssertLogContainsMsg(self, msg, functor, *args, **kwargs):
+    """Asserts that calling functor logs a line that contains msg.
+
+    Args:
+      msg: The message to look for.
+      functor: A function taking no arguments to test.
+      *args, **kwargs: passthrough arguments to AssertLogContainsMsg.
+    """
+    with self.OutputCapturer():
+      functor()
+    self.AssertOutputContainsLine(msg, *args, **kwargs)
+
+  def testNotice(self):
+    """Test logging.notice works and is between INFO and WARNING."""
+    msg = 'notice message'
+    self.logger.setLevel(logging.INFO)
+    self.AssertLogContainsMsg(msg, lambda: logging.notice(msg))
+    self.logger.setLevel(logging.WARNING)
+    self.AssertLogContainsMsg(msg, lambda: logging.notice(msg), invert=True)
+
+  def testPrintBuildbotFunctionsNoMarker(self):
+    """PrintBuildbot* without markers should not be recognized by buildbot."""
+    self.AssertLogContainsMsg('@@@STEP_LINK@',
+                              lambda: logging.PrintBuildbotLink('name', 'url'),
+                              check_stderr=True, invert=True)
+    self.AssertLogContainsMsg('@@@@STEP_TEXT@',
+                              lambda: logging.PrintBuildbotStepText('text'),
+                              check_stderr=True, invert=True)
+    self.AssertLogContainsMsg('@@@STEP_WARNINGS@@@',
+                              logging.PrintBuildbotStepWarnings,
+                              check_stderr=True, invert=True)
+    self.AssertLogContainsMsg('@@@STEP_FAILURE@@@',
+                              logging.PrintBuildbotStepFailure,
+                              check_stderr=True, invert=True)
+    self.AssertLogContainsMsg('@@@BUILD_STEP',
+                              lambda: logging.PrintBuildbotStepName('name'),
+                              check_stderr=True, invert=True)
+
+  def testPrintBuildbotFunctionsWithMarker(self):
+    """PrintBuildbot* with markers should be recognized by buildbot."""
+    logging.EnableBuildbotMarkers()
+    self.AssertLogContainsMsg('@@@STEP_LINK@name@url@@@',
+                              lambda: logging.PrintBuildbotLink('name', 'url'),
+                              check_stderr=True)
+    self.AssertLogContainsMsg('@@@STEP_TEXT@text@@@',
+                              lambda: logging.PrintBuildbotStepText('text'),
+                              check_stderr=True)
+    self.AssertLogContainsMsg('@@@STEP_WARNINGS@@@',
+                              logging.PrintBuildbotStepWarnings,
+                              check_stderr=True)
+    self.AssertLogContainsMsg('@@@STEP_FAILURE@@@',
+                              logging.PrintBuildbotStepFailure,
+                              check_stderr=True)
+    self.AssertLogContainsMsg('@@@BUILD_STEP@name@@@',
+                              lambda: logging.PrintBuildbotStepName('name'),
+                              check_stderr=True)
diff --git a/lib/cros_test_lib.py b/lib/cros_test_lib.py
new file mode 100644
index 0000000..e0a3d1a
--- /dev/null
+++ b/lib/cros_test_lib.py
@@ -0,0 +1,1953 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Cros unit test library, with utility functions."""
+
+from __future__ import print_function
+
+import collections
+import contextlib
+import cookielib
+import cStringIO
+import datetime
+import exceptions
+import functools
+import hashlib
+import json
+import mock
+import mox
+import netrc
+import os
+import re
+import socket
+import stat
+import sys
+import time
+import unittest
+import urllib
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.lib import blueprint_lib
+from chromite.lib import bootstrap_lib
+from chromite.lib import brick_lib
+from chromite.lib import cidb
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import gob_util
+from chromite.lib import graphite
+from chromite.lib import operation
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import remote_access
+from chromite.lib import retry_util
+from chromite.lib import terminal
+from chromite.lib import timeout_util
+from chromite.lib import workspace_lib
+
+
+site_config = config_lib.GetConfig()
+
+
+# Unit tests should never connect to the live prod or debug instances
+# of the cidb. This call ensures that they will not accidentally
+# do so through the normal cidb SetUp / GetConnectionForBuilder factory.
+cidb.CIDBConnectionFactory.SetupMockCidb()
+
+# Likewise for statsd and elastic search.
+graphite.ESMetadataFactory.SetupReadOnly()
+graphite.StatsFactory.SetupMock()
+
+
+Directory = collections.namedtuple('Directory', ['name', 'contents'])
+
+
+class GlobalTestConfig(object):
+  """Global configuration for tests."""
+
+  # By default, disable all network tests.
+  RUN_NETWORK_TESTS = False
+  NETWORK_TESTS_SKIPPED = 0
+
+
+def NetworkTest(reason='Skipping network test (re-run w/--network)'):
+  """Decorator for unit tests. Skip the test if --network is not specified."""
+  def Decorator(test_item):
+    @functools.wraps(test_item)
+    def NetworkWrapper(*args, **kwargs):
+      if not GlobalTestConfig.RUN_NETWORK_TESTS:
+        GlobalTestConfig.NETWORK_TESTS_SKIPPED += 1
+        raise unittest.SkipTest(reason)
+      test_item(*args, **kwargs)
+
+    # We can't check GlobalTestConfig.RUN_NETWORK_TESTS here because
+    # __main__ hasn't run yet. Wrap each test so that we check the flag before
+    # running it.
+    if isinstance(test_item, type) and issubclass(test_item, TestCase):
+      test_item.setUp = Decorator(test_item.setUp)
+      return test_item
+    else:
+      return NetworkWrapper
+
+  return Decorator
+
+
+def _FlattenStructure(base_path, dir_struct):
+  """Converts a directory structure to a list of paths."""
+  flattened = []
+  for obj in dir_struct:
+    if isinstance(obj, Directory):
+      new_base = os.path.join(base_path, obj.name).rstrip(os.sep)
+      flattened.append(new_base + os.sep)
+      flattened.extend(_FlattenStructure(new_base, obj.contents))
+    else:
+      assert isinstance(obj, basestring)
+      flattened.append(os.path.join(base_path, obj))
+  return flattened
+
+
+def CreateOnDiskHierarchy(base_path, dir_struct):
+  """Creates on-disk representation of an in-memory directory structure.
+
+  Args:
+    base_path: The absolute root of the directory structure.
+    dir_struct: A recursively defined data structure that represents a
+      directory tree.  The basic form is a list.  Elements can be file names or
+      cros_test_lib.Directory objects.  The 'contents' attribute of Directory
+      types is a directory structure representing the contents of the directory.
+      Examples:
+        - ['file1', 'file2']
+        - ['file1', Directory('directory', ['deepfile1', 'deepfile2']), 'file2']
+  """
+  flattened = _FlattenStructure(base_path, dir_struct)
+  for f in flattened:
+    f = os.path.join(base_path, f)
+    if f.endswith(os.sep):
+      osutils.SafeMakedirs(f)
+    else:
+      osutils.Touch(f, makedirs=True)
+
+
+def _VerifyDirectoryIterables(existing, expected):
+  """Compare two iterables representing contents of a directory.
+
+  Paths in |existing| and |expected| will be compared for exact match.
+
+  Args:
+    existing: An iterable containing paths that exist.
+    expected: An iterable of paths that are expected.
+
+  Raises:
+    AssertionError when there is any divergence between |existing| and
+    |expected|.
+  """
+  def FormatPaths(paths):
+    return '\n'.join(sorted(paths))
+
+  existing = set(existing)
+  expected = set(expected)
+
+  unexpected = existing - expected
+  if unexpected:
+    raise AssertionError('Found unexpected paths:\n%s'
+                         % FormatPaths(unexpected))
+  missing = expected - existing
+  if missing:
+    raise AssertionError('These files were expected but not found:\n%s'
+                         % FormatPaths(missing))
+
+
+def VerifyOnDiskHierarchy(base_path, dir_struct):
+  """Verify that an on-disk directory tree exactly matches a given structure.
+
+  Args:
+    base_path: See CreateOnDiskHierarchy()
+    dir_struct: See CreateOnDiskHierarchy()
+
+  Raises:
+    AssertionError when there is any divergence between the on-disk
+    structure and the structure specified by 'dir_struct'.
+  """
+  expected = _FlattenStructure(base_path, dir_struct)
+  _VerifyDirectoryIterables(osutils.DirectoryIterator(base_path), expected)
+
+
+def VerifyTarball(tarball, dir_struct):
+  """Compare the contents of a tarball against a directory structure.
+
+  Args:
+    tarball: Path to the tarball.
+    dir_struct: See CreateOnDiskHierarchy()
+
+  Raises:
+    AssertionError when there is any divergence between the tarball and the
+    structure specified by 'dir_struct'.
+  """
+  contents = cros_build_lib.RunCommand(
+      ['tar', '-tf', tarball], capture_output=True).output.splitlines()
+  normalized = set()
+  for p in contents:
+    norm = os.path.normpath(p)
+    if p.endswith('/'):
+      norm += '/'
+    if norm in normalized:
+      raise AssertionError('Duplicate entry %r found in %r!' % (norm, tarball))
+    normalized.add(norm)
+
+  expected = _FlattenStructure('', dir_struct)
+  _VerifyDirectoryIterables(normalized, expected)
+
+
+class StackedSetup(type):
+  """Metaclass to simplify unit testing and make it more robust.
+
+  A metaclass alters the way that classes are initialized, enabling us to
+  modify the class dictionary prior to the class being created. We use this
+  feature here to modify the way that unit tests work a bit.
+
+  This class does three things:
+    1) When a test case is set up or torn down, we now run all setUp and
+       tearDown methods in the inheritance tree.
+    2) If a setUp or tearDown method fails, we still run tearDown methods
+       for any test classes that were partially or completely set up.
+    3) All test cases time out after TEST_CASE_TIMEOUT seconds.
+
+  To use this class, set the following in your class:
+    __metaclass__ = StackedSetup
+
+  Since cros_test_lib.TestCase uses this metaclass, all derivatives of TestCase
+  also inherit the above behavior (unless they override the __metaclass__
+  attribute manually.)
+  """
+
+  TEST_CASE_TIMEOUT = 10 * 60
+
+  def __new__(mcs, mcs_name, bases, scope):
+    """Generate the new class with pointers to original funcs & our helpers"""
+    if 'setUp' in scope:
+      scope['__raw_setUp__'] = scope.pop('setUp')
+    scope['setUp'] = mcs._stacked_setUp
+
+    if 'tearDown' in scope:
+      scope['__raw_tearDown__'] = scope.pop('tearDown')
+    scope['tearDown'] = mcs._stacked_tearDown
+
+    # Modify all test* methods to time out after TEST_CASE_TIMEOUT seconds.
+    timeout = scope.get('TEST_CASE_TIMEOUT', StackedSetup.TEST_CASE_TIMEOUT)
+    if timeout is not None:
+      for name, func in scope.iteritems():
+        if name.startswith('test') and hasattr(func, '__call__'):
+          wrapper = timeout_util.TimeoutDecorator(timeout)
+          scope[name] = wrapper(func)
+
+    return type.__new__(mcs, mcs_name, bases, scope)
+
+  @staticmethod
+  def _walk_mro_stacking(obj, attr, reverse=False):
+    """Walk the stacked classes (python method resolution order)"""
+    iterator = iter if reverse else reversed
+    methods = (getattr(x, attr, None) for x in iterator(obj.__class__.__mro__))
+    seen = set()
+    for x in filter(None, methods):
+      x = getattr(x, 'im_func', x)
+      if x not in seen:
+        seen.add(x)
+        yield x
+
+  @staticmethod
+  def _stacked_setUp(obj):
+    """Run all the setUp funcs; if any fail, run all the tearDown funcs"""
+    obj.__test_was_run__ = False
+    try:
+      for target in StackedSetup._walk_mro_stacking(obj, '__raw_setUp__'):
+        target(obj)
+    except:
+      # TestCase doesn't trigger tearDowns if setUp failed; thus
+      # manually force it ourselves to ensure cleanup occurs.
+      StackedSetup._stacked_tearDown(obj)
+      raise
+
+    # Now mark the object as fully setUp; this is done so that
+    # any last minute assertions in tearDown can know if they should
+    # run or not.
+    obj.__test_was_run__ = True
+
+  @staticmethod
+  def _stacked_tearDown(obj):
+    """Run all the tearDown funcs; if any fail, we move on to the next one"""
+    exc_info = None
+    for target in StackedSetup._walk_mro_stacking(obj, '__raw_tearDown__',
+                                                  True):
+      # pylint: disable=bare-except
+      try:
+        target(obj)
+      except:
+        # Preserve the exception, throw it after running
+        # all tearDowns; we throw just the first also.  We suppress
+        # pylint's warning here since it can't understand that we're
+        # actually raising the exception, just in a nonstandard way.
+        if exc_info is None:
+          exc_info = sys.exc_info()
+
+    if exc_info:
+      # Chuck the saved exception, w/ the same TB from
+      # when it occurred.
+      raise exc_info[0], exc_info[1], exc_info[2]
+
+
+class TruthTable(object):
+  """Class to represent a boolean truth table, useful in unit tests.
+
+  If you find yourself testing the behavior of some function that should
+  basically follow the behavior of a particular truth table, then this class
+  can allow you to fully test that function without being overly verbose
+  in the unit test code.
+
+  The following usage is supported on a constructed TruthTable:
+  1) Iterate over input lines of the truth table, expressed as tuples of
+  bools.
+  2) Access a particular input line by index, expressed as a tuple of bools.
+  3) Access the expected output for a set of inputs.
+
+  For example, say function "Foo" in module "mod" should consists of the
+  following code:
+
+  def Foo(A, B, C):
+    return A and B and not C
+
+  In the unittest for Foo, do this:
+
+  def testFoo(self):
+    truth_table = cros_test_lib.TruthTable(inputs=[(True, True, True)])
+    for inputs in truth_table:
+      a, b, c = inputs
+      result = mod.Foo(a, b, c)
+      self.assertEquals(result, truth_table.GetOutput(inputs))
+  """
+
+  class TruthTableInputIterator(object):
+    """Class to support iteration over inputs of a TruthTable."""
+
+    def __init__(self, truth_table):
+      self.truth_table = truth_table
+      self.next_line = 0
+
+    def __iter__(self):
+      return self
+
+    def __next__(self):
+      return self.next()
+
+    def next(self):
+      if self.next_line < self.truth_table.num_lines:
+        self.next_line += 1
+        return self.truth_table.GetInputs(self.next_line - 1)
+      else:
+        raise StopIteration()
+
+  def __init__(self, inputs, input_result=True):
+    """Construct a TruthTable from given inputs.
+
+    Args:
+      inputs: Iterable of input lines, each expressed as a tuple of bools.
+        Each tuple must have the same length.
+      input_result: The output intended for each specified input.  For
+        truth tables that mostly output True it is more concise to specify
+        the false inputs and then set input_result to False.
+    """
+    # At least one input required.
+    if not inputs:
+      raise ValueError('Inputs required to construct TruthTable.')
+
+    # Save each input tuple in a set.  Also confirm that the length
+    # of each input tuple is the same.
+    self.dimension = len(inputs[0])
+    self.num_lines = pow(2, self.dimension)
+    self.expected_inputs = set()
+    self.expected_inputs_result = input_result
+
+    for input_vals in inputs:
+      if len(input_vals) != self.dimension:
+        raise ValueError('All TruthTable inputs must have same dimension.')
+
+      self.expected_inputs.add(input_vals)
+
+    # Start generator index at 0.
+    self.next_line = 0
+
+  def __len__(self):
+    return self.num_lines
+
+  def __iter__(self):
+    return self.TruthTableInputIterator(self)
+
+  def GetInputs(self, inputs_index):
+    """Get the input line at the given input index.
+
+    Args:
+      inputs_index: Following must hold: 0 <= inputs_index < self.num_lines.
+
+    Returns:
+      Tuple of bools representing one line of inputs.
+    """
+    if inputs_index >= 0 and inputs_index < self.num_lines:
+      line_values = []
+
+      # Iterate through each column in truth table.  Any order will
+      # produce a valid truth table, but going backward through
+      # columns will produce the traditional truth table ordering.
+      # For 2-dimensional example: F,F then F,T then T,F then T,T.
+      for col in xrange(self.dimension - 1, -1, -1):
+        line_values.append(bool(inputs_index / pow(2, col) % 2))
+
+      return tuple(line_values)
+
+    raise ValueError('This truth table has no line at index %r.' % inputs_index)
+
+  def GetOutput(self, inputs):
+    """Get the boolean output for the given inputs.
+
+    Args:
+      inputs: Tuple of bools, length must be equal to self.dimension.
+
+    Returns:
+      bool value representing truth table output for given inputs.
+    """
+    if not isinstance(inputs, tuple):
+      raise TypeError('Truth table inputs must be specified as a tuple.')
+
+    if not len(inputs) == self.dimension:
+      raise ValueError('Truth table inputs must match table dimension.')
+
+    return self.expected_inputs_result == (inputs in self.expected_inputs)
+
+
+class EasyAttr(dict):
+  """Convenient class for simulating objects with attributes in tests.
+
+  An EasyAttr object can be created with any attributes initialized very
+  easily.  Examples:
+
+  1) An object with .id=45 and .name="Joe":
+  testobj = EasyAttr(id=45, name="Joe")
+  2) An object with .title.text="Big" and .owner.text="Joe":
+  testobj = EasyAttr(title=EasyAttr(text="Big"), owner=EasyAttr(text="Joe"))
+  """
+
+  __slots__ = ()
+
+  def __getattr__(self, attr):
+    try:
+      return self[attr]
+    except KeyError:
+      raise AttributeError(attr)
+
+  def __delattr__(self, attr):
+    try:
+      self.pop(attr)
+    except KeyError:
+      raise AttributeError(attr)
+
+  def __setattr__(self, attr, value):
+    self[attr] = value
+
+  def __dir__(self):
+    return self.keys()
+
+
+class LogFilter(logging.Filter):
+  """A simple log filter that intercepts log messages and stores them."""
+
+  def __init__(self):
+    logging.Filter.__init__(self)
+    self.messages = cStringIO.StringIO()
+
+  def filter(self, record):
+    self.messages.write(record.getMessage() + '\n')
+    # Return False to prevent the message from being displayed.
+    return False
+
+
+class LoggingCapturer(object):
+  """Captures all messages emitted by the logging module."""
+
+  def __init__(self, logger_name='', log_level=logging.DEBUG):
+    self._log_filter = LogFilter()
+    self._old_level = None
+    self._log_level = log_level
+    self.logger_name = logger_name
+
+  def __enter__(self):
+    self.StartCapturing()
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    self.StopCapturing()
+
+  def StartCapturing(self):
+    """Begin capturing logging messages."""
+    logger = logging.getLogger(self.logger_name)
+    self._old_level = logger.getEffectiveLevel()
+    logger.setLevel(self._log_level)
+    logger.addFilter(self._log_filter)
+
+  def StopCapturing(self):
+    """Stop capturing logging messages."""
+    logger = logging.getLogger(self.logger_name)
+    logger.setLevel(self._old_level)
+    logger.removeFilter(self._log_filter)
+
+  @property
+  def messages(self):
+    return self._log_filter.messages.getvalue()
+
+  def LogsMatch(self, regex):
+    """Checks whether the logs match a given regex."""
+    match = re.search(regex, self.messages, re.MULTILINE)
+    return match is not None
+
+  def LogsContain(self, msg):
+    """Checks whether the logs contain a given string."""
+    return self.LogsMatch(re.escape(msg))
+
+
+class TestCase(unittest.TestCase):
+  """Basic chromite test case.
+
+  Provides sane setUp/tearDown logic so that tearDown is correctly cleaned up.
+
+  Takes care of saving/restoring process-wide settings like the environment so
+  that sub-tests don't have to worry about gettings this right.
+
+  Also includes additional assert helpers beyond python stdlib.
+  """
+
+  __metaclass__ = StackedSetup
+
+  # List of vars chromite is globally sensitive to and that should
+  # be suppressed for tests.
+  ENVIRON_VARIABLE_SUPPRESSIONS = ('CROS_CACHEDIR',)
+
+  def __init__(self, *args, **kwargs):
+    unittest.TestCase.__init__(self, *args, **kwargs)
+    # This is set to keep pylint from complaining.
+    self.__test_was_run__ = False
+
+  def setUp(self):
+    self.__saved_env__ = os.environ.copy()
+    self.__saved_cwd__ = os.getcwd()
+    self.__saved_umask__ = os.umask(0o22)
+    for x in self.ENVIRON_VARIABLE_SUPPRESSIONS:
+      os.environ.pop(x, None)
+
+  def tearDown(self):
+    osutils.SetEnvironment(self.__saved_env__)
+    os.chdir(self.__saved_cwd__)
+    os.umask(self.__saved_umask__)
+
+  def id(self):
+    """Return a name that can be passed in via the command line."""
+    return '%s.%s' % (self.__class__.__name__, self._testMethodName)
+
+  def __str__(self):
+    """Return a pretty name that can be passed in via the command line."""
+    return '[%s] %s' % (self.__module__, self.id())
+
+  def assertRaises2(self, exception, functor, *args, **kwargs):
+    """Like assertRaises, just with checking of the exception.
+
+    Args:
+      exception: The expected exception type to intecept.
+      functor: The function to invoke.
+      args: Positional args to pass to the function.
+      kwargs: Optional args to pass to the function.  Note we pull
+        exact_kls, msg, and check_attrs from these kwargs.
+      exact_kls: If given, the exception raise must be *exactly* that class
+        type; derivatives are a failure.
+      check_attrs: If given, a mapping of attribute -> value to assert on
+        the resultant exception.  Thus if you wanted to catch a ENOENT, you
+        would do:
+          assertRaises2(EnvironmentError, func, args,
+                        check_attrs={'errno': errno.ENOENT})
+      ex_msg: A substring that should be in the stringified exception.
+      msg: The error message to be displayed if the exception isn't raised.
+        If not given, a suitable one is defaulted to.
+      returns: The exception object.
+    """
+    exact_kls = kwargs.pop('exact_kls', None)
+    check_attrs = kwargs.pop('check_attrs', {})
+    ex_msg = kwargs.pop('ex_msg', None)
+    msg = kwargs.pop('msg', None)
+    if msg is None:
+      msg = ("%s(*%r, **%r) didn't throw an exception"
+             % (functor.__name__, args, kwargs))
+    try:
+      functor(*args, **kwargs)
+      raise AssertionError(msg)
+    except exception as e:
+      if ex_msg:
+        self.assertIn(ex_msg, str(e))
+      if exact_kls:
+        self.assertEqual(e.__class__, exception)
+      bad = []
+      for attr, required in check_attrs.iteritems():
+        self.assertTrue(hasattr(e, attr),
+                        msg='%s lacks attr %s' % (e, attr))
+        value = getattr(e, attr)
+        if value != required:
+          bad.append('%s attr is %s, needed to be %s'
+                     % (attr, value, required))
+      if bad:
+        raise AssertionError('\n'.join(bad))
+      return e
+
+  def assertExists(self, path):
+    """Make sure |path| exists"""
+    if not os.path.exists(path):
+      msg = ['path is missing: %s' % path]
+      while path != '/':
+        path = os.path.dirname(path)
+        if not path:
+          # If we're given something like "foo", abort once we get to "".
+          break
+        result = os.path.exists(path)
+        msg.append('\tos.path.exists(%s): %s' % (path, result))
+        if result:
+          msg.append('\tcontents: %r' % os.listdir(path))
+          break
+      raise self.failureException('\n'.join(msg))
+
+  def assertNotExists(self, path):
+    """Make sure |path| does not exist"""
+    if os.path.exists(path):
+      raise self.failureException('path exists when it should not: %s' % path)
+
+  def assertStartsWith(self, s, prefix):
+    """Asserts that |s| starts with |prefix|.
+
+    This function should be preferred over assertTrue(s.startswith(prefix)) for
+    it produces better error failure message than the other.
+    """
+    if not s.startswith(prefix):
+      raise self.failureException('%s does not starts with %s' % (s, prefix))
+
+  def assertEndsWith(self, s, suffix):
+    """Asserts that |s| ends with |suffix|.
+
+    This function should be preferred over assertTrue(s.endswith(suffix)) for
+    it produces better error failure message than the other.
+    """
+    if not s.endswith(suffix):
+      raise self.failureException('%s does not starts with %s' % (s, suffix))
+
+  def GetSequenceDiff(self, seq1, seq2):
+    """Get a string describing the difference between two sequences.
+
+    Args:
+      seq1: First sequence to compare.
+      seq2: Second sequence to compare.
+
+    Returns:
+      A string that describes how the two sequences differ.
+    """
+    try:
+      self.assertSequenceEqual(seq1, seq2)
+    except AssertionError as ex:
+      return ex.message
+    else:
+      return 'no differences'
+
+
+class LoggingTestCase(TestCase):
+  """Base class for logging capturer test cases."""
+
+  def AssertLogsMatch(self, log_capturer, regex, inverted=False):
+    """Verifies a regex matches the logs."""
+    assert_msg = '%r not found in %r' % (regex, log_capturer.messages)
+    assert_fn = self.assertTrue
+    if inverted:
+      assert_msg = '%r found in %r' % (regex, log_capturer.messages)
+      assert_fn = self.assertFalse
+
+    assert_fn(log_capturer.LogsMatch(regex), msg=assert_msg)
+
+  def AssertLogsContain(self, log_capturer, msg, inverted=False):
+    """Verifies a message is contained in the logs."""
+    return self.AssertLogsMatch(log_capturer, re.escape(msg), inverted=inverted)
+
+
+class OutputTestCase(TestCase):
+  """Base class for cros unit tests with utility methods."""
+
+  # These work with error output from operation module.
+  ERROR_MSG_RE = re.compile(r'^\033\[1;%dm(.+?)(?:\033\[0m)+$' %
+                            (30 + terminal.Color.RED,), re.DOTALL)
+  WARNING_MSG_RE = re.compile(r'^\033\[1;%dm(.+?)(?:\033\[0m)+$' %
+                              (30 + terminal.Color.YELLOW,), re.DOTALL)
+
+  def __init__(self, *args, **kwargs):
+    """Base class __init__ takes a second argument."""
+    TestCase.__init__(self, *args, **kwargs)
+    self._output_capturer = None
+
+  def OutputCapturer(self, *args, **kwargs):
+    """Create and return OutputCapturer object."""
+    self._output_capturer = cros_build_lib.OutputCapturer(*args, **kwargs)
+    return self._output_capturer
+
+  def _GetOutputCapt(self):
+    """Internal access to existing OutputCapturer.
+
+    Raises RuntimeError if output capturing was never on.
+    """
+    if self._output_capturer:
+      return self._output_capturer
+
+    raise RuntimeError('Output capturing was never turned on for this test.')
+
+  def _GenCheckMsgFunc(self, prefix_re, line_re):
+    """Return boolean func to check a line given |prefix_re| and |line_re|."""
+    def _method(line):
+      if prefix_re:
+        # Prefix regexp will strip off prefix (and suffix) from line.
+        match = prefix_re.search(line)
+
+        if match:
+          line = match.group(1)
+        else:
+          return False
+
+      return line_re.search(line) if line_re else True
+
+    if isinstance(prefix_re, str):
+      prefix_re = re.compile(prefix_re)
+    if isinstance(line_re, str):
+      line_re = re.compile(line_re)
+
+    # Provide a description of what this function looks for in a line.  Error
+    # messages can make use of this.
+    _method.description = None
+    if prefix_re and line_re:
+      _method.description = ('line matching prefix regexp %r then regexp %r' %
+                             (prefix_re.pattern, line_re.pattern))
+    elif prefix_re:
+      _method.description = 'line matching prefix regexp %r' % prefix_re.pattern
+    elif line_re:
+      _method.description = 'line matching regexp %r' % line_re.pattern
+    else:
+      raise RuntimeError('Nonsensical usage of _GenCheckMsgFunc: '
+                         'no prefix_re or line_re')
+
+    return _method
+
+  def _ContainsMsgLine(self, lines, msg_check_func):
+    return any(msg_check_func(ln) for ln in lines)
+
+  def _GenOutputDescription(self, check_stdout, check_stderr):
+    # Some extra logic to make an error message useful.
+    if check_stdout and check_stderr:
+      return 'stdout or stderr'
+    elif check_stdout:
+      return 'stdout'
+    elif check_stderr:
+      return 'stderr'
+
+  def _AssertOutputContainsMsg(self, check_msg_func, invert,
+                               check_stdout, check_stderr):
+    assert check_stdout or check_stderr
+
+    lines = []
+    if check_stdout:
+      lines.extend(self._GetOutputCapt().GetStdoutLines())
+    if check_stderr:
+      lines.extend(self._GetOutputCapt().GetStderrLines())
+
+    result = self._ContainsMsgLine(lines, check_msg_func)
+
+    # Some extra logic to make an error message useful.
+    output_desc = self._GenOutputDescription(check_stdout, check_stderr)
+
+    if invert:
+      msg = ('expected %s to not contain %s,\nbut found it in:\n%s' %
+             (output_desc, check_msg_func.description, lines))
+      self.assertFalse(result, msg=msg)
+    else:
+      msg = ('expected %s to contain %s,\nbut did not find it in:\n%s' %
+             (output_desc, check_msg_func.description, lines))
+      self.assertTrue(result, msg=msg)
+
+  def AssertOutputContainsError(self, regexp=None, invert=False,
+                                check_stdout=True, check_stderr=False):
+    """Assert requested output contains at least one error line.
+
+    If |regexp| is non-null, then the error line must also match it.
+    If |invert| is true, then assert the line is NOT found.
+
+    Raises RuntimeError if output capturing was never on for this test.
+    """
+    check_msg_func = self._GenCheckMsgFunc(self.ERROR_MSG_RE, regexp)
+    return self._AssertOutputContainsMsg(check_msg_func, invert,
+                                         check_stdout, check_stderr)
+
+  def AssertOutputContainsWarning(self, regexp=None, invert=False,
+                                  check_stdout=True, check_stderr=False):
+    """Assert requested output contains at least one warning line.
+
+    If |regexp| is non-null, then the warning line must also match it.
+    If |invert| is true, then assert the line is NOT found.
+
+    Raises RuntimeError if output capturing was never on for this test.
+    """
+    check_msg_func = self._GenCheckMsgFunc(self.WARNING_MSG_RE, regexp)
+    return self._AssertOutputContainsMsg(check_msg_func, invert,
+                                         check_stdout, check_stderr)
+
+  def AssertOutputContainsLine(self, regexp, invert=False,
+                               check_stdout=True, check_stderr=False):
+    """Assert requested output contains line matching |regexp|.
+
+    If |invert| is true, then assert the line is NOT found.
+
+    Raises RuntimeError if output capturing was never on for this test.
+    """
+    check_msg_func = self._GenCheckMsgFunc(None, regexp)
+    return self._AssertOutputContainsMsg(check_msg_func, invert,
+                                         check_stdout, check_stderr)
+
+  def _AssertOutputEndsInMsg(self, check_msg_func,
+                             check_stdout, check_stderr):
+    """Pass if requested output(s) ends(end) with an error message."""
+    assert check_stdout or check_stderr
+
+    lines = []
+    if check_stdout:
+      stdout_lines = self._GetOutputCapt().GetStdoutLines(include_empties=False)
+      if stdout_lines:
+        lines.append(stdout_lines[-1])
+    if check_stderr:
+      stderr_lines = self._GetOutputCapt().GetStderrLines(include_empties=False)
+      if stderr_lines:
+        lines.append(stderr_lines[-1])
+
+    result = self._ContainsMsgLine(lines, check_msg_func)
+
+    # Some extra logic to make an error message useful.
+    output_desc = self._GenOutputDescription(check_stdout, check_stderr)
+
+    msg = ('expected %s to end with %s,\nbut did not find it in:\n%s' %
+           (output_desc, check_msg_func.description, lines))
+    self.assertTrue(result, msg=msg)
+
+  def AssertOutputEndsInError(self, regexp=None,
+                              check_stdout=True, check_stderr=False):
+    """Assert requested output ends in error line.
+
+    If |regexp| is non-null, then the error line must also match it.
+
+    Raises RuntimeError if output capturing was never on for this test.
+    """
+    check_msg_func = self._GenCheckMsgFunc(self.ERROR_MSG_RE, regexp)
+    return self._AssertOutputEndsInMsg(check_msg_func,
+                                       check_stdout, check_stderr)
+
+  def AssertOutputEndsInWarning(self, regexp=None,
+                                check_stdout=True, check_stderr=False):
+    """Assert requested output ends in warning line.
+
+    If |regexp| is non-null, then the warning line must also match it.
+
+    Raises RuntimeError if output capturing was never on for this test.
+    """
+    check_msg_func = self._GenCheckMsgFunc(self.WARNING_MSG_RE, regexp)
+    return self._AssertOutputEndsInMsg(check_msg_func,
+                                       check_stdout, check_stderr)
+
+  def AssertOutputEndsInLine(self, regexp,
+                             check_stdout=True, check_stderr=False):
+    """Assert requested output ends in line matching |regexp|.
+
+    Raises RuntimeError if output capturing was never on for this test.
+    """
+    check_msg_func = self._GenCheckMsgFunc(None, regexp)
+    return self._AssertOutputEndsInMsg(check_msg_func,
+                                       check_stdout, check_stderr)
+
+  def FuncCatchSystemExit(self, func, *args, **kwargs):
+    """Run |func| with |args| and |kwargs| and catch exceptions.SystemExit.
+
+    Return tuple (return value or None, SystemExit number code or None).
+    """
+    try:
+      returnval = func(*args, **kwargs)
+
+      return returnval, None
+    except exceptions.SystemExit as ex:
+      exit_code = ex.args[0]
+      return None, exit_code
+
+  def AssertFuncSystemExitZero(self, func, *args, **kwargs):
+    """Run |func| with |args| and |kwargs| catching exceptions.SystemExit.
+
+    If the func does not raise a SystemExit with exit code 0 then assert.
+    """
+    exit_code = self.FuncCatchSystemExit(func, *args, **kwargs)[1]
+    self.assertIsNot(exit_code, None,
+                     msg='Expected system exit code 0, but caught none')
+    self.assertEqual(exit_code, 0,
+                     msg=('Expected system exit code 0, but caught %d' %
+                          exit_code))
+
+  def AssertFuncSystemExitNonZero(self, func, *args, **kwargs):
+    """Run |func| with |args| and |kwargs| catching exceptions.SystemExit.
+
+    If the func does not raise a non-zero SystemExit code then assert.
+    """
+    exit_code = self.FuncCatchSystemExit(func, *args, **kwargs)[1]
+    self.assertIsNot(exit_code, None,
+                     msg='Expected non-zero system exit code, but caught none')
+    self.assertNotEqual(exit_code, 0,
+                        msg=('Expected non-zero system exit code, but caught %d'
+                             % exit_code))
+
+  def AssertRaisesAndReturn(self, error, func, *args, **kwargs):
+    """Like assertRaises, but return exception raised."""
+    try:
+      func(*args, **kwargs)
+      self.fail(msg='Expected %s but got none' % error)
+    except error as ex:
+      return ex
+
+
+class TempDirTestCase(TestCase):
+  """Mixin used to give each test a tempdir that is cleansed upon finish"""
+
+  # Whether to delete tempdir used by this test. cf: SkipCleanup.
+  DELETE = True
+  _NO_DELETE_TEMPDIR_OBJ = None
+
+  def __init__(self, *args, **kwargs):
+    TestCase.__init__(self, *args, **kwargs)
+    self.tempdir = None
+    self._tempdir_obj = None
+
+  @classmethod
+  def SkipCleanup(cls):
+    """Leave behind tempdirs created by instances of this class.
+
+    Calling this function ensures that all future instances will leak their
+    temporary directories. Additionally, all future temporary directories will
+    be created inside one top level temporary directory, so that you can easily
+    blow them away when you're done.
+    Currently, this function is pretty stupid. You should call it *before*
+    creating any instances.
+
+    Returns:
+      Path to a temporary directory that contains all future temporary
+      directories created by instances of this class.
+    """
+    cls.DELETE = False
+    cls._NO_DELETE_TEMPDIR_OBJ = osutils.TempDir(
+        prefix='chromite.test_no_cleanup',
+        set_global=True,
+        delete=cls.DELETE)
+    logging.info('%s requested to SkipCleanup. Will leak %s',
+                 cls.__name__, cls._NO_DELETE_TEMPDIR_OBJ.tempdir)
+    return cls._NO_DELETE_TEMPDIR_OBJ.tempdir
+
+  def setUp(self):
+    self._tempdir_obj = osutils.TempDir(prefix='chromite.test', set_global=True,
+                                        delete=self.DELETE)
+    self.tempdir = self._tempdir_obj.tempdir
+
+  def tearDown(self):
+    if self._tempdir_obj is not None:
+      self._tempdir_obj.Cleanup()
+      self._tempdir_obj = None
+      self.tempdir = None
+
+  def assertFileContents(self, file_path, content):
+    """Assert that the file contains the given content."""
+    self.assertExists(file_path)
+    read_content = osutils.ReadFile(file_path)
+    self.assertEqual(read_content, content)
+
+
+class LocalSqlServerTestCase(TempDirTestCase):
+  """A TestCase that launches a local mysqld server in the background.
+
+  - This test must run insde the chroot.
+  - This class provides attributes:
+    - mysqld_host: The IP of the local mysqld server.
+    - mysqld_port: The port of the local mysqld server.
+  """
+
+  # Neither of these are in the PATH for a non-sudo user.
+  MYSQL_INSTALL_DB = '/usr/share/mysql/scripts/mysql_install_db'
+  MYSQLD = '/usr/sbin/mysqld'
+  MYSQLD_SHUTDOWN_TIMEOUT_S = 30
+
+  def __init__(self, *args, **kwargs):
+    TempDirTestCase.__init__(self, *args, **kwargs)
+    self.mysqld_host = None
+    self.mysqld_port = None
+    self._mysqld_dir = None
+    self._mysqld_runner = None
+    self._mysqld_needs_cleanup = False
+    # This class has assumptions about the mariadb installation that are only
+    # guaranteed to hold inside the chroot.
+    cros_build_lib.AssertInsideChroot()
+
+  def setUp(self):
+    """Launch mysqld in a clean temp directory."""
+
+    self._mysqld_dir = os.path.join(self.tempdir, 'mysqld_dir')
+    osutils.SafeMakedirs(self._mysqld_dir)
+    mysqld_tmp_dir = os.path.join(self._mysqld_dir, 'tmp')
+    osutils.SafeMakedirs(mysqld_tmp_dir)
+
+    # MYSQL_INSTALL_DB is stupid. It can't parse '--flag value'.
+    # Must give it options in '--flag=value' form.
+    cmd = [
+        self.MYSQL_INSTALL_DB,
+        '--no-defaults',
+        '--basedir=/usr',
+        '--ldata=%s' % self._mysqld_dir,
+    ]
+    cros_build_lib.RunCommand(cmd, quiet=True)
+
+    self.mysqld_host = '127.0.0.1'
+    self.mysqld_port = remote_access.GetUnusedPort()
+    cmd = [
+        self.MYSQLD,
+        '--no-defaults',
+        '--datadir', self._mysqld_dir,
+        '--socket', os.path.join(self._mysqld_dir, 'mysqld.socket'),
+        '--port', str(self.mysqld_port),
+        '--pid-file', os.path.join(self._mysqld_dir, 'mysqld.pid'),
+        '--tmpdir', mysqld_tmp_dir,
+    ]
+    self._mysqld_runner = parallel.BackgroundTaskRunner(
+        cros_build_lib.RunCommand,
+        processes=1,
+        halt_on_error=True)
+    queue = self._mysqld_runner.__enter__()
+    queue.put((cmd,))
+
+    # Ensure that the Sql server is up before continuing.
+    cmd = [
+        'mysqladmin',
+        '-S', os.path.join(self._mysqld_dir, 'mysqld.socket'),
+        'ping',
+    ]
+    try:
+      retry_util.RunCommandWithRetries(cmd=cmd, quiet=True, max_retry=5,
+                                       sleep=1, backoff_factor=1.5)
+    except Exception as e:
+      self._mysqld_needs_cleanup = True
+      logging.warning('Mysql server failed to show up! (%s)', e)
+      raise
+
+  def tearDown(self):
+    """Cleanup mysqld and our mysqld data directory."""
+    mysqld_socket = os.path.join(self._mysqld_dir, 'mysqld.socket')
+    if os.path.exists(mysqld_socket):
+      try:
+        cmd = [
+            'mysqladmin',
+            '-S', os.path.join(self._mysqld_dir, 'mysqld.socket'),
+            '-u', 'root',
+            'shutdown',
+        ]
+        cros_build_lib.RunCommand(cmd, quiet=True)
+      except cros_build_lib.RunCommandError as e:
+        self._mysqld_needs_cleanup = True
+        logging.warning('Could not stop test mysqld daemon (%s)', e)
+
+    # Explicitly stop the mysqld process before removing the working directory.
+    if self._mysqld_runner is not None:
+      if self._mysqld_needs_cleanup:
+        self._mysqld_runner.__exit__(
+            cros_build_lib.RunCommandError,
+            'Artification exception to cleanup mysqld',
+            None)
+      else:
+        self._mysqld_runner.__exit__(None, None, None)
+
+
+class MockTestCase(TestCase):
+  """Python-mock based test case; compatible with StackedSetup"""
+
+  def setUp(self):
+    self._patchers = []
+
+  def tearDown(self):
+    # We can't just run stopall() by itself, and need to stop our patchers
+    # manually since stopall() doesn't handle repatching.
+    cros_build_lib.SafeRun([p.stop for p in reversed(self._patchers)] +
+                           [mock.patch.stopall])
+
+  def StartPatcher(self, patcher):
+    """Call start() on the patcher, and stop() in tearDown."""
+    m = patcher.start()
+    self._patchers.append(patcher)
+    return m
+
+  def PatchObject(self, *args, **kwargs):
+    """Create and start a mock.patch.object().
+
+    stop() will be called automatically during tearDown.
+    """
+    return self.StartPatcher(mock.patch.object(*args, **kwargs))
+
+
+# MockTestCase must be before TempDirTestCase in this inheritance order,
+# because MockTestCase.StartPatcher() calls may be for PartialMocks, which
+# create their own temporary directory.  The teardown for those directories
+# occurs during MockTestCase.tearDown(), which needs to be run before
+# TempDirTestCase.tearDown().
+class MockTempDirTestCase(MockTestCase, TempDirTestCase):
+  """Convenience class mixing TempDir and Mock."""
+
+
+class GerritTestCase(MockTempDirTestCase):
+  """Test class for tests that interact with a Gerrit server.
+
+  Configured by default to use a specially-configured test Gerrit server at
+  t3st-chr0m3(-review).googlesource.com. The test server configuration may be
+  altered by setting the following environment variables from the parent
+  process:
+    CROS_TEST_GIT_HOST: host name for git operations; defaults to
+                        t3st-chr0me.googlesource.com.
+    CROS_TEST_GERRIT_HOST: host name for Gerrit operations; defaults to
+                           t3st-chr0me-review.googlesource.com.
+    CROS_TEST_COOKIES_PATH: path to a cookies.txt file to use for git/Gerrit
+                            requests; defaults to none.
+    CROS_TEST_COOKIE_NAMES: comma-separated list of cookie names from
+                            CROS_TEST_COOKIES_PATH to set on requests; defaults
+                            to none. The current implementation only sends
+                            cookies matching the exact host name and the empty
+                            path ("/").
+  """
+
+  # pylint: disable=protected-access
+
+  TEST_USERNAME = 'test-username'
+  TEST_EMAIL = 'test-username@test.org'
+
+  GerritInstance = collections.namedtuple('GerritInstance', [
+      'cookie_names',
+      'cookies_path',
+      'gerrit_host',
+      'gerrit_url',
+      'git_host',
+      'git_url',
+      'netrc_file',
+      'project_prefix',
+  ])
+
+  def _create_gerrit_instance(self, tmp_dir):
+    default_host = 't3st-chr0m3'
+    git_host = os.environ.get('CROS_TEST_GIT_HOST',
+                              '%s.googlesource.com' % default_host)
+    gerrit_host = os.environ.get('CROS_TEST_GERRIT_HOST',
+                                 '%s-review.googlesource.com' % default_host)
+    ip = socket.gethostbyname(socket.gethostname())
+    project_prefix = 'test-%s-%s/' % (
+        datetime.datetime.now().strftime('%Y%m%d%H%M%S'),
+        hashlib.sha1('%s_%s' % (ip, os.getpid())).hexdigest()[:8])
+    cookies_path = os.environ.get('CROS_TEST_COOKIES_PATH')
+    cookie_names_str = os.environ.get('CROS_TEST_COOKIE_NAMES', '')
+    cookie_names = [c for c in cookie_names_str.split(',') if c]
+
+    return self.GerritInstance(
+        cookie_names=cookie_names,
+        cookies_path=cookies_path,
+        gerrit_host=gerrit_host,
+        gerrit_url='https://%s/' % gerrit_host,
+        git_host=git_host,
+        git_url='https://%s/' % git_host,
+        # TODO(dborowitz): Ensure this is populated when using role account.
+        netrc_file=os.path.join(tmp_dir, '.netrc'),
+        project_prefix=project_prefix,)
+
+  def _populate_netrc(self, src):
+    """Sets up a test .netrc file using the given source as a base."""
+    # Heuristic: prefer passwords for @google.com accounts, since test host
+    # permissions tend to refer to those accounts.
+    preferred_account_domains = ['.google.com']
+    needed = [self.gerrit_instance.git_host, self.gerrit_instance.gerrit_host]
+    candidates = collections.defaultdict(list)
+    src_netrc = netrc.netrc(src)
+    for host, v in src_netrc.hosts.iteritems():
+      dot = host.find('.')
+      if dot < 0:
+        continue
+      for n in needed:
+        if n.endswith(host[dot:]):
+          login, _, password = v
+          i = 1
+          for pd in preferred_account_domains:
+            if login.endswith(pd):
+              i = 0
+              break
+          candidates[n].append((i, login, password))
+
+    with open(self.gerrit_instance.netrc_file, 'w') as out:
+      for n in needed:
+        cs = candidates[n]
+        self.assertGreater(len(cs), 0,
+                           msg='missing password in ~/.netrc for %s' % n)
+        cs.sort()
+        _, login, password = cs[0]
+        out.write('machine %s login %s password %s\n' % (n, login, password))
+
+  def setUp(self):
+    """Sets up the gerrit instances in a class-specific temp dir."""
+    old_home = os.environ['HOME']
+    os.environ['HOME'] = self.tempdir
+
+    # Create gerrit instance.
+    gi = self.gerrit_instance = self._create_gerrit_instance(self.tempdir)
+
+    netrc_path = os.path.join(old_home, '.netrc')
+    if os.path.exists(netrc_path):
+      self._populate_netrc(netrc_path)
+      # Set netrc file for http authentication.
+      self.PatchObject(gob_util, 'NETRC', netrc.netrc(gi.netrc_file))
+
+    if gi.cookies_path:
+      cros_build_lib.RunCommand(
+          ['git', 'config', '--global', 'http.cookiefile', gi.cookies_path],
+          quiet=True)
+
+    # Set cookie file for http authentication
+    if gi.cookies_path:
+      jar = cookielib.MozillaCookieJar(gi.cookies_path)
+      jar.load(ignore_expires=True)
+
+      def GetCookies(host, _path):
+        ret = dict(
+            (c.name, urllib.unquote(c.value)) for c in jar
+            if c.domain == host and c.path == '/' and c.name in gi.cookie_names)
+        return ret
+
+      self.PatchObject(gob_util, 'GetCookies', GetCookies)
+
+    # Make all chromite code point to the test server.
+    self.saved_params = {}
+    self.patched_params = {
+        'EXTERNAL_GOB_HOST': gi.git_host,
+        'EXTERNAL_GERRIT_HOST': gi.gerrit_host,
+        'EXTERNAL_GOB_URL': gi.git_url,
+        'EXTERNAL_GERRIT_URL': gi.gerrit_url,
+        'INTERNAL_GOB_HOST': gi.git_host,
+        'INTERNAL_GERRIT_HOST': gi.gerrit_host,
+        'INTERNAL_GOB_URL': gi.git_url,
+        'INTERNAL_GERRIT_URL': gi.gerrit_url,
+        'AOSP_GOB_HOST': gi.git_host,
+        'AOSP_GERRIT_HOST': gi.gerrit_host,
+        'AOSP_GOB_URL': gi.git_url,
+        'AOSP_GERRIT_URL': gi.gerrit_url,
+
+        'MANIFEST_URL': '%s/%s' % (
+            gi.git_url, site_config.params.MANIFEST_PROJECT
+        ),
+        'MANIFEST_INT_URL': '%s/%s' % (
+            gi.git_url, site_config.params.MANIFEST_INT_PROJECT
+        ),
+        'GIT_REMOTES': {
+            site_config.params.EXTERNAL_REMOTE: gi.gerrit_url,
+            site_config.params.INTERNAL_REMOTE: gi.gerrit_url,
+            site_config.params.CHROMIUM_REMOTE: gi.gerrit_url,
+            site_config.params.CHROME_REMOTE: gi.gerrit_url
+        }
+    }
+
+    for k in self.patched_params.iterkeys():
+      self.saved_params[k] = site_config.params.get(k)
+
+    site_config._site_params.update(self.patched_params)
+
+  def tearDown(self):
+    # Restore the 'patched' site parameters.
+    site_config._site_params.update(self.saved_params)
+
+  def createProject(self, suffix, description='Test project', owners=None,
+                    submit_type='CHERRY_PICK'):
+    """Create a project on the test gerrit server."""
+    name = self.gerrit_instance.project_prefix + suffix
+    body = {
+        'description': description,
+        'submit_type': submit_type,
+    }
+    if owners is not None:
+      body['owners'] = owners
+    path = 'projects/%s' % urllib.quote(name, '')
+    conn = gob_util.CreateHttpConn(
+        self.gerrit_instance.gerrit_host, path, reqtype='PUT', body=body)
+    response = conn.getresponse()
+    self.assertEquals(201, response.status,
+                      'Expected 201, got %s' % response.status)
+    s = cStringIO.StringIO(response.read())
+    self.assertEquals(")]}'", s.readline().rstrip())
+    jmsg = json.load(s)
+    self.assertEquals(name, jmsg['name'])
+    return name
+
+  def _CloneProject(self, name, path):
+    """Clone a project from the test gerrit server."""
+    root = os.path.dirname(path)
+    osutils.SafeMakedirs(root)
+    url = '%s://%s/%s' % (
+        gob_util.GIT_PROTOCOL, self.gerrit_instance.git_host, name)
+    git.RunGit(root, ['clone', url, path])
+    # Install commit-msg hook.
+    hook_path = os.path.join(path, '.git', 'hooks', 'commit-msg')
+    hook_cmd = ['curl', '-n', '-o', hook_path]
+    if self.gerrit_instance.cookies_path:
+      hook_cmd.extend(['-b', self.gerrit_instance.cookies_path])
+    hook_cmd.append('https://%s/a/tools/hooks/commit-msg'
+                    % self.gerrit_instance.gerrit_host)
+    cros_build_lib.RunCommand(hook_cmd, quiet=True)
+    os.chmod(hook_path, stat.S_IRWXU)
+    # Set git identity to test account
+    cros_build_lib.RunCommand(
+        ['git', 'config', 'user.email', self.TEST_EMAIL], cwd=path, quiet=True)
+    return path
+
+  def cloneProject(self, name, path=None):
+    """Clone a project from the test gerrit server."""
+    if path is None:
+      path = os.path.basename(name)
+      if path.endswith('.git'):
+        path = path[:-4]
+    path = os.path.join(self.tempdir, path)
+    return self._CloneProject(name, path)
+
+  @classmethod
+  def _CreateCommit(cls, clone_path, filename=None, msg=None, text=None,
+                    amend=False):
+    """Create a commit in the given git checkout.
+
+    Args:
+      clone_path: The directory on disk of the git clone.
+      filename: The name of the file to write. Optional.
+      msg: The commit message. Optional.
+      text: The text to append to the file. Optional.
+      amend: Whether to amend an existing patch. If set, we will amend the
+        HEAD commit in the checkout and upload that patch.
+
+    Returns:
+      (sha1, changeid) of the new commit.
+    """
+    if not filename:
+      filename = 'test-file.txt'
+    if not msg:
+      msg = 'Test Message'
+    if not text:
+      text = 'Another day, another dollar.'
+    fpath = os.path.join(clone_path, filename)
+    osutils.WriteFile(fpath, '%s\n' % text, mode='a')
+    cros_build_lib.RunCommand(['git', 'add', filename], cwd=clone_path,
+                              quiet=True)
+    cmd = ['git', 'commit']
+    cmd += ['--amend', '-C', 'HEAD'] if amend else ['-m', msg]
+    cros_build_lib.RunCommand(cmd, cwd=clone_path, quiet=True)
+    return cls._GetCommit(clone_path)
+
+  def createCommit(self, clone_path, filename=None, msg=None, text=None,
+                   amend=False):
+    """Create a commit in the given git checkout.
+
+    Args:
+      clone_path: The directory on disk of the git clone.
+      filename: The name of the file to write. Optional.
+      msg: The commit message. Optional.
+      text: The text to append to the file. Optional.
+      amend: Whether to amend an existing patch. If set, we will amend the
+        HEAD commit in the checkout and upload that patch.
+    """
+    clone_path = os.path.join(self.tempdir, clone_path)
+    return self._CreateCommit(clone_path, filename, msg, text, amend)
+
+  @staticmethod
+  def _GetCommit(clone_path, ref='HEAD'):
+    log_proc = cros_build_lib.RunCommand(
+        ['git', 'log', '-n', '1', ref], cwd=clone_path,
+        print_cmd=False, capture_output=True)
+    sha1 = None
+    change_id = None
+    for line in log_proc.output.splitlines():
+      match = re.match(r'^commit ([0-9a-fA-F]{40})$', line)
+      if match:
+        sha1 = match.group(1)
+        continue
+      match = re.match(r'^\s+Change-Id:\s*(\S+)$', line)
+      if match:
+        change_id = match.group(1)
+        continue
+    return (sha1, change_id)
+
+  def getCommit(self, clone_path, ref='HEAD'):
+    """Get the sha1 and change-id for the head commit in a git checkout."""
+    clone_path = os.path.join(self.tempdir, clone_path)
+    (sha1, change_id) = self._GetCommit(clone_path, ref)
+    self.assertTrue(sha1)
+    self.assertTrue(change_id)
+    return (sha1, change_id)
+
+  @staticmethod
+  def _UploadChange(clone_path, branch='master', remote='origin'):
+    cros_build_lib.RunCommand(
+        ['git', 'push', remote, 'HEAD:refs/for/%s' % branch], cwd=clone_path,
+        quiet=True)
+
+  def uploadChange(self, clone_path, branch='master', remote='origin'):
+    """Create a gerrit CL from the HEAD of a git checkout."""
+    clone_path = os.path.join(self.tempdir, clone_path)
+    self._UploadChange(clone_path, branch, remote)
+
+  @staticmethod
+  def _PushBranch(clone_path, branch='master'):
+    cros_build_lib.RunCommand(
+        ['git', 'push', 'origin', 'HEAD:refs/heads/%s' % branch],
+        cwd=clone_path, quiet=True)
+
+  def pushBranch(self, clone_path, branch='master'):
+    """Push a branch directly to gerrit, bypassing code review."""
+    clone_path = os.path.join(self.tempdir, clone_path)
+    self._PushBranch(clone_path, branch)
+
+  def createAccount(self, name='Test User', email='test-user@test.org',
+                    password=None, groups=None):
+    """Create a new user account on gerrit."""
+    username = urllib.quote(email.partition('@')[0])
+    path = 'accounts/%s' % username
+    body = {
+        'name': name,
+        'email': email,
+    }
+
+    if password:
+      body['http_password'] = password
+    if groups:
+      if isinstance(groups, basestring):
+        groups = [groups]
+      body['groups'] = groups
+    conn = gob_util.CreateHttpConn(
+        self.gerrit_instance.gerrit_host, path, reqtype='PUT', body=body)
+    response = conn.getresponse()
+    self.assertEquals(201, response.status)
+    s = cStringIO.StringIO(response.read())
+    self.assertEquals(")]}'", s.readline().rstrip())
+    jmsg = json.load(s)
+    self.assertEquals(email, jmsg['email'])
+
+
+class _RunCommandMock(mox.MockObject):
+  """Custom mock class used to suppress arguments we don't care about"""
+
+  DEFAULT_IGNORED_ARGS = ('print_cmd',)
+
+  def __call__(self, *args, **kwargs):
+    for arg in self.DEFAULT_IGNORED_ARGS:
+      kwargs.setdefault(arg, mox.IgnoreArg())
+    return mox.MockObject.__call__(self, *args, **kwargs)
+
+
+class _LessAnnoyingMox(mox.Mox):
+  """Mox derivative that slips in our suppressions to mox.
+
+  This is used by default via MoxTestCase; namely, this suppresses
+  certain arguments awareness that we don't care about via switching
+  in (dependent on the namespace requested) overriding MockObject
+  classing.
+
+  Via this, it makes maintenance much simpler- simplest example, if code
+  doesn't explicitly assert that print_cmd must be true/false... then
+  we don't care about what argument is set (it has no effect beyond output).
+  Mox normally *would* care, making it a pita to maintain.  This selectively
+  suppresses that awareness, making it maintainable.
+  """
+
+  mock_classes = {}.fromkeys(
+      ['chromite.lib.cros_build_lib.%s' % x
+       for x in dir(cros_build_lib) if 'RunCommand' in x],
+      _RunCommandMock)
+
+  @staticmethod
+  def _GetNamespace(obj):
+    return '%s.%s' % (obj.__module__, obj.__name__)
+
+  def CreateMock(self, obj, attrs=None):
+    if attrs is None:
+      attrs = {}
+    kls = self.mock_classes.get(
+        self._GetNamespace(obj), mox.MockObject)
+    # Copy attrs; I don't trust mox to not be stupid here.
+    new_mock = kls(obj, attrs=attrs)
+    self._mock_objects.append(new_mock)
+    return new_mock
+
+
+class MoxTestCase(TestCase):
+  """Mox based test case; compatible with StackedSetup
+
+  Note: mox is deprecated; please use MockTestCase instead.
+  """
+
+  mox_suppress_verify_all = False
+
+  def setUp(self):
+    self.mox = _LessAnnoyingMox()
+    self.stubs = mox.stubout.StubOutForTesting()
+
+  def tearDown(self):
+    try:
+      if self.__test_was_run__ and not self.mox_suppress_verify_all:
+        # This means the test code was actually ran.
+        # force a verifyall
+        self.mox.VerifyAll()
+    finally:
+      if hasattr(self, 'mox'):
+        self.mox.UnsetStubs()
+      if hasattr(self, 'stubs'):
+        self.stubs.UnsetAll()
+        self.stubs.SmartUnsetAll()
+
+
+class MoxTempDirTestCase(MoxTestCase, TempDirTestCase):
+  """Convenience class mixing TempDir and Mox
+
+  Note: mox is deprecated; please use MockTempDirTestCase instead.
+  """
+
+
+class MoxOutputTestCase(OutputTestCase, MoxTestCase):
+  """Conevenience class mixing OutputTestCase and MoxTestCase
+
+  Note: mox is deprecated; please use MockOutputTestCase instead.
+  """
+
+
+class MoxTempDirTestOutputCase(OutputTestCase, MoxTempDirTestCase):
+  """Conevenience class mixing OutputTestCase and MoxTempDirTestCase
+
+  Note: mox is deprecated; please use MockOutputTestCase instead.
+  """
+
+
+class MockOutputTestCase(MockTestCase, OutputTestCase):
+  """Convenience class mixing Output and Mock."""
+
+
+class ProgressBarTestCase(MockOutputTestCase):
+  """Test class to test the progress bar."""
+
+  # pylint: disable=protected-access
+
+  def setUp(self):
+    self._terminal_size = self.PatchObject(
+        operation.ProgressBarOperation, '_GetTerminalSize',
+        return_value=operation._TerminalSize(100, 20))
+    self.PatchObject(os, 'isatty', return_value=True)
+
+  def SetMockTerminalSize(self, width, height):
+    """Set mock terminal's size."""
+    self._terminal_size.return_value = operation._TerminalSize(width, height)
+
+  def AssertProgressBarAllEvents(self, num_events):
+    """Check that the progress bar is correct for all events."""
+    for i in xrange(num_events + 1):
+      self.AssertOutputContainsLine('%d%%' % (i * 100 / num_events))
+
+
+class MockLoggingTestCase(MockTestCase, LoggingTestCase):
+  """Convenience class mixing Logging and Mock."""
+
+
+class WorkspaceTestCase(MockTempDirTestCase):
+  """Test case that adds utilities for using workspaces."""
+
+  def setUp(self):
+    """Define variables populated below, mostly to make lint happy."""
+    self.bootstrap_path = None
+    self.mock_bootstrap_path = None
+
+    self.workspace_path = None
+    self.workspace_config = None
+    self.mock_workspace_path = None
+
+  def CreateBootstrap(self, sdk_version=None):
+    """Create a fake bootstrap directory in self.tempdir.
+
+    self.bootstrap_path points to new workspace path.
+    self.mock_bootstrap_path points to mock of FindBootstrapPath
+
+    Args:
+      sdk_version: Create a fake SDK version that's present in bootstrap.
+    """
+    # Create a bootstrap, inside our tempdir.
+    self.bootstrap_path = os.path.join(self.tempdir, 'bootstrap')
+    osutils.SafeMakedirs(os.path.join(self.bootstrap_path, '.git'))
+
+    # If a version is provided, fake it's existence in the bootstrap.
+    if sdk_version is not None:
+      sdk_path = bootstrap_lib.ComputeSdkPath(self.bootstrap_path, sdk_version)
+      osutils.SafeMakedirs(os.path.join(sdk_path, '.repo'))
+      osutils.SafeMakedirs(os.path.join(sdk_path, 'chromite', '.git'))
+
+    # Fake out bootstrap lookups to find this path.
+    self.mock_bootstrap_path = self.PatchObject(
+        bootstrap_lib, 'FindBootstrapPath', return_value=self.bootstrap_path)
+
+  def CreateWorkspace(self, sdk_version=None):
+    """Create a fake workspace directory in self.tempdir.
+
+    self.workspace_path points to new workspace path.
+    self.workspace_config points to workspace config file.
+    self.mock_workspace_path points to mock of WorkspacePath
+
+    Args:
+      sdk_version: Mark SDK version as active in workspace. Does NOT mean
+         it's present in bootstrap.
+    """
+    # Create a workspace, inside our tempdir.
+    self.workspace_path = os.path.join(self.tempdir, 'workspace')
+    self.workspace_config = os.path.join(
+        self.workspace_path,
+        workspace_lib.WORKSPACE_CONFIG)
+    osutils.Touch(self.workspace_config, makedirs=True)
+
+    # Define an SDK version for it, if needed.
+    if sdk_version is not None:
+      workspace_lib.SetActiveSdkVersion(self.workspace_path, sdk_version)
+
+    # Fake out workspace lookups to find this path.
+    self.mock_workspace_path = self.PatchObject(
+        workspace_lib, 'WorkspacePath', return_value=self.workspace_path)
+
+  def CreateBrick(self, name='thebrickfoo', main_package='category/bar',
+                  dependencies=None):
+    """Creates a new brick.
+
+    Args:
+      name: Brick name/path relative to the workspace root.
+      main_package: Main package to assign.
+      dependencies: List of bricks to depend on.
+
+    Returns:
+      The created Brick object.
+    """
+    brick_path = os.path.join(self.workspace_path, name)
+    config = {'name': name, 'main_package': main_package}
+    if dependencies:
+      config['dependencies'] = dependencies
+
+    return brick_lib.Brick(brick_path, initial_config=config)
+
+  def CreateBlueprint(self, name='theblueprintfoo.json', bsp=None, bricks=None,
+                      buildTargetId=None):
+    """Creates a new blueprint.
+
+    Args:
+      name: Blueprint name/path relative to the workspace root.
+      bsp: Path to BSP or None.
+      bricks: List of paths to bricks or None.
+      buildTargetId: The BuildTargetID to populate the APP_ID with or None.
+
+    Returns:
+      The created Blueprint object.
+    """
+    blueprint_path = os.path.join(self.workspace_path, name)
+
+    config = {}
+    if bricks:
+      config[blueprint_lib.BRICKS_FIELD] = bricks
+    if bsp:
+      config[blueprint_lib.BSP_FIELD] = bsp
+    if buildTargetId:
+      config[blueprint_lib.APP_ID_FIELD] = buildTargetId
+
+    return blueprint_lib.Blueprint(blueprint_path, initial_config=config)
+
+  def AssertBlueprintExists(self, name, bsp=None, bricks=None):
+    """Verifies a blueprint exists with the specified contents.
+
+    Args:
+      name: Blueprint name/path relative to the workspace root.
+      bsp: Expected blueprint BSP or None.
+      bricks: Expected blueprint bricks or None.
+    """
+    blueprint_path = os.path.join(self.workspace_path, name)
+    blueprint = blueprint_lib.Blueprint(blueprint_path)
+
+    if bsp is not None:
+      self.assertEqual(bsp, blueprint.GetBSP())
+    if bricks is not None:
+      self.assertListEqual(bricks, blueprint.GetBricks())
+
+
+@contextlib.contextmanager
+def SetTimeZone(tz):
+  """Temporarily set the timezone to the specified value.
+
+  This is needed because cros_test_lib.TestCase doesn't call time.tzset()
+  after resetting the environment.
+  """
+  old_environ = os.environ.copy()
+  try:
+    os.environ['TZ'] = tz
+    time.tzset()
+    yield
+  finally:
+    osutils.SetEnvironment(old_environ)
+    time.tzset()
+
+
+class ListTestSuite(unittest.BaseTestSuite):
+  """Stub test suite to list all possible tests"""
+
+  # We hack in |top| for local recursive usage.
+  # pylint: disable=arguments-differ
+  def run(self, result, _debug=False, top=True):
+    """List all the tests this suite would have run."""
+    # Recursively build a list of all the tests and the descriptions.
+    # We do this so we can align the output when printing.
+    tests = []
+    # Walk all the tests that this suite itself holds.
+    for test in self:
+      if isinstance(test, type(self)):
+        tests += test(result, top=False)
+      else:
+        desc = test.shortDescription()
+        if desc is None:
+          desc = ''
+        tests.append((test.id(), desc))
+
+    if top:
+      if tests:
+        # Now that we have all the tests, print them in lined up columns.
+        maxlen = max(len(x[0]) for x in tests)
+        for test, desc in tests:
+          print('%-*s  %s' % (maxlen, test, desc))
+      return result
+    else:
+      return tests
+
+
+class ListTestLoader(unittest.TestLoader):
+  """Stub test loader to list all possible tests"""
+
+  suiteClass = ListTestSuite
+
+
+class ListTestRunner(object):
+  """Stub test runner to list all possible tests"""
+
+  def run(self, test):
+    result = unittest.TestResult()
+    test(result)
+    return result
+
+
+class TraceTestRunner(unittest.TextTestRunner):
+  """Test runner that traces the test code as it runs
+
+  We insert tracing at the test runner level rather than test suite or test
+  case because both of those can execute code we've written (e.g. setUpClass
+  and setUp), and we want to trace that code too.
+  """
+
+  TRACE_KWARGS = {}
+
+  def run(self, test):
+    import trace
+    tracer = trace.Trace(**self.TRACE_KWARGS)
+    return tracer.runfunc(unittest.TextTestRunner.run, self, test)
+
+
+class ProfileTestRunner(unittest.TextTestRunner):
+  """Test runner that profiles the test code as it runs
+
+  We insert profiling at the test runner level rather than test suite or test
+  case because both of those can execute code we've written (e.g. setUpClass
+  and setUp), and we want to profile that code too.  It might be unexpectedly
+  heavy by invoking expensive setup logic.
+  """
+
+  PROFILE_KWARGS = {}
+  SORT_STATS_KEYS = ()
+
+  def run(self, test):
+    import cProfile
+    profiler = cProfile.Profile(**self.PROFILE_KWARGS)
+    ret = profiler.runcall(unittest.TextTestRunner.run, self, test)
+
+    import pstats
+    stats = pstats.Stats(profiler, stream=sys.stderr)
+    stats.strip_dirs().sort_stats(*self.SORT_STATS_KEYS).print_stats()
+    return ret
+
+
+class TestProgram(unittest.TestProgram):
+  """Helper wrapper around unittest.TestProgram
+
+  Any passed in kwargs are passed directly down to unittest.main; via this, you
+  can inject custom argv for example (to limit what tests run).
+  """
+
+  def __init__(self, **kwargs):
+    self.default_log_level = kwargs.pop('level', 'critical')
+    self._leaked_tempdir = None
+
+    try:
+      super(TestProgram, self).__init__(**kwargs)
+    finally:
+      if GlobalTestConfig.NETWORK_TESTS_SKIPPED:
+        print('Note: %i network test(s) skipped; use --network to run them.' %
+              GlobalTestConfig.NETWORK_TESTS_SKIPPED)
+
+  def parseArgs(self, argv):
+    """Parse the command line for the test"""
+    description = """Examples:
+  %(prog)s                            - run default set of tests
+  %(prog)s MyTestSuite                - run suite MyTestSuite
+  %(prog)s MyTestCase.testSomething   - run MyTestCase.testSomething
+  %(prog)s MyTestCase                 - run all MyTestCase.test* methods
+"""
+    parser = commandline.ArgumentParser(
+        description=description, default_log_level=self.default_log_level)
+
+    # These are options the standard unittest.TestProgram supports.
+    parser.add_argument('-v', '--verbose', default=False, action='store_true',
+                        help='Verbose output')
+    parser.add_argument('-q', '--quiet', default=False, action='store_true',
+                        help='Minimal output')
+    parser.add_argument('-f', '--failfast', default=False, action='store_true',
+                        help='Stop on first failure')
+    parser.add_argument('tests', nargs='*',
+                        help='specific test classes or methods to run')
+
+    # These are custom options we added.
+    parser.add_argument('-l', '--list', default=False, action='store_true',
+                        help='List all the available tests')
+    parser.add_argument('--network', default=False, action='store_true',
+                        help='Run tests that depend on good network '
+                             'connectivity')
+    parser.add_argument('--no-wipe', default=True, action='store_false',
+                        dest='wipe',
+                        help='Do not wipe the temporary working directory '
+                             '(default is to always wipe)')
+
+    # Note: The tracer module includes coverage options ...
+    group = parser.add_argument_group('Tracing options')
+    group.add_argument('--trace', default=False, action='store_true',
+                       help='Trace test execution')
+    group.add_argument('--ignore-module', default='',
+                       help='Ignore the specified modules (comma delimited)')
+    group.add_argument('--ignore-dir', default='',
+                       help='Ignore modules/packages in the specified dirs '
+                            '(comma delimited)')
+    group.add_argument('--no-ignore-system', default=True, action='store_false',
+                       dest='ignore_system',
+                       help='Do not ignore sys paths automatically')
+
+    group = parser.add_argument_group('Profiling options')
+    group.add_argument('--profile', default=False, action='store_true',
+                       help='Profile test execution')
+    group.add_argument('--profile-sort-keys', default='time',
+                       help='Keys to sort stats by (comma delimited)')
+    group.add_argument('--no-profile-builtins', default=True,
+                       action='store_false', dest='profile_builtins',
+                       help='Do not profile builtin functions')
+
+    opts = parser.parse_args(argv[1:])
+    opts.Freeze()
+
+    # Process the common options first.
+    if opts.verbose:
+      self.verbosity = 2
+
+    if opts.quiet:
+      self.verbosity = 0
+
+    if opts.failfast:
+      self.failfast = True
+
+    # Then handle the chromite extensions.
+    if opts.network:
+      GlobalTestConfig.RUN_NETWORK_TESTS = True
+
+    # We allow --list because it's nice to be able to throw --list onto an
+    # existing command line to quickly get the output.  It's clear to users
+    # that it does nothing else.
+    if sum((opts.trace, opts.profile)) > 1:
+      parser.error('--trace/--profile are exclusive')
+
+    if opts.list:
+      self.testRunner = ListTestRunner
+      self.testLoader = ListTestLoader()
+    elif opts.trace:
+      self.testRunner = TraceTestRunner
+
+      # Create the automatic ignore list based on sys.path.  We need to filter
+      # out chromite paths though as we might have automatic local paths in it.
+      auto_ignore = set()
+      if opts.ignore_system:
+        auto_ignore.add(os.path.join(constants.CHROMITE_DIR, 'third_party'))
+        for path in sys.path:
+          path = os.path.realpath(path)
+          if path.startswith(constants.CHROMITE_DIR):
+            continue
+          auto_ignore.add(path)
+
+      TraceTestRunner.TRACE_KWARGS = {
+          # Disable counting as it only applies to coverage collection.
+          'count': False,
+          # Enable tracing support since that's what we want w/--trace.
+          'trace': True,
+          # Enable relative timestamps before each traced line.
+          'timing': True,
+          'ignoremods': opts.ignore_module.split(','),
+          'ignoredirs': set(opts.ignore_dir.split(',')) | auto_ignore,
+      }
+    elif opts.profile:
+      self.testRunner = ProfileTestRunner
+
+      ProfileTestRunner.PROFILE_KWARGS = {
+          'subcalls': True,
+          'builtins': opts.profile_builtins,
+      }
+
+      ProfileTestRunner.SORT_STATS_KEYS = opts.profile_sort_keys.split(',')
+
+    # Figure out which tests the user/unittest wants to run.
+    if len(opts.tests) == 0 and self.defaultTest is None:
+      self.testNames = None
+    elif len(opts.tests) > 0:
+      self.testNames = opts.tests
+    else:
+      self.testNames = (self.defaultTest,)
+
+    if not opts.wipe:
+      # Instruct the TempDirTestCase to skip cleanup before actually creating
+      # any tempdirs.
+      self._leaked_tempdir = TempDirTestCase.SkipCleanup()
+
+    self.createTests()
+
+  def runTests(self):
+    try:
+      super(TestProgram, self).runTests()
+    finally:
+      if self._leaked_tempdir is not None:
+        logging.info('Working directory %s left behind. Please cleanup later.',
+                     self._leaked_tempdir)
+
+
+class main(TestProgram):
+  """Chromite's version of unittest.main.  Invoke this, not unittest.main."""
diff --git a/lib/cros_test_lib_unittest b/lib/cros_test_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/cros_test_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/cros_test_lib_unittest.py b/lib/cros_test_lib_unittest.py
new file mode 100644
index 0000000..68aad6c
--- /dev/null
+++ b/lib/cros_test_lib_unittest.py
@@ -0,0 +1,360 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittest for cros_test_lib (tests for tests? Who'd a thunk it)."""
+
+from __future__ import print_function
+
+import mock
+import os
+import sys
+import time
+import unittest
+
+from chromite.lib import bootstrap_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+from chromite.lib import timeout_util
+from chromite.lib import workspace_lib
+
+
+# pylint: disable=W0212,W0233
+
+# Convenience alias
+Dir = cros_test_lib.Directory
+
+
+class CrosTestCaseTest(cros_test_lib.TestCase):
+  """Test the cros_test_lib.TestCase."""
+
+  def testAssertStartsWith(self):
+    s = "abcdef"
+    prefix = "abc"
+    self.assertStartsWith(s, prefix)
+    prefix = "def"
+    self.assertRaises(AssertionError, self.assertStartsWith, s, prefix)
+
+  def testAssertEndsWith(self):
+    s = "abcdef"
+    suffix = "abc"
+    self.assertRaises(AssertionError, self.assertEndsWith, s, suffix)
+    suffix = "def"
+    self.assertEndsWith(s, suffix)
+
+
+class TruthTableTest(cros_test_lib.TestCase):
+  """Test TruthTable functionality."""
+
+  def _TestTableSanity(self, tt, lines):
+    """Run the given truth table through basic sanity checks.
+
+    Args:
+      tt: A TruthTable object.
+      lines: The expect input lines, in order (list of tuples).
+    """
+    # Check that more than one iterable can be used at once.
+    iter1 = iter(tt)
+    iter2 = iter(tt)
+    self.assertEquals(lines[0], iter1.next())
+    self.assertEquals(lines[0], iter2.next())
+    self.assertEquals(lines[1], iter2.next())
+
+    # Check that iteration again works again.
+    for ix, line in enumerate(tt):
+      self.assertEquals(lines[ix], line)
+
+    # Check direct access of input lines.
+    for i in xrange(len(tt)):
+      self.assertEquals(lines[i], tt.GetInputs(i))
+
+    # Check assertions on bad input to GetInputs.
+    self.assertRaises(ValueError, tt.GetInputs, -1)
+    self.assertRaises(ValueError, tt.GetInputs, len(tt))
+
+  def testTwoDimensions(self):
+    """Test TruthTable behavior for two boolean inputs."""
+    tt = cros_test_lib.TruthTable(inputs=[(True, True), (True, False)])
+    self.assertEquals(len(tt), pow(2, 2))
+
+    # Check truth table output.
+    self.assertFalse(tt.GetOutput((False, False)))
+    self.assertFalse(tt.GetOutput((False, True)))
+    self.assertTrue(tt.GetOutput((True, False)))
+    self.assertTrue(tt.GetOutput((True, True)))
+
+    # Check assertions on bad input to GetOutput.
+    self.assertRaises(TypeError, tt.GetOutput, True)
+    self.assertRaises(ValueError, tt.GetOutput, (True, True, True))
+
+    # Check iteration over input lines.
+    lines = list(tt)
+    self.assertEquals((False, False), lines[0])
+    self.assertEquals((False, True), lines[1])
+    self.assertEquals((True, False), lines[2])
+    self.assertEquals((True, True), lines[3])
+
+    self._TestTableSanity(tt, lines)
+
+  def testFourDimensions(self):
+    """Test TruthTable behavior for four boolean inputs."""
+    false1 = (True, True, True, False)
+    false2 = (True, False, True, False)
+    true1 = (False, True, False, True)
+    true2 = (True, True, False, False)
+    tt = cros_test_lib.TruthTable(inputs=(false1, false2), input_result=False)
+    self.assertEquals(len(tt), pow(2, 4))
+
+    # Check truth table output.
+    self.assertFalse(tt.GetOutput(false1))
+    self.assertFalse(tt.GetOutput(false2))
+    self.assertTrue(tt.GetOutput(true1))
+    self.assertTrue(tt.GetOutput(true2))
+
+    # Check assertions on bad input to GetOutput.
+    self.assertRaises(TypeError, tt.GetOutput, True)
+    self.assertRaises(ValueError, tt.GetOutput, (True, True, True))
+
+    # Check iteration over input lines.
+    lines = list(tt)
+    self.assertEquals((False, False, False, False), lines[0])
+    self.assertEquals((False, False, False, True), lines[1])
+    self.assertEquals((False, True, True, True), lines[7])
+    self.assertEquals((True, True, True, True), lines[15])
+
+    self._TestTableSanity(tt, lines)
+
+
+class VerifyTarballTest(cros_test_lib.MockTempDirTestCase):
+  """Test tarball verification functionality."""
+
+  TARBALL = 'fake_tarball'
+
+  def setUp(self):
+    self.rc_mock = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
+
+  def _MockTarList(self, files):
+    """Mock out tarball content list call.
+
+    Args:
+      files: A list of contents to return.
+    """
+    self.rc_mock.AddCmdResult(
+        partial_mock.ListRegex('tar -tf'), output='\n'.join(files))
+
+  def testNormPath(self):
+    """Test path normalization."""
+    tar_contents = ['./', './foo/', './foo/./a', './foo/./b']
+    dir_struct = [Dir('.', []), Dir('foo', ['a', 'b'])]
+    self._MockTarList(tar_contents)
+    cros_test_lib.VerifyTarball(self.TARBALL, dir_struct)
+
+  def testDuplicate(self):
+    """Test duplicate detection."""
+    tar_contents = ['a', 'b', 'a']
+    dir_struct = ['a', 'b']
+    self._MockTarList(tar_contents)
+    self.assertRaises(AssertionError, cros_test_lib.VerifyTarball, self.TARBALL,
+                      dir_struct)
+
+
+class MockTestCaseTest(cros_test_lib.TestCase):
+  """Tests MockTestCase functionality."""
+
+  class MyMockTestCase(cros_test_lib.MockTestCase):
+    """Helper class for testing MockTestCase."""
+    def testIt(self):
+      pass
+
+  class Mockable(object):
+    """Helper test class intended for having values mocked out."""
+    TO_BE_MOCKED = 0
+    TO_BE_MOCKED2 = 10
+    TO_BE_MOCKED3 = 20
+
+  def GetPatcher(self, attr, val):
+    return mock.patch('%s.MockTestCaseTest.Mockable.%s' % (__name__, attr),
+                      new=val)
+
+  def testPatchRemovalError(self):
+    """Verify that patch removal during tearDown is robust to Exceptions."""
+    tc = self.MyMockTestCase('testIt')
+    patcher = self.GetPatcher('TO_BE_MOCKED', -100)
+    patcher2 = self.GetPatcher('TO_BE_MOCKED2', -200)
+    patcher3 = self.GetPatcher('TO_BE_MOCKED3', -300)
+    patcher3.start()
+    tc.setUp()
+    tc.StartPatcher(patcher)
+    tc.StartPatcher(patcher2)
+    patcher.stop()
+    self.assertEquals(self.Mockable.TO_BE_MOCKED2, -200)
+    self.assertEquals(self.Mockable.TO_BE_MOCKED3, -300)
+    self.assertRaises(RuntimeError, tc.tearDown)
+    # Make sure that even though exception is raised for stopping 'patcher', we
+    # continue to stop 'patcher2', and run patcher.stopall().
+    self.assertEquals(self.Mockable.TO_BE_MOCKED2, 10)
+    self.assertEquals(self.Mockable.TO_BE_MOCKED3, 20)
+
+
+class TestCaseTest(unittest.TestCase):
+  """Tests TestCase functionality."""
+
+  def testTimeout(self):
+    """Test that test cases are interrupted when they are hanging."""
+
+    class TimeoutTestCase(cros_test_lib.TestCase):
+      """Test case that raises a TimeoutError because it takes too long."""
+
+      TEST_CASE_TIMEOUT = 1
+
+      def testSleeping(self):
+        """Sleep for 2 minutes. This should raise a TimeoutError."""
+        time.sleep(2 * 60)
+        raise AssertionError('Test case should have timed out.')
+
+    # Run the test case, verifying it raises a TimeoutError.
+    test = TimeoutTestCase(methodName='testSleeping')
+    self.assertRaises(timeout_util.TimeoutError, test.testSleeping)
+
+
+class OutputTestCaseTest(cros_test_lib.OutputTestCase,
+                         cros_test_lib.TempDirTestCase):
+  """Tests OutputTestCase functionality."""
+
+  def testStdoutAndStderr(self):
+    """Check capturing stdout and stderr."""
+    with self.OutputCapturer():
+      print('foo')
+      print('bar', file=sys.stderr)
+    self.AssertOutputContainsLine('foo')
+    self.AssertOutputContainsLine('bar', check_stdout=False, check_stderr=True)
+
+  def testStdoutReadDuringCapture(self):
+    """Check reading stdout mid-capture."""
+    with self.OutputCapturer():
+      print('foo')
+      self.AssertOutputContainsLine('foo')
+      print('bar')
+      self.AssertOutputContainsLine('bar')
+    self.AssertOutputContainsLine('foo')
+    self.AssertOutputContainsLine('bar')
+
+  def testClearCaptured(self):
+    """Check writing data, clearing it, then writing more data."""
+    with self.OutputCapturer() as cap:
+      print('foo')
+      self.AssertOutputContainsLine('foo')
+      cap.ClearCaptured()
+      self.AssertOutputContainsLine('foo', invert=True)
+      print('bar')
+    self.AssertOutputContainsLine('bar')
+
+  def testRunCommandCapture(self):
+    """Check capturing RunCommand() subprocess output."""
+    with self.OutputCapturer():
+      cros_build_lib.RunCommand(['sh', '-c', 'echo foo; echo bar >&2'],
+                                mute_output=False)
+    self.AssertOutputContainsLine('foo')
+    self.AssertOutputContainsLine('bar', check_stdout=False, check_stderr=True)
+
+  def testCapturingStdoutAndStderrToFile(self):
+    """Check that OutputCapturer captures to a named file."""
+    stdout_path = os.path.join(self.tempdir, 'stdout')
+    stderr_path = os.path.join(self.tempdir, 'stderr')
+    with self.OutputCapturer(stdout_path=stdout_path, stderr_path=stderr_path):
+      print('foo')
+      print('bar', file=sys.stderr)
+
+    # Check that output can be read by OutputCapturer.
+    self.AssertOutputContainsLine('foo')
+    self.AssertOutputContainsLine('bar', check_stdout=False, check_stderr=True)
+    # Verify that output is actually written to the correct files.
+    self.assertEqual('foo\n', osutils.ReadFile(stdout_path))
+    self.assertEqual('bar\n', osutils.ReadFile(stderr_path))
+
+
+class WorkspaceTestCaseTest(cros_test_lib.WorkspaceTestCase):
+  """Verification for WorkspaceTestCase."""
+
+  def testCreateWorkspace(self):
+    """Tests CreateWorkspace()."""
+    self.CreateWorkspace()
+    self.assertExists(self.workspace_path)
+    self.assertEqual(self.workspace_path, workspace_lib.WorkspacePath())
+
+  def testCreateWorkspaceSdk(self):
+    """Tests CreateWorkspace() with an SDK version."""
+    self.CreateWorkspace(sdk_version='1.2.3')
+    self.assertEqual('1.2.3',
+                     workspace_lib.GetActiveSdkVersion(self.workspace_path))
+
+  def testCreateBootstrap(self):
+    """Tests CreateBootstrap()."""
+    self.CreateBootstrap()
+    self.assertExists(self.bootstrap_path)
+    self.assertEqual(self.bootstrap_path, bootstrap_lib.FindBootstrapPath())
+
+  def testCreateBootstrapSdk(self):
+    """Tests CreateBootstrap() with an SDK version."""
+    self.CreateBootstrap(sdk_version='1.2.3')
+    self.assertExists(
+        bootstrap_lib.ComputeSdkPath(self.bootstrap_path, '1.2.3'))
+
+  def testCreateBrick(self):
+    """Tests CreateBrick()."""
+    self.CreateWorkspace()
+
+    self.CreateBrick(name='bar')
+    brick = self.CreateBrick(name='foo', main_package='category/bar',
+                             dependencies=['//bar'])
+    self.assertEqual(os.path.join(self.workspace_path, 'foo'), brick.brick_dir)
+    self.assertEqual('foo', brick.FriendlyName())
+    self.assertEqual(['category/bar'], brick.MainPackages())
+    self.assertEqual(['//bar'], [b.brick_locator for b in brick.Dependencies()])
+
+  def testCreateBlueprint(self):
+    """Tests CreateBlueprint()."""
+    brick_path = '//foo_brick'
+    bsp_path = '//foo_bsp'
+    blueprint_path = 'foo.json'
+
+    self.CreateWorkspace()
+    self.CreateBrick(brick_path)
+    self.CreateBrick(bsp_path)
+
+    blueprint = self.CreateBlueprint(name=blueprint_path, bsp=bsp_path,
+                                     bricks=[brick_path])
+    self.assertExists(os.path.join(self.workspace_path, blueprint_path))
+    self.assertEqual(bsp_path, blueprint.GetBSP())
+    self.assertEqual([brick_path], blueprint.GetBricks())
+
+  def testAssertBlueprintExists(self):
+    """Tests AssertBlueprintExists()."""
+    brick_path = '//foo_brick'
+    bsp_path = '//foo_bsp'
+    blueprint_path = 'foo.json'
+
+    self.CreateWorkspace()
+    self.CreateBrick(brick_path)
+    self.CreateBrick(bsp_path)
+    self.CreateBlueprint(name=blueprint_path, bsp=bsp_path, bricks=[brick_path])
+
+    # Test success conditions.
+    self.AssertBlueprintExists(blueprint_path)
+    self.AssertBlueprintExists(blueprint_path, bsp=bsp_path)
+    self.AssertBlueprintExists(blueprint_path, bricks=[brick_path])
+    self.AssertBlueprintExists(blueprint_path, bsp=bsp_path,
+                               bricks=[brick_path])
+
+    # Test failure conditions.
+    def TestFailure(blueprint_path, bsp=None, bricks=None):
+      with self.assertRaises(Exception):
+        self.AssertBlueprintExists(blueprint_path, bsp=bsp, bricks=bricks)
+
+    TestFailure('//no/blueprint')
+    TestFailure(blueprint_path, bsp='//no/bsp')
+    TestFailure(blueprint_path, bricks=['//no/brick'])
+    TestFailure(blueprint_path, bricks=[brick_path, '//no/brick'])
diff --git a/lib/datafiles/arch.aarch64.elf b/lib/datafiles/arch.aarch64.elf
new file mode 100644
index 0000000..45330fe
--- /dev/null
+++ b/lib/datafiles/arch.aarch64.elf
Binary files differ
diff --git a/lib/datafiles/arch.alpha.elf b/lib/datafiles/arch.alpha.elf
new file mode 100644
index 0000000..8e88de0
--- /dev/null
+++ b/lib/datafiles/arch.alpha.elf
Binary files differ
diff --git a/lib/datafiles/arch.arm.eabi.elf b/lib/datafiles/arch.arm.eabi.elf
new file mode 100644
index 0000000..be15999
--- /dev/null
+++ b/lib/datafiles/arch.arm.eabi.elf
Binary files differ
diff --git a/lib/datafiles/arch.hppa.elf b/lib/datafiles/arch.hppa.elf
new file mode 100644
index 0000000..31debbb
--- /dev/null
+++ b/lib/datafiles/arch.hppa.elf
Binary files differ
diff --git a/lib/datafiles/arch.i386.elf b/lib/datafiles/arch.i386.elf
new file mode 100644
index 0000000..78fb825
--- /dev/null
+++ b/lib/datafiles/arch.i386.elf
Binary files differ
diff --git a/lib/datafiles/arch.ia64.elf b/lib/datafiles/arch.ia64.elf
new file mode 100644
index 0000000..78590b7
--- /dev/null
+++ b/lib/datafiles/arch.ia64.elf
Binary files differ
diff --git a/lib/datafiles/arch.m68k.elf b/lib/datafiles/arch.m68k.elf
new file mode 100644
index 0000000..73c8ea2
--- /dev/null
+++ b/lib/datafiles/arch.m68k.elf
Binary files differ
diff --git a/lib/datafiles/arch.mips.o32.elf b/lib/datafiles/arch.mips.o32.elf
new file mode 100644
index 0000000..786836b
--- /dev/null
+++ b/lib/datafiles/arch.mips.o32.elf
Binary files differ
diff --git a/lib/datafiles/arch.mips64.elf b/lib/datafiles/arch.mips64.elf
new file mode 100644
index 0000000..74566f5
--- /dev/null
+++ b/lib/datafiles/arch.mips64.elf
Binary files differ
diff --git a/lib/datafiles/arch.mipsel.o32.elf b/lib/datafiles/arch.mipsel.o32.elf
new file mode 100644
index 0000000..c5fa520
--- /dev/null
+++ b/lib/datafiles/arch.mipsel.o32.elf
Binary files differ
diff --git a/lib/datafiles/arch.mipsn32.elf b/lib/datafiles/arch.mipsn32.elf
new file mode 100644
index 0000000..1fdb0d4
--- /dev/null
+++ b/lib/datafiles/arch.mipsn32.elf
Binary files differ
diff --git a/lib/datafiles/arch.ppc.elf b/lib/datafiles/arch.ppc.elf
new file mode 100644
index 0000000..0070c56
--- /dev/null
+++ b/lib/datafiles/arch.ppc.elf
Binary files differ
diff --git a/lib/datafiles/arch.ppc64.elf b/lib/datafiles/arch.ppc64.elf
new file mode 100644
index 0000000..8ed1ff4
--- /dev/null
+++ b/lib/datafiles/arch.ppc64.elf
Binary files differ
diff --git a/lib/datafiles/arch.s390.elf b/lib/datafiles/arch.s390.elf
new file mode 100644
index 0000000..2d68cef
--- /dev/null
+++ b/lib/datafiles/arch.s390.elf
Binary files differ
diff --git a/lib/datafiles/arch.s390x.elf b/lib/datafiles/arch.s390x.elf
new file mode 100644
index 0000000..8af27ef
--- /dev/null
+++ b/lib/datafiles/arch.s390x.elf
Binary files differ
diff --git a/lib/datafiles/arch.sh4.elf b/lib/datafiles/arch.sh4.elf
new file mode 100644
index 0000000..119c54d
--- /dev/null
+++ b/lib/datafiles/arch.sh4.elf
Binary files differ
diff --git a/lib/datafiles/arch.sparc.elf b/lib/datafiles/arch.sparc.elf
new file mode 100644
index 0000000..e9bcf64
--- /dev/null
+++ b/lib/datafiles/arch.sparc.elf
Binary files differ
diff --git a/lib/datafiles/arch.sparc64.elf b/lib/datafiles/arch.sparc64.elf
new file mode 100644
index 0000000..ddd970f
--- /dev/null
+++ b/lib/datafiles/arch.sparc64.elf
Binary files differ
diff --git a/lib/datafiles/arch.x32.elf b/lib/datafiles/arch.x32.elf
new file mode 100644
index 0000000..d9a19fe
--- /dev/null
+++ b/lib/datafiles/arch.x32.elf
Binary files differ
diff --git a/lib/datafiles/arch.x86_64.elf b/lib/datafiles/arch.x86_64.elf
new file mode 100644
index 0000000..d7c137c
--- /dev/null
+++ b/lib/datafiles/arch.x86_64.elf
Binary files differ
diff --git a/lib/dev_server_wrapper.py b/lib/dev_server_wrapper.py
new file mode 100644
index 0000000..775a664
--- /dev/null
+++ b/lib/dev_server_wrapper.py
@@ -0,0 +1,764 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing methods and classes to interact with a devserver instance.
+"""
+
+from __future__ import print_function
+
+import multiprocessing
+import os
+import socket
+import shutil
+import sys
+import tempfile
+import httplib
+import urllib2
+import urlparse
+
+from chromite.cbuildbot import constants
+from chromite.cli import command
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import path_util
+from chromite.lib import remote_access
+from chromite.lib import timeout_util
+
+
+DEFAULT_PORT = 8080
+
+DEVSERVER_PKG_DIR = os.path.join(constants.SOURCE_ROOT, 'src/platform/dev')
+DEFAULT_STATIC_DIR = path_util.FromChrootPath(
+    os.path.join(constants.SOURCE_ROOT, 'src', 'platform', 'dev', 'static'))
+
+XBUDDY_REMOTE = 'remote'
+XBUDDY_LOCAL = 'local'
+
+ROOTFS_FILENAME = 'update.gz'
+STATEFUL_FILENAME = 'stateful.tgz'
+
+
+class ImagePathError(Exception):
+  """Raised when the provided path can't be resolved to an image."""
+
+
+def ConvertTranslatedPath(original_path, translated_path):
+  """Converts a translated xbuddy path to an xbuddy path.
+
+  Devserver/xbuddy does not accept requests with translated xbuddy
+  path (build-id/version/image-name). This function converts such a
+  translated path to an xbuddy path that is suitable to used in
+  devserver requests.
+
+  Args:
+    original_path: the xbuddy path before translation.
+      (e.g., remote/peppy/latest-canary).
+    translated_path: the translated xbuddy path
+      (e.g., peppy-release/R36-5760.0.0).
+
+  Returns:
+    A xbuddy path uniquely identifies a build and can be used in devserver
+      requests: {local|remote}/build-id/version/image_type
+  """
+  chunks = translated_path.split(os.path.sep)
+  chunks[-1] = constants.IMAGE_NAME_TO_TYPE[chunks[-1]]
+
+  if GetXbuddyPath(original_path).startswith(XBUDDY_REMOTE):
+    chunks = [XBUDDY_REMOTE] + chunks
+  else:
+    chunks = [XBUDDY_LOCAL] + chunks
+
+  return os.path.sep.join(chunks)
+
+
+def GetXbuddyPath(path):
+  """A helper function to parse an xbuddy path.
+
+  Args:
+    path: Either a path without no scheme or an xbuddy://path/for/xbuddy
+
+  Returns:
+    path/for/xbuddy if |path| is xbuddy://path/for/xbuddy; otherwise,
+    returns |path|.
+
+  Raises:
+    ValueError if |path| uses any scheme other than xbuddy://.
+  """
+  parsed = urlparse.urlparse(path)
+
+  # pylint: disable=E1101
+  if parsed.scheme == 'xbuddy':
+    return '%s%s' % (parsed.netloc, parsed.path)
+  elif parsed.scheme == '':
+    logging.debug('Assuming %s is an xbuddy path.', path)
+    return path
+  else:
+    raise ValueError('Do not support scheme %s.', parsed.scheme)
+
+
+def GetImagePathWithXbuddy(path, board, version=None,
+                           static_dir=DEFAULT_STATIC_DIR, lookup_only=False):
+  """Gets image path and resolved XBuddy path using xbuddy.
+
+  Ask xbuddy to translate |path|, and if necessary, download and stage the
+  image, then return a translated path to the image. Also returns the resolved
+  XBuddy path, which may be useful for subsequent calls in case the argument is
+  an alias.
+
+  Args:
+    path: The xbuddy path.
+    board: The default board to use if board is not specified in |path|.
+    version: The default version to use if one is not specified in |path|.
+    static_dir: Static directory to stage the image in.
+    lookup_only: Caller only wants to translate the path not download the
+      artifact.
+
+  Returns:
+    A tuple consisting of a translated path to the image
+    (build-id/version/image_name) as well as the fully resolved XBuddy path (in
+    the case where |path| is an XBuddy alias).
+  """
+  # Since xbuddy often wants to use gsutil from $PATH, make sure our local copy
+  # shows up first.
+  upath = os.environ['PATH'].split(os.pathsep)
+  upath.insert(0, os.path.dirname(gs.GSContext.GetDefaultGSUtilBin()))
+  os.environ['PATH'] = os.pathsep.join(upath)
+
+  # Import xbuddy for translating, downloading and staging the image.
+  if not os.path.exists(DEVSERVER_PKG_DIR):
+    raise Exception('Cannot find xbuddy module. Devserver package directory '
+                    'does not exist: %s' % DEVSERVER_PKG_DIR)
+  sys.path.append(DEVSERVER_PKG_DIR)
+  # pylint: disable=import-error
+  import xbuddy
+  import cherrypy
+
+  # If we are using the progress bar, quiet the logging output of cherrypy.
+  if command.UseProgressBar():
+    if (hasattr(cherrypy.log, 'access_log') and
+        hasattr(cherrypy.log, 'error_log')):
+      cherrypy.log.access_log.setLevel(logging.NOTICE)
+      cherrypy.log.error_log.setLevel(logging.NOTICE)
+    else:
+      cherrypy.config.update({'server.log_to_screen': False})
+
+  xb = xbuddy.XBuddy(static_dir=static_dir, board=board, version=version,
+                     log_screen=False)
+  path_list = GetXbuddyPath(path).rsplit(os.path.sep)
+  try:
+    if lookup_only:
+      build_id, file_name = xb.Translate(path_list)
+    else:
+      build_id, file_name = xb.Get(path_list)
+
+    resolved_path, _ = xb.LookupAlias(os.path.sep.join(path_list))
+    return os.path.join(build_id, file_name), resolved_path
+  except xbuddy.XBuddyException as e:
+    logging.error('Locating image "%s" failed. The path might not be valid or '
+                  'the image might not exist.', path)
+    raise ImagePathError('Cannot locate image %s: %s' % (path, e))
+
+
+def GenerateXbuddyRequest(path, req_type):
+  """Generate an xbuddy request used to retreive payloads.
+
+  This function generates a xbuddy request based on |path| and
+  |req_type|, which can be used to query the devserver. For request
+  type 'image' ('update'), the devserver will repond with a URL
+  pointing to the folder where the image (update payloads) is stored.
+
+  Args:
+    path: An xbuddy path (with or without xbuddy://).
+    req_type: xbuddy request type ('update', 'image', or 'translate').
+
+  Returns:
+    A xbuddy request.
+  """
+  if req_type == 'update':
+    return 'xbuddy/%s?for_update=true&return_dir=true' % GetXbuddyPath(path)
+  elif req_type == 'image':
+    return 'xbuddy/%s?return_dir=true' % GetXbuddyPath(path)
+  elif req_type == 'translate':
+    return 'xbuddy_translate/%s' % GetXbuddyPath(path)
+  else:
+    raise ValueError('Does not support xbuddy request type %s' % req_type)
+
+
+def TranslatedPathToLocalPath(translated_path, static_dir):
+  """Convert the translated path to a local path to the image file.
+
+  Args:
+    translated_path: the translated xbuddy path
+      (e.g., peppy-release/R36-5760.0.0/chromiumos_image).
+    static_dir: The static directory used by the devserver.
+
+  Returns:
+    A local path to the image file.
+  """
+  real_path = osutils.ExpandPath(os.path.join(static_dir, translated_path))
+
+  if os.path.exists(real_path):
+    return real_path
+  else:
+    return path_util.FromChrootPath(real_path)
+
+
+def GetUpdatePayloadsFromLocalPath(path, payload_dir,
+                                   src_image_to_delta=None,
+                                   static_dir=DEFAULT_STATIC_DIR):
+  """Generates update payloads from a local image path.
+
+  This function wraps around ConvertLocalPathToXbuddy and GetUpdatePayloads,
+  managing the creation and destruction of the necessary temporary directories
+  required by this process.
+
+  Args:
+    path: Path to an image.
+    payload_dir: The directory to store the payloads. On failure, the devserver
+                 log will be copied to |payload_dir|.
+    src_image_to_delta: Image used as the base to generate the delta payloads.
+    static_dir: Devserver static dir to use.
+  """
+
+  with cros_build_lib.ContextManagerStack() as stack:
+    image_tempdir = stack.Add(
+        osutils.TempDir,
+        base_dir=path_util.FromChrootPath('/tmp'),
+        prefix='dev_server_wrapper_local_image', sudo_rm=True)
+    static_tempdir = stack.Add(osutils.TempDir,
+                               base_dir=static_dir,
+                               prefix='local_image', sudo_rm=True)
+    xbuddy_path = ConvertLocalPathToXbuddyPath(path, image_tempdir,
+                                               static_tempdir, static_dir)
+    GetUpdatePayloads(xbuddy_path, payload_dir,
+                      src_image_to_delta=src_image_to_delta,
+                      static_dir=static_dir)
+
+
+def ConvertLocalPathToXbuddyPath(path, image_tempdir, static_tempdir,
+                                 static_dir=DEFAULT_STATIC_DIR):
+  """Converts |path| to an xbuddy path.
+
+  This function copies the image into a temprary directory in chroot
+  and creates a symlink in static_dir for devserver/xbuddy to
+  access.
+
+  Note that the temporary directories need to be cleaned up by the caller
+  once they are no longer needed.
+
+  Args:
+    path: Path to an image.
+    image_tempdir: osutils.TempDir instance to copy the image into. The
+                   directory must be located within the chroot.
+    static_tempdir: osutils.TempDir instance to be symlinked to by the static
+                    directory.
+    static_dir: Static directory to create the symlink in.
+
+  Returns:
+    The xbuddy path for |path|
+  """
+  tempdir_path = image_tempdir.tempdir
+  logging.info('Copying image to temporary directory %s', tempdir_path)
+  # Devserver only knows the image names listed in IMAGE_TYPE_TO_NAME.
+  # Rename the image to chromiumos_test_image.bin when copying.
+  TEMP_IMAGE_TYPE = 'test'
+  shutil.copy(path,
+              os.path.join(tempdir_path,
+                           constants.IMAGE_TYPE_TO_NAME[TEMP_IMAGE_TYPE]))
+  chroot_path = path_util.ToChrootPath(tempdir_path)
+  # Create and link static_dir/local_imagexxxx/link to the image
+  # folder, so that xbuddy/devserver can understand the path.
+  relative_dir = os.path.join(os.path.basename(static_tempdir.tempdir), 'link')
+  symlink_path = os.path.join(static_dir, relative_dir)
+  logging.info('Creating a symlink %s -> %s', symlink_path, chroot_path)
+  os.symlink(chroot_path, symlink_path)
+  return os.path.join(relative_dir, TEMP_IMAGE_TYPE)
+
+
+def GetUpdatePayloads(path, payload_dir, board=None,
+                      src_image_to_delta=None, timeout=60 * 15,
+                      static_dir=DEFAULT_STATIC_DIR):
+  """Launch devserver to get the update payloads.
+
+  Args:
+    path: The xbuddy path.
+    payload_dir: The directory to store the payloads. On failure, the devserver
+                 log will be copied to |payload_dir|.
+    board: The default board to use when |path| is None.
+    src_image_to_delta: Image used as the base to generate the delta payloads.
+    timeout: Timeout for launching devserver (seconds).
+    static_dir: Devserver static dir to use.
+  """
+  ds = DevServerWrapper(static_dir=static_dir, src_image=src_image_to_delta,
+                        board=board)
+  req = GenerateXbuddyRequest(path, 'update')
+  logging.info('Starting local devserver to generate/serve payloads...')
+  try:
+    ds.Start()
+    url = ds.OpenURL(ds.GetURL(sub_dir=req), timeout=timeout)
+    ds.DownloadFile(os.path.join(url, ROOTFS_FILENAME), payload_dir)
+    ds.DownloadFile(os.path.join(url, STATEFUL_FILENAME), payload_dir)
+  except DevServerException:
+    logging.warning(ds.TailLog() or 'No devserver log is available.')
+    raise
+  else:
+    logging.debug(ds.TailLog() or 'No devserver log is available.')
+  finally:
+    ds.Stop()
+    if os.path.exists(ds.log_file):
+      shutil.copyfile(ds.log_file,
+                      os.path.join(payload_dir, 'local_devserver.log'))
+    else:
+      logging.warning('Could not find %s', ds.log_file)
+
+
+def GenerateUpdateId(target, src, key, for_vm):
+  """Returns a simple representation id of |target| and |src| paths.
+
+  Args:
+    target: Target image of the update payloads.
+    src: Base image to of the delta update payloads.
+    key: Private key used to sign the payloads.
+    for_vm: Whether the update payloads are to be used in a VM .
+  """
+  update_id = target
+  if src:
+    update_id = '->'.join([src, update_id])
+
+  if key:
+    update_id = '+'.join([update_id, key])
+
+  if not for_vm:
+    update_id = '+'.join([update_id, 'patched_kernel'])
+
+  return update_id
+
+
+class DevServerException(Exception):
+  """Base exception class of devserver errors."""
+
+
+class DevServerStartupError(DevServerException):
+  """Thrown when the devserver fails to start up."""
+
+
+class DevServerStopError(DevServerException):
+  """Thrown when the devserver fails to stop."""
+
+
+class DevServerResponseError(DevServerException):
+  """Thrown when the devserver responds with an error."""
+
+
+class DevServerConnectionError(DevServerException):
+  """Thrown when unable to connect to devserver."""
+
+
+class DevServerWrapper(multiprocessing.Process):
+  """A Simple wrapper around a dev server instance."""
+
+  # Wait up to 15 minutes for the dev server to start. It can take a
+  # while to start when generating payloads in parallel.
+  DEV_SERVER_TIMEOUT = 900
+  KILL_TIMEOUT = 10
+
+  def __init__(self, static_dir=None, port=None, log_dir=None, src_image=None,
+               board=None):
+    """Initialize a DevServerWrapper instance.
+
+    Args:
+      static_dir: The static directory to be used by the devserver.
+      port: The port to used by the devserver.
+      log_dir: Directory to store the log files.
+      src_image: The path to the image to be used as the base to
+        generate delta payloads.
+      board: Override board to pass to the devserver for xbuddy pathing.
+    """
+    super(DevServerWrapper, self).__init__()
+    self.devserver_bin = 'start_devserver'
+    # Set port if it is given. Otherwise, devserver will start at any
+    # available port.
+    self.port = None if not port else port
+    self.src_image = src_image
+    self.board = board
+    self.tempdir = None
+    self.log_dir = log_dir
+    if not self.log_dir:
+      self.tempdir = osutils.TempDir(
+          base_dir=path_util.FromChrootPath('/tmp'),
+          prefix='devserver_wrapper',
+          sudo_rm=True)
+      self.log_dir = self.tempdir.tempdir
+    self.static_dir = static_dir
+    self.log_file = os.path.join(self.log_dir, 'dev_server.log')
+    self.port_file = os.path.join(self.log_dir, 'dev_server.port')
+    self._pid_file = self._GetPIDFilePath()
+    self._pid = None
+
+  @classmethod
+  def DownloadFile(cls, url, dest):
+    """Download the file from the URL to a local path."""
+    if os.path.isdir(dest):
+      dest = os.path.join(dest, os.path.basename(url))
+
+    logging.info('Downloading %s to %s', url, dest)
+    osutils.WriteFile(dest, DevServerWrapper.OpenURL(url), mode='wb')
+
+  def GetURL(self, sub_dir=None):
+    """Returns the URL of this devserver instance."""
+    return self.GetDevServerURL(port=self.port, sub_dir=sub_dir)
+
+  @classmethod
+  def GetDevServerURL(cls, ip=None, port=None, sub_dir=None):
+    """Returns the dev server url.
+
+    Args:
+      ip: IP address of the devserver. If not set, use the IP
+        address of this machine.
+      port: Port number of devserver.
+      sub_dir: The subdirectory of the devserver url.
+    """
+    ip = cros_build_lib.GetIPv4Address() if not ip else ip
+    # If port number is not given, assume 8080 for backward
+    # compatibility.
+    port = DEFAULT_PORT if not port else port
+    url = 'http://%(ip)s:%(port)s' % {'ip': ip, 'port': str(port)}
+    if sub_dir:
+      url += '/' + sub_dir
+
+    return url
+
+  @classmethod
+  def OpenURL(cls, url, ignore_url_error=False, timeout=60):
+    """Returns the HTTP response of a URL."""
+    logging.debug('Retrieving %s', url)
+    try:
+      res = urllib2.urlopen(url, timeout=timeout)
+    except (urllib2.HTTPError, httplib.HTTPException) as e:
+      logging.error('Devserver responded with an error!')
+      raise DevServerResponseError(e)
+    except (urllib2.URLError, socket.timeout) as e:
+      if not ignore_url_error:
+        logging.error('Cannot connect to devserver!')
+        raise DevServerConnectionError(e)
+    else:
+      return res.read()
+
+  @classmethod
+  def WipeStaticDirectory(cls, static_dir):
+    """Cleans up |static_dir|.
+
+    Args:
+      static_dir: path to the static directory of the devserver instance.
+    """
+    # Wipe the payload cache.
+    cls.WipePayloadCache(static_dir=static_dir)
+    logging.info('Cleaning up directory %s', static_dir)
+    osutils.RmDir(static_dir, ignore_missing=True, sudo=True)
+
+  @classmethod
+  def WipePayloadCache(cls, devserver_bin='start_devserver', static_dir=None):
+    """Cleans up devserver cache of payloads.
+
+    Args:
+      devserver_bin: path to the devserver binary.
+      static_dir: path to use as the static directory of the devserver instance.
+    """
+    logging.info('Cleaning up previously generated payloads.')
+    cmd = [devserver_bin, '--clear_cache', '--exit']
+    if static_dir:
+      cmd.append('--static_dir=%s' % path_util.ToChrootPath(static_dir))
+
+    cros_build_lib.SudoRunCommand(
+        cmd, enter_chroot=True, print_cmd=False, combine_stdout_stderr=True,
+        redirect_stdout=True, redirect_stderr=True, cwd=constants.SOURCE_ROOT)
+
+  def _ReadPortNumber(self):
+    """Read port number from file."""
+    if not self.is_alive():
+      raise DevServerStartupError('Devserver terminated unexpectedly!')
+
+    try:
+      timeout_util.WaitForReturnTrue(os.path.exists,
+                                     func_args=[self.port_file],
+                                     timeout=self.DEV_SERVER_TIMEOUT,
+                                     period=5)
+    except timeout_util.TimeoutError:
+      self.terminate()
+      raise DevServerStartupError('Devserver portfile does not exist!')
+
+    self.port = int(osutils.ReadFile(self.port_file).strip())
+
+  def IsReady(self):
+    """Check if devserver is up and running."""
+    if not self.is_alive():
+      raise DevServerStartupError('Devserver terminated unexpectedly!')
+
+    url = os.path.join('http://%s:%d' % (remote_access.LOCALHOST_IP, self.port),
+                       'check_health')
+    if self.OpenURL(url, ignore_url_error=True, timeout=2):
+      return True
+
+    return False
+
+  def _GetPIDFilePath(self):
+    """Returns pid file path."""
+    return tempfile.NamedTemporaryFile(prefix='devserver_wrapper',
+                                       dir=self.log_dir,
+                                       delete=False).name
+
+  def _GetPID(self):
+    """Returns the pid read from the pid file."""
+    # Pid file was passed into the chroot.
+    return osutils.ReadFile(self._pid_file).rstrip()
+
+  def _WaitUntilStarted(self):
+    """Wait until the devserver has started."""
+    if not self.port:
+      self._ReadPortNumber()
+
+    try:
+      timeout_util.WaitForReturnTrue(self.IsReady,
+                                     timeout=self.DEV_SERVER_TIMEOUT,
+                                     period=5)
+    except timeout_util.TimeoutError:
+      self.terminate()
+      raise DevServerStartupError('Devserver did not start')
+
+  def run(self):
+    """Kicks off devserver in a separate process and waits for it to finish."""
+    # Truncate the log file if it already exists.
+    if os.path.exists(self.log_file):
+      osutils.SafeUnlink(self.log_file, sudo=True)
+
+    path_resolver = path_util.ChrootPathResolver()
+
+    port = self.port if self.port else 0
+    cmd = [self.devserver_bin,
+           '--pidfile', path_resolver.ToChroot(self._pid_file),
+           '--logfile', path_resolver.ToChroot(self.log_file),
+           '--port=%d' % port]
+
+    if not self.port:
+      cmd.append('--portfile=%s' % path_resolver.ToChroot(self.port_file))
+
+    if self.static_dir:
+      cmd.append(
+          '--static_dir=%s' % path_resolver.ToChroot(self.static_dir))
+
+    if self.src_image:
+      cmd.append('--src_image=%s' % path_resolver.ToChroot(self.src_image))
+
+    if self.board:
+      cmd.append('--board=%s' % self.board)
+
+    chroot_args = ['--no-ns-pid']
+    result = self._RunCommand(
+        cmd, enter_chroot=True, chroot_args=chroot_args,
+        cwd=constants.SOURCE_ROOT, error_code_ok=True,
+        redirect_stdout=True, combine_stdout_stderr=True)
+    if result.returncode != 0:
+      msg = (('Devserver failed to start!\n'
+              '--- Start output from the devserver startup command ---\n'
+              '%s'
+              '--- End output from the devserver startup command ---') %
+             (result.output))
+      logging.error(msg)
+
+  def Start(self):
+    """Starts a background devserver and waits for it to start.
+
+    Starts a background devserver and waits for it to start. Will only return
+    once devserver has started and running pid has been read.
+    """
+    self.start()
+    self._WaitUntilStarted()
+    self._pid = self._GetPID()
+
+  def Stop(self):
+    """Kills the devserver instance with SIGTERM and SIGKILL if SIGTERM fails"""
+    if not self._pid:
+      logging.debug('No devserver running.')
+      return
+
+    logging.debug('Stopping devserver instance with pid %s', self._pid)
+    if self.is_alive():
+      self._RunCommand(['kill', self._pid], error_code_ok=True)
+    else:
+      logging.debug('Devserver not running!')
+      return
+
+    self.join(self.KILL_TIMEOUT)
+    if self.is_alive():
+      logging.warning('Devserver is unstoppable. Killing with SIGKILL')
+      try:
+        self._RunCommand(['kill', '-9', self._pid])
+      except cros_build_lib.RunCommandError as e:
+        raise DevServerStopError('Unable to stop devserver: %s' % e)
+
+  def PrintLog(self):
+    """Print devserver output to stdout."""
+    print(self.TailLog(num_lines='+1'))
+
+  def TailLog(self, num_lines=50):
+    """Returns the most recent |num_lines| lines of the devserver log file."""
+    fname = self.log_file
+    # We use self._RunCommand here to check the existence of the log
+    # file, so it works for RemoteDevserverWrapper as well.
+    if self._RunCommand(
+        ['test', '-f', fname], error_code_ok=True).returncode == 0:
+      result = self._RunCommand(['tail', '-n', str(num_lines), fname],
+                                capture_output=True)
+      output = '--- Start output from %s ---' % fname
+      output += result.output
+      output += '--- End output from %s ---' % fname
+      return output
+
+  def _RunCommand(self, *args, **kwargs):
+    """Runs a shell commmand."""
+    kwargs.setdefault('debug_level', logging.DEBUG)
+    return cros_build_lib.SudoRunCommand(*args, **kwargs)
+
+
+class RemoteDevServerWrapper(DevServerWrapper):
+  """A wrapper of a devserver on a remote device.
+
+  Devserver wrapper for RemoteDevice. This wrapper kills all existing
+  running devserver instances before startup, thus allowing one
+  devserver running at a time.
+
+  We assume there is no chroot on the device, thus we do not launch
+  devserver inside chroot.
+  """
+
+  # Shorter timeout because the remote devserver instance does not
+  # need to generate payloads.
+  DEV_SERVER_TIMEOUT = 30
+  KILL_TIMEOUT = 10
+  PID_FILE_PATH = '/tmp/devserver_wrapper.pid'
+
+  CHERRYPY_ERROR_MSG = """
+Your device does not have cherrypy package installed; cherrypy is
+necessary for launching devserver on the device. Your device may be
+running an older image (<R33-4986.0.0), where cherrypy is not
+installed by default.
+
+You can fix this with one of the following three options:
+  1. Update the device to a newer image with a USB stick.
+  2. Run 'cros deploy device cherrypy' to install cherrpy.
+  3. Run cros flash with --no-rootfs-update to update only the stateful
+     parition to a newer image (with the risk that the rootfs/stateful version
+    mismatch may cause some problems).
+  """
+
+  def __init__(self, remote_device, devserver_bin, **kwargs):
+    """Initializes a RemoteDevserverPortal object with the remote device.
+
+    Args:
+      remote_device: A RemoteDevice object.
+      devserver_bin: The path to the devserver script on the device.
+      **kwargs: See DevServerWrapper documentation.
+    """
+    super(RemoteDevServerWrapper, self).__init__(**kwargs)
+    self.device = remote_device
+    self.devserver_bin = devserver_bin
+    self.hostname = remote_device.hostname
+
+  def _GetPID(self):
+    """Returns the pid read from pid file."""
+    result = self._RunCommand(['cat', self._pid_file])
+    return result.output
+
+  def _GetPIDFilePath(self):
+    """Returns the pid filename"""
+    return self.PID_FILE_PATH
+
+  def _RunCommand(self, *args, **kwargs):
+    """Runs a remote shell command.
+
+    Args:
+      *args: See RemoteAccess.RemoteDevice documentation.
+      **kwargs: See RemoteAccess.RemoteDevice documentation.
+    """
+    kwargs.setdefault('debug_level', logging.DEBUG)
+    return self.device.RunCommand(*args, **kwargs)
+
+  def _ReadPortNumber(self):
+    """Read port number from file."""
+    if not self.is_alive():
+      raise DevServerStartupError('Devserver terminated unexpectedly!')
+
+    def PortFileExists():
+      result = self._RunCommand(['test', '-f', self.port_file],
+                                error_code_ok=True)
+      return result.returncode == 0
+
+    try:
+      timeout_util.WaitForReturnTrue(PortFileExists,
+                                     timeout=self.DEV_SERVER_TIMEOUT,
+                                     period=5)
+    except timeout_util.TimeoutError:
+      self.terminate()
+      raise DevServerStartupError('Devserver portfile does not exist!')
+
+    self.port = int(self._RunCommand(
+        ['cat', self.port_file], capture_output=True).output.strip())
+
+  def IsReady(self):
+    """Returns True if devserver is ready to accept requests."""
+    if not self.is_alive():
+      raise DevServerStartupError('Devserver terminated unexpectedly!')
+
+    url = os.path.join('http://127.0.0.1:%d' % self.port, 'check_health')
+    # Running wget through ssh because the port on the device is not
+    # accessible by default.
+    result = self.device.RunCommand(
+        ['wget', url, '-q', '-O', '/dev/null'], error_code_ok=True)
+    return result.returncode == 0
+
+  def run(self):
+    """Launches a devserver process on the device."""
+    self._RunCommand(['cat', '/dev/null', '>|', self.log_file])
+
+    port = self.port if self.port else 0
+    cmd = ['python2', self.devserver_bin,
+           '--logfile=%s' % self.log_file,
+           '--pidfile', self._pid_file,
+           '--port=%d' % port]
+
+    if not self.port:
+      cmd.append('--portfile=%s' % self.port_file)
+
+    if self.static_dir:
+      cmd.append('--static_dir=%s' % self.static_dir)
+
+    logging.info('Starting devserver on %s', self.hostname)
+    result = self._RunCommand(cmd, error_code_ok=True, redirect_stdout=True,
+                              combine_stdout_stderr=True)
+    if result.returncode != 0:
+      msg = (('Remote devserver failed to start!\n'
+              '--- Start output from the devserver startup command ---\n'
+              '%s'
+              '--- End output from the devserver startup command ---') %
+             (result.output))
+      logging.error(msg)
+      if 'ImportError: No module named cherrypy' in result.output:
+        logging.error(self.CHERRYPY_ERROR_MSG)
+
+  def GetURL(self, sub_dir=None):
+    """Returns the URL of this devserver instance."""
+    return self.GetDevServerURL(ip=self.hostname, port=self.port,
+                                sub_dir=sub_dir)
+
+  @classmethod
+  def WipePayloadCache(cls, devserver_bin='start_devserver', static_dir=None):
+    """Cleans up devserver cache of payloads."""
+    raise NotImplementedError()
+
+  @classmethod
+  def WipeStaticDirectory(cls, static_dir):
+    """Cleans up |static_dir|."""
+    raise NotImplementedError()
diff --git a/lib/dev_server_wrapper_stress_test b/lib/dev_server_wrapper_stress_test
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/dev_server_wrapper_stress_test
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/dev_server_wrapper_stress_test.py b/lib/dev_server_wrapper_stress_test.py
new file mode 100644
index 0000000..1784e4f
--- /dev/null
+++ b/lib/dev_server_wrapper_stress_test.py
@@ -0,0 +1,28 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Stress test for dev_server_wrapper.
+
+Test script runs a long time stressing the ability to start and stop the
+dev_server_wrapper. Even very rare hangs will cause significant build flake.
+"""
+
+from __future__ import print_function
+
+from chromite.lib import cros_logging as logging
+from chromite.lib import dev_server_wrapper
+
+
+_ITERATIONS = 10000
+
+
+def main(_argv):
+  logging.getLogger().setLevel(logging.DEBUG)
+  for i in range(_ITERATIONS):
+    print('Iteration {}'.format(i))
+    wrapper = dev_server_wrapper.DevServerWrapper()
+    print('Starting')
+    wrapper.Start()
+    print('Stopping')
+    wrapper.Stop()
diff --git a/lib/dev_server_wrapper_unittest b/lib/dev_server_wrapper_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/dev_server_wrapper_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/dev_server_wrapper_unittest.py b/lib/dev_server_wrapper_unittest.py
new file mode 100644
index 0000000..b359b68
--- /dev/null
+++ b/lib/dev_server_wrapper_unittest.py
@@ -0,0 +1,96 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module tests helpers in devserver_wrapper."""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from cbuildbot import constants
+from chromite.lib import cros_test_lib
+from chromite.lib import dev_server_wrapper
+
+
+# pylint: disable=W0212
+class TestXbuddyHelpers(cros_test_lib.MockTempDirTestCase):
+  """Test xbuddy helper functions."""
+  def testGenerateXbuddyRequestForUpdate(self):
+    """Test we generate correct xbuddy requests."""
+    # Use the latest build when 'latest' is given.
+    req = 'xbuddy/latest?for_update=true&return_dir=true'
+    self.assertEqual(
+        dev_server_wrapper.GenerateXbuddyRequest('latest', 'update'), req)
+
+    # Convert the path starting with 'xbuddy://' to 'xbuddy/'
+    path = 'xbuddy://remote/stumpy/version'
+    req = 'xbuddy/remote/stumpy/version?for_update=true&return_dir=true'
+    self.assertEqual(
+        dev_server_wrapper.GenerateXbuddyRequest(path, 'update'), req)
+
+  def testGenerateXbuddyRequestForImage(self):
+    """Tests that we generate correct requests to get images."""
+    image_path = 'foo/bar/taco'
+    self.assertEqual(dev_server_wrapper.GenerateXbuddyRequest(image_path,
+                                                              'image'),
+                     'xbuddy/foo/bar/taco?return_dir=true')
+
+    image_path = 'xbuddy://foo/bar/taco'
+    self.assertEqual(dev_server_wrapper.GenerateXbuddyRequest(image_path,
+                                                              'image'),
+                     'xbuddy/foo/bar/taco?return_dir=true')
+
+  def testGenerateXbuddyRequestForTranslate(self):
+    """Tests that we generate correct requests for translation."""
+    image_path = 'foo/bar/taco'
+    self.assertEqual(dev_server_wrapper.GenerateXbuddyRequest(image_path,
+                                                              'translate'),
+                     'xbuddy_translate/foo/bar/taco')
+
+    image_path = 'xbuddy://foo/bar/taco'
+    self.assertEqual(dev_server_wrapper.GenerateXbuddyRequest(image_path,
+                                                              'translate'),
+                     'xbuddy_translate/foo/bar/taco')
+
+  def testConvertTranslatedPath(self):
+    """Tests that we convert a translated path to a usable xbuddy path."""
+    path = 'remote/latest-canary'
+    translated_path = 'taco-release/R36-5761.0.0/chromiumos_test_image.bin'
+    self.assertEqual(dev_server_wrapper.ConvertTranslatedPath(path,
+                                                              translated_path),
+                     'remote/taco-release/R36-5761.0.0/test')
+
+    path = 'latest'
+    translated_path = 'taco/R36-5600.0.0/chromiumos_image.bin'
+    self.assertEqual(dev_server_wrapper.ConvertTranslatedPath(path,
+                                                              translated_path),
+                     'local/taco/R36-5600.0.0/dev')
+
+  @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True)
+  def testTranslatedPathToLocalPath(self, _mock1):
+    """Tests that we convert a translated path to a local path correctly."""
+    translated_path = 'peppy-release/R33-5116.87.0/chromiumos_image.bin'
+    base_path = os.path.join(self.tempdir, 'peppy-release/R33-5116.87.0')
+
+    local_path = os.path.join(base_path, 'chromiumos_image.bin')
+    self.assertEqual(
+        dev_server_wrapper.TranslatedPathToLocalPath(translated_path,
+                                                     self.tempdir),
+        local_path)
+
+  @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False)
+  def testTranslatedPathToLocalPathOutsideChroot(self, _mock1):
+    """Tests that we convert a translated path when outside the chroot."""
+    translated_path = 'peppy-release/R33-5116.87.0/chromiumos_image.bin'
+    chroot_dir = os.path.join(constants.SOURCE_ROOT,
+                              constants.DEFAULT_CHROOT_DIR)
+    static_dir = os.path.join('devserver', 'static')
+    chroot_static_dir = os.path.join('/', static_dir)
+
+    local_path = os.path.join(chroot_dir, static_dir, translated_path)
+    self.assertEqual(
+        dev_server_wrapper.TranslatedPathToLocalPath(
+            translated_path, chroot_static_dir),
+        local_path)
diff --git a/lib/dot_helper.py b/lib/dot_helper.py
new file mode 100644
index 0000000..efb3fe7
--- /dev/null
+++ b/lib/dot_helper.py
@@ -0,0 +1,108 @@
+# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions for building graphs with dot."""
+
+from __future__ import print_function
+
+from chromite.lib import cros_build_lib
+from chromite.lib import osutils
+
+
+class Subgraph(object):
+  """A subgraph in dot. Contains nodes, arcs, and other subgraphs."""
+
+  _valid_ranks = set(['source', 'sink', 'same', 'min', 'max', None])
+
+  def __init__(self, rank=None):
+    self._rank = rank
+    self._nodes = []
+    self._subgraphs = []
+    self._arcs = set()
+    self._rank = None
+
+  def AddNode(self, node_id, name=None, color=None, href=None):
+    """Adds a node to the subgraph."""
+    tags = {}
+    if name:
+      tags['label'] = name
+    if color:
+      tags['color'] = color
+      tags['fontcolor'] = color
+    if href:
+      tags['href'] = href
+    self._nodes.append({'id': node_id, 'tags': tags})
+
+  def AddSubgraph(self, subgraph):
+    """Adds a subgraph to the subgraph."""
+    self._subgraphs.append(subgraph)
+
+  def AddNewSubgraph(self, rank=None):
+    """Adds a new subgraph to the subgraph. The new subgraph is returned."""
+    subgraph = Subgraph(rank)
+    self.AddSubgraph(subgraph)
+    return subgraph
+
+  def AddArc(self, node_from, node_to):
+    """Adds an arc between two nodes."""
+    self._arcs.add((node_from, node_to))
+
+  def _GenNodes(self):
+    """Generates the code for all the nodes."""
+    lines = []
+    for node in self._nodes:
+      tags = ['%s="%s"' % (k, v) for (k, v) in node['tags'].iteritems()]
+      lines.append('"%s" [%s];' % (node['id'], ', '.join(tags)))
+    return lines
+
+  def _GenSubgraphs(self):
+    """Generates the code for all the subgraphs contained in this subgraph."""
+    lines = []
+    for subgraph in self._subgraphs:
+      lines += subgraph.Gen()
+    return lines
+
+  def _GenArcs(self):
+    """Generates the code for all the arcs."""
+    lines = []
+    for node_from, node_to in self._arcs:
+      lines.append('"%s" -> "%s";' % (node_from, node_to))
+    return lines
+
+  def _GenInner(self):
+    """Generates the code for the inner contents of the subgraph."""
+    lines = []
+    if self._rank:
+      lines.append('rank=%s;' % self._rank)
+    lines += self._GenSubgraphs()
+    lines += self._GenNodes()
+    lines += self._GenArcs()
+    return lines
+
+  def Gen(self):
+    """Generates the code for the subgraph."""
+    return ['subgraph {'] + self._GenInner() + ['}']
+
+
+class Graph(Subgraph):
+  """A top-level graph in dot. It's basically a subgraph with a name."""
+
+  def __init__(self, name):
+    Subgraph.__init__(self)
+    self._name = name
+
+  def Gen(self):
+    """Generates the code for the graph."""
+    return ['digraph "%s" {' % self._name,
+            'graph [name="%s"];' % self._name] + self._GenInner() + ['}']
+
+
+def GenerateImage(lines, filename, out_format='svg', save_dot_filename=None):
+  """Generates the image by calling dot on the input lines."""
+  data = '\n'.join(lines)
+  cros_build_lib.RunCommand(['dot', '-T%s' % out_format, '-o', filename],
+                            input=data)
+
+  if save_dot_filename:
+    osutils.WriteFile(save_dot_filename, data)
diff --git a/lib/factory.py b/lib/factory.py
new file mode 100644
index 0000000..7141145
--- /dev/null
+++ b/lib/factory.py
@@ -0,0 +1,122 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Abstract ObjectFactory class used for injection of external dependencies."""
+
+from __future__ import print_function
+
+import functools
+
+
+class ObjectFactoryIllegalOperation(Exception):
+  """Raised when attemping an illegal ObjectFactory operation."""
+
+_NO_SINGLETON_INSTANCE = object()
+
+class ObjectFactory(object):
+  """Abstract object factory, used for injection of external dependencies.
+
+  A call to Setup(...) is necessary before a call to GetInstance().
+  """
+
+  _object_name = ''
+  _is_setup = False
+  _setup_type = None
+  _setup_instance = None
+  _types = {}
+
+  def __init__(self, object_name, setup_types, allowed_transitions=None):
+    """ObjectFactory constructor.
+
+    Args:
+      object_name: Human readable name for the type of object that this factory
+                   generates.
+      setup_types: A (set up type name -> generator function) dictionary, which
+                   teaches ObjectFactory how to construct instances after setup
+                   has been called. For set up types where a singleton instance
+                   is specified at setup(...) time, generator function should be
+                   None.
+      allowed_transitions: Optional function, where
+                           allowed_transitions(from_type, to_type) specifies
+                           whether transition from |from_type| to |to_type| is
+                           allowed.
+
+                           If unspecified, no transitions are allowed.
+    """
+
+    self._object_name = object_name
+    self._types = setup_types
+    self._allowed_transitions = allowed_transitions
+
+  def Setup(self, setup_type, singleton_instance=_NO_SINGLETON_INSTANCE):
+    # Prevent set up to unknown types.
+    if setup_type not in self._types:
+      raise ObjectFactoryIllegalOperation(
+          'Unknown %s setup_type %s' % (self._object_name, setup_type))
+
+    # Prevent illegal setup transitions.
+    if self._is_setup:
+      if self._allowed_transitions:
+        if not self._allowed_transitions(self._setup_type, setup_type):
+          raise ObjectFactoryIllegalOperation(
+              'Illegal set up transition from %s to %s.' % (self._setup_type,
+                                                            setup_type))
+      else:
+        raise ObjectFactoryIllegalOperation(
+            '%s already set up.' % self._object_name)
+
+    # Allow singleton_instance if and only if factory method for this type is
+    # None.
+    instance_supplied = (singleton_instance != _NO_SINGLETON_INSTANCE)
+    factory_is_none = (self._types[setup_type] is None)
+    if instance_supplied != factory_is_none:
+      raise ObjectFactoryIllegalOperation(
+          'singleton_instance should be supplied if and only if setup_type has '
+          'a factory that is None.')
+
+    self._setup_type = setup_type
+    self._setup_instance = singleton_instance
+    self._is_setup = True
+
+  @property
+  def is_setup(self):
+    """Returns True iff a call to get_instance is expected to succeed."""
+    return self._is_setup
+
+  @property
+  def setup_type(self):
+    """Returns the setup_type."""
+    return self._setup_type
+
+  def GetInstance(self):
+    """Returns an object instance iff setup has been called.
+
+    Raises:
+      ObjectFactoryIllegalOperation: if setup has not yet been called.
+    """
+    if not self.is_setup:
+      raise ObjectFactoryIllegalOperation(
+          '%s is not set up.' % self._object_name)
+    if self._setup_instance != _NO_SINGLETON_INSTANCE:
+      return self._setup_instance
+    return self._types[self.setup_type]()
+
+  def _clear_setup(self):
+    """Clear setup, for testing purposes only."""
+    self._setup_type = None
+    self._is_setup = False
+    self._setup_instance = _NO_SINGLETON_INSTANCE
+
+
+def CachedFunctionCall(function):
+  """Wraps a parameterless |function| in a cache."""
+  cached_value = []
+
+  @functools.wraps(function)
+  def wrapper():
+    if not cached_value:
+      cached_value.append(function())
+    return cached_value[0]
+
+  return wrapper
diff --git a/lib/factory_unittest b/lib/factory_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/factory_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/factory_unittest.py b/lib/factory_unittest.py
new file mode 100644
index 0000000..3e0e77d
--- /dev/null
+++ b/lib/factory_unittest.py
@@ -0,0 +1,75 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for factory.py."""
+
+from __future__ import print_function
+
+from chromite.lib import cros_test_lib
+from chromite.lib import factory
+
+def _GET_OBJECT():
+  return object()
+
+class FactoryTest(cros_test_lib.TestCase):
+  """Test that ObjectFactory behaves as expected."""
+
+  _OBJECT_NAME = 'Test Object Name'
+  _OBJECT_TYPES = {
+      't0' : _GET_OBJECT,
+      't1' : factory.CachedFunctionCall(_GET_OBJECT),
+      't3' : factory.CachedFunctionCall(_GET_OBJECT),
+      't4' : None,
+  }
+
+  def _allowed_transitions(self, from_setup, to_setup):
+    return from_setup == 't3' and to_setup == 't4'
+
+  def setUp(self):
+    self.of = factory.ObjectFactory(self._OBJECT_NAME, self._OBJECT_TYPES)
+    self.of2 = factory.ObjectFactory(self._OBJECT_NAME, self._OBJECT_TYPES,
+                                     self._allowed_transitions)
+
+  def testGetInstance(self):
+    self.of.Setup('t0')
+    a = self.of.GetInstance()
+    self.assertNotEqual(a, self.of.GetInstance())
+
+  def testGetCachedInstance(self):
+    self.of.Setup('t1')
+    a = self.of.GetInstance()
+    self.assertEqual(a, self.of.GetInstance())
+
+  def testDuplicateSetupForbidden(self):
+    self.of.Setup('t0')
+    with self.assertRaises(factory.ObjectFactoryIllegalOperation):
+      self.of.Setup('t0')
+
+  def testNotSetup(self):
+    with self.assertRaises(factory.ObjectFactoryIllegalOperation):
+      self.of.GetInstance()
+
+  def testUnknownSetupForbidden(self):
+    with self.assertRaises(factory.ObjectFactoryIllegalOperation):
+      self.of.Setup('unknown setup type')
+
+  def testSetupWithInstanceForbidden(self):
+    with self.assertRaises(factory.ObjectFactoryIllegalOperation):
+      self.of.Setup('t0', None)
+
+  def testSetupWithInstanceAllowed(self):
+    self.of.Setup('t4', None)
+
+  def testForbiddenTransition(self):
+    self.of2.Setup('t0')
+    with self.assertRaises(factory.ObjectFactoryIllegalOperation):
+      self.of2.Setup('t1')
+
+  def testAllowedTransition(self):
+    self.of2.Setup('t3')
+    a = self.of2.GetInstance()
+    self.of2.Setup('t4', None)
+    self.assertNotEqual(a, self.of2.GetInstance())
+
+
diff --git a/lib/fake_cidb.py b/lib/fake_cidb.py
new file mode 100644
index 0000000..0efb4e2
--- /dev/null
+++ b/lib/fake_cidb.py
@@ -0,0 +1,236 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Fake CIDB for unit testing."""
+
+from __future__ import print_function
+
+import datetime
+import itertools
+
+from chromite.cbuildbot import constants
+from chromite.lib import clactions
+
+
+class FakeCIDBConnection(object):
+  """Fake connection to a Continuous Integration database.
+
+  This class is a partial re-implementation of CIDBConnection, using
+  in-memory lists rather than a backing database.
+  """
+
+  NUM_RESULTS_NO_LIMIT = -1
+
+  def __init__(self, fake_keyvals=None):
+    self.buildTable = []
+    self.clActionTable = []
+    self.buildStageTable = {}
+    self.failureTable = {}
+    self.fake_time = None
+    self.fake_keyvals = fake_keyvals or {}
+
+  def SetTime(self, fake_time):
+    """Sets a fake time to be retrieved by GetTime.
+
+    Args:
+      fake_time: datetime.datetime object.
+    """
+    self.fake_time = fake_time
+
+  def GetTime(self):
+    """Gets the current database time."""
+    return self.fake_time or datetime.datetime.now()
+
+  def InsertBuild(self, builder_name, waterfall, build_number,
+                  build_config, bot_hostname, master_build_id=None,
+                  timeout_seconds=None, status=constants.BUILDER_STATUS_PASSED):
+    """Insert a build row.
+
+    Note this API slightly differs from cidb as we pass status to avoid having
+    to have a later FinishBuild call in testing.
+    """
+    deadline = None
+    if timeout_seconds is not None:
+      timediff = datetime.timedelta(seconds=timeout_seconds)
+      deadline = datetime.datetime.now() + timediff
+
+    build_id = len(self.buildTable)
+    row = {'id': build_id,
+           'builder_name': builder_name,
+           'buildbot_generation': constants.BUILDBOT_GENERATION,
+           'waterfall': waterfall,
+           'build_number': build_number,
+           'build_config' : build_config,
+           'bot_hostname': bot_hostname,
+           'start_time': datetime.datetime.now(),
+           'master_build_id' : master_build_id,
+           'deadline': deadline,
+           'status': status}
+    self.buildTable.append(row)
+    return build_id
+
+  def UpdateMetadata(self, build_id, metadata):
+    """See cidb.UpdateMetadata."""
+    d = metadata.GetDict()
+    versions = d.get('version') or {}
+    self.buildTable[build_id - 1].update(
+        {'chrome_version': versions.get('chrome'),
+         'milestone_version': versions.get('milestone'),
+         'platform_version': versions.get('platform'),
+         'full_version': versions.get('full'),
+         'sdk_version': d.get('sdk-versions'),
+         'toolchain_url': d.get('toolchain-url'),
+         'build_type': d.get('build_type')})
+    return 1
+
+  def InsertCLActions(self, build_id, cl_actions, timestamp=None):
+    """Insert a list of |cl_actions|."""
+    if not cl_actions:
+      return 0
+
+    rows = []
+    for cl_action in cl_actions:
+      change_number = int(cl_action.change_number)
+      patch_number = int(cl_action.patch_number)
+      change_source = cl_action.change_source
+      action = cl_action.action
+      reason = cl_action.reason
+      rows.append({
+          'build_id' : build_id,
+          'change_source' : change_source,
+          'change_number': change_number,
+          'patch_number' : patch_number,
+          'action' : action,
+          'timestamp': timestamp or datetime.datetime.now(),
+          'reason' : reason})
+
+    self.clActionTable.extend(rows)
+    return len(rows)
+
+  def InsertBuildStage(self, build_id, name, board=None,
+                       status=constants.BUILDER_STATUS_PLANNED):
+    build_stage_id = len(self.buildStageTable)
+    row = {'build_id': build_id,
+           'name': name,
+           'board': board,
+           'status': status}
+    self.buildStageTable[build_stage_id] = row
+    return build_stage_id
+
+  def InsertBoardPerBuild(self, build_id, board):
+    # TODO(akeshet): Fill this placeholder.
+    pass
+
+  def InsertFailure(self, build_stage_id, exception_type, exception_message,
+                    exception_category=constants.EXCEPTION_CATEGORY_UNKNOWN,
+                    outer_failure_id=None,
+                    extra_info=None):
+    failure_id = len(self.failureTable)
+    values = {'build_stage_id': build_stage_id,
+              'exception_type': exception_type,
+              'exception_message': exception_message,
+              'exception_category': exception_category,
+              'outer_failure_id': outer_failure_id,
+              'extra_info': extra_info}
+    self.failureTable[failure_id] = values
+    return failure_id
+
+  def StartBuildStage(self, build_stage_id):
+    if build_stage_id > len(self.buildStageTable):
+      return
+
+    self.buildStageTable[build_stage_id]['status'] = (
+        constants.BUILDER_STATUS_INFLIGHT)
+
+  def ExtendDeadline(self, build_id, timeout):
+    # No sanity checking in fake object.
+    now = datetime.datetime.now()
+    timediff = datetime.timedelta(seconds=timeout)
+    self.buildStageTable[build_id]['deadline'] = now + timediff
+
+  def FinishBuildStage(self, build_stage_id, status):
+    if build_stage_id > len(self.buildStageTable):
+      return
+
+    self.buildStageTable[build_stage_id]['status'] = status
+
+  def GetActionsForChanges(self, changes):
+    """Gets all the actions for the given changes."""
+    clauses = set()
+    for change in changes:
+      change_source = 'internal' if change.internal else 'external'
+      clauses.add((int(change.gerrit_number), change_source))
+    values = []
+    for row in self.GetActionHistory():
+      if (row.change_number, row.change_source) in clauses:
+        values.append(row)
+    return values
+
+  def GetActionHistory(self, *args, **kwargs):
+    """Get all the actions for all changes."""
+    # pylint: disable=W0613
+    values = []
+    for item, action_id in zip(self.clActionTable, itertools.count()):
+      row = (
+          action_id,
+          item['build_id'],
+          item['action'],
+          item['reason'],
+          self.buildTable[item['build_id']]['build_config'],
+          item['change_number'],
+          item['patch_number'],
+          item['change_source'],
+          item['timestamp'])
+      values.append(row)
+
+    return clactions.CLActionHistory(clactions.CLAction(*row) for row in values)
+
+  def GetBuildStatus(self, build_id):
+    """Gets the status of the build."""
+    return self.buildTable[build_id - 1]
+
+  def GetBuildStatuses(self, build_ids):
+    """Gets the status of the builds."""
+    return [self.buildTable[x -1] for x in build_ids]
+
+  def GetBuildHistory(self, build_config, num_results,
+                      ignore_build_id=None, start_date=None, end_date=None,
+                      starting_build_number=None):
+    """Returns the build history for the given |build_config|."""
+    def ReduceToBuildConfig(new_list, current_build):
+      """Filters a build list to only those of a given config."""
+      if current_build['build_config'] == build_config:
+        new_list.append(current_build)
+
+      return new_list
+
+    build_configs = reduce(ReduceToBuildConfig, self.buildTable, [])
+    # Reverse sort as that's what's expected.
+    build_configs = sorted(build_configs[-num_results:], reverse=True)
+
+    # Filter results.
+    if ignore_build_id is not None:
+      build_configs = [b for b in build_configs if b['id'] != ignore_build_id]
+    if start_date is not None:
+      build_configs = [b for b in build_configs
+                       if b['start_time'].date() >= start_date]
+    if end_date is not None:
+      build_configs = [b for b in build_configs
+                       if 'finish_time' in b and
+                       b['finish_time'].date() <= end_date]
+    if starting_build_number is not None:
+      build_configs = [b for b in build_configs
+                       if b['build_number'] >= starting_build_number]
+
+    return build_configs
+
+  def GetTimeToDeadline(self, build_id):
+    """Gets the time remaining until deadline."""
+    now = datetime.datetime.now()
+    deadline = self.buildTable[build_id]['deadline']
+    return max(0, (deadline - now).total_seconds())
+
+  def GetKeyVals(self):
+    """Gets contents of keyvalTable."""
+    return self.fake_keyvals
diff --git a/lib/filetype.py b/lib/filetype.py
new file mode 100644
index 0000000..2d63b02
--- /dev/null
+++ b/lib/filetype.py
@@ -0,0 +1,297 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""File type decoding class for Chromium OS rootfs file bucketing.
+
+This file decodes the type of file based on the contents, filename and other
+metadata. The result is a string that represents the file type and subtypes
+of the file, separated by slashes (/). The first level is one of the following:
+"text", "binary" and "inode". The first two refer to the contents of the file
+for regular files, while the third one is used for special files such as
+directories, symlinks, block devices, etc.
+
+The file type can have more than one level, for example "binary/elf/static",
+"binary/image/png", or "text/conf". See the filetype_unittest.py file for more
+examples.
+
+The purpose of this module is to provide a file type that splits the contents
+of a Chromium OS build in small buckets, partitioning cases where other standard
+classifications keep in the same set.
+"""
+
+from __future__ import print_function
+
+import itertools
+import magic
+import mmap
+import os
+import re
+import stat
+
+from chromite.lib import parseelf
+
+
+# The buffer size we would use to read files from the disk.
+FILE_BUFFER_SIZE = 32 * 1024
+
+
+def SplitShebang(header):
+  """Splits a shebang (#!) into command and arguments.
+
+  Args:
+    header: The first line of a shebang file, for example
+        "#!/usr/bin/env -uPWD python foo.py\n". The referenced command must be
+        an absolute path with optionally some arguments.
+
+  Returns:
+    A tuple of strings (command, args) where the first string is the called
+    and the second is the list of arguments as passed in the header.
+
+  Riases:
+    ValueError if the passed header is not a valid shebang line.
+  """
+  m = re.match(r'#!\s*(/[a-z/0-9\.-]+)\s*(.*)$', header)
+  if m:
+    return m.group(1), m.group(2).strip()
+  raise ValueError('shebang (#!) line expected')
+
+
+class FileTypeDecoder(object):
+  """Class to help decode the type of a file.
+
+  This class implements a single GetType() method that decodes the type of a
+  file based on the contents and metadata. This class holds some global data
+  shared between several calls to that method.
+  """
+
+  # Whitelist of mime types and their mapping to file type.
+  MIME_TYPE_MAPPING = {
+      'application/x-gzip': 'binary/compressed/gzip',
+      'application/x-bzip2': 'binary/compressed/bzip2',
+      'application/x-xz': 'binary/compressed/xz',
+
+      # Goobuntu magic database returns 'gzip' instead of 'x-gzip'. This
+      # supports running dep_tracker outside the chroot for development.
+      'application/gzip': 'binary/compressed/gzip',
+  }
+
+  def __init__(self, root='/'):
+    """Initializes the internal state.
+
+    Args:
+      root: Path to the root directory where all the files live. This will be
+      assumed as the root directory for absolute symlinks.
+    """
+    self._root = root
+    self._mime = magic.open(magic.MIME_TYPE)
+    self._mime.load()
+
+  def __del__(self):
+    self._mime.close()
+
+  def GetType(self, rel_path, st=None, elf=None):
+    """Return the file type of the passed file.
+
+    Does a best-effort attempt to infer the file type of the passed file. If
+    only rel_path is provided, the stat_struct information and parsed ELF data
+    will be computed. If the information is already available, such as if the
+    ELF file is already parsed, passing st and elf will speed up the file
+    detection.
+
+    Args:
+      rel_path: The path to the file, used to detect the filetype from the
+          contents of the file.
+      st: The stat_result struct of the file.
+      elf: The result of parseelf.ParseELF().
+
+    Returns:
+      A string with the file type classified in categories separated by /. For
+      example, a dynamic library will return 'binary/elf/dynamic-so'. If the
+      type can't be inferred it returns None.
+    """
+    # Analysis based on inode data.
+    if st is None:
+      st = os.lstat(os.path.join(self._root, rel_path))
+    if stat.S_ISDIR(st.st_mode):
+      return 'inode/directory'
+    if stat.S_ISLNK(st.st_mode):
+      return 'inode/symlink'
+    if not stat.S_ISREG(st.st_mode):
+      return 'inode/special'
+    if st.st_size == 0:
+      return 'inode/empty'
+
+    # Analysis based on the ELF header and contents.
+    if elf:
+      return self._GetELFType(elf)
+
+    # Analysis based on the file contents.
+    try:
+      with open(os.path.join(self._root, rel_path), 'rb') as fobj:
+        fmap = mmap.mmap(fobj.fileno(), 0, prot=mmap.PROT_READ)
+        result = self._GetTypeFromContent(rel_path, fobj, fmap)
+        fmap.close()
+        return result
+    except IOError:
+      return
+
+  def _GetTypeFromContent(self, rel_path, fobj, fmap):
+    """Return the file path based on the file contents.
+
+    This helper function detect the file type based on the contents of the file.
+
+    Args:
+      rel_path: The path to the file, used to detect the filetype from the
+          contents of the file.
+      fobj: a file() object for random access to rel_path.
+      fmap: a mmap object mapping the whole rel_path file for reading.
+    """
+
+    # Detect if the file is binary based on the presence of non-ASCII chars. We
+    # include some the first 32 chars often used in text files but we exclude
+    # the rest.
+    ascii_chars = '\x07\x08\t\n\x0c\r\x1b' + ''.join(map(chr, range(32, 128)))
+    is_binary = any(bool(chunk.translate(None, ascii_chars))
+                    for chunk in iter(lambda: fmap.read(FILE_BUFFER_SIZE), ''))
+
+    # We use the first part of the file in several checks.
+    fmap.seek(0)
+    first_kib = fmap.read(1024)
+
+    # Binary files.
+    if is_binary:
+      # The elf argument was not passed, so compute it now if the file is an
+      # ELF.
+      if first_kib.startswith('\x7fELF'):
+        return self._GetELFType(parseelf.ParseELF(self._root, rel_path,
+                                                  parse_symbols=False))
+
+      if first_kib.startswith('MZ\x90\0'):
+        return 'binary/dos-bin'
+
+      if len(first_kib) >= 512 and first_kib[510:512] == '\x55\xaa':
+        return 'binary/bootsector/x86'
+
+      # Firmware file depend on the technical details of the device they run on,
+      # so there's no easy way to detect them. We use the filename to guess that
+      # case.
+      if '/firmware/' in rel_path and (
+          rel_path.endswith('.fw') or
+          rel_path[-4:] in ('.bin', '.cis', '.csp', '.dsp')):
+        return 'binary/firmware'
+
+      # TZif (timezone) files. See tzfile(5) for details.
+      if (first_kib.startswith('TZif' + '\0' * 16) or
+          first_kib.startswith('TZif2' + '\0' * 15) or
+          first_kib.startswith('TZif3' + '\0' * 15)):
+        return 'binary/tzfile'
+
+      # Whitelist some binary mime types.
+      fobj.seek(0)
+      # _mime.descriptor() will close the passed file descriptor.
+      mime_type = self._mime.descriptor(os.dup(fobj.fileno()))
+      if mime_type.startswith('image/'):
+        return 'binary/' + mime_type
+      if mime_type in self.MIME_TYPE_MAPPING:
+        return self.MIME_TYPE_MAPPING[mime_type]
+
+      # Other binary files.
+      return 'binary'
+
+    # Text files.
+    # Read the first couple of lines used in the following checks. This will
+    # only read the required lines, with the '\n' char at the end of each line
+    # except on the last one if it is not present on that line. At this point
+    # we know that the file is not empty, so at least one line existst.
+    fmap.seek(0)
+    first_lines = list(itertools.islice(iter(fmap.readline, ''), 0, 10))
+    head_line = first_lines[0]
+
+    # #! or "shebangs". Only those files with a single line are considered
+    # shebangs. Some files start with "#!" but are other kind of files, such
+    # as python or bash scripts.
+    try:
+      prog_name, args = SplitShebang(head_line)
+      if len(first_lines) == 1:
+        return 'text/shebang'
+
+      prog_name = os.path.basename(prog_name)
+      args = args.split()
+      if prog_name == 'env':
+        # If "env" is called, we skip all the arguments passed to env (flags,
+        # VAR=value) and treat the program name as the program to use.
+        for i, arg in enumerate(args):
+          if arg == '--' and (i + 1) < len(args):
+            prog_name = args[i + 1]
+            break
+          if not arg or arg[0] == '-' or '=' in arg:
+            continue
+          prog_name = arg
+          break
+
+      # Strip the version number from comon programs like "python2.7".
+      prog_name = prog_name.rstrip('0123456789-.')
+
+      if prog_name in ('awk', 'bash', 'dash', 'ksh', 'perl', 'python', 'sh'):
+        return 'text/script/' + prog_name
+      # Other unknown script.
+      return 'text/script'
+    except ValueError:
+      pass
+
+    # PEM files.
+    if head_line.strip() == '-----BEGIN CERTIFICATE-----':
+      return 'text/pem/cert'
+    if head_line.strip() == '-----BEGIN RSA PRIVATE KEY-----':
+      return 'text/pem/rsa-private'
+
+    # Linker script.
+    if head_line.strip() == '/* GNU ld script':
+      return 'text/ld-script'
+
+    # Protobuf files.
+    if rel_path.endswith('.proto'):
+      return 'text/proto'
+
+    if len(first_lines) == 1:
+      if re.match(r'[0-9\.]+$', head_line):
+        return 'text/oneline/number'
+      return 'text/oneline'
+
+    return 'text'
+
+  @staticmethod
+  def _GetELFType(elf):
+    """Returns the file type for ELF files.
+
+    Args:
+      elf: The result of parseelf.ParseELF().
+    """
+    if elf['type'] == 'ET_REL':
+      elf_type = 'object'
+    elif (not '.dynamic' in elf['sections'] and
+          not 'PT_DYNAMIC' in elf['segments']):
+      elf_type = 'static'
+    else:
+      if elf['is_lib']:
+        elf_type = 'dynamic-so'
+      else:
+        elf_type = 'dynamic-bin'
+    return 'binary/elf/' + elf_type
+
+  @classmethod
+  def DecodeFile(cls, path):
+    """Decodes the file type of the passed file.
+
+    This function is a wrapper to the FileTypeDecoder class to decode the type
+    of a single file. If you need to decode multiple files please use
+    FileTypeDecoder class instead.
+
+    Args:
+      path: The path to the file or directory.
+
+    Returns:
+      A string with the decoded file type or None if it couldn't be decoded.
+    """
+    return cls('.').GetType(path)
diff --git a/lib/filetype_unittest b/lib/filetype_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/filetype_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/filetype_unittest.py b/lib/filetype_unittest.py
new file mode 100644
index 0000000..4ba0c66
--- /dev/null
+++ b/lib/filetype_unittest.py
@@ -0,0 +1,220 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the filetype.py module."""
+
+from __future__ import print_function
+
+import os
+import stat
+
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import filetype
+from chromite.lib import unittest_lib
+
+
+class SplitShebangTest(cros_test_lib.TestCase):
+  """Test the SplitShebang function."""
+
+  def testSimpleCase(self):
+    """Test a simple case."""
+    self.assertEquals(('/bin/sh', ''), filetype.SplitShebang('#!/bin/sh'))
+
+  def testCaseWithArguments(self):
+    """Test a case with arguments."""
+    self.assertEquals(('/bin/sh', '-i -c "ls"'),
+                      filetype.SplitShebang('#!/bin/sh  -i -c "ls"'))
+
+  def testCaseWithEndline(self):
+    """Test a case finished with a newline char."""
+    self.assertEquals(('/bin/sh', '-i'),
+                      filetype.SplitShebang('#!/bin/sh  -i\n'))
+
+  def testCaseWithSpaces(self):
+    """Test a case with several spaces in the line."""
+    self.assertEquals(('/bin/sh', '-i'),
+                      filetype.SplitShebang('#!  /bin/sh  -i   \n'))
+
+  def testInvalidCases(self):
+    """Thes invalid cases."""
+    self.assertRaises(ValueError, filetype.SplitShebang, '/bin/sh -i')
+    self.assertRaises(ValueError, filetype.SplitShebang, '#!')
+    self.assertRaises(ValueError, filetype.SplitShebang, '#!env python')
+
+
+class FileTypeDecoderTest(cros_test_lib.TempDirTestCase):
+  """Test the FileTypeDecoder class."""
+
+  def testSpecialFiles(self):
+    """Tests special files, such as symlinks, directories and named pipes."""
+    somedir = os.path.join(self.tempdir, 'somedir')
+    osutils.SafeMakedirs(somedir)
+    self.assertEquals('inode/directory',
+                      filetype.FileTypeDecoder.DecodeFile(somedir))
+
+    a_fifo = os.path.join(self.tempdir, 'a_fifo')
+    os.mknod(a_fifo, stat.S_IFIFO)
+    self.assertEquals('inode/special',
+                      filetype.FileTypeDecoder.DecodeFile(a_fifo))
+
+    empty_file = os.path.join(self.tempdir, 'empty_file')
+    osutils.WriteFile(empty_file, '')
+    self.assertEquals('inode/empty',
+                      filetype.FileTypeDecoder.DecodeFile(empty_file))
+
+    a_link = os.path.join(self.tempdir, 'a_link')
+    os.symlink('somewhere', a_link)
+    self.assertEquals('inode/symlink',
+                      filetype.FileTypeDecoder.DecodeFile(a_link))
+
+  def testTextShebangFiles(self):
+    """Test shebangs (#!) file decoding based on the executed path."""
+    # If the file has only one line is considered a "shebang" rather than a
+    # script.
+    shebang = os.path.join(self.tempdir, 'shebang')
+    osutils.WriteFile(shebang, "#!/bin/python --foo --bar\n")
+    self.assertEquals('text/shebang',
+                      filetype.FileTypeDecoder.DecodeFile(shebang))
+
+    # A shebang with contents is considered a script.
+    script = os.path.join(self.tempdir, 'script')
+    osutils.WriteFile(script, "#!/bin/foobar --foo --bar\n\nexit 1\n")
+    self.assertEquals('text/script',
+                      filetype.FileTypeDecoder.DecodeFile(script))
+
+    bash_script = os.path.join(self.tempdir, 'bash_script')
+    osutils.WriteFile(bash_script,
+                      "#!/bin/bash --debug\n# Copyright\nexit 42\n")
+    self.assertEquals('text/script/bash',
+                      filetype.FileTypeDecoder.DecodeFile(bash_script))
+
+    pyscript = os.path.join(self.tempdir, 'pyscript')
+    osutils.WriteFile(pyscript,
+                      "#!/usr/bin/env PYTHONPATH=/foo python-2.7 -3\n# foo\n")
+    self.assertEquals('text/script/python',
+                      filetype.FileTypeDecoder.DecodeFile(pyscript))
+
+    perlscript = os.path.join(self.tempdir, 'perlscript')
+    osutils.WriteFile(perlscript, "#!/usr/local/bin/perl\n#\n")
+    self.assertEquals('text/script/perl',
+                      filetype.FileTypeDecoder.DecodeFile(perlscript))
+
+  def testTextPEMFiles(self):
+    """Test decoding various PEM files."""
+    # A RSA private key (sample from update_engine unittest).
+    some_cert = os.path.join(self.tempdir, 'some_cert')
+    osutils.WriteFile(some_cert,
+                      """-----BEGIN CERTIFICATE-----
+MIIDJTCCAo6gAwIBAgIJAP6IycaMXlqsMA0GCSqGSIb3DQEBBQUAMIGLMQswCQYD
+VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTETMBEGA1UEChMKR29vZ2xlIElu
+YzESMBAGA1UECxMJQ2hyb21lIE9TMRgwFgYDVQQDEw9PcGVuU1NMIFRlc3QgQ0Ex
+JDAiBgkqhkiG9w0BCQEWFXNlY3VyaXR5QGNocm9taXVtLm9yZzAgFw0xMjA1MTcx
+OTQ1MjJaGA8yMTEyMDExNDE5NDUyMlowgZ0xCzAJBgNVBAYTAlVTMRMwEQYDVQQI
+EwpDYWxpZm9ybmlhMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MRMwEQYDVQQKEwpH
+b29nbGUgSW5jMRIwEAYDVQQLEwlDaHJvbWUgT1MxEjAQBgNVBAMTCTEyNy4wLjAu
+MTEkMCIGCSqGSIb3DQEJARYVc2VjdXJpdHlAY2hyb21pdW0ub3JnMIGfMA0GCSqG
+SIb3DQEBAQUAA4GNADCBiQKBgQC5bxzyvNJFDmyThIGoFoZkN3rlQB8QoR80rS1u
+8pLyqW5Vk2A0pNOvcxPrUHAUTgWhikqzymz4a4XoLxat53H/t/XmRYwZ9GVNZocz
+Q4naWxtPyPqIBosMLnWu6FHUVO1lTdvhC6Pjw2i1S9Rq3dMsANU1IER4NR8XM+v6
+qBg1XQIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NM
+IEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQU+biqy5tbRGlUNLsEtjmy
+7v1eYqowHwYDVR0jBBgwFoAUC0E889mD7bn2fXcEtA3HvUimV+0wDQYJKoZIhvcN
+AQEFBQADgYEA2fJxpGwDbnUF5Z3mvZ81/pk8fVQdJvV5U93EA29VE1SaFA5S5qYS
+zP1Ue0MX+RqMLKjnH+E6yEoo+kYD9rzagnvORefbJeM92SiHgHPeSm8F1nQtGclj
+p8izLBlcKgPHwQLKxELmbS/xvt4cyHaLSIy50lLrdJeKtXjqq4PbH3Y=
+-----END CERTIFICATE-----
+""")
+    self.assertEquals('text/pem/cert',
+                      filetype.FileTypeDecoder.DecodeFile(some_cert))
+
+    # A RSA private key (sample from vboot_reference unittest).
+    rsa_key = os.path.join(self.tempdir, 'rsa_key')
+    osutils.WriteFile(rsa_key,
+                      """-----BEGIN RSA PRIVATE KEY-----
+MIICXAIBAAKBgQCdYBOJIJvGX9vC4E5XD1jb9zJ99FzR4G0n8HNyWy5ZKyy/hi80
+ibXpy6QdWcm4wqTvmVjU+20sP4AgzKC65fKyFvvAHUiD4yGr1qWtg4YFUcBbUiXO
+CQ66W3AC4g2Ju9C16AzMpBk043bQsUQvxILEumQqQ1VS33uM7Kq8dWpL6QIDAQAB
+AoGAb12y1WIu+gWRtWkX5wHkRty6bWmEWbzwYcgFWlJuDQnBg9MICqy8/7Js85w7
+ZLTRFQC2XRmDW0GggRVtVHUu9X2jwkHR9+TWza4xAtYcSwDl6VJTHX2ygptrG/n9
+qPFinfvnpiP7b2WNjC53V3cnjg3m+1B5zrmFxsVLDMVLQhECQQDN7i1NWZFVNfYa
+GT2GSgMpD0nPXA1HHUvFFgnI9xJkBCewHzega+PrrrpMKZZWLpc4YCm3PK9nI8Nk
+EmJE5HwNAkEAw6OpiOgWdRaJWx3+XBsFOhz6K86xwV0NpVb6ocrBKU/t0OqP+gZh
+B/YBDfwXPr2w5FCwozUs/MrBdoYR3WnsTQJABNn/pzrc+azzx1mg4XEM8gKyMnhw
+t6QxDMugH2Pywvh2FuglX1orXHoZWYIBULZ4SZO6Z96+IyfsiocEWasoYQJBALZ/
+onO7BM/+0Oz1osSq1Aps45Yf/0OAmW0mITDyIZR3IkJjvSEf+D3j5wHzqn91lmC1
+QMFOpoO+ZBA7asjfuXUCQGmHgpC0BuD4S1QlcF0nrVHTG7Y8KZ18s9qPJS3csuGf
+or10mrNRF3tyGy8e/sw88a74Q/6v/PgChZHmq6QjOOU=
+-----END RSA PRIVATE KEY-----
+""")
+    self.assertEquals('text/pem/rsa-private',
+                      filetype.FileTypeDecoder.DecodeFile(rsa_key))
+
+  def testBinaryELFFiles(self):
+    """Test decoding ELF files."""
+    liba_so = os.path.join(self.tempdir, 'liba.so')
+    unittest_lib.BuildELF(liba_so, ['func_a'])
+    self.assertEquals('binary/elf/dynamic-so',
+                      filetype.FileTypeDecoder.DecodeFile(liba_so))
+
+    prog = os.path.join(self.tempdir, 'prog')
+    unittest_lib.BuildELF(prog,
+                          undefined_symbols=['func_a'],
+                          used_libs=['a'],
+                          executable=True)
+    self.assertEquals('binary/elf/dynamic-bin',
+                      filetype.FileTypeDecoder.DecodeFile(prog))
+
+    prog_static = os.path.join(self.tempdir, 'prog_static')
+    unittest_lib.BuildELF(prog_static, executable=True, static=True)
+    self.assertEquals('binary/elf/static',
+                      filetype.FileTypeDecoder.DecodeFile(prog_static))
+
+  def testBinaryCompressedFiles(self):
+    """Test decoding compressed files."""
+    compressed = os.path.join(self.tempdir, 'compressed')
+
+    # `echo hola | gzip -9`
+    osutils.WriteFile(compressed,
+                      '\x1f\x8b\x08\x00<\xce\x07T\x02\x03\xcb\xc8\xcfI\xe4\x02'
+                      '\x00x\xad\xdb\xd1\x05\x00\x00\x00')
+    self.assertEquals('binary/compressed/gzip',
+                      filetype.FileTypeDecoder.DecodeFile(compressed))
+
+    # `echo hola | bzip2 -9`
+    osutils.WriteFile(compressed,
+                      'BZh91AY&SY\xfa\xd4\xdb5\x00\x00\x01A\x00\x00\x10 D\xa0'
+                      '\x00!\x83A\x9a\t\xa8qw$S\x85\t\x0f\xadM\xb3P')
+    self.assertEquals('binary/compressed/bzip2',
+                      filetype.FileTypeDecoder.DecodeFile(compressed))
+
+    # `echo hola | xz -9`
+    osutils.WriteFile(
+        compressed,
+        '\xfd7zXZ\x00\x00\x04\xe6\xd6\xb4F\x02\x00!\x01\x16\x00\x00\x00t/\xe5'
+        '\xa3\x01\x00\x04hola\n\x00\x00\x00\x00\xdd\xb0\x00\xac6w~\x9d\x00\x01'
+        '\x1d\x05\xb8-\x80\xaf\x1f\xb6\xf3}\x01\x00\x00\x00\x00\x04YZ')
+    self.assertEquals('binary/compressed/xz',
+                      filetype.FileTypeDecoder.DecodeFile(compressed))
+
+  def testBinaryMiscFiles(self):
+    """Test for various binary file formats."""
+    # A timezone file.
+    some_timezone = os.path.join(self.tempdir, 'some_timezone')
+    osutils.WriteFile(
+        some_timezone,
+        'TZif2\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+        '\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+        '\x00\x01\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00UTC\x00\x00\x00TZif2'
+        '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+        '\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+        '\x01\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00UTC\x00\x00\x00\nUTC0\n')
+    self.assertEquals('binary/tzfile',
+                      filetype.FileTypeDecoder.DecodeFile(some_timezone))
+
+    # A x86 boot sector with just nops.
+    bootsec = os.path.join(self.tempdir, 'bootsec')
+    osutils.WriteFile(bootsec, '\x90' * 510 + '\x55\xaa')
+    self.assertEquals('binary/bootsector/x86',
+                      filetype.FileTypeDecoder.DecodeFile(bootsec))
diff --git a/lib/gclient.py b/lib/gclient.py
new file mode 100644
index 0000000..8f8385e
--- /dev/null
+++ b/lib/gclient.py
@@ -0,0 +1,241 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common functions used for syncing Chrome."""
+
+from __future__ import print_function
+
+import os
+import pprint
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import git
+from chromite.lib import osutils
+
+
+site_config = config_lib.GetConfig()
+
+
+CHROME_COMMITTER_URL = 'https://chromium.googlesource.com/chromium/src'
+STATUS_URL = 'https://chromium-status.appspot.com/current?format=json'
+
+# Last release for each milestone where a '.DEPS.git' was emitted. After this,
+# a Git-only DEPS is emitted as 'DEPS' and '.DEPS.git' is no longer created.
+_DEPS_GIT_TRANSITION_MAP = {
+    45: (45, 0, 2430, 3),
+    44: (44, 0, 2403, 48),
+    43: (43, 0, 2357, 125),
+}
+
+
+def FindGclientFile(path):
+  """Returns the nearest higher-level gclient file from the specified path.
+
+  Args:
+    path: The path to use. Defaults to cwd.
+  """
+  return osutils.FindInPathParents(
+      '.gclient', path, test_func=os.path.isfile)
+
+
+def FindGclientCheckoutRoot(path):
+  """Get the root of your gclient managed checkout."""
+  gclient_path = FindGclientFile(path)
+  if gclient_path:
+    return os.path.dirname(gclient_path)
+  return None
+
+
+def _LoadGclientFile(path):
+  """Load a gclient file and return the solutions defined by the gclient file.
+
+  Args:
+    path: The gclient file to load.
+
+  Returns:
+    A list of solutions defined by the gclient file or an empty list if no
+    solutions exists.
+  """
+  global_scope = {}
+  # Similar to depot_tools, we use execfile() to evaluate the gclient file,
+  # which is essentially a Python script, and then extract the solutions
+  # defined by the gclient file from the 'solutions' variable in the global
+  # scope.
+  execfile(path, global_scope)
+  return global_scope.get('solutions', [])
+
+
+def _FindOrAddSolution(solutions, name):
+  """Find a solution of the specified name from the given list of solutions.
+
+  If no solution with the specified name is found, a solution with the
+  specified name is appended to the given list of solutions. This function thus
+  always returns a solution.
+
+  Args:
+    solutions: The list of solutions to search from.
+    name: The solution name to search for.
+
+  Returns:
+    The solution with the specified name.
+  """
+  for solution in solutions:
+    if solution['name'] == name:
+      return solution
+
+  solution = {'name': name}
+  solutions.append(solution)
+  return solution
+
+
+def BuildspecUsesDepsGit(rev):
+  """Tests if a given buildspec revision uses .DEPS.git or DEPS.
+
+  Previous, Chromium emitted two dependency files: DEPS and .DEPS.git, the
+  latter being a Git-only construction of DEPS. Recently a switch was thrown,
+  causing .DEPS.git to be emitted exclusively as DEPS.
+
+  To support past buildspec checkouts, this logic tests a given Chromium
+  buildspec revision against the transition thresholds, using .DEPS.git prior
+  to transition and DEPS after.
+  """
+  rev = tuple(int(d) for d in rev.split('.'))
+  milestone = rev[0]
+  threshold = _DEPS_GIT_TRANSITION_MAP.get(milestone)
+  if threshold:
+    return rev <= threshold
+  return all(milestone < k for k in _DEPS_GIT_TRANSITION_MAP.iterkeys())
+
+
+def _GetGclientURLs(internal, rev):
+  """Get the URLs and deps_file values to use in gclient file.
+
+  See WriteConfigFile below.
+  """
+  results = []
+
+  if rev is None or git.IsSHA1(rev):
+    # Regular chromium checkout; src may float to origin/master or be pinned.
+    url = constants.CHROMIUM_GOB_URL
+
+    if rev:
+      url += ('@' + rev)
+    results.append(('src', url, '.DEPS.git'))
+    if internal:
+      results.append(
+          ('src-internal', constants.CHROME_INTERNAL_GOB_URL, '.DEPS.git'))
+  elif internal:
+    # Internal buildspec: check out the buildspec repo and set deps_file to
+    # the path to the desired release spec.
+    url = site_config.params.INTERNAL_GOB_URL + '/chrome/tools/buildspec.git'
+
+    # Chromium switched to DEPS at version 45.0.2432.3.
+    deps_file = '.DEPS.git' if BuildspecUsesDepsGit(rev) else 'DEPS'
+
+    results.append(('CHROME_DEPS', url, 'releases/%s/%s' % (rev, deps_file)))
+  else:
+    # External buildspec: use the main chromium src repository, pinned to the
+    # release tag, with deps_file set to .DEPS.git (which is created by
+    # publish_deps.py).
+    url = constants.CHROMIUM_GOB_URL + '@refs/tags/' + rev
+    results.append(('src', url, '.DEPS.git'))
+
+  return results
+
+
+def _GetGclientSolutions(internal, rev, template):
+  """Get the solutions array to write to the gclient file.
+
+  See WriteConfigFile below.
+  """
+  urls = _GetGclientURLs(internal, rev)
+  solutions = _LoadGclientFile(template) if template is not None else []
+  for (name, url, deps_file) in urls:
+    solution = _FindOrAddSolution(solutions, name)
+    # Always override 'url' and 'deps_file' of a solution as we need to specify
+    # the revision information.
+    solution['url'] = url
+    if deps_file:
+      solution['deps_file'] = deps_file
+
+    # Use 'custom_deps' and 'custom_vars' of a solution when specified by the
+    # template gclient file.
+    solution.setdefault('custom_deps', {})
+    solution.setdefault('custom_vars', {})
+
+  return solutions
+
+
+def _GetGclientSpec(internal, rev, template, use_cache):
+  """Return a formatted gclient spec.
+
+  See WriteConfigFile below.
+  """
+  solutions = _GetGclientSolutions(internal=internal, rev=rev,
+                                   template=template)
+  result = 'solutions = %s\n' % pprint.pformat(solutions)
+
+  # Horrible hack, I will go to hell for this.  The bots need to have a git
+  # cache set up; but how can we tell whether this code is running on a bot
+  # or a developer's machine?
+  if cros_build_lib.HostIsCIBuilder() and use_cache:
+    result += "cache_dir = '/b/git-cache'\n"
+
+  return result
+
+def WriteConfigFile(gclient, cwd, internal, rev, template=None,
+                    use_cache=True):
+  """Initialize the specified directory as a gclient checkout.
+
+  For gclient documentation, see:
+    http://src.chromium.org/svn/trunk/tools/depot_tools/README.gclient
+
+  Args:
+    gclient: Path to gclient.
+    cwd: Directory to sync.
+    internal: Whether you want an internal checkout.
+    rev: Revision or tag to use.
+        - If None, use the latest from trunk.
+        - If this is a sha1, use the specified revision.
+        - Otherwise, treat this as a chrome version string.
+    template: An optional file to provide a template of gclient solutions.
+              _GetGclientSolutions iterates through the solutions specified
+              by the template and performs appropriate modifications such as
+              filling information like url and revision and adding extra
+              solutions.
+    use_cache: An optional Boolean flag to indicate if the git cache should
+               be used when available (on a continuous-integration builder).
+  """
+  spec = _GetGclientSpec(internal=internal, rev=rev, template=template,
+                         use_cache=use_cache)
+  cmd = [gclient, 'config', '--spec', spec]
+  cros_build_lib.RunCommand(cmd, cwd=cwd)
+
+
+def Revert(gclient, cwd):
+  """Revert all local changes.
+
+  Args:
+    gclient: Path to gclient.
+    cwd: Directory to revert.
+  """
+  cros_build_lib.RunCommand([gclient, 'revert', '--nohooks'], cwd=cwd)
+
+
+def Sync(gclient, cwd, reset=False):
+  """Sync the specified directory using gclient.
+
+  Args:
+    gclient: Path to gclient.
+    cwd: Directory to sync.
+    reset: Reset to pristine version of the source code.
+  """
+  cmd = [gclient, 'sync', '--verbose', '--nohooks', '--transitive',
+         '--with_branch_heads', '--with_tags']
+
+  if reset:
+    cmd += ['--reset', '--force', '--delete_unversioned_trees']
+  cros_build_lib.RunCommand(cmd, cwd=cwd)
diff --git a/lib/gclient_unittest b/lib/gclient_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/gclient_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/gclient_unittest.py b/lib/gclient_unittest.py
new file mode 100644
index 0000000..045adf6
--- /dev/null
+++ b/lib/gclient_unittest.py
@@ -0,0 +1,166 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for gclient.py."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import gclient
+from chromite.lib import osutils
+
+
+class TestGclientWriteConfigFile(
+    cros_build_lib_unittest.RunCommandTempDirTestCase):
+  """Unit tests for gclient.WriteConfigFile."""
+
+  _TEST_CWD = '/work/chrome'
+
+  def _AssertGclientConfigSpec(self, expected_spec, use_cache=True):
+    if cros_build_lib.HostIsCIBuilder() and use_cache:
+      expected_spec += "cache_dir = '/b/git-cache'\n"
+    self.rc.assertCommandContains(('gclient', 'config', '--spec',
+                                   expected_spec),
+                                  cwd=self._TEST_CWD)
+
+  def _CreateGclientTemplate(self, template_content):
+    template_path = os.path.join(self.tempdir, 'gclient_template')
+    osutils.WriteFile(template_path, template_content)
+    return template_path
+
+  def testChromiumSpec(self):
+    """Test WriteConfigFile with chromium checkout and no revision."""
+    gclient.WriteConfigFile('gclient', self._TEST_CWD, False, None)
+    self._AssertGclientConfigSpec("""solutions = [{'custom_deps': {},
+  'custom_vars': {},
+  'deps_file': '.DEPS.git',
+  'name': 'src',
+  'url': 'https://chromium.googlesource.com/chromium/src.git'}]
+""")
+
+  def testChromiumSpecNotUseCache(self):
+    """Test WriteConfigFile with chromium checkout and no revision."""
+    gclient.WriteConfigFile('gclient', self._TEST_CWD, False, None,
+                            use_cache=False)
+    self._AssertGclientConfigSpec("""solutions = [{'custom_deps': {},
+  'custom_vars': {},
+  'deps_file': '.DEPS.git',
+  'name': 'src',
+  'url': 'https://chromium.googlesource.com/chromium/src.git'}]
+""", use_cache=False)
+
+  def testChromeSpec(self):
+    """Test WriteConfigFile with chrome checkout and no revision."""
+    gclient.WriteConfigFile('gclient', self._TEST_CWD, True, None)
+    self._AssertGclientConfigSpec("""solutions = [{'custom_deps': {},
+  'custom_vars': {},
+  'deps_file': '.DEPS.git',
+  'name': 'src',
+  'url': 'https://chromium.googlesource.com/chromium/src.git'},
+ {'custom_deps': {},
+  'custom_vars': {},
+  'deps_file': '.DEPS.git',
+  'name': 'src-internal',
+  'url': 'https://chrome-internal.googlesource.com/chrome/src-internal.git'}]
+""")
+
+  def testChromiumSpecWithGitHash(self):
+    """Test WriteConfigFile with chromium checkout at a given git revision."""
+    gclient.WriteConfigFile('gclient', self._TEST_CWD, False,
+                            '7becbe4afb42b3301d42149d7d1cade017f150ff')
+    self._AssertGclientConfigSpec("""solutions = [{'custom_deps': {},
+  'custom_vars': {},
+  'deps_file': '.DEPS.git',
+  'name': 'src',
+  'url': 'https://chromium.googlesource.com/chromium/src.git@7becbe4afb42b3301d42149d7d1cade017f150ff'}]
+""")
+
+  def testChromeSpecWithGitHash(self):
+    """Test WriteConfigFile with chrome checkout at a given git revision."""
+    gclient.WriteConfigFile('gclient', self._TEST_CWD, True,
+                            '7becbe4afb42b3301d42149d7d1cade017f150ff')
+    self._AssertGclientConfigSpec("""solutions = [{'custom_deps': {},
+  'custom_vars': {},
+  'deps_file': '.DEPS.git',
+  'name': 'src',
+  'url': 'https://chromium.googlesource.com/chromium/src.git@7becbe4afb42b3301d42149d7d1cade017f150ff'},
+ {'custom_deps': {},
+  'custom_vars': {},
+  'deps_file': '.DEPS.git',
+  'name': 'src-internal',
+  'url': 'https://chrome-internal.googlesource.com/chrome/src-internal.git'}]
+""")
+
+  def testChromeSpecWithReleaseTag(self):
+    """Test WriteConfigFile with chrome checkout at a given release tag."""
+    gclient.WriteConfigFile('gclient', self._TEST_CWD, True, '45.0.2431.1')
+    self._AssertGclientConfigSpec("""solutions = [{'custom_deps': {},
+  'custom_vars': {},
+  'deps_file': 'releases/45.0.2431.1/DEPS',
+  'name': 'CHROME_DEPS',
+  'url': 'https://chrome-internal.googlesource.com/chrome/tools/buildspec.git'}]
+""")
+
+  def testChromiumSpecWithReleaseTag(self):
+    """Test WriteConfigFile with chromium checkout at a given release tag."""
+    gclient.WriteConfigFile('gclient', self._TEST_CWD, False, '41.0.2270.0')
+    self._AssertGclientConfigSpec("""solutions = [{'custom_deps': {},
+  'custom_vars': {},
+  'deps_file': '.DEPS.git',
+  'name': 'src',
+  'url': 'https://chromium.googlesource.com/chromium/src.git@refs/tags/41.0.2270.0'}]
+""")
+
+  def testChromeSpecWithReleaseTagDepsGit(self):
+    """Test WriteConfigFile with chrome checkout at a given release tag."""
+    gclient.WriteConfigFile('gclient', self._TEST_CWD, True, '41.0.2270.0')
+    self._AssertGclientConfigSpec("""solutions = [{'custom_deps': {},
+  'custom_vars': {},
+  'deps_file': 'releases/41.0.2270.0/.DEPS.git',
+  'name': 'CHROME_DEPS',
+  'url': 'https://chrome-internal.googlesource.com/chrome/tools/buildspec.git'}]
+""")
+
+  def testChromeSpecDepsResolution(self):
+    """Test BuildspecUsesDepsGit at release thresholds."""
+    for rev, uses_deps_git in (
+        ('41.0.2270.0', True),
+        ('45.0.2430.3', True),
+        ('45.0.2431.0', False),
+        ('44.0.2403.48', True),
+        ('44.0.2404.0', False),
+        ('43.0.2357.125', True),
+        ('43.0.2357.126', False)):
+      self.assertEqual(gclient.BuildspecUsesDepsGit(rev), uses_deps_git)
+
+  def testChromeSpecWithGclientTemplate(self):
+    """Test WriteConfigFile with chrome checkout with a gclient template."""
+    template_path = self._CreateGclientTemplate("""solutions = [
+  {
+    'name': 'src',
+    'custom_deps': {'dep1': '1'},
+    'custom_vars': {'var1': 'test1', 'var2': 'test2'},
+  },
+  { 'name': 'no-vars', 'custom_deps': {'dep2': '2', 'dep3': '3'} },
+  { 'name': 'no-deps', 'custom_vars': {'var3': 'a', 'var4': 'b'} }
+]""")
+    gclient.WriteConfigFile('gclient', self._TEST_CWD, True,
+                            '7becbe4afb42b3301d42149d7d1cade017f150ff',
+                            template=template_path)
+    self._AssertGclientConfigSpec("""solutions = [{'custom_deps': {'dep1': '1'},
+  'custom_vars': {'var1': 'test1', 'var2': 'test2'},
+  'deps_file': '.DEPS.git',
+  'name': 'src',
+  'url': 'https://chromium.googlesource.com/chromium/src.git@7becbe4afb42b3301d42149d7d1cade017f150ff'},
+ {'custom_deps': {'dep2': '2', 'dep3': '3'}, 'name': 'no-vars'},
+ {'custom_vars': {'var3': 'a', 'var4': 'b'}, 'name': 'no-deps'},
+ {'custom_deps': {},
+  'custom_vars': {},
+  'deps_file': '.DEPS.git',
+  'name': 'src-internal',
+  'url': 'https://chrome-internal.googlesource.com/chrome/src-internal.git'}]
+""")
diff --git a/lib/gdata_lib.py b/lib/gdata_lib.py
new file mode 100644
index 0000000..c026b9e
--- /dev/null
+++ b/lib/gdata_lib.py
@@ -0,0 +1,741 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Library for interacting with gdata (i.e. Google Docs, Tracker, etc)."""
+
+from __future__ import print_function
+
+import functools
+import getpass
+import os
+import pickle
+import re
+import urllib
+import xml.dom.minidom
+
+import gdata.projecthosting.client
+import gdata.service
+import gdata.spreadsheet
+import gdata.spreadsheet.service
+
+from chromite.lib import operation
+
+
+# pylint: disable=attribute-defined-outside-init,access-member-before-definition
+
+
+TOKEN_FILE = os.path.join(os.environ['HOME'], '.gdata_token')
+CRED_FILE = os.path.join(os.environ['HOME'], '.gdata_cred.txt')
+
+oper = operation.Operation('gdata_lib')
+
+_BAD_COL_CHARS_REGEX = re.compile(r'[ /_]')
+def PrepColNameForSS(col):
+  """Translate a column name for spreadsheet interface."""
+  # Spreadsheet interface requires column names to be
+  # all lowercase and with no spaces or other special characters.
+  return _BAD_COL_CHARS_REGEX.sub('', col.lower())
+
+
+# TODO(mtennant): Rename PrepRowValuesForSS
+def PrepRowForSS(row):
+  """Make sure spreadsheet handles all values in row as strings."""
+  return dict((key, PrepValForSS(val)) for key, val in row.items())
+
+
+# Regex to detect values that the spreadsheet will auto-format as numbers.
+_NUM_REGEX = re.compile(r'^[\d\.]+$')
+def PrepValForSS(val):
+  """Make sure spreadsheet handles this value as a string."""
+  # The main reason for this is version strings (e.g. for portage packages),
+  # which Sheets automatically interprets as numbers and mangles.
+  if val and _NUM_REGEX.match(val):
+    return "'" + val
+  return val
+
+
+def ScrubValFromSS(val):
+  """Remove string indicator prefix if found."""
+  if val and val[0] == "'":
+    return val[1:]
+  return val
+
+
+class Creds(object):
+  """Class to manage user/password credentials."""
+
+  __slots__ = (
+      'docs_auth_token',    # Docs Client auth token string
+      'creds_dirty',        # True if user/password set and not, yet, saved
+      'password',           # User password
+      'token_dirty',        # True if auth token(s) set and not, yet, saved
+      'tracker_auth_token', # Tracker Client auth token string
+      'user',               # User account (foo@chromium.org)
+  )
+
+  SAVED_TOKEN_ATTRS = ('docs_auth_token', 'tracker_auth_token', 'user')
+
+  def __init__(self):
+    self.user = None
+    self.password = None
+
+    self.docs_auth_token = None
+    self.tracker_auth_token = None
+
+    self.token_dirty = False
+    self.creds_dirty = False
+
+  def SetDocsAuthToken(self, auth_token):
+    """Set the Docs auth_token string."""
+    self.docs_auth_token = auth_token
+    self.token_dirty = True
+
+  def SetTrackerAuthToken(self, auth_token):
+    """Set the Tracker auth_token string."""
+    self.tracker_auth_token = auth_token
+    self.token_dirty = True
+
+  def LoadAuthToken(self, filepath):
+    """Load previously saved auth token(s) from |filepath|.
+
+    This first clears both docs_auth_token and tracker_auth_token.
+    """
+    self.docs_auth_token = None
+    self.tracker_auth_token = None
+    try:
+      f = open(filepath, 'r')
+      obj = pickle.load(f)
+      f.close()
+      if obj.has_key('auth_token'):
+        # Backwards compatability.  Default 'auth_token' is what
+        # docs_auth_token used to be saved as.
+        self.docs_auth_token = obj['auth_token']
+        self.token_dirty = True
+      for attr in self.SAVED_TOKEN_ATTRS:
+        if obj.has_key(attr):
+          setattr(self, attr, obj[attr])
+      oper.Notice('Loaded Docs/Tracker auth token(s) from "%s"' % filepath)
+    except IOError:
+      oper.Error('Unable to load auth token file at "%s"' % filepath)
+
+  def StoreAuthTokenIfNeeded(self, filepath):
+    """Store auth token(s) to |filepath| if anything changed."""
+    if self.token_dirty:
+      self.StoreAuthToken(filepath)
+
+  def StoreAuthToken(self, filepath):
+    """Store auth token(s) to |filepath|."""
+    obj = {}
+
+    for attr in self.SAVED_TOKEN_ATTRS:
+      val = getattr(self, attr)
+      if val:
+        obj[attr] = val
+
+    try:
+      oper.Notice('Storing Docs and/or Tracker auth token to "%s"' % filepath)
+      f = open(filepath, 'w')
+      pickle.dump(obj, f)
+      f.close()
+
+      self.token_dirty = False
+    except IOError:
+      oper.Error('Unable to store auth token to file at "%s"' % filepath)
+
+  def SetCreds(self, user, password=None):
+    if not '@' in user:
+      user = '%s@chromium.org' % user
+
+    if not password:
+      password = getpass.getpass('Docs password for %s:' % user)
+
+    self.user = user
+    self.password = password
+    self.creds_dirty = True
+
+  def LoadCreds(self, filepath):
+    """Load email/password credentials from |filepath|."""
+    # Read email from first line and password from second.
+
+    with open(filepath, 'r') as f:
+      (self.user, self.password) = (l.strip() for l in f.readlines())
+    oper.Notice('Loaded Docs/Tracker login credentials from "%s"' % filepath)
+
+  def StoreCredsIfNeeded(self, filepath):
+    """Store email/password credentials to |filepath| if anything changed."""
+    if self.creds_dirty:
+      self.StoreCreds(filepath)
+
+  def StoreCreds(self, filepath):
+    """Store email/password credentials to |filepath|."""
+    oper.Notice('Storing Docs/Tracker login credentials to "%s"' % filepath)
+    # Simply write email on first line and password on second.
+    with open(filepath, 'w') as f:
+      f.write(self.user + '\n')
+      f.write(self.password + '\n')
+
+    self.creds_dirty = False
+
+
+class IssueComment(object):
+  """Represent a Tracker issue comment."""
+
+  __slots__ = ['title', 'text']
+
+  def __init__(self, title, text):
+    self.title = title
+    self.text = text
+
+  def __str__(self):
+    text = '<no comment>'
+    if self.text:
+      text = '\n  '.join(self.text.split('\n'))
+    return '%s:\n  %s' % (self.title, text)
+
+
+class Issue(object):
+  """Represents one Tracker Issue."""
+
+  SlotDefaults = {
+      'comments': [], # List of IssueComment objects
+      'id': 0,        # Issue id number (int)
+      'labels': [],   # List of text labels
+      'owner': None,  # Current owner (text, chromium.org account)
+      'status': None, # Current issue status (text) (e.g. Assigned)
+      'summary': None,# Issue summary (first comment)
+      'title': None,  # Title text
+      'ccs': [],      # Cc list
+  }
+
+  __slots__ = SlotDefaults.keys()
+
+  def __init__(self, **kwargs):
+    """Init for one Issue object.
+
+    |kwargs| - key/value arguments to give initial values to
+    any additional attributes on |self|.
+    """
+    # Use SlotDefaults overwritten by kwargs for starting slot values.
+    slotvals = self.SlotDefaults.copy()
+    slotvals.update(kwargs)
+    for slot in self.__slots__:
+      setattr(self, slot, slotvals.pop(slot))
+    if slotvals:
+      raise ValueError('I do not know what to do with %r' % slotvals)
+
+  def __str__(self):
+    """Pretty print of issue."""
+    lines = [
+        'Issue %d - %s' % (self.id, self.title),
+        'Status: %s, Owner: %s' % (self.status, self.owner),
+        'Labels: %s' % ', '.join(self.labels),
+    ]
+
+    if self.summary:
+      lines.append('Summary: %s' % self.summary)
+
+    if self.comments:
+      lines.extend(self.comments)
+
+    return '\n'.join(lines)
+
+  def InitFromTracker(self, t_issue, project_name):
+    """Initialize |self| from tracker issue |t_issue|"""
+    # The __slots__ logic above confuses pylint.
+    # https://bitbucket.org/logilab/pylint/issue/380/
+    # pylint: disable=assigning-non-slot
+
+    self.id = int(t_issue.id.text.split('/')[-1])
+    self.labels = [label.text for label in t_issue.label]
+    if t_issue.owner:
+      self.owner = t_issue.owner.username.text
+    self.status = t_issue.status.text
+    self.summary = t_issue.content.text
+    self.title = t_issue.title.text
+    self.comments = self.GetTrackerIssueComments(self.id, project_name)
+
+  def GetTrackerIssueComments(self, issue_id, project_name):
+    """Retrieve comments for |issue_id| from comments URL"""
+    comments = []
+
+    feeds = 'http://code.google.com/feeds'
+    url = '%s/issues/p/%s/issues/%d/comments/full' % (feeds, project_name,
+                                                      issue_id)
+    doc = xml.dom.minidom.parse(urllib.urlopen(url))
+    entries = doc.getElementsByTagName('entry')
+    for entry in entries:
+      title_text_list = []
+      for key in ('title', 'content'):
+        child = entry.getElementsByTagName(key)[0].firstChild
+        title_text_list.append(child.nodeValue if child else None)
+      comments.append(IssueComment(*title_text_list))
+
+    return comments
+
+  def __eq__(self, other):
+    return (self.id == other.id and self.labels == other.labels and
+            self.owner == other.owner and self.status == other.status and
+            self.summary == other.summary and self.title == other.title)
+
+  def __ne__(self, other):
+    return not self == other
+
+
+class TrackerError(RuntimeError):
+  """Error class for tracker communication errors."""
+
+
+class TrackerInvalidUserError(TrackerError):
+  """Error class for when user not recognized by Tracker."""
+
+
+class TrackerComm(object):
+  """Class to manage communication with Tracker."""
+
+  __slots__ = (
+      'author',       # Author when creating/editing Tracker issues
+      'it_client',    # Issue Tracker client
+      'project_name', # Tracker project name
+  )
+
+  def __init__(self):
+    self.author = None
+    self.it_client = None
+    self.project_name = None
+
+  def Connect(self, creds, project_name, source='chromiumos'):
+    self.project_name = project_name
+
+    it_client = gdata.projecthosting.client.ProjectHostingClient()
+    it_client.source = source
+
+    if creds.tracker_auth_token:
+      oper.Notice('Logging into Tracker using previous auth token.')
+      it_client.auth_token = gdata.gauth.ClientLoginToken(
+          creds.tracker_auth_token)
+    else:
+      oper.Notice('Logging into Tracker as "%s".' % creds.user)
+      it_client.ClientLogin(creds.user, creds.password,
+                            source=source, service='code',
+                            account_type='GOOGLE')
+      creds.SetTrackerAuthToken(it_client.auth_token.token_string)
+
+    self.author = creds.user
+    self.it_client = it_client
+
+  def _QueryTracker(self, query):
+    """Query the tracker for a list of issues. Return |None| on failure."""
+    try:
+      return self.it_client.get_issues(self.project_name, query=query)
+    except gdata.client.RequestError:
+      return None
+
+  def _CreateIssue(self, t_issue):
+    """Create an Issue from a Tracker Issue."""
+    issue = Issue()
+    issue.InitFromTracker(t_issue, self.project_name)
+    return issue
+
+  # TODO(mtennant): This method works today, but is not being actively used.
+  # Leaving it in, because a logical use of the method is for to verify
+  # that a Tracker issue in the package spreadsheet is open, and to add
+  # comments to it when new upstream versions become available.
+  def GetTrackerIssueById(self, tid):
+    """Get tracker issue given |tid| number.  Return Issue object if found."""
+
+    query = gdata.projecthosting.client.Query(issue_id=str(tid))
+    feed = self._QueryTracker(query)
+
+    if feed.entry:
+      return self._CreateIssue(feed.entry[0])
+    return None
+
+  def GetTrackerIssuesByText(self, search_text, full_text=True,
+                             only_open=True):
+    """Find all Tracker Issues that contain the text search_text."""
+    if not full_text:
+      search_text = 'summary:"%s"' % search_text
+    if only_open:
+      search_text += ' is:open'
+    query = gdata.projecthosting.client.Query(text_query=search_text)
+    feed = self._QueryTracker(query)
+    if feed:
+      return [self._CreateIssue(tissue) for tissue in feed.entry]
+    else:
+      return []
+
+  def CreateTrackerIssue(self, issue):
+    """Create a new issue in Tracker according to |issue|."""
+    try:
+      created = self.it_client.add_issue(project_name=self.project_name,
+                                         title=issue.title,
+                                         content=issue.summary,
+                                         author=self.author,
+                                         status=issue.status,
+                                         owner=issue.owner,
+                                         labels=issue.labels,
+                                         ccs=issue.ccs)
+      issue.id = int(created.id.text.split('/')[-1])
+      return issue.id
+    except gdata.client.RequestError as ex:
+      if ex.body and ex.body.lower() == 'user not found':
+        raise TrackerInvalidUserError('Tracker user %s not found' % issue.owner)
+      if ex.body and ex.body.lower() == 'issue owner must be a member':
+        raise TrackerInvalidUserError('Tracker user %s not a member' %
+                                      issue.owner)
+      raise
+
+  def AppendTrackerIssueById(self, issue_id, comment, owner=None):
+    """Append |comment| to issue |issue_id| in Tracker"""
+    self.it_client.update_issue(project_name=self.project_name,
+                                issue_id=issue_id,
+                                author=self.author,
+                                comment=comment,
+                                owner=owner)
+    return issue_id
+
+
+class SpreadsheetRow(dict):
+  """Minor semi-immutable extension of dict to hold spreadsheet data.
+
+  This lets us keep the original spreadsheet row object and spreadsheet row
+  number as attributes.
+
+  No changes are made to equality checking or anything else, so client code
+  that wishes to handle this as a pure dict can.
+  """
+
+  def __init__(self, ss_row_obj, ss_row_num, mapping=None):
+    if mapping:
+      dict.__init__(self, mapping)
+
+    self.ss_row_obj = ss_row_obj
+    self.ss_row_num = ss_row_num
+
+  def __setitem__(self, key, val):
+    raise TypeError('setting item in SpreadsheetRow not supported')
+
+  def __delitem__(self, key):
+    raise TypeError('deleting item in SpreadsheetRow not supported')
+
+
+class SpreadsheetError(RuntimeError):
+  """Error class for spreadsheet communication errors."""
+
+
+def ReadWriteDecorator(func):
+  """Raise SpreadsheetError if appropriate."""
+
+  def f(self, *args, **kwargs):
+    try:
+      return func(self, *args, **kwargs)
+    except gdata.service.RequestError as ex:
+      raise SpreadsheetError(str(ex))
+
+  f.__name__ = func.__name__
+  return f
+
+
+class SpreadsheetComm(object):
+  """Class to manage communication with one Google Spreadsheet worksheet."""
+
+  # Row numbering in spreadsheets effectively starts at 2 because row 1
+  # has the column headers.
+  ROW_NUMBER_OFFSET = 2
+
+  # Spreadsheet column numbers start at 1.
+  COLUMN_NUMBER_OFFSET = 1
+
+  __slots__ = (
+      '_columns',    # Tuple of translated column names, filled in as needed
+      '_rows',       # Tuple of Row dicts in order, filled in as needed
+      'gd_client',   # Google Data client
+      'ss_key',      # Spreadsheet key
+      'ws_name',     # Worksheet name
+      'ws_key',      # Worksheet key
+  )
+
+  @property
+  def columns(self):
+    """The columns property is filled in on demand.
+
+    It is a tuple of column names, each run through PrepColNameForSS.
+    """
+    if self._columns is None:
+      query = gdata.spreadsheet.service.CellQuery()
+      query['max-row'] = '1'
+      feed = self.gd_client.GetCellsFeed(self.ss_key, self.ws_key, query=query)
+
+      # The use of PrepColNameForSS here looks weird, but the values
+      # in row 1 are the unaltered column names, rather than the restricted
+      # column names used for interface purposes.  In other words, if the
+      # spreadsheet looks like it has a column called "Foo Bar", then the
+      # first row will have a value "Foo Bar" but all interaction with that
+      # column for other rows will use column key "foobar".  Translate to
+      # restricted names now with PrepColNameForSS.
+      cols = [PrepColNameForSS(entry.content.text) for entry in feed.entry]
+
+      self._columns = tuple(cols)
+
+    return self._columns
+
+  @property
+  def rows(self):
+    """The rows property is filled in on demand.
+
+    It is a tuple of SpreadsheetRow objects.
+    """
+    if self._rows is None:
+      rows = []
+
+      feed = self.gd_client.GetListFeed(self.ss_key, self.ws_key)
+      for rowIx, rowObj in enumerate(feed.entry, start=self.ROW_NUMBER_OFFSET):
+        row_dict = dict((key, ScrubValFromSS(val.text))
+                        for key, val in rowObj.custom.iteritems())
+        rows.append(SpreadsheetRow(rowObj, rowIx, row_dict))
+
+      self._rows = tuple(rows)
+
+    return self._rows
+
+  def __init__(self):
+    for slot in self.__slots__:
+      setattr(self, slot, None)
+
+  def Connect(self, creds, ss_key, ws_name, source='chromiumos'):
+    """Login to spreadsheet service and set current worksheet.
+
+    |creds| Credentials object for Google Docs
+    |ss_key| Spreadsheet key
+    |ws_name| Worksheet name
+    |source| Name to associate with connecting service
+    """
+    self._Login(creds, source)
+    self.SetCurrentWorksheet(ws_name, ss_key=ss_key)
+
+  def SetCurrentWorksheet(self, ws_name, ss_key=None):
+    """Change the current worksheet.  This clears all caches."""
+    if ss_key and ss_key != self.ss_key:
+      self.ss_key = ss_key
+      self._ClearCache()
+
+    self.ws_name = ws_name
+
+    ws_key = self._GetWorksheetKey(self.ss_key, self.ws_name)
+    if ws_key != self.ws_key:
+      self.ws_key = ws_key
+      self._ClearCache()
+
+  def _ClearCache(self, keep_columns=False):
+    """Called whenever column/row data might be stale."""
+    self._rows = None
+    if not keep_columns:
+      self._columns = None
+
+  def _Login(self, creds, source):
+    """Login to Google doc client using given |creds|."""
+    gd_client = RetrySpreadsheetsService()
+    gd_client.source = source
+
+    # Login using previous auth token if available, otherwise
+    # use email/password from creds.
+    if creds.docs_auth_token:
+      oper.Notice('Logging into Docs using previous auth token.')
+      gd_client.SetClientLoginToken(creds.docs_auth_token)
+    else:
+      oper.Notice('Logging into Docs as "%s".' % creds.user)
+      gd_client.email = creds.user
+      gd_client.password = creds.password
+      gd_client.ProgrammaticLogin()
+      creds.SetDocsAuthToken(gd_client.GetClientLoginToken())
+
+    self.gd_client = gd_client
+
+  def _GetWorksheetKey(self, ss_key, ws_name):
+    """Get the worksheet key with name |ws_name| in spreadsheet |ss_key|."""
+    feed = self.gd_client.GetWorksheetsFeed(ss_key)
+    # The worksheet key is the last component in the URL (after last '/')
+    for entry in feed.entry:
+      if ws_name == entry.title.text:
+        return entry.id.text.split('/')[-1]
+
+    oper.Die('Unable to find worksheet "%s" in spreadsheet "%s"' %
+             (ws_name, ss_key))
+
+  @ReadWriteDecorator
+  def GetColumns(self):
+    """Return tuple of column names in worksheet.
+
+    Note that each returned name has been run through PrepColNameForSS.
+    """
+    return self.columns
+
+  @ReadWriteDecorator
+  def GetColumnIndex(self, colName):
+    """Get the column index (starting at 1) for column |colName|"""
+    try:
+      # Spreadsheet column indices start at 1, so +1.
+      return self.columns.index(colName) + self.COLUMN_NUMBER_OFFSET
+    except ValueError:
+      return None
+
+  @ReadWriteDecorator
+  def GetRows(self):
+    """Return tuple of SpreadsheetRow objects in order."""
+    return self.rows
+
+  @ReadWriteDecorator
+  def GetRowCacheByCol(self, column):
+    """Return a dict for looking up rows by value in |column|.
+
+    Each row value is a SpreadsheetRow object.
+    If more than one row has the same value for |column|, then the
+    row objects will be in a list in the returned dict.
+    """
+    row_cache = {}
+
+    for row in self.GetRows():
+      col_val = row[column]
+
+      current_entry = row_cache.get(col_val, None)
+      if current_entry and type(current_entry) is list:
+        current_entry.append(row)
+      elif current_entry:
+        current_entry = [current_entry, row]
+      else:
+        current_entry = row
+
+      row_cache[col_val] = current_entry
+
+    return row_cache
+
+  @ReadWriteDecorator
+  def InsertRow(self, row):
+    """Insert |row| at end of spreadsheet."""
+    self.gd_client.InsertRow(row, self.ss_key, self.ws_key)
+    self._ClearCache(keep_columns=True)
+
+  @ReadWriteDecorator
+  def UpdateRowCellByCell(self, rowIx, row):
+    """Replace cell values in row at |rowIx| with those in |row| dict."""
+    for colName in row:
+      colIx = self.GetColumnIndex(colName)
+      if colIx is not None:
+        self.ReplaceCellValue(rowIx, colIx, row[colName])
+    self._ClearCache(keep_columns=True)
+
+  @ReadWriteDecorator
+  def DeleteRow(self, ss_row):
+    """Delete the given |ss_row| (must be original spreadsheet row object."""
+    self.gd_client.DeleteRow(ss_row)
+    self._ClearCache(keep_columns=True)
+
+  @ReadWriteDecorator
+  def ReplaceCellValue(self, rowIx, colIx, val):
+    """Replace cell value at |rowIx| and |colIx| with |val|"""
+    self.gd_client.UpdateCell(rowIx, colIx, val, self.ss_key, self.ws_key)
+    self._ClearCache(keep_columns=True)
+
+  @ReadWriteDecorator
+  def ClearCellValue(self, rowIx, colIx):
+    """Clear cell value at |rowIx| and |colIx|"""
+    self.ReplaceCellValue(rowIx, colIx, None)
+
+  @ReadWriteDecorator
+  def ClearColumnWorksheet(self, colIx):
+    """Clear column with index |colIX| from current worksheet."""
+    query = gdata.spreadsheet.service.CellQuery()
+    query.min_col = str(colIx)
+    query.max_col = str(colIx)
+
+    cells = self.gd_client.GetCellsFeed(self.ss_key, wksht_id=self.ws_key,
+                                        query=query)
+    batchRequest = gdata.spreadsheet.SpreadsheetsCellsFeed()
+
+    for entry in cells.entry:
+      entry.cell.inputValue = None
+      batchRequest.AddUpdate(entry)
+
+    self.gd_client.ExecuteBatch(batchRequest, cells.GetBatchLink().href)
+
+  @ReadWriteDecorator
+  def WriteColumnToWorksheet(self, colIx, data):
+    """Clear column index |colIx| from worksheet and write |data| to it."""
+    self.ClearColumnWorksheet(colIx)
+
+    query = gdata.spreadsheet.service.CellQuery()
+    query.min_col = str(colIx)
+    query.max_col = str(colIx)
+    query.min_row = '1'
+    query.max_row = str(len(data))
+    query.return_empty = 'true'
+
+    cells = self.gd_client.GetCellsFeed(self.ss_key, wksht_id=self.ws_key,
+                                        query=query)
+    batchRequest = gdata.spreadsheet.SpreadsheetsCellsFeed()
+
+    for entry, value in zip(cells.entry, data):
+      entry.cell.inputValue = str(value)
+      batchRequest.AddUpdate(entry)
+
+    self.gd_client.ExecuteBatch(batchRequest, cells.GetBatchLink().href)
+
+
+class RetrySpreadsheetsService(gdata.spreadsheet.service.SpreadsheetsService):
+  """Extend SpreadsheetsService to put retry logic around http request method.
+
+  The entire purpose of this class is to remove some flakiness from
+  interactions with Google Drive spreadsheet service, in the form of
+  certain 40* and 50* http error responses to http requests.  This is
+  documented in https://code.google.com/p/chromium/issues/detail?id=206798.
+  There are two "request" methods that need to be wrapped in retry logic.
+  1) The request method on self.  Original implementation is in
+     base class atom.service.AtomService.
+  2) The request method on self.http_client.  The class of self.http_client
+     can actually vary, so the original implementation of the request
+     method can also vary.
+  """
+
+  TRY_MAX = 5
+  RETRYABLE_STATUSES = (
+      403,  # Forbidden (but retries still seem to help).
+      500,  # Internal server error.
+  )
+
+  def __init__(self, *args, **kwargs):
+    gdata.spreadsheet.service.SpreadsheetsService.__init__(self, *args,
+                                                           **kwargs)
+
+    # Wrap self.http_client.request with retry wrapper.  This request method
+    # is used by ProgrammaticLogin(), at least.
+    if hasattr(self, 'http_client'):
+      self.http_client.request = functools.partial(self._RetryRequest,
+                                                   self.http_client.request)
+
+    self.request = functools.partial(self._RetryRequest, self.request)
+
+  def _RetryRequest(self, func, *args, **kwargs):
+    """Retry wrapper for bound |func|, passing |args| and |kwargs|.
+
+    This retry wrapper can be used for any http request |func| that provides
+    an http status code via the .status attribute of the returned value.
+
+    Retry when the status value on the return object is in RETRYABLE_STATUSES,
+    and run up to TRY_MAX times.  If successful (whether or not retries
+    were necessary) return the last return value returned from base method.
+    If unsuccessful return the first return value returned from base method.
+    """
+    first_retval = None
+    for try_ix in xrange(1, self.TRY_MAX + 1):
+      retval = func(*args, **kwargs)
+      if retval.status not in self.RETRYABLE_STATUSES:
+        return retval
+      else:
+        oper.Warning('Retry-able HTTP request failure (status=%d), try %d/%d' %
+                     (retval.status, try_ix, self.TRY_MAX))
+        if not first_retval:
+          first_retval = retval
+
+    oper.Warning('Giving up on HTTP request after %d tries' % self.TRY_MAX)
+    return first_retval
diff --git a/lib/gdata_lib_unittest b/lib/gdata_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/gdata_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/gdata_lib_unittest.py b/lib/gdata_lib_unittest.py
new file mode 100644
index 0000000..b0ec48b
--- /dev/null
+++ b/lib/gdata_lib_unittest.py
@@ -0,0 +1,1108 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the gdata_lib module."""
+
+from __future__ import print_function
+
+import getpass
+import mox
+import re
+
+import atom.service
+import gdata.projecthosting.client as gd_ph_client
+import gdata.spreadsheet.service
+
+from chromite.lib import cros_test_lib
+from chromite.lib import gdata_lib
+from chromite.lib import osutils
+
+
+# pylint: disable=W0212,E1101
+
+
+class GdataLibTest(cros_test_lib.OutputTestCase):
+  """Tests for methods that escape/unescape strings for speadsheets."""
+
+  def testPrepColNameForSS(self):
+    tests = {
+        'foo': 'foo',
+        'Foo': 'foo',
+        'FOO': 'foo',
+        'foo bar': 'foobar',
+        'Foo Bar': 'foobar',
+        'F O O B A R': 'foobar',
+        'Foo/Bar': 'foobar',
+        'Foo Bar/Dude': 'foobardude',
+        'foo/bar': 'foobar',
+    }
+
+    for col in tests:
+      expected = tests[col]
+      self.assertEquals(expected, gdata_lib.PrepColNameForSS(col))
+      self.assertEquals(expected, gdata_lib.PrepColNameForSS(expected))
+
+  def testPrepValForSS(self):
+    tests = {
+        None: None,
+        '': '',
+        'foo': 'foo',
+        'foo123': 'foo123',
+        '123': "'123",
+        '1.2': "'1.2",
+    }
+
+    for val in tests:
+      expected = tests[val]
+      self.assertEquals(expected, gdata_lib.PrepValForSS(val))
+
+  def testPrepRowForSS(self):
+    vals = {
+        None: None,
+        '': '',
+        'foo': 'foo',
+        'foo123': 'foo123',
+        '123': "'123",
+        '1.2': "'1.2",
+    }
+
+    # Create before and after rows (rowIn, rowOut).
+    rowIn = {}
+    rowOut = {}
+    col = 'a' # Column names not important.
+    for valIn in vals:
+      valOut = vals[valIn]
+      rowIn[col] = valIn
+      rowOut[col] = valOut
+
+      col = chr(ord(col) + 1) # Change column name.
+
+    self.assertEquals(rowOut, gdata_lib.PrepRowForSS(rowIn))
+
+  def testScrubValFromSS(self):
+    tests = {
+        None: None,
+        'foo': 'foo',
+        '123': '123',
+        "'123": '123',
+    }
+
+    for val in tests:
+      expected = tests[val]
+      self.assertEquals(expected, gdata_lib.ScrubValFromSS(val))
+
+
+class CredsTest(cros_test_lib.MockOutputTestCase):
+  """Tests related to user credentials."""
+
+  USER = 'somedude@chromium.org'
+  PASSWORD = 'worldsbestpassword'
+  DOCS_TOKEN = 'SomeDocsAuthToken'
+  TRACKER_TOKEN = 'SomeTrackerAuthToken'
+
+  @osutils.TempFileDecorator
+  def testStoreLoadCreds(self):
+    # This is the replay script for the test.
+    creds = gdata_lib.Creds()
+
+    # This is the test verification.
+    with self.OutputCapturer():
+      creds.SetCreds(self.USER, self.PASSWORD)
+      self.assertEquals(self.USER, creds.user)
+      self.assertEquals(self.PASSWORD, creds.password)
+      self.assertTrue(creds.creds_dirty)
+
+      creds.StoreCreds(self.tempfile)
+      self.assertEquals(self.USER, creds.user)
+      self.assertEquals(self.PASSWORD, creds.password)
+      self.assertFalse(creds.creds_dirty)
+
+      # Clear user/password before loading from just-created file.
+      creds.user = None
+      creds.password = None
+      self.assertEquals(None, creds.user)
+      self.assertEquals(None, creds.password)
+
+      creds.LoadCreds(self.tempfile)
+      self.assertEquals(self.USER, creds.user)
+      self.assertEquals(self.PASSWORD, creds.password)
+      self.assertFalse(creds.creds_dirty)
+
+  @osutils.TempFileDecorator
+  def testStoreLoadToken(self):
+    # This is the replay script for the test.
+    creds = gdata_lib.Creds()
+    creds.user = self.USER
+
+    # This is the test verification.
+    with self.OutputCapturer():
+      creds.SetDocsAuthToken(self.DOCS_TOKEN)
+      self.assertEquals(self.DOCS_TOKEN, creds.docs_auth_token)
+      self.assertTrue(creds.token_dirty)
+      creds.SetTrackerAuthToken(self.TRACKER_TOKEN)
+      self.assertEquals(self.TRACKER_TOKEN, creds.tracker_auth_token)
+      self.assertTrue(creds.token_dirty)
+
+      creds.StoreAuthToken(self.tempfile)
+      self.assertEquals(self.DOCS_TOKEN, creds.docs_auth_token)
+      self.assertEquals(self.TRACKER_TOKEN, creds.tracker_auth_token)
+      self.assertFalse(creds.token_dirty)
+
+      # Clear auth_tokens before loading from just-created file.
+      creds.docs_auth_token = None
+      creds.tracker_auth_token = None
+      creds.user = None
+
+      creds.LoadAuthToken(self.tempfile)
+      self.assertEquals(self.DOCS_TOKEN, creds.docs_auth_token)
+      self.assertEquals(self.TRACKER_TOKEN, creds.tracker_auth_token)
+      self.assertFalse(creds.token_dirty)
+      self.assertEquals(self.USER, creds.user)
+
+  def testSetCreds(self):
+    # This is the replay script for the test.
+    creds = gdata_lib.Creds()
+
+    # This is the test verification.
+    creds.SetCreds(self.USER, password=self.PASSWORD)
+    self.assertEquals(self.USER, creds.user)
+    self.assertEquals(self.PASSWORD, creds.password)
+    self.assertTrue(creds.creds_dirty)
+
+  def testSetCredsNoPassword(self):
+    # Add test-specific mocks/stubs
+    self.PatchObject(getpass, 'getpass', return_value=self.PASSWORD)
+
+    # This is the replay script for the test.
+    creds = gdata_lib.Creds()
+
+    # This is the test verification.
+    creds.SetCreds(self.USER)
+    self.assertEquals(self.USER, creds.user)
+    self.assertEquals(self.PASSWORD, creds.password)
+    self.assertTrue(creds.creds_dirty)
+
+  def testSetDocsToken(self):
+    # This is the replay script for the test.
+    creds = gdata_lib.Creds()
+
+    # This is the test verification.
+    creds.SetDocsAuthToken(self.DOCS_TOKEN)
+    self.assertEquals(self.DOCS_TOKEN, creds.docs_auth_token)
+    self.assertTrue(creds.token_dirty)
+
+  def testSetTrackerToken(self):
+    # This is the replay script for the test.
+    creds = gdata_lib.Creds()
+
+    # This is the test verification.
+    creds.SetTrackerAuthToken(self.TRACKER_TOKEN)
+    self.assertEquals(self.TRACKER_TOKEN, creds.tracker_auth_token)
+    self.assertTrue(creds.token_dirty)
+
+
+class SpreadsheetRowTest(cros_test_lib.OutputTestCase):
+  """Tests related to spreadsheet row interaction."""
+
+  SS_ROW_OBJ = 'SSRowObj'
+  SS_ROW_NUM = 5
+
+  def testEmpty(self):
+    row = gdata_lib.SpreadsheetRow(self.SS_ROW_OBJ, self.SS_ROW_NUM)
+
+    self.assertEquals(0, len(row))
+    self.assertEquals(self.SS_ROW_OBJ, row.ss_row_obj)
+    self.assertEquals(self.SS_ROW_NUM, row.ss_row_num)
+
+    self.assertRaises(TypeError, row, '__setitem__', 'abc', 'xyz')
+    self.assertEquals(0, len(row))
+    self.assertFalse('abc' in row)
+
+  def testInit(self):
+    starting_vals = {'abc': 'xyz', 'foo': 'bar'}
+    row = gdata_lib.SpreadsheetRow(self.SS_ROW_OBJ, self.SS_ROW_NUM,
+                                   starting_vals)
+
+    self.assertEquals(len(starting_vals), len(row))
+    self.assertEquals(starting_vals, row)
+    self.assertEquals(row['abc'], 'xyz')
+    self.assertTrue('abc' in row)
+    self.assertEquals(row['foo'], 'bar')
+    self.assertTrue('foo' in row)
+
+    self.assertEquals(self.SS_ROW_OBJ, row.ss_row_obj)
+    self.assertEquals(self.SS_ROW_NUM, row.ss_row_num)
+
+    self.assertRaises(TypeError, row, '__delitem__', 'abc')
+    self.assertEquals(len(starting_vals), len(row))
+    self.assertTrue('abc' in row)
+
+
+class SpreadsheetCommTest(cros_test_lib.MoxOutputTestCase):
+  """Test Speadsheet communication."""
+
+  SS_KEY = 'TheSSKey'
+  WS_NAME = 'TheWSName'
+  WS_KEY = 'TheWSKey'
+
+  USER = 'dude'
+  PASSWORD = 'shhh'
+  TOKEN = 'authtoken'
+
+  COLUMNS = ('greeting', 'name', 'title')
+  ROWS = (
+      {'greeting': 'Hi', 'name': 'George', 'title': 'Mr.'},
+      {'greeting': 'Howdy', 'name': 'Billy Bob', 'title': 'Mr.'},
+      {'greeting': 'Yo', 'name': 'Adriane', 'title': 'Ms.'},
+  )
+
+  def MockScomm(self, connect=True):
+    """Return a mocked SpreadsheetComm"""
+    mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
+
+    mocked_scomm._columns = None
+    mocked_scomm._rows = None
+
+    if connect:
+      mocked_gdclient = self.mox.CreateMock(gdata_lib.RetrySpreadsheetsService)
+      mocked_scomm.gd_client = mocked_gdclient
+      mocked_scomm.ss_key = self.SS_KEY
+      mocked_scomm.ws_name = self.WS_NAME
+      mocked_scomm.ws_key = self.WS_KEY
+    else:
+      mocked_scomm.gd_client = None
+      mocked_scomm.ss_key = None
+      mocked_scomm.ws_name = None
+      mocked_scomm.ws_key = None
+
+    return mocked_scomm
+
+  def NewScomm(self, gd_client=None, connect=True):
+    """Return a non-mocked SpreadsheetComm."""
+    scomm = gdata_lib.SpreadsheetComm()
+    scomm.gd_client = gd_client
+
+    if connect:
+      scomm.ss_key = self.SS_KEY
+      scomm.ws_name = self.WS_NAME
+      scomm.ws_key = self.WS_KEY
+    else:
+      scomm.ss_key = None
+      scomm.ws_name = None
+      scomm.ws_key = None
+
+    return scomm
+
+  def GenerateCreds(self, skip_user=False, skip_token=False):
+    creds = gdata_lib.Creds()
+    if not skip_user:
+      creds.user = self.USER
+      creds.password = self.PASSWORD
+
+    if not skip_token:
+      creds.docs_auth_token = self.TOKEN
+
+    return creds
+
+  def testConnect(self):
+    mocked_scomm = self.MockScomm(connect=False)
+    creds = self.GenerateCreds()
+
+    # This is the replay script for the test.
+    mocked_scomm._Login(creds, 'chromiumos')
+    mocked_scomm.SetCurrentWorksheet(self.WS_NAME, ss_key=self.SS_KEY)
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    gdata_lib.SpreadsheetComm.Connect(mocked_scomm, creds,
+                                      self.SS_KEY, self.WS_NAME)
+    self.mox.VerifyAll()
+
+  def testColumns(self):
+    """Test the .columns property.  Testing a property gets ugly."""
+    self.mox.StubOutWithMock(gdata.spreadsheet.service, 'CellQuery')
+    mocked_gdclient = self.mox.CreateMock(gdata_lib.RetrySpreadsheetsService)
+    scomm = self.NewScomm(gd_client=mocked_gdclient, connect=True)
+
+    query = {'max-row': '1'}
+
+    # Simulate a Cells feed from spreadsheet for the column row.
+    cols = [c[0].upper() + c[1:] for c in self.COLUMNS]
+    entry = [cros_test_lib.EasyAttr(
+        content=cros_test_lib.EasyAttr(text=c)) for c in cols]
+    feed = cros_test_lib.EasyAttr(entry=entry)
+
+    # This is the replay script for the test.
+    gdata.spreadsheet.service.CellQuery().AndReturn(query)
+    mocked_gdclient.GetCellsFeed(
+        self.SS_KEY, self.WS_KEY, query=query).AndReturn(feed)
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    result = scomm.columns
+    del scomm # Force deletion now before VerifyAll.
+    self.mox.VerifyAll()
+
+    expected_result = self.COLUMNS
+    self.assertEquals(expected_result, result)
+
+  def testRows(self):
+    """Test the .rows property.  Testing a property gets ugly."""
+    mocked_gdclient = self.mox.CreateMock(gdata_lib.RetrySpreadsheetsService)
+    scomm = self.NewScomm(gd_client=mocked_gdclient, connect=True)
+
+    # Simulate a List feed from spreadsheet for all rows.
+    rows = [
+        {'col_name': 'Joe', 'col_age': '12', 'col_zip': '12345'},
+        {'col_name': 'Bob', 'col_age': '15', 'col_zip': '54321'},
+    ]
+    entry = []
+    for row in rows:
+      custom = dict((k, cros_test_lib.EasyAttr(text=v))
+                    for (k, v) in row.iteritems())
+      entry.append(cros_test_lib.EasyAttr(custom=custom))
+    feed = cros_test_lib.EasyAttr(entry=entry)
+
+    # This is the replay script for the test.
+    mocked_gdclient.GetListFeed(self.SS_KEY, self.WS_KEY).AndReturn(feed)
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    result = scomm.rows
+    del scomm # Force deletion now before VerifyAll.
+    self.mox.VerifyAll()
+    self.assertEquals(tuple(rows), result)
+
+    # Result tuple should have spreadsheet row num as attribute on each row.
+    self.assertEquals(2, result[0].ss_row_num)
+    self.assertEquals(3, result[1].ss_row_num)
+
+    # Result tuple should have spreadsheet row obj as attribute on each row.
+    self.assertEquals(entry[0], result[0].ss_row_obj)
+    self.assertEquals(entry[1], result[1].ss_row_obj)
+
+  def testSetCurrentWorksheetStart(self):
+    mocked_scomm = self.MockScomm(connect=True)
+
+    # Undo worksheet settings.
+    mocked_scomm.ss_key = None
+    mocked_scomm.ws_name = None
+    mocked_scomm.ws_key = None
+
+    # This is the replay script for the test.
+    mocked_scomm._ClearCache()
+    mocked_scomm._GetWorksheetKey(
+        self.SS_KEY, self.WS_NAME).AndReturn(self.WS_KEY)
+    mocked_scomm._ClearCache()
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    gdata_lib.SpreadsheetComm.SetCurrentWorksheet(mocked_scomm, self.WS_NAME,
+                                                  ss_key=self.SS_KEY)
+    self.mox.VerifyAll()
+
+    self.assertEquals(self.SS_KEY, mocked_scomm.ss_key)
+    self.assertEquals(self.WS_KEY, mocked_scomm.ws_key)
+    self.assertEquals(self.WS_NAME, mocked_scomm.ws_name)
+
+  def testSetCurrentWorksheetRestart(self):
+    mocked_scomm = self.MockScomm(connect=True)
+
+    other_ws_name = 'OtherWSName'
+    other_ws_key = 'OtherWSKey'
+
+    # This is the replay script for the test.
+    mocked_scomm._GetWorksheetKey(
+        self.SS_KEY, other_ws_name).AndReturn(other_ws_key)
+    mocked_scomm._ClearCache()
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    gdata_lib.SpreadsheetComm.SetCurrentWorksheet(mocked_scomm, other_ws_name)
+    self.mox.VerifyAll()
+
+    self.assertEquals(self.SS_KEY, mocked_scomm.ss_key)
+    self.assertEquals(other_ws_key, mocked_scomm.ws_key)
+    self.assertEquals(other_ws_name, mocked_scomm.ws_name)
+
+  def testClearCache(self):
+    rows = 'SomeRows'
+    cols = 'SomeColumns'
+
+    scomm = self.NewScomm()
+    scomm._rows = rows
+    scomm._columns = cols
+
+    scomm._ClearCache(keep_columns=True)
+    self.assertTrue(scomm._rows is None)
+    self.assertEquals(cols, scomm._columns)
+
+    scomm._rows = rows
+    scomm._columns = cols
+
+    scomm._ClearCache(keep_columns=False)
+    self.assertTrue(scomm._rows is None)
+    self.assertTrue(scomm._columns is None)
+
+    scomm._rows = rows
+    scomm._columns = cols
+
+    scomm._ClearCache()
+    self.assertTrue(scomm._rows is None)
+    self.assertTrue(scomm._columns is None)
+
+  def testLoginWithUserPassword(self):
+    mocked_scomm = self.MockScomm(connect=False)
+    creds = self.GenerateCreds(skip_token=True)
+
+    self.mox.StubOutClassWithMocks(gdata_lib, 'RetrySpreadsheetsService')
+
+    source = 'SomeSource'
+
+    # This is the replay script for the test.
+    mocked_gdclient = gdata_lib.RetrySpreadsheetsService()
+    mocked_gdclient.ProgrammaticLogin()
+    mocked_gdclient.GetClientLoginToken().AndReturn(self.TOKEN)
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    with self.OutputCapturer():
+      gdata_lib.SpreadsheetComm._Login(mocked_scomm, creds, source)
+    self.mox.VerifyAll()
+    self.assertEquals(self.USER, mocked_gdclient.email)
+    self.assertEquals(self.PASSWORD, mocked_gdclient.password)
+    self.assertEquals(self.TOKEN, creds.docs_auth_token)
+    self.assertEquals(source, mocked_gdclient.source)
+    self.assertEquals(mocked_gdclient, mocked_scomm.gd_client)
+
+  def testLoginWithToken(self):
+    mocked_scomm = self.MockScomm(connect=False)
+    creds = self.GenerateCreds(skip_user=True)
+
+    self.mox.StubOutClassWithMocks(gdata_lib, 'RetrySpreadsheetsService')
+
+    source = 'SomeSource'
+
+    # This is the replay script for the test.
+    mocked_gdclient = gdata_lib.RetrySpreadsheetsService()
+    mocked_gdclient.SetClientLoginToken(creds.docs_auth_token)
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    with self.OutputCapturer():
+      gdata_lib.SpreadsheetComm._Login(mocked_scomm, creds, source)
+    self.mox.VerifyAll()
+    self.assertFalse(hasattr(mocked_gdclient, 'email'))
+    self.assertFalse(hasattr(mocked_gdclient, 'password'))
+    self.assertEquals(source, mocked_gdclient.source)
+    self.assertEquals(mocked_gdclient, mocked_scomm.gd_client)
+
+  def testGetWorksheetKey(self):
+    mocked_scomm = self.MockScomm()
+
+    entrylist = [
+        cros_test_lib.EasyAttr(
+            title=cros_test_lib.EasyAttr(text='Foo'), id='NotImportant'),
+        cros_test_lib.EasyAttr(
+            title=cros_test_lib.EasyAttr(text=self.WS_NAME),
+            id=cros_test_lib.EasyAttr(text='/some/path/%s' % self.WS_KEY)),
+        cros_test_lib.EasyAttr(
+            title=cros_test_lib.EasyAttr(text='Bar'), id='NotImportant'),
+    ]
+    feed = cros_test_lib.EasyAttr(entry=entrylist)
+
+    # This is the replay script for the test.
+    mocked_scomm.gd_client.GetWorksheetsFeed(self.SS_KEY).AndReturn(feed)
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    gdata_lib.SpreadsheetComm._GetWorksheetKey(mocked_scomm,
+                                               self.SS_KEY, self.WS_NAME)
+    self.mox.VerifyAll()
+
+  def testGetColumns(self):
+    mocked_scomm = self.MockScomm()
+    mocked_scomm.columns = 'SomeColumns'
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    result = gdata_lib.SpreadsheetComm.GetColumns(mocked_scomm)
+    self.mox.VerifyAll()
+    self.assertEquals('SomeColumns', result)
+
+  def testGetColumnIndex(self):
+    # Note that spreadsheet column indices start at 1.
+    mocked_scomm = self.MockScomm()
+    mocked_scomm.columns = ['these', 'are', 'column', 'names']
+
+    # This is the replay script for the test.
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    result = gdata_lib.SpreadsheetComm.GetColumnIndex(mocked_scomm, 'are')
+    self.mox.VerifyAll()
+    self.assertEquals(2, result)
+
+  def testGetRows(self):
+    mocked_scomm = self.MockScomm()
+    rows = []
+    for row_ix, row_dict in enumerate(self.ROWS):
+      rows.append(gdata_lib.SpreadsheetRow('SSRowObj%d' % (row_ix + 2),
+                                           (row_ix + 2), row_dict))
+    mocked_scomm.rows = tuple(rows)
+
+    # This is the replay script for the test.
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    result = gdata_lib.SpreadsheetComm.GetRows(mocked_scomm)
+    self.mox.VerifyAll()
+    self.assertEquals(self.ROWS, result)
+    for row_ix in xrange(len(self.ROWS)):
+      self.assertEquals(row_ix + 2, result[row_ix].ss_row_num)
+      self.assertEquals('SSRowObj%d' % (row_ix + 2), result[row_ix].ss_row_obj)
+
+  def testGetRowCacheByCol(self):
+    mocked_scomm = self.MockScomm()
+
+    # This is the replay script for the test.
+    mocked_scomm.GetRows().AndReturn(self.ROWS)
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    result = gdata_lib.SpreadsheetComm.GetRowCacheByCol(mocked_scomm, 'name')
+    self.mox.VerifyAll()
+
+    # Result is a dict of rows by the 'name' column.
+    for row in self.ROWS:
+      name = row['name']
+      self.assertEquals(row, result[name])
+
+  def testGetRowCacheByColDuplicates(self):
+    mocked_scomm = self.MockScomm()
+
+    # Create new row list with duplicates by name column.
+    rows = []
+    for row in self.ROWS:
+      new_row = dict(row)
+      new_row['greeting'] = row['greeting'] + ' there'
+      rows.append(new_row)
+
+    rows.extend(self.ROWS)
+
+    # This is the replay script for the test.
+    mocked_scomm.GetRows().AndReturn(tuple(rows))
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    result = gdata_lib.SpreadsheetComm.GetRowCacheByCol(mocked_scomm, 'name')
+    self.mox.VerifyAll()
+
+    # Result is a dict of rows by the 'name' column.  In this
+    # test each result should be a list of the rows with the same
+    # value in the 'name' column.
+    num_rows = len(rows)
+    for ix in xrange(num_rows / 2):
+      row1 = rows[ix]
+      row2 = rows[ix + (num_rows / 2)]
+
+      name = row1['name']
+      self.assertEquals(name, row2['name'])
+
+      expected_rows = [row1, row2]
+      self.assertEquals(expected_rows, result[name])
+
+  def testInsertRow(self):
+    mocked_scomm = self.MockScomm()
+
+    row = 'TheRow'
+
+    # Replay script
+    mocked_scomm.gd_client.InsertRow(row, mocked_scomm.ss_key,
+                                     mocked_scomm.ws_key)
+    mocked_scomm._ClearCache(keep_columns=True)
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    gdata_lib.SpreadsheetComm.InsertRow(mocked_scomm, row)
+    self.mox.VerifyAll()
+
+  def testUpdateRowCellByCell(self):
+    mocked_scomm = self.MockScomm()
+
+    rowIx = 5
+    row = {'a': 123, 'b': 234, 'c': 345}
+    colIndices = {'a': 1, 'b': None, 'c': 4}
+
+    # Replay script
+    for colName in row:
+      colIx = colIndices[colName]
+      mocked_scomm.GetColumnIndex(colName).AndReturn(colIx)
+      if colIx is not None:
+        mocked_scomm.ReplaceCellValue(rowIx, colIx, row[colName])
+    mocked_scomm._ClearCache(keep_columns=True)
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    gdata_lib.SpreadsheetComm.UpdateRowCellByCell(mocked_scomm, rowIx, row)
+    self.mox.VerifyAll()
+
+  def testDeleteRow(self):
+    mocked_scomm = self.MockScomm()
+
+    ss_row = 'TheRow'
+
+    # Replay script
+    mocked_scomm.gd_client.DeleteRow(ss_row)
+    mocked_scomm._ClearCache(keep_columns=True)
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    gdata_lib.SpreadsheetComm.DeleteRow(mocked_scomm, ss_row)
+    self.mox.VerifyAll()
+
+  def testReplaceCellValue(self):
+    mocked_scomm = self.MockScomm()
+
+    rowIx = 14
+    colIx = 4
+    val = 'TheValue'
+
+    # Replay script
+    mocked_scomm.gd_client.UpdateCell(rowIx, colIx, val,
+                                      mocked_scomm.ss_key, mocked_scomm.ws_key)
+    mocked_scomm._ClearCache(keep_columns=True)
+    self.mox.ReplayAll()
+
+    # Verify
+    gdata_lib.SpreadsheetComm.ReplaceCellValue(mocked_scomm, rowIx, colIx, val)
+    self.mox.VerifyAll()
+
+  def testClearCellValue(self):
+    mocked_scomm = self.MockScomm()
+
+    rowIx = 14
+    colIx = 4
+
+    # Replay script
+    mocked_scomm.ReplaceCellValue(rowIx, colIx, None)
+    self.mox.ReplayAll()
+
+    # Verify
+    gdata_lib.SpreadsheetComm.ClearCellValue(mocked_scomm, rowIx, colIx)
+    self.mox.VerifyAll()
+
+
+class IssueCommentTest(cros_test_lib.TestCase):
+  """Test creating comments."""
+
+  def testInit(self):
+    title = 'Greetings, Earthlings'
+    text = 'I come in peace.'
+    ic = gdata_lib.IssueComment(title, text)
+
+    self.assertEquals(title, ic.title)
+    self.assertEquals(text, ic.text)
+
+
+def createTrackerIssue(tid, labels, owner, status, content, title):
+  tissue = cros_test_lib.EasyAttr()
+  tissue.id = cros_test_lib.EasyAttr(
+      text='http://www/some/path/%d' % tid)
+  tissue.label = [cros_test_lib.EasyAttr(text=l) for l in labels]
+  tissue.owner = cros_test_lib.EasyAttr(
+      username=cros_test_lib.EasyAttr(text=owner))
+  tissue.status = cros_test_lib.EasyAttr(text=status)
+  tissue.content = cros_test_lib.EasyAttr(text=content)
+  tissue.title = cros_test_lib.EasyAttr(text=title)
+  return tissue
+
+
+class IssueTest(cros_test_lib.MoxTestCase):
+  """Test creating a bug."""
+
+  def testInitOverride(self):
+    owner = 'somedude@chromium.org'
+    status = 'Assigned'
+    issue = gdata_lib.Issue(owner=owner, status=status)
+
+    self.assertEquals(owner, issue.owner)
+    self.assertEquals(status, issue.status)
+
+  def testInitInvalidOverride(self):
+    self.assertRaises(ValueError, gdata_lib.Issue,
+                      foobar='NotARealAttr')
+
+  def testInitFromTracker(self):
+    # Need to create a dummy Tracker Issue object.
+    tissue_id = 123
+    tissue_labels = ['Iteration-10', 'Effort-2']
+    tissue_owner = 'thedude@chromium.org'
+    tissue_status = 'Available'
+    tissue_content = 'The summary message'
+    tissue_title = 'The Big Title'
+
+    tissue = createTrackerIssue(tid=tissue_id, labels=tissue_labels,
+                                owner=tissue_owner, status=tissue_status,
+                                content=tissue_content, title=tissue_title)
+
+    mocked_issue = self.mox.CreateMock(gdata_lib.Issue)
+
+    # Replay script
+    mocked_issue.GetTrackerIssueComments(tissue_id, 'TheProject').AndReturn([])
+    self.mox.ReplayAll()
+
+    # Verify
+    gdata_lib.Issue.InitFromTracker(mocked_issue, tissue, 'TheProject')
+    self.mox.VerifyAll()
+    self.assertEquals(tissue_id, mocked_issue.id)
+    self.assertEquals(tissue_labels, mocked_issue.labels)
+    self.assertEquals(tissue_owner, mocked_issue.owner)
+    self.assertEquals(tissue_status, mocked_issue.status)
+    self.assertEquals(tissue_content, mocked_issue.summary)
+    self.assertEquals(tissue_title, mocked_issue.title)
+    self.assertEquals([], mocked_issue.comments)
+
+
+class TrackerCommTest(cros_test_lib.MoxOutputTestCase):
+  """Test bug tracker communication."""
+
+  def testConnectEmail(self):
+    source = 'TheSource'
+    token = 'TheToken'
+    creds = gdata_lib.Creds()
+    creds.user = 'dude'
+    creds.password = 'shhh'
+    creds.tracker_auth_token = None
+    self.mox.StubOutClassWithMocks(gd_ph_client, 'ProjectHostingClient')
+    mocked_tcomm = self.mox.CreateMock(gdata_lib.TrackerComm)
+
+    def set_token(*_args, **_kwargs):
+      mocked_itclient.auth_token = cros_test_lib.EasyAttr(token_string=token)
+
+    # Replay script
+    mocked_itclient = gd_ph_client.ProjectHostingClient()
+    mocked_itclient.ClientLogin(
+        creds.user, creds.password, source=source, service='code',
+        account_type='GOOGLE').WithSideEffects(set_token)
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      gdata_lib.TrackerComm.Connect(mocked_tcomm, creds, 'TheProject',
+                                    source=source)
+    self.mox.VerifyAll()
+    self.assertEquals(mocked_tcomm.it_client, mocked_itclient)
+
+  def testConnectToken(self):
+    source = 'TheSource'
+    token = 'TheToken'
+    creds = gdata_lib.Creds()
+    creds.user = 'dude'
+    creds.password = 'shhh'
+    creds.tracker_auth_token = token
+    mocked_tcomm = self.mox.CreateMock(gdata_lib.TrackerComm)
+
+    self.mox.StubOutClassWithMocks(gd_ph_client, 'ProjectHostingClient')
+    self.mox.StubOutClassWithMocks(gdata.gauth, 'ClientLoginToken')
+
+    # Replay script
+    mocked_itclient = gd_ph_client.ProjectHostingClient()
+    mocked_token = gdata.gauth.ClientLoginToken(token)
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      gdata_lib.TrackerComm.Connect(mocked_tcomm, creds, 'TheProject',
+                                    source=source)
+    self.mox.VerifyAll()
+    self.assertEquals(mocked_tcomm.it_client, mocked_itclient)
+    self.assertEquals(mocked_itclient.auth_token, mocked_token)
+
+  def testGetTrackerIssueById(self):
+    mocked_itclient = self.mox.CreateMock(gd_ph_client.ProjectHostingClient)
+    tcomm = gdata_lib.TrackerComm()
+    tcomm.it_client = mocked_itclient
+    tcomm.project_name = 'TheProject'
+
+    self.mox.StubOutClassWithMocks(gd_ph_client, 'Query')
+    self.mox.StubOutClassWithMocks(gdata_lib, 'Issue')
+    self.mox.StubOutWithMock(gdata_lib.Issue, 'InitFromTracker')
+
+    issue_id = 12345
+    feed = cros_test_lib.EasyAttr(entry=['hi', 'there'])
+
+    # Replay script
+    mocked_query = gd_ph_client.Query(issue_id=str(issue_id))
+    mocked_itclient.get_issues(
+        'TheProject', query=mocked_query).AndReturn(feed)
+    mocked_issue = gdata_lib.Issue()
+    mocked_issue.InitFromTracker(feed.entry[0], 'TheProject')
+    self.mox.ReplayAll()
+
+    # Verify
+    issue = tcomm.GetTrackerIssueById(issue_id)
+    self.mox.VerifyAll()
+    self.assertEquals(mocked_issue, issue)
+
+  def testGetTrackerIssuesByText(self):
+    author = 'TheAuthor'
+    project = 'TheProject'
+    text = "find me"
+
+    # Set up the fake tracker issue.
+    tissue_id = 1
+    tissue_labels = ['auto-filed']
+    tissue_owner = 'someone@chromium.org'
+    tissue_status = 'Available'
+    tissue_content = 'find me in body'
+    tissue_title = 'find me in title'
+
+    tissue = createTrackerIssue(tid=tissue_id, labels=tissue_labels,
+                                owner=tissue_owner, status=tissue_status,
+                                content=tissue_content, title=tissue_title)
+
+    issue = gdata_lib.Issue(id=tissue_id, labels=tissue_labels,
+                            owner=tissue_owner, status=tissue_status,
+                            title=tissue_title, summary=tissue_content)
+
+    # This will get called as part of Issue.InitFromTracker.
+    self.mox.StubOutWithMock(gdata_lib.Issue, 'GetTrackerIssueComments')
+
+    mocked_itclient = self.mox.CreateMock(gd_ph_client.ProjectHostingClient)
+
+    tcomm = gdata_lib.TrackerComm()
+    tcomm.author = author
+    tcomm.it_client = mocked_itclient
+    tcomm.project_name = project
+
+    # We expect a Query instance to be passed into get_issues.
+    # pylint: disable=E1120
+    self.mox.StubOutClassWithMocks(gd_ph_client, 'Query')
+
+    mocked_query = gd_ph_client.Query(text_query='%s is:open' % text)
+    feed = cros_test_lib.EasyAttr(entry=[tissue])
+    mocked_itclient.get_issues(project, query=mocked_query).AndReturn(feed)
+    gdata_lib.Issue.GetTrackerIssueComments(1, project).AndReturn([])
+
+    self.mox.ReplayAll()
+
+    issues = tcomm.GetTrackerIssuesByText(text)
+    self.assertEquals(issues, [issue])
+
+  def testCreateTrackerIssue(self):
+    author = 'TheAuthor'
+    mocked_itclient = self.mox.CreateMock(gd_ph_client.ProjectHostingClient)
+    mocked_tcomm = self.mox.CreateMock(gdata_lib.TrackerComm)
+    mocked_tcomm.author = author
+    mocked_tcomm.it_client = mocked_itclient
+    mocked_tcomm.project_name = 'TheProject'
+
+    issue = cros_test_lib.EasyAttr(title='TheTitle',
+                                   summary='TheSummary',
+                                   status='TheStatus',
+                                   owner='TheOwner',
+                                   labels='TheLabels',
+                                   ccs=[])
+
+    # Replay script
+    issue_id = cros_test_lib.EasyAttr(
+        id=cros_test_lib.EasyAttr(text='foo/bar/123'))
+    mocked_itclient.add_issue(
+        project_name='TheProject',
+        title=issue.title,
+        content=issue.summary,
+        author=author,
+        status=issue.status,
+        owner=issue.owner,
+        labels=issue.labels,
+        ccs=issue.ccs).AndReturn(issue_id)
+    self.mox.ReplayAll()
+
+    # Verify
+    result = gdata_lib.TrackerComm.CreateTrackerIssue(mocked_tcomm, issue)
+    self.mox.VerifyAll()
+    self.assertEquals(123, result)
+
+  def testAppendTrackerIssueById(self):
+    author = 'TheAuthor'
+    project_name = 'TheProject'
+    mocked_itclient = self.mox.CreateMock(gd_ph_client.ProjectHostingClient)
+    mocked_tcomm = self.mox.CreateMock(gdata_lib.TrackerComm)
+    mocked_tcomm.author = author
+    mocked_tcomm.it_client = mocked_itclient
+    mocked_tcomm.project_name = project_name
+
+    issue_id = 54321
+    comment = 'TheComment'
+
+    # Replay script
+    mocked_itclient.update_issue(project_name=project_name,
+                                 issue_id=issue_id,
+                                 author=author,
+                                 comment=comment,
+                                 owner=None)
+    self.mox.ReplayAll()
+
+    # Verify
+    result = gdata_lib.TrackerComm.AppendTrackerIssueById(mocked_tcomm,
+                                                          issue_id, comment)
+    self.mox.VerifyAll()
+    self.assertEquals(issue_id, result)
+
+
+class RetrySpreadsheetsServiceTest(cros_test_lib.MoxOutputTestCase):
+  """Test Spreadsheet server retry helper."""
+
+  def testRequest(self):
+    """Test that calling request method invokes _RetryRequest wrapper."""
+    # pylint: disable=W0212
+
+    self.mox.StubOutWithMock(gdata_lib.RetrySpreadsheetsService,
+                             '_RetryRequest')
+
+    # Use a real RetrySpreadsheetsService object rather than a mocked
+    # one, because the .request method only exists if __init__ is run.
+    # Also split up __new__ and __init__ in order to grab the original
+    # rss.request method (inherited from base class at that point).
+    rss = gdata_lib.RetrySpreadsheetsService.__new__(
+        gdata_lib.RetrySpreadsheetsService)
+    orig_request = rss.request
+    rss.__init__()
+
+    args = ('GET', 'http://foo.bar')
+
+    # This is the replay script for the test.
+    gdata_lib.RetrySpreadsheetsService._RetryRequest(
+        orig_request, *args).AndReturn('wrapped')
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    retval = rss.request(*args)
+    self.mox.VerifyAll()
+    self.assertEquals('wrapped', retval)
+
+  def _TestHttpClientRetryRequest(self, statuses):
+    """Test retry logic in http_client request during ProgrammaticLogin.
+
+    |statuses| is list of http codes to simulate, where 200 means success.
+    """
+    expect_success = statuses[-1] == 200
+
+    self.mox.StubOutWithMock(atom.http.ProxiedHttpClient, 'request')
+    rss = gdata_lib.RetrySpreadsheetsService()
+
+    args = ('POST', 'https://www.google.com/accounts/ClientLogin')
+    def _read():
+      return 'Some response text'
+
+    # This is the replay script for the test.
+    # Simulate the return codes in statuses.
+    for status in statuses:
+      retstatus = cros_test_lib.EasyAttr(status=status, read=_read)
+      atom.http.ProxiedHttpClient.request(
+          *args, data=mox.IgnoreArg(),
+          headers=mox.IgnoreArg()).AndReturn(retstatus)
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    with self.OutputCapturer():
+      if expect_success:
+        rss.ProgrammaticLogin()
+      else:
+        self.assertRaises(gdata.service.Error, rss.ProgrammaticLogin)
+      self.mox.VerifyAll()
+
+    if not expect_success:
+      # Retries did not help, request still failed.
+      regexp = re.compile(r'^Giving up on HTTP request')
+      self.AssertOutputContainsWarning(regexp=regexp)
+    elif len(statuses) > 1:
+      # Warning expected if retries were needed.
+      self.AssertOutputContainsWarning()
+    else:
+      # First try worked, expect no warnings.
+      self.AssertOutputContainsWarning(invert=True)
+
+  def testHttpClientRetryRequest(self):
+    self._TestHttpClientRetryRequest([200])
+
+  def testHttpClientRetryRequest403(self):
+    self._TestHttpClientRetryRequest([403, 200])
+
+  def testHttpClientRetryRequest403x2(self):
+    self._TestHttpClientRetryRequest([403, 403, 200])
+
+  def testHttpClientRetryRequest403x3(self):
+    self._TestHttpClientRetryRequest([403, 403, 403, 200])
+
+  def testHttpClientRetryRequest403x4(self):
+    self._TestHttpClientRetryRequest([403, 403, 403, 403, 200])
+
+  def testHttpClientRetryRequest403x5(self):
+    # This one should exhaust the retries.
+    self._TestHttpClientRetryRequest([403, 403, 403, 403, 403])
+
+  def _TestRetryRequest(self, statuses):
+    """Test retry logic for request method.
+
+    |statuses| is list of http codes to simulate, where 200 means success.
+    """
+    expect_success = statuses[-1] == 200
+    expected_status_index = len(statuses) - 1 if expect_success else 0
+
+    mocked_ss = self.mox.CreateMock(gdata_lib.RetrySpreadsheetsService)
+    args = ('GET', 'http://foo.bar')
+
+    # This is the replay script for the test.
+    for ix, status in enumerate(statuses):
+      # Add index of status to track which status the request function is
+      # returning.  It is expected to return the last return status if
+      # successful (retries or not), but first return status if failed.
+      retval = cros_test_lib.EasyAttr(status=status, index=ix)
+      mocked_ss.request(*args).AndReturn(retval)
+
+    self.mox.ReplayAll()
+
+    # This is the test verification.
+    with self.OutputCapturer():
+      # pylint: disable=W0212
+      rval = gdata_lib.RetrySpreadsheetsService._RetryRequest(mocked_ss,
+                                                              mocked_ss.request,
+                                                              *args)
+      self.mox.VerifyAll()
+      self.assertEquals(statuses[expected_status_index], rval.status)
+      self.assertEquals(expected_status_index, rval.index)
+
+    if not expect_success:
+      # Retries did not help, request still failed.
+      regexp = re.compile(r'^Giving up on HTTP request')
+      self.AssertOutputContainsWarning(regexp=regexp)
+    elif expected_status_index > 0:
+      # Warning expected if retries were needed.
+      self.AssertOutputContainsWarning()
+    else:
+      # First try worked, expect no warnings.
+      self.AssertOutputContainsWarning(invert=True)
+
+  def testRetryRequest(self):
+    self._TestRetryRequest([200])
+
+  def testRetryRequest403(self):
+    self._TestRetryRequest([403, 200])
+
+  def testRetryRequest403x2(self):
+    self._TestRetryRequest([403, 403, 200])
+
+  def testRetryRequest403x3(self):
+    self._TestRetryRequest([403, 403, 403, 200])
+
+  def testRetryRequest403x4(self):
+    self._TestRetryRequest([403, 403, 403, 403, 200])
+
+  def testRetryRequest403x5(self):
+    # This one should exhaust the retries.
+    self._TestRetryRequest([403, 403, 403, 403, 403])
diff --git a/lib/gerrit.py b/lib/gerrit.py
new file mode 100644
index 0000000..2a1155d
--- /dev/null
+++ b/lib/gerrit.py
@@ -0,0 +1,503 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing helper class and methods for interacting with Gerrit."""
+
+from __future__ import print_function
+
+import operator
+
+from chromite.cbuildbot import config_lib
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import gob_util
+from chromite.lib import parallel
+from chromite.lib import patch as cros_patch
+
+
+site_config = config_lib.GetConfig()
+
+
+class GerritException(Exception):
+  """Base exception, thrown for gerrit failures"""
+
+
+class QueryHasNoResults(GerritException):
+  """Exception thrown when a query returns no results."""
+
+
+class QueryNotSpecific(GerritException):
+  """Thrown when a query needs to identify one CL, but matched multiple."""
+
+
+class GerritHelper(object):
+  """Helper class to manage interaction with the gerrit-on-borg service."""
+
+  # Maximum number of results to return per query.
+  _GERRIT_MAX_QUERY_RETURN = 500
+
+  # Number of processes to run in parallel when fetching from Gerrit. The
+  # Gerrit team recommended keeping this small to avoid putting too much
+  # load on the server.
+  _NUM_PROCESSES = 10
+
+  # Fields that appear in gerrit change query results.
+  MORE_CHANGES = '_more_changes'
+
+  def __init__(self, host, remote, print_cmd=True):
+    """Initialize.
+
+    Args:
+      host: Hostname (without protocol prefix) of the gerrit server.
+      remote: The symbolic name of a known remote git host,
+          taken from cbuildbot.contants.
+      print_cmd: Determines whether all RunCommand invocations will be echoed.
+          Set to False for quiet operation.
+    """
+    self.host = host
+    self.remote = remote
+    self.print_cmd = bool(print_cmd)
+    self._version = None
+
+  @classmethod
+  def FromRemote(cls, remote, **kwargs):
+    if remote == site_config.params.INTERNAL_REMOTE:
+      host = site_config.params.INTERNAL_GERRIT_HOST
+    elif remote == site_config.params.EXTERNAL_REMOTE:
+      host = site_config.params.EXTERNAL_GERRIT_HOST
+    else:
+      raise ValueError('Remote %s not supported.' % remote)
+    return cls(host, remote, **kwargs)
+
+  @classmethod
+  def FromGob(cls, gob, **kwargs):
+    """Return a helper for a GoB instance."""
+    host = site_config.params.GOB_HOST % ('%s-review' % gob)
+    # TODO(phobbs) this will be wrong when "gob" isn't in GOB_REMOTES.
+    # We should get rid of remotes altogether and just use the host.
+    return cls(host, site_config.params.GOB_REMOTES.get(gob, gob), **kwargs)
+
+  def SetReviewers(self, change, add=(), remove=(), dryrun=False):
+    """Modify the list of reviewers on a gerrit change.
+
+    Args:
+      change: ChangeId or change number for a gerrit review.
+      add: Sequence of email addresses of reviewers to add.
+      remove: Sequence of email addresses of reviewers to remove.
+      dryrun: If True, only print what would have been done.
+    """
+    if add:
+      if dryrun:
+        logging.info('Would have added %s to "%s"', add, change)
+      else:
+        gob_util.AddReviewers(self.host, change, add)
+    if remove:
+      if dryrun:
+        logging.info('Would have removed %s to "%s"', remove, change)
+      else:
+        gob_util.RemoveReviewers(self.host, change, remove)
+
+  def GetChangeDetail(self, change_num):
+    """Return detailed information about a gerrit change.
+
+    Args:
+      change_num: A gerrit change number.
+    """
+    return gob_util.GetChangeDetail(
+        self.host, change_num, o_params=('CURRENT_REVISION', 'CURRENT_COMMIT'))
+
+  def GrabPatchFromGerrit(self, project, change, commit, must_match=True):
+    """Return a cros_patch.GerritPatch representing a gerrit change.
+
+    Args:
+      project: The name of the gerrit project for the change.
+      change: A ChangeId or gerrit number for the change.
+      commit: The git commit hash for a patch associated with the change.
+      must_match: Raise an exception if the change is not found.
+    """
+    query = {'project': project, 'commit': commit, 'must_match': must_match}
+    return self.QuerySingleRecord(change, **query)
+
+  def IsChangeCommitted(self, change, must_match=False):
+    """Check whether a gerrit change has been merged.
+
+    Args:
+      change: A gerrit change number.
+      must_match: Raise an exception if the change is not found.  If this is
+          False, then a missing change will return None.
+    """
+    change = gob_util.GetChange(self.host, change)
+    if not change:
+      if must_match:
+        raise QueryHasNoResults('Could not query for change %s' % change)
+      return
+    return change.get('status') == 'MERGED'
+
+  def GetLatestSHA1ForBranch(self, project, branch):
+    """Return the git hash at the tip of a branch."""
+    url = '%s://%s/%s' % (gob_util.GIT_PROTOCOL, self.host, project)
+    cmd = ['ls-remote', url, 'refs/heads/%s' % branch]
+    try:
+      result = git.RunGit('.', cmd, print_cmd=self.print_cmd)
+      if result:
+        return result.output.split()[0]
+    except cros_build_lib.RunCommandError:
+      logging.error('Command "%s" failed.', cros_build_lib.CmdToStr(cmd),
+                    exc_info=True)
+
+  def QuerySingleRecord(self, change=None, **kwargs):
+    """Free-form query of a gerrit change that expects a single result.
+
+    Args:
+      change: A gerrit change number.
+      **kwargs:
+        dryrun: Don't query the gerrit server; just return None.
+        must_match: Raise an exception if the query comes back empty.  If this
+            is False, an unsatisfied query will return None.
+        Refer to Query() docstring for remaining arguments.
+
+    Returns:
+      If kwargs['raw'] == True, return a python dict representing the
+      change; otherwise, return a cros_patch.GerritPatch object.
+    """
+    query_kwds = kwargs
+    dryrun = query_kwds.get('dryrun')
+    must_match = query_kwds.pop('must_match', True)
+    results = self.Query(change, **query_kwds)
+    if dryrun:
+      return None
+    elif not results:
+      if must_match:
+        raise QueryHasNoResults('Query %s had no results' % (change,))
+      return None
+    elif len(results) != 1:
+      raise QueryNotSpecific('Query %s returned too many results: %s'
+                             % (change, results))
+    return results[0]
+
+  def Query(self, change=None, sort=None, current_patch=True, options=(),
+            dryrun=False, raw=False, start=None, bypass_cache=True, **kwargs):
+    """Free-form query for gerrit changes.
+
+    Args:
+      change: ChangeId, git commit hash, or gerrit number for a change.
+      sort: A functor to extract a sort key from a cros_patch.GerritChange
+          object, for sorting results..  If this is None, results will not be
+          sorted.
+      current_patch: If True, ask the gerrit server for extra information about
+          the latest uploaded patch.
+      options: Deprecated.
+      dryrun: If True, don't query the gerrit server; return an empty list.
+      raw: If True, return a list of python dict's representing the query
+          results.  Otherwise, return a list of cros_patch.GerritPatch.
+      start: Offset in the result set to start at.
+      bypass_cache: Query each change to make sure data is up to date.
+      kwargs: A dict of query parameters, as described here:
+        https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
+
+    Returns:
+      A list of python dicts or cros_patch.GerritChange.
+    """
+    query_kwds = kwargs
+    if options:
+      raise GerritException('"options" argument unsupported on gerrit-on-borg.')
+    url_prefix = gob_util.GetGerritFetchUrl(self.host)
+    # All possible params are documented at
+    # pylint: disable=C0301
+    # https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
+    o_params = ['DETAILED_ACCOUNTS', 'ALL_REVISIONS', 'DETAILED_LABELS']
+    if current_patch:
+      o_params.extend(['CURRENT_COMMIT', 'CURRENT_REVISION'])
+
+    if change and cros_patch.ParseGerritNumber(change) and not query_kwds:
+      if dryrun:
+        logging.info('Would have run gob_util.GetChangeDetail(%s, %s)',
+                     self.host, change)
+        return []
+      change = self.GetChangeDetail(change)
+      if change is None:
+        return []
+      patch_dict = cros_patch.GerritPatch.ConvertQueryResults(change, self.host)
+      if raw:
+        return [patch_dict]
+      return [cros_patch.GerritPatch(patch_dict, self.remote, url_prefix)]
+
+    # TODO: We should allow querying using a cros_patch.PatchQuery
+    # object directly.
+    if change and cros_patch.ParseSHA1(change):
+      # Use commit:sha1 for accurate query results (crbug.com/358381).
+      kwargs['commit'] = change
+      change = None
+    elif change and cros_patch.ParseChangeID(change):
+      # Use change:change-id for accurate query results (crbug.com/357876).
+      kwargs['change'] = change
+      change = None
+    elif change and cros_patch.ParseFullChangeID(change):
+      change = cros_patch.ParseFullChangeID(change)
+      kwargs['change'] = change.change_id
+      kwargs['project'] = change.project
+      kwargs['branch'] = change.branch
+      change = None
+
+    if change and query_kwds.get('change'):
+      raise GerritException('Bad query params: provided a change-id-like query,'
+                            ' and a "change" search parameter')
+
+    if dryrun:
+      logging.info('Would have run gob_util.QueryChanges(%s, %s, '
+                   'first_param=%s, limit=%d)', self.host, repr(query_kwds),
+                   change, self._GERRIT_MAX_QUERY_RETURN)
+      return []
+
+    start = 0
+    moar = gob_util.QueryChanges(
+        self.host, query_kwds, first_param=change, start=start,
+        limit=self._GERRIT_MAX_QUERY_RETURN, o_params=o_params)
+    result = list(moar)
+    while moar and self.MORE_CHANGES in moar[-1]:
+      start += len(moar)
+      moar = gob_util.QueryChanges(
+          self.host, query_kwds, first_param=change, start=start,
+          limit=self._GERRIT_MAX_QUERY_RETURN, o_params=o_params)
+      result.extend(moar)
+
+    # NOTE: Query results are served from the gerrit cache, which may be stale.
+    # To make sure the patch information is accurate, re-request each query
+    # result directly, circumventing the cache.  For reference:
+    #   https://code.google.com/p/chromium/issues/detail?id=302072
+    if bypass_cache:
+      result = self.GetMultipleChangeDetail([x['_number'] for x in result])
+
+    result = [cros_patch.GerritPatch.ConvertQueryResults(
+        x, self.host) for x in result]
+    if sort:
+      result = sorted(result, key=operator.itemgetter(sort))
+    if raw:
+      return result
+    return [cros_patch.GerritPatch(x, self.remote, url_prefix) for x in result]
+
+  def GetMultipleChangeDetail(self, changes):
+    """Query the gerrit server for multiple changes using GetChangeDetail.
+
+    Args:
+      changes: A sequence of gerrit change numbers.
+
+    Returns:
+      A list of the raw output of GetChangeDetail.
+    """
+    inputs = [[change] for change in changes]
+    return parallel.RunTasksInProcessPool(self.GetChangeDetail, inputs,
+                                          processes=self._NUM_PROCESSES)
+
+  def QueryMultipleCurrentPatchset(self, changes):
+    """Query the gerrit server for multiple changes.
+
+    Args:
+      changes: A sequence of gerrit change numbers.
+
+    Returns:
+      A list of cros_patch.GerritPatch.
+    """
+    if not changes:
+      return
+
+    url_prefix = gob_util.GetGerritFetchUrl(self.host)
+    results = self.GetMultipleChangeDetail(changes)
+    for change, change_detail in zip(changes, results):
+      if not change_detail:
+        raise GerritException('Change %s not found on server %s.'
+                              % (change, self.host))
+      patch_dict = cros_patch.GerritPatch.ConvertQueryResults(
+          change_detail, self.host)
+      yield change, cros_patch.GerritPatch(patch_dict, self.remote, url_prefix)
+
+  @staticmethod
+  def _to_changenum(change):
+    """Unequivocally return a gerrit change number.
+
+    The argument may either be an number, which will be returned unchanged;
+    or an instance of GerritPatch, in which case its gerrit number will be
+    returned.
+    """
+    # TODO(davidjames): Deprecate the ability to pass in strings to these
+    # functions -- API users should just pass in a GerritPatch instead or use
+    # the gob_util APIs directly.
+    if isinstance(change, cros_patch.GerritPatch):
+      return change.gerrit_number
+
+    return change
+
+  def SetReview(self, change, msg=None, labels=None, dryrun=False):
+    """Update the review labels on a gerrit change.
+
+    Args:
+      change: A gerrit change number.
+      msg: A text comment to post to the review.
+      labels: A dict of label/value to set on the review.
+      dryrun: If True, don't actually update the review.
+    """
+    if not msg and not labels:
+      return
+    if dryrun:
+      if msg:
+        logging.info('Would have added message "%s" to change "%s".', msg,
+                     change)
+      if labels:
+        for key, val in labels.iteritems():
+          logging.info('Would have set label "%s" to "%s" for change "%s".',
+                       key, val, change)
+      return
+    gob_util.SetReview(self.host, self._to_changenum(change),
+                       msg=msg, labels=labels, notify='ALL')
+
+  def SetTopic(self, change, topic, dryrun=False):
+    """Update the topic on a gerrit change.
+
+    Args:
+      change: A gerrit change number.
+      topic: The topic to set the review to.
+      dryrun: If True, don't actually set the topic.
+    """
+    if dryrun:
+      logging.info('Would have set topic "%s" for change "%s".', topic, change)
+      return
+    gob_util.SetTopic(self.host, self._to_changenum(change), topic=topic)
+
+  def RemoveReady(self, change, dryrun=False):
+    """Set the 'Commit-Queue' and 'Trybot-Ready' labels on a |change| to '0'."""
+    if dryrun:
+      logging.info('Would have reset Commit-Queue label for %s', change)
+      return
+    gob_util.ResetReviewLabels(self.host, self._to_changenum(change),
+                               label='Commit-Queue', notify='OWNER')
+    gob_util.ResetReviewLabels(self.host, self._to_changenum(change),
+                               label='Trybot-Ready', notify='OWNER')
+
+  def SubmitChange(self, change, dryrun=False):
+    """Land (merge) a gerrit change using the JSON API."""
+    if dryrun:
+      logging.info('Would have submitted change %s', change)
+      return
+    gob_util.SubmitChange(self.host, change.gerrit_number, revision=change.sha1)
+
+  def AbandonChange(self, change, dryrun=False):
+    """Mark a gerrit change as 'Abandoned'."""
+    if dryrun:
+      logging.info('Would have abandoned change %s', change)
+      return
+    gob_util.AbandonChange(self.host, self._to_changenum(change))
+
+  def RestoreChange(self, change, dryrun=False):
+    """Re-activate a previously abandoned gerrit change."""
+    if dryrun:
+      logging.info('Would have restored change %s', change)
+      return
+    gob_util.RestoreChange(self.host, self._to_changenum(change))
+
+  def DeleteDraft(self, change, dryrun=False):
+    """Delete a draft patch set."""
+    if dryrun:
+      logging.info('Would have deleted draft patch set %s', change)
+      return
+    gob_util.DeleteDraft(self.host, self._to_changenum(change))
+
+  def GetAccount(self):
+    """Get information about the user account."""
+    return gob_util.GetAccount(self.host)
+
+
+def GetGerritPatchInfo(patches):
+  """Query Gerrit server for patch information using string queries.
+
+  Args:
+    patches: A list of patch IDs to query. Internal patches start with a '*'.
+
+  Returns:
+    A list of GerritPatch objects describing each patch.  Only the first
+    instance of a requested patch is returned.
+
+  Raises:
+    PatchException if a patch can't be found.
+    ValueError if a query string cannot be converted to a PatchQuery object.
+  """
+  return GetGerritPatchInfoWithPatchQueries(
+      [cros_patch.ParsePatchDep(p) for p in patches])
+
+
+def GetGerritPatchInfoWithPatchQueries(patches):
+  """Query Gerrit server for patch information using PatchQuery objects.
+
+  Args:
+    patches: A list of PatchQuery objects to query.
+
+  Returns:
+    A list of GerritPatch objects describing each patch.  Only the first
+    instance of a requested patch is returned.
+
+  Raises:
+    PatchException if a patch can't be found.
+  """
+  seen = set()
+  results = []
+  order = {k.ToGerritQueryText(): idx for (idx, k) in enumerate(patches)}
+  for remote in site_config.params.CHANGE_PREFIX.keys():
+    helper = GetGerritHelper(remote)
+    raw_ids = [x.ToGerritQueryText() for x in patches if x.remote == remote]
+    for k, change in helper.QueryMultipleCurrentPatchset(raw_ids):
+      # return a unique list, while maintaining the ordering of the first
+      # seen instance of each patch.  Do this to ensure whatever ordering
+      # the user is trying to enforce, we honor; lest it break on
+      # cherry-picking.
+      if change.id not in seen:
+        results.append((order[k], change))
+        seen.add(change.id)
+
+  return [change for _idx, change in sorted(results)]
+
+
+def GetGerritHelper(remote=None, gob=None, **kwargs):
+  """Return a GerritHelper instance for interacting with the given remote."""
+  if gob:
+    return GerritHelper.FromGob(gob, **kwargs)
+  else:
+    return GerritHelper.FromRemote(remote, **kwargs)
+
+
+def GetGerritHelperForChange(change):
+  """Return a usable GerritHelper instance for this change.
+
+  If you need a GerritHelper for a specific change, get it via this
+  function.
+  """
+  return GetGerritHelper(change.remote)
+
+
+def GetCrosInternal(**kwargs):
+  """Convenience method for accessing private ChromeOS gerrit."""
+  return GetGerritHelper(site_config.params.INTERNAL_REMOTE, **kwargs)
+
+
+def GetCrosExternal(**kwargs):
+  """Convenience method for accessing public ChromiumOS gerrit."""
+  return GetGerritHelper(site_config.params.EXTERNAL_REMOTE, **kwargs)
+
+
+def GetChangeRef(change_number, patchset=None):
+  """Given a change number, return the refs/changes/* space for it.
+
+  Args:
+    change_number: The gerrit change number you want a refspec for.
+    patchset: If given it must either be an integer or '*'.  When given,
+      the returned refspec is for that exact patchset.  If '*' is given, it's
+      used for pulling down all patchsets for that change.
+
+  Returns:
+    A git refspec.
+  """
+  change_number = int(change_number)
+  s = 'refs/changes/%02i/%i' % (change_number % 100, change_number)
+  if patchset is not None:
+    s += '/%s' % ('*' if patchset == '*' else int(patchset))
+  return s
diff --git a/lib/gerrit_unittest b/lib/gerrit_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/gerrit_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/gerrit_unittest.py b/lib/gerrit_unittest.py
new file mode 100644
index 0000000..3b22106
--- /dev/null
+++ b/lib/gerrit_unittest.py
@@ -0,0 +1,388 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for GerritHelper."""
+
+from __future__ import print_function
+
+import getpass
+import httplib
+import os
+import mock
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import validation_pool
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import gerrit
+from chromite.lib import git
+from chromite.lib import gob_util
+from chromite.lib import osutils
+from chromite.lib import retry_util
+from chromite.lib import timeout_util
+
+
+site_config = config_lib.GetConfig()
+
+
+# NOTE: The following test cases are designed to run as part of the release
+# qualification process for the googlesource.com servers:
+#   GerritHelperTest
+# Any new test cases must be manually added to the qualification test suite.
+
+
+# pylint: disable=W0212,R0904
+@cros_test_lib.NetworkTest()
+class GerritHelperTest(cros_test_lib.GerritTestCase):
+  """Unittests for GerritHelper."""
+
+  def _GetHelper(self, remote=site_config.params.EXTERNAL_REMOTE):
+    return gerrit.GetGerritHelper(remote)
+
+  def createPatch(self, clone_path, project, **kwargs):
+    """Create a patch in the given git checkout and upload it to gerrit.
+
+    Args:
+      clone_path: The directory on disk of the git clone.
+      project: The associated project.
+      **kwargs: Additional keyword arguments to pass to createCommit.
+
+    Returns:
+      A GerritPatch object.
+    """
+    (revision, changeid) = self.createCommit(clone_path, **kwargs)
+    self.uploadChange(clone_path)
+    def PatchQuery():
+      return self._GetHelper().QuerySingleRecord(
+          change=changeid, project=project, branch='master')
+    # 'RetryException' is needed because there is a race condition between
+    # uploading the change and querying for the change.
+    gpatch = retry_util.RetryException(
+        gerrit.QueryHasNoResults,
+        5,
+        PatchQuery,
+        sleep=1)
+    self.assertEqual(gpatch.change_id, changeid)
+    self.assertEqual(gpatch.revision, revision)
+    return gpatch
+
+  def test001SimpleQuery(self):
+    """Create one independent and three dependent changes, then query them."""
+    project = self.createProject('test001')
+    clone_path = self.cloneProject(project)
+    (head_sha1, head_changeid) = self.createCommit(clone_path)
+    for idx in xrange(3):
+      cros_build_lib.RunCommand(
+          ['git', 'checkout', head_sha1], cwd=clone_path, quiet=True)
+      self.createCommit(clone_path, filename='test-file-%d.txt' % idx)
+      self.uploadChange(clone_path)
+    helper = self._GetHelper()
+    changes = helper.Query(owner='self', project=project)
+    self.assertEqual(len(changes), 4)
+    changes = helper.Query(head_changeid, project=project, branch='master')
+    self.assertEqual(len(changes), 1)
+    self.assertEqual(changes[0].change_id, head_changeid)
+    self.assertEqual(changes[0].sha1, head_sha1)
+    change = helper.QuerySingleRecord(
+        head_changeid, project=project, branch='master')
+    self.assertTrue(change)
+    self.assertEqual(change.change_id, head_changeid)
+    self.assertEqual(change.sha1, head_sha1)
+    change = helper.GrabPatchFromGerrit(project, head_changeid, head_sha1)
+    self.assertTrue(change)
+    self.assertEqual(change.change_id, head_changeid)
+    self.assertEqual(change.sha1, head_sha1)
+
+  @mock.patch.object(gerrit.GerritHelper, '_GERRIT_MAX_QUERY_RETURN', 2)
+  def test002GerritQueryTruncation(self):
+    """Verify that we detect gerrit truncating our query, and handle it."""
+    project = self.createProject('test002')
+    clone_path = self.cloneProject(project)
+    # Using a shell loop is markedly faster than running a python loop.
+    num_changes = 5
+    cmd = ('for ((i=0; i<%i; i=i+1)); do '
+           'echo "Another day, another dollar." > test-file-$i.txt; '
+           'git add test-file-$i.txt; '
+           'git commit -m "Test commit $i."; '
+           'done' % num_changes)
+    cros_build_lib.RunCommand(cmd, shell=True, cwd=clone_path, quiet=True)
+    self.uploadChange(clone_path)
+    helper = self._GetHelper()
+    changes = helper.Query(project=project)
+    self.assertEqual(len(changes), num_changes)
+
+  def test003IsChangeCommitted(self):
+    """Tests that we can parse a json to check if a change is committed."""
+    project = self.createProject('test003')
+    clone_path = self.cloneProject(project)
+    gpatch = self.createPatch(clone_path, project)
+    helper = self._GetHelper()
+    helper.SetReview(gpatch.gerrit_number, labels={'Code-Review':'+2'})
+    helper.SubmitChange(gpatch)
+    self.assertTrue(helper.IsChangeCommitted(gpatch.gerrit_number))
+
+    gpatch = self.createPatch(clone_path, project)
+    self.assertFalse(helper.IsChangeCommitted(gpatch.gerrit_number))
+
+  def test004GetLatestSHA1ForBranch(self):
+    """Verifies that we can query the tip-of-tree commit in a git repository."""
+    project = self.createProject('test004')
+    clone_path = self.cloneProject(project)
+    for _ in xrange(5):
+      (master_sha1, _) = self.createCommit(clone_path)
+    self.pushBranch(clone_path, 'master')
+    for _ in xrange(5):
+      (testbranch_sha1, _) = self.createCommit(clone_path)
+    self.pushBranch(clone_path, 'testbranch')
+    helper = self._GetHelper()
+    self.assertEqual(
+        helper.GetLatestSHA1ForBranch(project, 'master'),
+        master_sha1)
+    self.assertEqual(
+        helper.GetLatestSHA1ForBranch(project, 'testbranch'),
+        testbranch_sha1)
+
+  def _ChooseReviewers(self):
+    all_reviewers = set(['dborowitz@google.com', 'sop@google.com',
+                         'jrn@google.com'])
+    ret = list(all_reviewers.difference(['%s@google.com' % getpass.getuser()]))
+    self.assertGreaterEqual(len(ret), 2)
+    return ret
+
+  def test005SetReviewers(self):
+    """Verify that we can set reviewers on a CL."""
+    project = self.createProject('test005')
+    clone_path = self.cloneProject(project)
+    gpatch = self.createPatch(clone_path, project)
+    emails = self._ChooseReviewers()
+    helper = self._GetHelper()
+    helper.SetReviewers(gpatch.gerrit_number, add=(
+        emails[0], emails[1]))
+    reviewers = gob_util.GetReviewers(helper.host, gpatch.gerrit_number)
+    self.assertEqual(len(reviewers), 2)
+    self.assertItemsEqual(
+        [r['email'] for r in reviewers],
+        [emails[0], emails[1]])
+    helper.SetReviewers(gpatch.gerrit_number,
+                        remove=(emails[0],))
+    reviewers = gob_util.GetReviewers(helper.host, gpatch.gerrit_number)
+    self.assertEqual(len(reviewers), 1)
+    self.assertEqual(reviewers[0]['email'], emails[1])
+
+  def test006PatchNotFound(self):
+    """Test case where ChangeID isn't found on the server."""
+    changeids = ['I' + ('deadbeef' * 5), 'I' + ('beadface' * 5)]
+    self.assertRaises(gerrit.GerritException, gerrit.GetGerritPatchInfo,
+                      changeids)
+    self.assertRaises(gerrit.GerritException, gerrit.GetGerritPatchInfo,
+                      ['*' + cid for cid in changeids])
+    # Change ID sequence starts at 1000.
+    gerrit_numbers = ['123', '543']
+    self.assertRaises(gerrit.GerritException, gerrit.GetGerritPatchInfo,
+                      gerrit_numbers)
+    self.assertRaises(gerrit.GerritException, gerrit.GetGerritPatchInfo,
+                      ['*' + num for num in gerrit_numbers])
+
+  def test007VagueQuery(self):
+    """Verify GerritHelper complains if an ID matches multiple changes."""
+    project = self.createProject('test007')
+    clone_path = self.cloneProject(project)
+    (sha1, _) = self.createCommit(clone_path)
+    (_, changeid) = self.createCommit(clone_path)
+    self.uploadChange(clone_path, 'master')
+    cros_build_lib.RunCommand(
+        ['git', 'checkout', sha1], cwd=clone_path, quiet=True)
+    self.createCommit(clone_path)
+    self.pushBranch(clone_path, 'testbranch')
+    self.createCommit(
+        clone_path, msg='Test commit.\n\nChange-Id: %s' % changeid)
+    self.uploadChange(clone_path, 'testbranch')
+    self.assertRaises(gerrit.GerritException, gerrit.GetGerritPatchInfo,
+                      [changeid])
+
+  def test008Queries(self):
+    """Verify assorted query operations."""
+    project = self.createProject('test008')
+    clone_path = self.cloneProject(project)
+    gpatch = self.createPatch(clone_path, project)
+    helper = self._GetHelper()
+
+    # Multi-queries with one valid and one invalid term should raise.
+    invalid_change_id = 'I1234567890123456789012345678901234567890'
+    self.assertRaises(gerrit.GerritException, gerrit.GetGerritPatchInfo,
+                      [invalid_change_id, gpatch.change_id])
+    self.assertRaises(gerrit.GerritException, gerrit.GetGerritPatchInfo,
+                      [gpatch.change_id, invalid_change_id])
+    self.assertRaises(gerrit.GerritException, gerrit.GetGerritPatchInfo,
+                      ['9876543', gpatch.gerrit_number])
+    self.assertRaises(gerrit.GerritException, gerrit.GetGerritPatchInfo,
+                      [gpatch.gerrit_number, '9876543'])
+
+    # Simple query by project/changeid/sha1.
+    patch_info = helper.GrabPatchFromGerrit(gpatch.project, gpatch.change_id,
+                                            gpatch.sha1)
+    self.assertEqual(patch_info.gerrit_number, gpatch.gerrit_number)
+    self.assertEqual(patch_info.remote, site_config.params.EXTERNAL_REMOTE)
+
+    # Simple query by gerrit number to external remote.
+    patch_info = gerrit.GetGerritPatchInfo([gpatch.gerrit_number])
+    self.assertEqual(patch_info[0].gerrit_number, gpatch.gerrit_number)
+    self.assertEqual(patch_info[0].remote, site_config.params.EXTERNAL_REMOTE)
+
+    # Simple query by gerrit number to internal remote.
+    patch_info = gerrit.GetGerritPatchInfo(['*' + gpatch.gerrit_number])
+    self.assertEqual(patch_info[0].gerrit_number, gpatch.gerrit_number)
+    self.assertEqual(patch_info[0].remote, site_config.params.INTERNAL_REMOTE)
+
+    # Query to external server by gerrit number and change-id which refer to
+    # the same change should return one result.
+    fq_changeid = '~'.join((gpatch.project, 'master', gpatch.change_id))
+    patch_info = gerrit.GetGerritPatchInfo([gpatch.gerrit_number, fq_changeid])
+    self.assertEqual(len(patch_info), 1)
+    self.assertEqual(patch_info[0].gerrit_number, gpatch.gerrit_number)
+    self.assertEqual(patch_info[0].remote, site_config.params.EXTERNAL_REMOTE)
+
+    # Query to internal server by gerrit number and change-id which refer to
+    # the same change should return one result.
+    patch_info = gerrit.GetGerritPatchInfo(
+        ['*' + gpatch.gerrit_number, '*' + fq_changeid])
+    self.assertEqual(len(patch_info), 1)
+    self.assertEqual(patch_info[0].gerrit_number, gpatch.gerrit_number)
+    self.assertEqual(patch_info[0].remote, site_config.params.INTERNAL_REMOTE)
+
+  def test009SubmitOutdatedCommit(self):
+    """Tests that we can parse a json to check if a change is committed."""
+    project = self.createProject('test009')
+    clone_path = self.cloneProject(project, 'p1')
+
+    # Create a change.
+    gpatch1 = self.createPatch(clone_path, project)
+
+    # Update the change.
+    gpatch2 = self.createPatch(clone_path, project, amend=True)
+
+    # Make sure we're talking about the same change.
+    self.assertEqual(gpatch1.change_id, gpatch2.change_id)
+
+    # Try submitting the out-of-date change.
+    helper = self._GetHelper()
+    helper.SetReview(gpatch1.gerrit_number, labels={'Code-Review':'+2'})
+    with self.assertRaises(gob_util.GOBError) as ex:
+      helper.SubmitChange(gpatch1)
+    self.assertEqual(ex.exception.http_status, httplib.CONFLICT)
+    self.assertFalse(helper.IsChangeCommitted(gpatch1.gerrit_number))
+
+    # Try submitting the up-to-date change.
+    helper.SubmitChange(gpatch2)
+    helper.IsChangeCommitted(gpatch2.gerrit_number)
+
+  def test010SubmitBatchUsingGit(self):
+    project = self.createProject('test012')
+
+    helper = self._GetHelper()
+    repo = self.cloneProject(project, 'p1')
+    initial_patch = self.createPatch(repo, project, msg='Init')
+    helper.SetReview(initial_patch.gerrit_number, labels={'Code-Review':'+2'})
+    helper.SubmitChange(initial_patch)
+    # GoB does not guarantee that the change will be in "merged" state
+    # atomically after the /Submit endpoint is called.
+    timeout_util.WaitForReturnTrue(
+        lambda: helper.IsChangeCommitted(initial_patch.gerrit_number),
+        timeout=60)
+
+    patchA = self.createPatch(repo, project,
+                              msg='Change A',
+                              filename='a.txt')
+
+    osutils.WriteFile(os.path.join(repo, 'aoeu.txt'), 'asdf')
+    git.RunGit(repo, ['add', 'aoeu.txt'])
+    git.RunGit(repo, ['commit', '--amend', '--reuse-message=HEAD'])
+    sha1 = git.RunGit(repo,
+                      ['rev-list', '-n1', 'HEAD']).output.strip()
+
+    patchA.sha1 = sha1
+    patchA.revision = sha1
+
+    patchB = self.createPatch(repo, project,
+                              msg='Change B',
+                              filename='b.txt')
+
+    pool = validation_pool.ValidationPool(
+        overlays=constants.PUBLIC,
+        build_root='',
+        build_number=0,
+        builder_name='',
+        is_master=False,
+        dryrun=False)
+
+    by_repo = {repo: [patchA, patchB]}
+    pool.SubmitLocalChanges(
+        by_repo,
+        reason="Testing submitting changes in batch via Git.")
+
+    self.assertTrue(helper.IsChangeCommitted(patchB.gerrit_number))
+    self.assertTrue(helper.IsChangeCommitted(patchA.gerrit_number))
+    for patch in [patchA, patchB]:
+      self.assertTrue(helper.IsChangeCommitted(patch.gerrit_number))
+
+  def test011ResetReviewLabels(self):
+    """Tests that we can remove a code review label."""
+    project = self.createProject('test011')
+    helper = self._GetHelper()
+    clone_path = self.cloneProject(project, 'p1')
+    gpatch = self.createPatch(clone_path, project, msg='Init')
+    helper.SetReview(gpatch.gerrit_number, labels={'Code-Review':'+2'})
+    gob_util.ResetReviewLabels(helper.host, gpatch.gerrit_number,
+                               label='Code-Review', notify='OWNER')
+
+  def test012ApprovalTime(self):
+    """Approval timestamp should be reset when a new patchset is created."""
+    # Create a change.
+    project = self.createProject('test013')
+    helper = self._GetHelper()
+    clone_path = self.cloneProject(project, 'p1')
+    gpatch = self.createPatch(clone_path, project, msg='Init')
+    helper.SetReview(gpatch.gerrit_number, labels={'Code-Review':'+2'})
+
+    # Update the change.
+    new_msg = 'New %s' % gpatch.commit_message
+    cros_build_lib.RunCommand(
+        ['git', 'commit', '--amend', '-m', new_msg], cwd=clone_path, quiet=True)
+    self.uploadChange(clone_path)
+    gpatch2 = self._GetHelper().QuerySingleRecord(
+        change=gpatch.change_id, project=gpatch.project, branch='master')
+    self.assertNotEqual(gpatch2.approval_timestamp, 0)
+    self.assertNotEqual(gpatch2.commit_timestamp, 0)
+    self.assertEqual(gpatch2.approval_timestamp, gpatch2.commit_timestamp)
+
+
+@cros_test_lib.NetworkTest()
+class DirectGerritHelperTest(cros_test_lib.TestCase):
+  """Unittests for GerritHelper that use the real Chromium instance."""
+
+  # A big list of real changes.
+  CHANGES = ['235893', '*189165', '231790', '*190026', '231647', '234645']
+
+  def testMultipleChangeDetail(self):
+    """Test ordering of results in GetMultipleChangeDetail"""
+    changes = [x for x in self.CHANGES if not x.startswith('*')]
+    helper = gerrit.GetCrosExternal()
+    results = list(helper.GetMultipleChangeDetail([str(x) for x in changes]))
+    gerrit_numbers = [str(x['_number']) for x in results]
+    self.assertEqual(changes, gerrit_numbers)
+
+  def testQueryMultipleCurrentPatchset(self):
+    """Test ordering of results in QueryMultipleCurrentPatchset"""
+    changes = [x for x in self.CHANGES if not x.startswith('*')]
+    helper = gerrit.GetCrosExternal()
+    results = list(helper.QueryMultipleCurrentPatchset(changes))
+    self.assertEqual(changes, [x.gerrit_number for _, x in results])
+    self.assertEqual(changes, [x for x, _ in results])
+
+  def testGetGerritPatchInfo(self):
+    """Test ordering of results in GetGerritPatchInfo"""
+    changes = self.CHANGES
+    results = list(gerrit.GetGerritPatchInfo(changes))
+    self.assertEqual(changes, [x.gerrit_number_str for x in results])
diff --git a/lib/git.py b/lib/git.py
new file mode 100644
index 0000000..694dc80
--- /dev/null
+++ b/lib/git.py
@@ -0,0 +1,1419 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common functions for interacting with git and repo."""
+
+from __future__ import print_function
+
+import collections
+import errno
+import hashlib
+import os
+import re
+import string
+import time
+from xml import sax
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import retry_util
+
+
+site_config = config_lib.GetConfig()
+
+
+# Retry a git operation if git returns a error response with any of these
+# messages. It's all observed 'bad' GoB responses so far.
+GIT_TRANSIENT_ERRORS = (
+    # crbug.com/285832
+    r'! \[remote rejected\].*\(error in hook\)',
+
+    # crbug.com/289932
+    r'! \[remote rejected\].*\(failed to lock\)',
+
+    # crbug.com/307156
+    r'! \[remote rejected\].*\(error in Gerrit backend\)',
+
+    # crbug.com/285832
+    r'remote error: Internal Server Error',
+
+    # crbug.com/294449
+    r'fatal: Couldn\'t find remote ref ',
+
+    # crbug.com/220543
+    r'git fetch_pack: expected ACK/NAK, got',
+
+    # crbug.com/189455
+    r'protocol error: bad pack header',
+
+    # crbug.com/202807
+    r'The remote end hung up unexpectedly',
+
+    # crbug.com/298189
+    r'TLS packet with unexpected length was received',
+
+    # crbug.com/187444
+    r'RPC failed; result=\d+, HTTP code = \d+',
+
+    # crbug.com/315421, b2/18249316
+    r'The requested URL returned error: 5',
+
+    # crbug.com/388876
+    r'Connection timed out',
+
+    # crbug.com/451458, b/19202011
+    r'repository cannot accept new pushes; contact support',
+)
+
+GIT_TRANSIENT_ERRORS_RE = re.compile('|'.join(GIT_TRANSIENT_ERRORS),
+                                     re.IGNORECASE)
+
+DEFAULT_RETRY_INTERVAL = 3
+DEFAULT_RETRIES = 10
+
+
+class GitException(Exception):
+  """An exception related to git."""
+
+
+class RemoteRef(object):
+  """Object representing a remote ref.
+
+  A remote ref encapsulates both a remote (e.g., 'origin',
+  'https://chromium.googlesource.com/chromiumos/chromite.git', etc.) and a ref
+  name (e.g., 'refs/heads/master').
+  """
+
+  def __init__(self, remote, ref):
+    self.remote = remote
+    self.ref = ref
+
+
+def FindRepoDir(path):
+  """Returns the nearest higher-level repo dir from the specified path.
+
+  Args:
+    path: The path to use. Defaults to cwd.
+  """
+  return osutils.FindInPathParents(
+      '.repo', path, test_func=os.path.isdir)
+
+
+def FindRepoCheckoutRoot(path):
+  """Get the root of your repo managed checkout."""
+  repo_dir = FindRepoDir(path)
+  if repo_dir:
+    return os.path.dirname(repo_dir)
+  else:
+    return None
+
+
+def IsSubmoduleCheckoutRoot(path, remote, url):
+  """Tests to see if a directory is the root of a git submodule checkout.
+
+  Args:
+    path: The directory to test.
+    remote: The remote to compare the |url| with.
+    url: The exact URL the |remote| needs to be pointed at.
+  """
+  if os.path.isdir(path):
+    remote_url = cros_build_lib.RunCommand(
+        ['git', '--git-dir', path, 'config', 'remote.%s.url' % remote],
+        redirect_stdout=True, debug_level=logging.DEBUG,
+        error_code_ok=True).output.strip()
+    if remote_url == url:
+      return True
+  return False
+
+
+def IsGitRepo(cwd):
+  """Checks if there's a git repo rooted at a directory."""
+  return os.path.isdir(os.path.join(cwd, '.git'))
+
+
+def IsGitRepositoryCorrupted(cwd):
+  """Verify that the specified git repository is not corrupted.
+
+  Args:
+    cwd: The git repository to verify.
+
+  Returns:
+    True if the repository is corrupted.
+  """
+  cmd = ['fsck', '--no-progress', '--no-dangling']
+  try:
+    RunGit(cwd, cmd)
+    return False
+  except cros_build_lib.RunCommandError as ex:
+    logging.warning(str(ex))
+    return True
+
+
+_HEX_CHARS = frozenset(string.hexdigits)
+
+
+def IsSHA1(value, full=True):
+  """Returns True if the given value looks like a sha1.
+
+  If full is True, then it must be full length- 40 chars.  If False, >=6, and
+  <40.
+  """
+  if not all(x in _HEX_CHARS for x in value):
+    return False
+  l = len(value)
+  if full:
+    return l == 40
+  return l >= 6 and l <= 40
+
+
+def IsRefsTags(value):
+  """Return True if the given value looks like a tag.
+
+  Currently this is identified via refs/tags/ prefixing.
+  """
+  return value.startswith('refs/tags/')
+
+
+def GetGitRepoRevision(cwd, branch='HEAD'):
+  """Find the revision of a branch.
+
+  Defaults to current branch.
+  """
+  return RunGit(cwd, ['rev-parse', branch]).output.strip()
+
+
+def DoesCommitExistInRepo(cwd, commit):
+  """Determine whether a commit (SHA1 or ref) exists in a repo.
+
+  Args:
+    cwd: A directory within the project repo.
+    commit: The commit to look for. This can be a SHA1 or it can be a ref.
+
+  Returns:
+    True if the commit exists in the repo.
+  """
+  try:
+    RunGit(cwd, ['rev-list', '-n1', commit, '--'])
+  except cros_build_lib.RunCommandError as e:
+    if e.result.returncode == 128:
+      return False
+    raise
+  return True
+
+
+def GetCurrentBranch(cwd):
+  """Returns current branch of a repo, and None if repo is on detached HEAD."""
+  try:
+    ret = RunGit(cwd, ['symbolic-ref', '-q', 'HEAD'])
+    return StripRefsHeads(ret.output.strip(), False)
+  except cros_build_lib.RunCommandError as e:
+    if e.result.returncode != 1:
+      raise
+    return None
+
+
+def StripRefsHeads(ref, strict=True):
+  """Remove leading 'refs/heads/' from a ref name.
+
+  If strict is True, an Exception is thrown if the ref doesn't start with
+  refs/heads.  If strict is False, the original ref is returned.
+  """
+  if not ref.startswith('refs/heads/') and strict:
+    raise Exception('Ref name %s does not start with refs/heads/' % ref)
+
+  return ref.replace('refs/heads/', '')
+
+
+def StripRefs(ref):
+  """Remove leading 'refs/heads', 'refs/remotes/[^/]+/' from a ref name."""
+  ref = StripRefsHeads(ref, False)
+  if ref.startswith('refs/remotes/'):
+    return ref.split('/', 3)[-1]
+  return ref
+
+
+def NormalizeRef(ref):
+  """Convert git branch refs into fully qualified form."""
+  if ref and not ref.startswith('refs/'):
+    ref = 'refs/heads/%s' % ref
+  return ref
+
+
+def NormalizeRemoteRef(remote, ref):
+  """Convert git branch refs into fully qualified remote form."""
+  if ref:
+    # Support changing local ref to remote ref, or changing the remote
+    # for a remote ref.
+    ref = StripRefs(ref)
+
+    if not ref.startswith('refs/'):
+      ref = 'refs/remotes/%s/%s' % (remote, ref)
+
+  return ref
+
+
+class ProjectCheckout(dict):
+  """Attributes of a given project in the manifest checkout.
+
+  TODO(davidjames): Convert this into an ordinary object instead of a dict.
+  """
+
+  def __init__(self, attrs):
+    """Constructor.
+
+    Args:
+      attrs: The attributes associated with this checkout, as a dictionary.
+    """
+    dict.__init__(self, attrs)
+
+  def AssertPushable(self):
+    """Verify that it is safe to push changes to this repository."""
+    if not self['pushable']:
+      remote = self['remote']
+      raise AssertionError('Remote %s is not pushable.' % (remote,))
+
+  def IsBranchableProject(self):
+    """Return whether we can create a branch in the repo for this project."""
+    # Backwards compatibility is an issue here. Older manifests used a heuristic
+    # based on where the project is hosted. We must continue supporting it.
+    # (crbug.com/470690)
+    # Prefer explicit tagging.
+    if (self[constants.MANIFEST_ATTR_BRANCHING] ==
+        constants.MANIFEST_ATTR_BRANCHING_CREATE):
+      return True
+    if self[constants.MANIFEST_ATTR_BRANCHING] in (
+        constants.MANIFEST_ATTR_BRANCHING_PIN,
+        constants.MANIFEST_ATTR_BRANCHING_TOT):
+      return False
+
+    # Old heuristic.
+    if (self['remote'] not in site_config.params.CROS_REMOTES or
+        self['remote'] not in site_config.params.BRANCHABLE_PROJECTS):
+      return False
+    return re.match(site_config.params.BRANCHABLE_PROJECTS[self['remote']],
+                    self['name'])
+
+  def IsPinnableProject(self):
+    """Return whether we should pin to a revision on the CrOS branch."""
+    # Backwards compatibility is an issue here. Older manifests used a different
+    # tag to spcify pinning behaviour. Support both for now. (crbug.com/470690)
+    # Prefer explicit tagging.
+    if self[constants.MANIFEST_ATTR_BRANCHING] != '':
+      return (self[constants.MANIFEST_ATTR_BRANCHING] ==
+              constants.MANIFEST_ATTR_BRANCHING_PIN)
+
+    # Old heuristic.
+    return cros_build_lib.BooleanShellValue(self.get('pin'), True)
+
+  def IsPatchable(self):
+    """Returns whether this project is patchable.
+
+    For projects that get checked out at multiple paths and/or branches,
+    this method can be used to determine which project path a patch
+    should be applied to.
+
+    Returns:
+      True if the project corresponding to the checkout is patchable.
+    """
+    # There are 2 ways we determine if a project is patchable.
+    # - For an unversioned manifest, if the 'revision' is a raw SHA1 hash
+    #   and not a named branch, assume it is a pinned project path and can not
+    #   be patched.
+    # - For a versioned manifest (generated via repo -r), repo will set
+    #   revision to the actual git sha1 ref, and add an 'upstream'
+    #   field corresponding to branch name in the original manifest. For
+    #   a project with a SHA1 'revision' but no named branch in the
+    #   'upstream' field, assume it can not be patched.
+    return not IsSHA1(self.get('upstream', self['revision']))
+
+  def GetPath(self, absolute=False):
+    """Get the path to the checkout.
+
+    Args:
+      absolute: If True, return an absolute path. If False,
+        return a path relative to the repo root.
+    """
+    return self['local_path'] if absolute else self['path']
+
+
+class Manifest(object):
+  """SAX handler that parses the manifest document.
+
+  Properties:
+    checkouts_by_name: A dictionary mapping the names for <project> tags to a
+      list of ProjectCheckout objects.
+    checkouts_by_path: A dictionary mapping paths for <project> tags to a single
+      ProjectCheckout object.
+    default: The attributes of the <default> tag.
+    includes: A list of XML files that should be pulled in to the manifest.
+      These includes are represented as a list of (name, path) tuples.
+    manifest_include_dir: If given, this is where to start looking for
+      include targets.
+    projects: DEPRECATED. A dictionary mapping the names for <project> tags to
+      a single ProjectCheckout object. This is now deprecated, since each
+      project can map to multiple ProjectCheckout objects.
+    remotes: A dictionary mapping <remote> tags to the associated attributes.
+    revision: The revision of the manifest repository. If not specified, this
+      will be TOT.
+  """
+
+  _instance_cache = {}
+
+  def __init__(self, source, manifest_include_dir=None):
+    """Initialize this instance.
+
+    Args:
+      source: The path to the manifest to parse.  May be a file handle.
+      manifest_include_dir: If given, this is where to start looking for
+        include targets.
+    """
+    self.source = source
+    self.default = {}
+    self._current_project_path = None
+    self._current_project_name = None
+    self._annotations = {}
+    self.checkouts_by_path = {}
+    self.checkouts_by_name = {}
+    self.remotes = {}
+    self.includes = []
+    self.revision = None
+    self.manifest_include_dir = manifest_include_dir
+    self._RunParser(source)
+    self.includes = tuple(self.includes)
+
+  def _RequireAttr(self, attr, attrs):
+    name = attrs.get('name')
+    assert attr in attrs, ('%s is missing a "%s" attribute; attrs: %r' %
+                           (name, attr, attrs))
+
+  def _RunParser(self, source, finalize=True):
+    parser = sax.make_parser()
+    handler = sax.handler.ContentHandler()
+    handler.startElement = self._StartElement
+    handler.endElement = self._EndElement
+    parser.setContentHandler(handler)
+    parser.parse(source)
+    if finalize:
+      self._FinalizeAllProjectData()
+
+  def _StartElement(self, name, attrs):
+    """Stores the default manifest properties and per-project overrides."""
+    attrs = dict(attrs.items())
+    if name == 'default':
+      self.default = attrs
+    elif name == 'remote':
+      self._RequireAttr('name', attrs)
+      attrs.setdefault('alias', attrs['name'])
+      self.remotes[attrs['name']] = attrs
+    elif name == 'project':
+      self._RequireAttr('name', attrs)
+      self._current_project_path = attrs.get('path', attrs['name'])
+      self._current_project_name = attrs['name']
+      self.checkouts_by_path[self._current_project_path] = attrs
+      checkout = self.checkouts_by_name.setdefault(self._current_project_name,
+                                                   [])
+      checkout.append(attrs)
+      self._annotations = {}
+    elif name == 'annotation':
+      self._RequireAttr('name', attrs)
+      self._RequireAttr('value', attrs)
+      self._annotations[attrs['name']] = attrs['value']
+    elif name == 'manifest':
+      self.revision = attrs.get('revision')
+    elif name == 'include':
+      if self.manifest_include_dir is None:
+        raise OSError(
+            errno.ENOENT, 'No manifest_include_dir given, but an include was '
+            'encountered; attrs=%r' % (attrs,))
+      # Include is calculated relative to the manifest that has the include;
+      # thus set the path temporarily to the dirname of the target.
+      original_include_dir = self.manifest_include_dir
+      include_path = os.path.realpath(
+          os.path.join(original_include_dir, attrs['name']))
+      self.includes.append((attrs['name'], include_path))
+      self._RunParser(include_path, finalize=False)
+
+  def _EndElement(self, name):
+    """Store any child element properties into the parent element."""
+    if name == 'project':
+      assert (self._current_project_name is not None and
+              self._current_project_path is not None), (
+                  'Malformed xml: Encountered unmatched </project>')
+      self.checkouts_by_path[self._current_project_path].update(
+          self._annotations)
+      for checkout in self.checkouts_by_name[self._current_project_name]:
+        checkout.update(self._annotations)
+      self._current_project_path = None
+      self._current_project_name = None
+
+  def _FinalizeAllProjectData(self):
+    """Rewrite projects mixing defaults in and adding our attributes."""
+    for path_data in self.checkouts_by_path.itervalues():
+      self._FinalizeProjectData(path_data)
+
+  def _FinalizeProjectData(self, attrs):
+    """Sets up useful properties for a project.
+
+    Args:
+      attrs: The attribute dictionary of a <project> tag.
+    """
+    for key in ('remote', 'revision'):
+      attrs.setdefault(key, self.default.get(key))
+
+    remote = attrs['remote']
+    assert remote in self.remotes, ('%s: %s not in %s' %
+                                    (self.source, remote, self.remotes))
+    remote_name = attrs['remote_alias'] = self.remotes[remote]['alias']
+
+    # 'repo manifest -r' adds an 'upstream' attribute to the project tag for the
+    # manifests it generates.  We can use the attribute to get a valid branch
+    # instead of a sha1 for these types of manifests.
+    upstream = attrs.get('upstream', attrs['revision'])
+    if IsSHA1(upstream):
+      # The current version of repo we use has a bug: When you create a new
+      # repo checkout from a revlocked manifest, the 'upstream' attribute will
+      # just point at a SHA1. The default revision will still be correct,
+      # however. For now, return the default revision as our best guess as to
+      # what the upstream branch for this repository would be. This guess may
+      # sometimes be wrong, but it's correct for all of the repositories where
+      # we need to push changes (e.g., the overlays).
+      # TODO(davidjames): Either fix the repo bug, or update our logic here to
+      # check the manifest repository to find the right tracking branch.
+      upstream = self.default.get('revision', 'refs/heads/master')
+
+    attrs['tracking_branch'] = 'refs/remotes/%s/%s' % (
+        remote_name, StripRefs(upstream),
+    )
+
+    attrs['pushable'] = remote in site_config.params.GIT_REMOTES
+    if attrs['pushable']:
+      attrs['push_remote'] = remote
+      attrs['push_remote_url'] = site_config.params.GIT_REMOTES[remote]
+      attrs['push_url'] = '%s/%s' % (attrs['push_remote_url'], attrs['name'])
+    groups = set(attrs.get('groups', 'default').replace(',', ' ').split())
+    groups.add('default')
+    attrs['groups'] = frozenset(groups)
+
+    # Compute the local ref space.
+    # Sanitize a couple path fragments to simplify assumptions in this
+    # class, and in consuming code.
+    attrs.setdefault('path', attrs['name'])
+    for key in ('name', 'path'):
+      attrs[key] = os.path.normpath(attrs[key])
+
+    if constants.MANIFEST_ATTR_BRANCHING in attrs:
+      assert (attrs[constants.MANIFEST_ATTR_BRANCHING] in
+              constants.MANIFEST_ATTR_BRANCHING_ALL)
+    else:
+      attrs[constants.MANIFEST_ATTR_BRANCHING] = ''
+
+  @staticmethod
+  def _GetManifestHash(source, ignore_missing=False):
+    if isinstance(source, basestring):
+      try:
+        # TODO(build): convert this to osutils.ReadFile once these
+        # classes are moved out into their own module (if possible;
+        # may still be cyclic).
+        with open(source, 'rb') as f:
+          return hashlib.md5(f.read()).hexdigest()
+      except EnvironmentError as e:
+        if e.errno != errno.ENOENT or not ignore_missing:
+          raise
+    source.seek(0)
+    md5 = hashlib.md5(source.read()).hexdigest()
+    source.seek(0)
+    return md5
+
+  @classmethod
+  def Cached(cls, source, manifest_include_dir=None):
+    """Return an instance, reusing an existing one if possible.
+
+    May be a seekable filehandle, or a filepath.
+    See __init__ for an explanation of these arguments.
+    """
+
+    md5 = cls._GetManifestHash(source)
+    obj, sources = cls._instance_cache.get(md5, (None, ()))
+    if manifest_include_dir is None and sources:
+      # We're being invoked in a different way than the orignal
+      # caching; disregard the cached entry.
+      # Most likely, the instantiation will explode; let it fly.
+      obj, sources = None, ()
+    for include_target, target_md5 in sources:
+      if cls._GetManifestHash(include_target, True) != target_md5:
+        obj = None
+        break
+    if obj is None:
+      obj = cls(source, manifest_include_dir=manifest_include_dir)
+      sources = tuple((abspath, cls._GetManifestHash(abspath))
+                      for (target, abspath) in obj.includes)
+      cls._instance_cache[md5] = (obj, sources)
+
+    return obj
+
+
+class ManifestCheckout(Manifest):
+  """A Manifest Handler for a specific manifest checkout."""
+
+  _instance_cache = {}
+
+  def __init__(self, path, manifest_path=None, search=True):
+    """Initialize this instance.
+
+    Args:
+      path: Path into a manifest checkout (doesn't have to be the root).
+      manifest_path: If supplied, the manifest to use.  Else the manifest
+        in the root of the checkout is used.  May be a seekable file handle.
+      search: If True, the path can point into the repo, and the root will
+        be found automatically.  If False, the path *must* be the root, else
+        an OSError ENOENT will be thrown.
+
+    Raises:
+      OSError: if a failure occurs.
+    """
+    self.root, manifest_path = self._NormalizeArgs(
+        path, manifest_path, search=search)
+
+    self.manifest_path = os.path.realpath(manifest_path)
+    manifest_include_dir = os.path.dirname(self.manifest_path)
+    self.manifest_branch = self._GetManifestsBranch(self.root)
+    self._content_merging = {}
+    Manifest.__init__(self, self.manifest_path,
+                      manifest_include_dir=manifest_include_dir)
+
+  @staticmethod
+  def _NormalizeArgs(path, manifest_path=None, search=True):
+    root = FindRepoCheckoutRoot(path)
+    if root is None:
+      raise OSError(errno.ENOENT, "Couldn't find repo root: %s" % (path,))
+    root = os.path.normpath(os.path.realpath(root))
+    if not search:
+      if os.path.normpath(os.path.realpath(path)) != root:
+        raise OSError(errno.ENOENT, 'Path %s is not a repo root, and search '
+                      'is disabled.' % path)
+    if manifest_path is None:
+      manifest_path = os.path.join(root, '.repo', 'manifest.xml')
+    return root, manifest_path
+
+  @staticmethod
+  def IsFullManifest(checkout_root):
+    """Returns True iff the given checkout is using a full manifest.
+
+    This method should go away as part of the cleanup related to brbug.com/854.
+
+    Args:
+      checkout_root: path to the root of an SDK checkout.
+
+    Returns:
+      True iff the manifest selected for the given SDK is a full manifest.
+      In this context we'll accept any manifest for which there are no groups
+      defined.
+    """
+    manifests_git_repo = os.path.join(checkout_root, '.repo', 'manifests.git')
+    cmd = ['config', '--local', '--get', 'manifest.groups']
+    result = RunGit(manifests_git_repo, cmd, error_code_ok=True)
+
+    if result.output.strip():
+      # Full layouts don't define groups.
+      return False
+
+    return True
+
+  def FindCheckouts(self, project, branch=None, only_patchable=False):
+    """Returns the list of checkouts for a given |project|/|branch|.
+
+    Args:
+      project: Project name to search for.
+      branch: Branch to use.
+      only_patchable: Restrict search to patchable project paths.
+
+    Returns:
+      A list of ProjectCheckout objects.
+    """
+    checkouts = []
+    for checkout in self.checkouts_by_name.get(project, []):
+      if project == checkout['name']:
+        if only_patchable and not checkout.IsPatchable():
+          continue
+        tracking_branch = checkout['tracking_branch']
+        if branch is None or StripRefs(branch) == StripRefs(tracking_branch):
+          checkouts.append(checkout)
+    return checkouts
+
+  def FindCheckout(self, project, branch=None, strict=True):
+    """Returns the checkout associated with a given project/branch.
+
+    Args:
+      project: The project to look for.
+      branch: The branch that the project is tracking.
+      strict: Raise AssertionError if a checkout cannot be found.
+
+    Returns:
+      A ProjectCheckout object.
+
+    Raises:
+      AssertionError if there is more than one checkout associated with the
+      given project/branch combination.
+    """
+    checkouts = self.FindCheckouts(project, branch)
+    if len(checkouts) < 1:
+      if strict:
+        raise AssertionError('Could not find checkout of %s' % (project,))
+      return None
+    elif len(checkouts) > 1:
+      raise AssertionError('Too many checkouts found for %s' % project)
+    return checkouts[0]
+
+  def ListCheckouts(self):
+    """List the checkouts in the manifest.
+
+    Returns:
+      A list of ProjectCheckout objects.
+    """
+    return self.checkouts_by_path.values()
+
+  def FindCheckoutFromPath(self, path, strict=True):
+    """Find the associated checkouts for a given |path|.
+
+    The |path| can either be to the root of a project, or within the
+    project itself (chromite.cbuildbot for example).  It may be relative
+    to the repo root, or an absolute path.  If |path| is not within a
+    checkout, return None.
+
+    Args:
+      path: Path to examine.
+      strict: If True, fail when no checkout is found.
+
+    Returns:
+      None if no checkout is found, else the checkout.
+    """
+    # Realpath everything sans the target to keep people happy about
+    # how symlinks are handled; exempt the final node since following
+    # through that is unlikely even remotely desired.
+    tmp = os.path.join(self.root, os.path.dirname(path))
+    path = os.path.join(os.path.realpath(tmp), os.path.basename(path))
+    path = os.path.normpath(path) + '/'
+    candidates = []
+    for checkout in self.ListCheckouts():
+      if path.startswith(checkout['local_path'] + '/'):
+        candidates.append((checkout['path'], checkout))
+
+    if not candidates:
+      if strict:
+        raise AssertionError('Could not find repo project at %s' % (path,))
+      return None
+
+    # The checkout with the greatest common path prefix is the owner of
+    # the given pathway. Return that.
+    return max(candidates)[1]
+
+  def _FinalizeAllProjectData(self):
+    """Rewrite projects mixing defaults in and adding our attributes."""
+    Manifest._FinalizeAllProjectData(self)
+    for key, value in self.checkouts_by_path.iteritems():
+      self.checkouts_by_path[key] = ProjectCheckout(value)
+    for key, value in self.checkouts_by_name.iteritems():
+      self.checkouts_by_name[key] = \
+          [ProjectCheckout(x) for x in value]
+
+  def _FinalizeProjectData(self, attrs):
+    Manifest._FinalizeProjectData(self, attrs)
+    attrs['local_path'] = os.path.join(self.root, attrs['path'])
+
+  @staticmethod
+  def _GetManifestsBranch(root):
+    """Get the tracking branch of the manifest repository.
+
+    Returns:
+      The branch name.
+    """
+    # Suppress the normal "if it ain't refs/heads, we don't want none o' that"
+    # check for the merge target; repo writes the ambigious form of the branch
+    # target for `repo init -u url -b some-branch` usages (aka, 'master'
+    # instead of 'refs/heads/master').
+    path = os.path.join(root, '.repo', 'manifests')
+    current_branch = GetCurrentBranch(path)
+    if current_branch != 'default':
+      raise OSError(errno.ENOENT,
+                    'Manifest repository at %s is checked out to %s.  '
+                    "It should be checked out to 'default'."
+                    % (root, 'detached HEAD' if current_branch is None
+                       else current_branch))
+
+    result = GetTrackingBranchViaGitConfig(
+        path, 'default', allow_broken_merge_settings=True, for_checkout=False)
+
+    if result is not None:
+      return StripRefsHeads(result.ref, False)
+
+    raise OSError(errno.ENOENT,
+                  "Manifest repository at %s is checked out to 'default', but "
+                  'the git tracking configuration for that branch is broken; '
+                  'failing due to that.' % (root,))
+
+  # pylint: disable=arguments-differ
+  @classmethod
+  def Cached(cls, path, manifest_path=None, search=True):
+    """Return an instance, reusing an existing one if possible.
+
+    Args:
+      path: The pathway into a checkout; the root will be found automatically.
+      manifest_path: if given, the manifest.xml to use instead of the
+        checkouts internal manifest.  Use with care.
+      search: If True, the path can point into the repo, and the root will
+        be found automatically.  If False, the path *must* be the root, else
+        an OSError ENOENT will be thrown.
+    """
+    root, manifest_path = cls._NormalizeArgs(path, manifest_path,
+                                             search=search)
+
+    md5 = cls._GetManifestHash(manifest_path)
+    obj, sources = cls._instance_cache.get((root, md5), (None, ()))
+    for include_target, target_md5 in sources:
+      if cls._GetManifestHash(include_target, True) != target_md5:
+        obj = None
+        break
+    if obj is None:
+      obj = cls(root, manifest_path=manifest_path)
+      sources = tuple((abspath, cls._GetManifestHash(abspath))
+                      for (target, abspath) in obj.includes)
+      cls._instance_cache[(root, md5)] = (obj, sources)
+    return obj
+
+
+def RunGit(git_repo, cmd, retry=True, **kwargs):
+  """RunCommand wrapper for git commands.
+
+  This suppresses print_cmd, and suppresses output by default.  Git
+  functionality w/in this module should use this unless otherwise
+  warranted, to standardize git output (primarily, keeping it quiet
+  and being able to throw useful errors for it).
+
+  Args:
+    git_repo: Pathway to the git repo to operate on.
+    cmd: A sequence of the git subcommand to run.  The 'git' prefix is
+      added automatically.  If you wished to run 'git remote update',
+      this would be ['remote', 'update'] for example.
+    retry: If set, retry on transient errors. Defaults to True.
+    kwargs: Any RunCommand or GenericRetry options/overrides to use.
+
+  Returns:
+    A CommandResult object.
+  """
+
+  def _ShouldRetry(exc):
+    """Returns True if push operation failed with a transient error."""
+    if (isinstance(exc, cros_build_lib.RunCommandError)
+        and exc.result and exc.result.error and
+        GIT_TRANSIENT_ERRORS_RE.search(exc.result.error)):
+      logging.warning('git reported transient error (cmd=%s); retrying',
+                      cros_build_lib.CmdToStr(cmd), exc_info=True)
+      return True
+    return False
+
+  max_retry = kwargs.pop('max_retry', DEFAULT_RETRIES if retry else 0)
+  kwargs.setdefault('print_cmd', False)
+  kwargs.setdefault('sleep', DEFAULT_RETRY_INTERVAL)
+  kwargs.setdefault('cwd', git_repo)
+  kwargs.setdefault('capture_output', True)
+  return retry_util.GenericRetry(
+      _ShouldRetry, max_retry, cros_build_lib.RunCommand,
+      ['git'] + cmd, **kwargs)
+
+
+def Init(git_repo):
+  """Create a new git repository, in the given location.
+
+  Args:
+    git_repo: Path for where to create a git repo. Directory will be created if
+              it doesnt exist.
+  """
+  osutils.SafeMakedirs(git_repo)
+  RunGit(git_repo, ['init'])
+
+
+def Clone(git_repo, git_url):
+  """Clone a git repository, into the given directory.
+
+  Args:
+    git_repo: Path for where to create a git repo. Directory will be created if
+              it doesnt exist.
+    git_url: Url to clone the git repository from.
+  """
+  osutils.SafeMakedirs(git_repo)
+  RunGit(git_repo, ['clone', git_url, git_repo])
+
+
+def GetProjectUserEmail(git_repo):
+  """Get the email configured for the project."""
+  output = RunGit(git_repo, ['var', 'GIT_COMMITTER_IDENT']).output
+  m = re.search(r'<([^>]*)>', output.strip())
+  return m.group(1) if m else None
+
+
+def MatchBranchName(git_repo, pattern, namespace=''):
+  """Return branches who match the specified regular expression.
+
+  Args:
+    git_repo: The git repository to operate upon.
+    pattern: The regexp to search with.
+    namespace: The namespace to restrict search to (e.g. 'refs/heads/').
+
+  Returns:
+    List of matching branch names (with |namespace| trimmed).
+  """
+  match = re.compile(pattern, flags=re.I)
+  output = RunGit(git_repo, ['ls-remote', git_repo, namespace + '*']).output
+  branches = [x.split()[1] for x in output.splitlines()]
+  branches = [x[len(namespace):] for x in branches if x.startswith(namespace)]
+  return [x for x in branches if match.search(x)]
+
+
+class AmbiguousBranchName(Exception):
+  """Error if given branch name matches too many branches."""
+
+
+def MatchSingleBranchName(*args, **kwargs):
+  """Match exactly one branch name, else throw an exception.
+
+  Args:
+    See MatchBranchName for more details; all args are passed on.
+
+  Returns:
+    The branch name.
+
+  Raises:
+    raise AmbiguousBranchName if we did not match exactly one branch.
+  """
+  ret = MatchBranchName(*args, **kwargs)
+  if len(ret) != 1:
+    raise AmbiguousBranchName('Did not match exactly 1 branch: %r' % ret)
+  return ret[0]
+
+
+def GetTrackingBranchViaGitConfig(git_repo, branch, for_checkout=True,
+                                  allow_broken_merge_settings=False,
+                                  recurse=10):
+  """Pull the remote and upstream branch of a local branch
+
+  Args:
+    git_repo: The git repository to operate upon.
+    branch: The branch to inspect.
+    for_checkout: Whether to return localized refspecs, or the remote's
+      view of it.
+    allow_broken_merge_settings: Repo in a couple of spots writes invalid
+      branch.mybranch.merge settings; if these are encountered, they're
+      normally treated as an error and this function returns None.  If
+      this option is set to True, it suppresses this check.
+    recurse: If given and the target is local, then recurse through any
+      remote=. (aka locals).  This is enabled by default, and is what allows
+      developers to have multiple local branches of development dependent
+      on one another; disabling this makes that work flow impossible,
+      thus disable it only with good reason.  The value given controls how
+      deeply to recurse.  Defaults to tracing through 10 levels of local
+      remotes. Disabling it is a matter of passing 0.
+
+  Returns:
+    A RemoteRef, or None.  If for_checkout, then it returns the localized
+    version of it.
+  """
+  try:
+    cmd = ['config', '--get-regexp',
+           r'branch\.%s\.(remote|merge)' % re.escape(branch)]
+    data = RunGit(git_repo, cmd).output.splitlines()
+
+    prefix = 'branch.%s.' % (branch,)
+    data = [x.split() for x in data]
+    vals = dict((x[0][len(prefix):], x[1]) for x in data)
+    if len(vals) != 2:
+      if not allow_broken_merge_settings:
+        return None
+      elif 'merge' not in vals:
+        # There isn't anything we can do here.
+        return None
+      elif 'remote' not in vals:
+        # Repo v1.9.4 and up occasionally invalidly leave the remote out.
+        # Only occurs for the manifest repo fortunately.
+        vals['remote'] = 'origin'
+    remote, rev = vals['remote'], vals['merge']
+    # Suppress non branches; repo likes to write revisions and tags here,
+    # which is wrong (git hates it, nor will it honor it).
+    if rev.startswith('refs/remotes/'):
+      if for_checkout:
+        return RemoteRef(remote, rev)
+      # We can't backtrack from here, or at least don't want to.
+      # This is likely refs/remotes/m/ which repo writes when dealing
+      # with a revision locked manifest.
+      return None
+    if not rev.startswith('refs/heads/'):
+      # We explicitly don't allow pushing to tags, nor can one push
+      # to a sha1 remotely (makes no sense).
+      if not allow_broken_merge_settings:
+        return None
+    elif remote == '.':
+      if recurse == 0:
+        raise Exception(
+            'While tracing out tracking branches, we recursed too deeply: '
+            'bailing at %s' % branch)
+      return GetTrackingBranchViaGitConfig(
+          git_repo, StripRefsHeads(rev), for_checkout=for_checkout,
+          allow_broken_merge_settings=allow_broken_merge_settings,
+          recurse=recurse - 1)
+    elif for_checkout:
+      rev = 'refs/remotes/%s/%s' % (remote, StripRefsHeads(rev))
+    return RemoteRef(remote, rev)
+  except cros_build_lib.RunCommandError as e:
+    # 1 is the retcode for no matches.
+    if e.result.returncode != 1:
+      raise
+  return None
+
+
+def GetTrackingBranchViaManifest(git_repo, for_checkout=True, for_push=False,
+                                 manifest=None):
+  """Gets the appropriate push branch via the manifest if possible.
+
+  Args:
+    git_repo: The git repo to operate upon.
+    for_checkout: Whether to return localized refspecs, or the remote's
+      view of it.  Note that depending on the remote, the remote may differ
+      if for_push is True or set to False.
+    for_push: Controls whether the remote and refspec returned is explicitly
+      for pushing.
+    manifest: A Manifest instance if one is available, else a
+      ManifestCheckout is created and used.
+
+  Returns:
+    A RemoteRef, or None.  If for_checkout, then it returns the localized
+    version of it.
+  """
+  try:
+    if manifest is None:
+      manifest = ManifestCheckout.Cached(git_repo)
+
+    checkout = manifest.FindCheckoutFromPath(git_repo, strict=False)
+
+    if checkout is None:
+      return None
+
+    if for_push:
+      checkout.AssertPushable()
+
+    if for_push:
+      remote = checkout['push_remote']
+    else:
+      remote = checkout['remote']
+
+    if for_checkout:
+      revision = checkout['tracking_branch']
+    else:
+      revision = checkout['revision']
+      if not revision.startswith('refs/heads/'):
+        return None
+
+    return RemoteRef(remote, revision)
+  except EnvironmentError as e:
+    if e.errno != errno.ENOENT:
+      raise
+  return None
+
+
+def GetTrackingBranch(git_repo, branch=None, for_checkout=True, fallback=True,
+                      manifest=None, for_push=False):
+  """Gets the appropriate push branch for the specified directory.
+
+  This function works on both repo projects and regular git checkouts.
+
+  Assumptions:
+   1. We assume the manifest defined upstream is desirable.
+   2. No manifest?  Assume tracking if configured is accurate.
+   3. If none of the above apply, you get 'origin', 'master' or None,
+      depending on fallback.
+
+  Args:
+    git_repo: Git repository to operate upon.
+    branch: Find the tracking branch for this branch.  Defaults to the
+      current branch for |git_repo|.
+    for_checkout: Whether to return localized refspecs, or the remotes
+      view of it.
+    fallback: If true and no remote/branch could be discerned, return
+      'origin', 'master'.  If False, you get None.
+      Note that depending on the remote, the remote may differ
+      if for_push is True or set to False.
+    for_push: Controls whether the remote and refspec returned is explicitly
+      for pushing.
+    manifest: A Manifest instance if one is available, else a
+      ManifestCheckout is created and used.
+
+  Returns:
+    A RemoteRef, or None.
+  """
+  result = GetTrackingBranchViaManifest(git_repo, for_checkout=for_checkout,
+                                        manifest=manifest, for_push=for_push)
+  if result is not None:
+    return result
+
+  if branch is None:
+    branch = GetCurrentBranch(git_repo)
+  if branch:
+    result = GetTrackingBranchViaGitConfig(git_repo, branch,
+                                           for_checkout=for_checkout)
+    if result is not None:
+      if (result.ref.startswith('refs/heads/') or
+          result.ref.startswith('refs/remotes/')):
+        return result
+
+  if not fallback:
+    return None
+  if for_checkout:
+    return RemoteRef('origin', 'refs/remotes/origin/master')
+  return RemoteRef('origin', 'master')
+
+
+def CreateBranch(git_repo, branch, branch_point='HEAD', track=False):
+  """Create a branch.
+
+  Args:
+    git_repo: Git repository to act on.
+    branch: Name of the branch to create.
+    branch_point: The ref to branch from.  Defaults to 'HEAD'.
+    track: Whether to setup the branch to track its starting ref.
+  """
+  cmd = ['checkout', '-B', branch, branch_point]
+  if track:
+    cmd.append('--track')
+  RunGit(git_repo, cmd)
+
+
+def AddPath(path):
+  """Use 'git add' on a path.
+
+  Args:
+    path: Path to the git repository and the path to add.
+  """
+  dirname, filename = os.path.split(path)
+  RunGit(dirname, ['add', '--', filename])
+
+
+def RmPath(path):
+  """Use 'git rm' on a file.
+
+  Args:
+    path: Path to the git repository and the path to rm.
+  """
+  dirname, filename = os.path.split(path)
+  RunGit(dirname, ['rm', '--', filename])
+
+
+def GetObjectAtRev(git_repo, obj, rev):
+  """Return the contents of a git object at a particular revision.
+
+  This could be used to look at an old version of a file or directory, for
+  instance, without modifying the working directory.
+
+  Args:
+    git_repo: Path to a directory in the git repository to query.
+    obj: The name of the object to read.
+    rev: The revision to retrieve.
+
+  Returns:
+    The content of the object.
+  """
+  rev_obj = '%s:%s' % (rev, obj)
+  return RunGit(git_repo, ['show', rev_obj]).output
+
+
+def RevertPath(git_repo, filename, rev):
+  """Revert a single file back to a particular revision and 'add' it with git.
+
+  Args:
+    git_repo: Path to the directory holding the file.
+    filename: Name of the file to revert.
+    rev: Revision to revert the file to.
+  """
+  RunGit(git_repo, ['checkout', rev, '--', filename])
+
+
+def Commit(git_repo, message, amend=False, allow_empty=False):
+  """Commit with git.
+
+  Args:
+    git_repo: Path to the git repository to commit in.
+    message: Commit message to use.
+    amend: Whether to 'amend' the CL, default False
+    allow_empty: Whether to allow an empty commit. Default False.
+
+  Returns:
+    The Gerrit Change-ID assigned to the CL if it exists.
+  """
+  cmd = ['commit', '-m', message]
+  if amend:
+    cmd.append('--amend')
+  if allow_empty:
+    cmd.append('--allow-empty')
+  RunGit(git_repo, cmd)
+
+  log = RunGit(git_repo, ['log', '-n', '1', '--format=format:%B']).output
+  match = re.search('Change-Id: (?P<ID>I[a-fA-F0-9]*)', log)
+  return match.group('ID') if match else None
+
+
+_raw_diff_components = ('src_mode', 'dst_mode', 'src_sha', 'dst_sha',
+                        'status', 'score', 'src_file', 'dst_file')
+# RawDiffEntry represents a line of raw formatted git diff output.
+RawDiffEntry = collections.namedtuple('RawDiffEntry', _raw_diff_components)
+
+
+# This regular expression pulls apart a line of raw formatted git diff output.
+DIFF_RE = re.compile(
+    r':(?P<src_mode>[0-7]*) (?P<dst_mode>[0-7]*) '
+    r'(?P<src_sha>[0-9a-f]*)(\.)* (?P<dst_sha>[0-9a-f]*)(\.)* '
+    r'(?P<status>[ACDMRTUX])(?P<score>[0-9]+)?\t'
+    r'(?P<src_file>[^\t]+)\t?(?P<dst_file>[^\t]+)?')
+
+
+def RawDiff(path, target):
+  """Return the parsed raw format diff of target
+
+  Args:
+    path: Path to the git repository to diff in.
+    target: The target to diff.
+
+  Returns:
+    A list of RawDiffEntry's.
+  """
+  entries = []
+
+  cmd = ['diff', '-M', '--raw', target]
+  diff = RunGit(path, cmd).output
+  diff_lines = diff.strip().split('\n')
+  for line in diff_lines:
+    match = DIFF_RE.match(line)
+    if not match:
+      raise GitException('Failed to parse diff output: %s' % line)
+    entries.append(RawDiffEntry(*match.group(*_raw_diff_components)))
+
+  return entries
+
+
+def UploadCL(git_repo, remote, branch, local_branch='HEAD', draft=False,
+             **kwargs):
+  """Upload a CL to gerrit. The CL should be checked out currently.
+
+  Args:
+    git_repo: Path to the git repository with the CL to upload checked out.
+    remote: The remote to upload the CL to.
+    branch: Branch to upload to.
+    local_branch: Branch to upload.
+    draft: Whether to upload as a draft.
+    kwargs: Extra options for GitPush. capture_output defaults to False so
+      that the URL for new or updated CLs is shown to the user.
+  """
+  ref = ('refs/drafts/%s' if draft else 'refs/for/%s') % branch
+  remote_ref = RemoteRef(remote, ref)
+  kwargs.setdefault('capture_output', False)
+  GitPush(git_repo, local_branch, remote_ref, **kwargs)
+
+
+def GitPush(git_repo, refspec, push_to, force=False, retry=True,
+            capture_output=True, skip=False):
+  """Wrapper for pushing to a branch.
+
+  Args:
+    git_repo: Git repository to act on.
+    refspec: The local ref to push to the remote.
+    push_to: A RemoteRef object representing the remote ref to push to.
+    force: Whether to bypass non-fastforward checks.
+    retry: Retry a push in case of transient errors.
+    capture_output: Whether to capture output for this command.
+    skip: Do not actually push anything.
+  """
+  cmd = ['push', push_to.remote, '%s:%s' % (refspec, push_to.ref)]
+  if force:
+    cmd.append('--force')
+
+  if skip:
+    # git-push has a --dry-run option but we can't use it because that still
+    # runs push-access checks, and we want the skip mode to be available to
+    # users who can't really push to remote.
+    logging.info('Would have run "%s"', cmd)
+    return
+
+  RunGit(git_repo, cmd, retry=retry, capture_output=capture_output)
+
+
+# TODO(build): Switch callers of this function to use CreateBranch instead.
+def CreatePushBranch(branch, git_repo, sync=True, remote_push_branch=None):
+  """Create a local branch for pushing changes inside a repo repository.
+
+  Args:
+    branch: Local branch to create.
+    git_repo: Git repository to create the branch in.
+    sync: Update remote before creating push branch.
+    remote_push_branch: A RemoteRef to push to. i.e.,
+                        RemoteRef('cros', 'master').  By default it tries to
+                        automatically determine which tracking branch to use
+                        (see GetTrackingBranch()).
+  """
+  if not remote_push_branch:
+    remote_push_branch = GetTrackingBranch(git_repo, for_push=True)
+
+  if sync:
+    cmd = ['remote', 'update', remote_push_branch.remote]
+    RunGit(git_repo, cmd)
+
+  RunGit(git_repo, ['checkout', '-B', branch, '-t', remote_push_branch.ref])
+
+
+def SyncPushBranch(git_repo, remote, rebase_target):
+  """Sync and rebase a local push branch to the latest remote version.
+
+  Args:
+    git_repo: Git repository to rebase in.
+    remote: The remote returned by GetTrackingBranch(for_push=True)
+    rebase_target: The branch name returned by GetTrackingBranch().  Must
+      start with refs/remotes/ (specifically must be a proper remote
+      target rather than an ambiguous name).
+  """
+  if not rebase_target.startswith('refs/remotes/'):
+    raise Exception(
+        'Was asked to rebase to a non branch target w/in the push pathways.  '
+        'This is highly indicative of an internal bug.  remote %s, rebase %s'
+        % (remote, rebase_target))
+
+  cmd = ['remote', 'update', remote]
+  RunGit(git_repo, cmd)
+
+  try:
+    RunGit(git_repo, ['rebase', rebase_target])
+  except cros_build_lib.RunCommandError:
+    # Looks like our change conflicts with upstream. Cleanup our failed
+    # rebase.
+    RunGit(git_repo, ['rebase', '--abort'], error_code_ok=True)
+    raise
+
+
+# TODO(build): Switch this to use the GitPush function.
+def PushWithRetry(branch, git_repo, dryrun=False, retries=5):
+  """General method to push local git changes.
+
+  This method only works with branches created via the CreatePushBranch
+  function.
+
+  Args:
+    branch: Local branch to push.  Branch should have already been created
+      with a local change committed ready to push to the remote branch.  Must
+      also already be checked out to that branch.
+    git_repo: Git repository to push from.
+    dryrun: Git push --dry-run if set to True.
+    retries: The number of times to retry before giving up, default: 5
+
+  Raises:
+    GitPushFailed if push was unsuccessful after retries
+  """
+  remote_ref = GetTrackingBranch(git_repo, branch, for_checkout=False,
+                                 for_push=True)
+  # Don't like invoking this twice, but there is a bit of API
+  # impedence here; cros_mark_as_stable
+  local_ref = GetTrackingBranch(git_repo, branch, for_push=True)
+
+  if not remote_ref.ref.startswith('refs/heads/'):
+    raise Exception('Was asked to push to a non branch namespace: %s' %
+                    remote_ref.ref)
+
+  push_command = ['push', remote_ref.remote, '%s:%s' %
+                  (branch, remote_ref.ref)]
+  logging.debug('Trying to push %s to %s:%s',
+                git_repo, branch, remote_ref.ref)
+
+  if dryrun:
+    push_command.append('--dry-run')
+  for retry in range(1, retries + 1):
+    SyncPushBranch(git_repo, remote_ref.remote, local_ref.ref)
+    try:
+      RunGit(git_repo, push_command)
+      break
+    except cros_build_lib.RunCommandError:
+      if retry < retries:
+        logging.warning('Error pushing changes trying again (%s/%s)',
+                        retry, retries)
+        time.sleep(5 * retry)
+        continue
+      raise
+
+  logging.info('Successfully pushed %s to %s:%s',
+               git_repo, branch, remote_ref.ref)
+
+
+def CleanAndDetachHead(git_repo):
+  """Remove all local changes and checkout a detached head.
+
+  Args:
+    git_repo: Directory of git repository.
+  """
+  RunGit(git_repo, ['am', '--abort'], error_code_ok=True)
+  RunGit(git_repo, ['rebase', '--abort'], error_code_ok=True)
+  RunGit(git_repo, ['clean', '-dfx'])
+  RunGit(git_repo, ['checkout', '--detach', '-f', 'HEAD'])
+
+
+def CleanAndCheckoutUpstream(git_repo, refresh_upstream=True):
+  """Remove all local changes and checkout the latest origin.
+
+  All local changes in the supplied repo will be removed. The branch will
+  also be switched to a detached head pointing at the latest origin.
+
+  Args:
+    git_repo: Directory of git repository.
+    refresh_upstream: If True, run a remote update prior to checking it out.
+  """
+  remote_ref = GetTrackingBranch(git_repo, for_push=refresh_upstream)
+  CleanAndDetachHead(git_repo)
+  if refresh_upstream:
+    RunGit(git_repo, ['remote', 'update', remote_ref.remote])
+  RunGit(git_repo, ['checkout', remote_ref.ref])
+
+
+def GetChromiteTrackingBranch():
+  """Returns the remote branch associated with chromite."""
+  cwd = os.path.dirname(os.path.realpath(__file__))
+  result_ref = GetTrackingBranch(cwd, for_checkout=False, fallback=False)
+  if result_ref:
+    branch = result_ref.ref
+    if branch.startswith('refs/heads/'):
+      # Normal scenario.
+      return StripRefsHeads(branch)
+    # Reaching here means it was refs/remotes/m/blah, or just plain invalid,
+    # or that we're on a detached head in a repo not managed by chromite.
+
+  # Manually try the manifest next.
+  try:
+    manifest = ManifestCheckout.Cached(cwd)
+    # Ensure the manifest knows of this checkout.
+    if manifest.FindCheckoutFromPath(cwd, strict=False):
+      return manifest.manifest_branch
+  except EnvironmentError as e:
+    if e.errno != errno.ENOENT:
+      raise
+
+  # Not a manifest checkout.
+  logging.warning(
+      "Chromite checkout at %s isn't controlled by repo, nor is it on a "
+      'branch (or if it is, the tracking configuration is missing or broken).  '
+      'Falling back to assuming the chromite checkout is derived from '
+      "'master'; this *may* result in breakage." % cwd)
+  return 'master'
+
+
+def GarbageCollection(git_repo):
+  """Cleanup unnecessary files and optimize the local repository.
+
+  Args:
+    git_repo: Directory of git repository.
+  """
+  # Use --auto so it only runs if housekeeping is necessary.
+  RunGit(git_repo, ['gc', '--auto'])
diff --git a/lib/git_unittest b/lib/git_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/git_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/git_unittest.py b/lib/git_unittest.py
new file mode 100644
index 0000000..466c7fa
--- /dev/null
+++ b/lib/git_unittest.py
@@ -0,0 +1,353 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for chromite.lib.git and helpers for testing that module."""
+
+from __future__ import print_function
+
+import functools
+import mock
+import os
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.lib import partial_mock
+from chromite.lib import patch_unittest
+
+
+class ManifestMock(partial_mock.PartialMock):
+  """Partial mock for git.Manifest."""
+  TARGET = 'chromite.lib.git.Manifest'
+  ATTRS = ('_RunParser',)
+
+  def _RunParser(self, *_args):
+    pass
+
+
+class ManifestCheckoutMock(partial_mock.PartialMock):
+  """Partial mock for git.ManifestCheckout."""
+  TARGET = 'chromite.lib.git.ManifestCheckout'
+  ATTRS = ('_GetManifestsBranch',)
+
+  def _GetManifestsBranch(self, _root):
+    return 'default'
+
+
+class NormalizeRefTest(cros_test_lib.TestCase):
+  """Test the Normalize*Ref functions."""
+
+  def _TestNormalize(self, functor, tests):
+    """Helper function for testing Normalize*Ref functions.
+
+    Args:
+      functor: Normalize*Ref functor that only needs the input
+        ref argument.
+      tests: Dict of test inputs to expected test outputs.
+    """
+    for test_input, test_output in tests.iteritems():
+      result = functor(test_input)
+      msg = ('Expected %s to translate %r to %r, but got %r.' %
+             (functor.__name__, test_input, test_output, result))
+      self.assertEquals(test_output, result, msg)
+
+  def testNormalizeRef(self):
+    """Test git.NormalizeRef function."""
+    tests = {
+        # These should all get 'refs/heads/' prefix.
+        'foo': 'refs/heads/foo',
+        'foo-bar-123': 'refs/heads/foo-bar-123',
+
+        # If input starts with 'refs/' it should be left alone.
+        'refs/foo/bar': 'refs/foo/bar',
+        'refs/heads/foo': 'refs/heads/foo',
+
+        # Plain 'refs' is nothing special.
+        'refs': 'refs/heads/refs',
+
+        None: None,
+    }
+    self._TestNormalize(git.NormalizeRef, tests)
+
+  def testNormalizeRemoteRef(self):
+    """Test git.NormalizeRemoteRef function."""
+    remote = 'TheRemote'
+    tests = {
+        # These should all get 'refs/remotes/TheRemote' prefix.
+        'foo': 'refs/remotes/%s/foo' % remote,
+        'foo-bar-123': 'refs/remotes/%s/foo-bar-123' % remote,
+
+        # These should be translated from local to remote ref.
+        'refs/heads/foo': 'refs/remotes/%s/foo' % remote,
+        'refs/heads/foo-bar-123': 'refs/remotes/%s/foo-bar-123' % remote,
+
+        # These should be moved from one remote to another.
+        'refs/remotes/OtherRemote/foo': 'refs/remotes/%s/foo' % remote,
+
+        # These should be left alone.
+        'refs/remotes/%s/foo' % remote: 'refs/remotes/%s/foo' % remote,
+        'refs/foo/bar': 'refs/foo/bar',
+
+        # Plain 'refs' is nothing special.
+        'refs': 'refs/remotes/%s/refs' % remote,
+
+        None: None,
+    }
+
+    # Add remote arg to git.NormalizeRemoteRef.
+    functor = functools.partial(git.NormalizeRemoteRef, remote)
+    functor.__name__ = git.NormalizeRemoteRef.__name__
+
+    self._TestNormalize(functor, tests)
+
+
+class GitWrappersTest(cros_build_lib_unittest.RunCommandTempDirTestCase):
+  """Tests for small git wrappers"""
+
+  CHANGE_ID = 'I0da12ef6d2c670305f0281641bc53db22faf5c1a'
+  COMMIT_LOG = '''
+  foo: Change to foo.
+
+  Change-Id: %s
+  ''' % CHANGE_ID
+
+  PUSH_REMOTE = 'fake_remote'
+  PUSH_BRANCH = 'fake_branch'
+  PUSH_LOCAL = 'fake_local_branch'
+
+  def setUp(self):
+    self.fake_git_dir = os.path.join(self.tempdir, 'foo/bar')
+    self.fake_file = 'baz'
+    self.fake_path = os.path.join(self.fake_git_dir, self.fake_file)
+
+  def testInit(self):
+    git.Init(self.fake_path)
+
+    # Should have created the git repo directory, if it didn't exist.
+    self.assertExists(self.fake_git_dir)
+    self.assertCommandContains(['init'])
+
+  def testClone(self):
+    url = 'http://happy/git/repo'
+
+    git.Clone(self.fake_git_dir, url)
+
+    # Should have created the git repo directory, if it didn't exist.
+    self.assertExists(self.fake_git_dir)
+    self.assertCommandContains(['clone', url, self.fake_git_dir])
+
+  def testAddPath(self):
+    git.AddPath(self.fake_path)
+    self.assertCommandContains(['add'])
+    self.assertCommandContains([self.fake_file])
+
+  def testRmPath(self):
+    git.RmPath(self.fake_path)
+    self.assertCommandContains(['rm'])
+    self.assertCommandContains([self.fake_file])
+
+  def testGetObjectAtRev(self):
+    git.GetObjectAtRev(self.fake_git_dir, '.', '1234')
+    self.assertCommandContains(['show'])
+
+  def testRevertPath(self):
+    git.RevertPath(self.fake_git_dir, self.fake_file, '1234')
+    self.assertCommandContains(['checkout'])
+    self.assertCommandContains([self.fake_file])
+
+  def testCommit(self):
+    self.rc.AddCmdResult(partial_mock.In('log'), output=self.COMMIT_LOG)
+    git.Commit(self.fake_git_dir, 'bar')
+    self.assertCommandContains(['--amend'], expected=False)
+    cid = git.Commit(self.fake_git_dir, 'bar', amend=True)
+    self.assertCommandContains(['--amend'])
+    self.assertCommandContains(['--allow-empty'], expected=False)
+    self.assertEqual(cid, self.CHANGE_ID)
+    cid = git.Commit(self.fake_git_dir, 'new', allow_empty=True)
+    self.assertCommandContains(['--allow-empty'])
+
+  def testUploadCLNormal(self):
+    git.UploadCL(self.fake_git_dir, self.PUSH_REMOTE, self.PUSH_BRANCH,
+                 local_branch=self.PUSH_LOCAL)
+    self.assertCommandContains(['%s:refs/for/%s' % (self.PUSH_LOCAL,
+                                                    self.PUSH_BRANCH)],
+                               capture_output=False)
+
+  def testUploadCLDraft(self):
+    git.UploadCL(self.fake_git_dir, self.PUSH_REMOTE, self.PUSH_BRANCH,
+                 local_branch=self.PUSH_LOCAL, draft=True)
+    self.assertCommandContains(['%s:refs/drafts/%s' % (self.PUSH_LOCAL,
+                                                       self.PUSH_BRANCH)],
+                               capture_output=False)
+
+  def testUploadCLCaptured(self):
+    git.UploadCL(self.fake_git_dir, self.PUSH_REMOTE, self.PUSH_BRANCH,
+                 local_branch=self.PUSH_LOCAL, draft=True, capture_output=True)
+    self.assertCommandContains(['%s:refs/drafts/%s' % (self.PUSH_LOCAL,
+                                                       self.PUSH_BRANCH)],
+                               capture_output=True)
+
+
+class ProjectCheckoutTest(cros_test_lib.TestCase):
+  """Tests for git.ProjectCheckout"""
+
+  def setUp(self):
+    self.fake_unversioned_patchable = git.ProjectCheckout(
+        dict(name='chromite',
+             path='src/chromite',
+             revision='remotes/for/master'))
+    self.fake_unversioned_unpatchable = git.ProjectCheckout(
+        dict(name='chromite',
+             path='src/platform/somethingsomething/chromite',
+             # Pinned to a SHA1.
+             revision='1deadbeeaf1deadbeeaf1deadbeeaf1deadbeeaf'))
+    self.fake_versioned_patchable = git.ProjectCheckout(
+        dict(name='chromite',
+             path='src/chromite',
+             revision='1deadbeeaf1deadbeeaf1deadbeeaf1deadbeeaf',
+             upstream='remotes/for/master'))
+    self.fake_versioned_unpatchable = git.ProjectCheckout(
+        dict(name='chromite',
+             path='src/chromite',
+             revision='1deadbeeaf1deadbeeaf1deadbeeaf1deadbeeaf',
+             upstream='1deadbeeaf1deadbeeaf1deadbeeaf1deadbeeaf'))
+
+  def testIsPatchable(self):
+    self.assertTrue(self.fake_unversioned_patchable.IsPatchable())
+    self.assertFalse(self.fake_unversioned_unpatchable.IsPatchable())
+    self.assertTrue(self.fake_versioned_patchable.IsPatchable())
+    self.assertFalse(self.fake_versioned_unpatchable.IsPatchable())
+
+
+class RawDiffTest(cros_test_lib.MockTestCase):
+  """Tests for git.RawDiff function."""
+
+  def testRawDiff(self):
+    """Test the parsing of the git.RawDiff function."""
+
+    diff_output = '''
+:100644 100644 ac234b2... 077d1f8... M\tchromeos-base/chromeos-chrome/Manifest
+:100644 100644 9e5d11b... 806bf9b... R099\tchromeos-base/chromeos-chrome/chromeos-chrome-40.0.2197.0_rc-r1.ebuild\tchromeos-base/chromeos-chrome/chromeos-chrome-40.0.2197.2_rc-r1.ebuild
+:100644 100644 70d6e94... 821c642... M\tchromeos-base/chromeos-chrome/chromeos-chrome-9999.ebuild
+:100644 100644 be445f9... be445f9... R100\tchromeos-base/chromium-source/chromium-source-40.0.2197.0_rc-r1.ebuild\tchromeos-base/chromium-source/chromium-source-40.0.2197.2_rc-r1.ebuild
+'''
+    result = cros_build_lib.CommandResult(output=diff_output)
+    self.PatchObject(git, 'RunGit', return_value=result)
+
+    entries = git.RawDiff('foo', 'bar')
+    self.assertEqual(entries, [
+        ('100644', '100644', 'ac234b2', '077d1f8', 'M', None,
+         'chromeos-base/chromeos-chrome/Manifest', None),
+        ('100644', '100644', '9e5d11b', '806bf9b', 'R', '099',
+         'chromeos-base/chromeos-chrome/'
+         'chromeos-chrome-40.0.2197.0_rc-r1.ebuild',
+         'chromeos-base/chromeos-chrome/'
+         'chromeos-chrome-40.0.2197.2_rc-r1.ebuild'),
+        ('100644', '100644', '70d6e94', '821c642', 'M', None,
+         'chromeos-base/chromeos-chrome/chromeos-chrome-9999.ebuild', None),
+        ('100644', '100644', 'be445f9', 'be445f9', 'R', '100',
+         'chromeos-base/chromium-source/'
+         'chromium-source-40.0.2197.0_rc-r1.ebuild',
+         'chromeos-base/chromium-source/'
+         'chromium-source-40.0.2197.2_rc-r1.ebuild')
+    ])
+
+
+class GitPushTest(cros_test_lib.MockTestCase):
+  """Tests for git.GitPush function."""
+
+  # Non fast-forward push error message.
+  NON_FF_PUSH_ERROR = (
+      'To https://localhost/repo.git\n'
+      '! [remote rejected] master -> master (non-fast-forward)\n'
+      'error: failed to push some refs to \'https://localhost/repo.git\'\n')
+
+  # List of possible GoB transient errors.
+  TRANSIENT_ERRORS = (
+      # Hook error when creating a new branch from SHA1 ref.
+      ('remote: Processing changes: (-)To https://localhost/repo.git\n'
+       '! [remote rejected] 6c78ca083c3a9d64068c945fd9998eb1e0a3e739 -> '
+       'stabilize-4636.B (error in hook)\n'
+       'error: failed to push some refs to \'https://localhost/repo.git\'\n'),
+
+      # 'failed to lock' error when creating a new branch from SHA1 ref.
+      ('remote: Processing changes: done\nTo https://localhost/repo.git\n'
+       '! [remote rejected] 4ea09c129b5fedb261bae2431ce2511e35ac3923 -> '
+       'stabilize-daisy-4319.96.B (failed to lock)\n'
+       'error: failed to push some refs to \'https://localhost/repo.git\'\n'),
+
+      # Hook error when pushing branch.
+      ('remote: Processing changes: (\\)To https://localhost/repo.git\n'
+       '! [remote rejected] temp_auto_checkin_branch -> '
+       'master (error in hook)\n'
+       'error: failed to push some refs to \'https://localhost/repo.git\'\n'),
+
+      # Another kind of error when pushing a branch.
+      'fatal: remote error: Internal Server Error',
+
+      # crbug.com/298189
+      ('error: gnutls_handshake() failed: A TLS packet with unexpected length '
+       'was received. while accessing '
+       'http://localhost/repo.git/info/refs?service=git-upload-pack\n'
+       'fatal: HTTP request failed'),
+
+      # crbug.com/298189
+      ('fatal: unable to access \'https://localhost/repo.git\': GnuTLS recv '
+       'error (-9): A TLS packet with unexpected length was received.'),
+  )
+
+  def setUp(self):
+    self.StartPatcher(mock.patch('time.sleep'))
+
+  @staticmethod
+  def _RunGitPush():
+    """Runs git.GitPush with some default arguments."""
+    git.GitPush('some_repo_path', 'local-ref',
+                git.RemoteRef('some-remote', 'remote-ref'),
+                retry=True, skip=False)
+
+  def testPushSuccess(self):
+    """Test handling of successful git push."""
+    with cros_build_lib_unittest.RunCommandMock() as rc_mock:
+      rc_mock.AddCmdResult(partial_mock.In('push'), returncode=0)
+      self._RunGitPush()
+
+  def testNonFFPush(self):
+    """Non fast-forward push error propagates to the caller."""
+    with cros_build_lib_unittest.RunCommandMock() as rc_mock:
+      rc_mock.AddCmdResult(partial_mock.In('push'), returncode=128,
+                           error=self.NON_FF_PUSH_ERROR)
+      self.assertRaises(cros_build_lib.RunCommandError, self._RunGitPush)
+
+  def testPersistentTransientError(self):
+    """GitPush fails if transient error occurs multiple times."""
+    for error in self.TRANSIENT_ERRORS:
+      with cros_build_lib_unittest.RunCommandMock() as rc_mock:
+        rc_mock.AddCmdResult(partial_mock.In('push'), returncode=128,
+                             error=error)
+        self.assertRaises(cros_build_lib.RunCommandError, self._RunGitPush)
+
+  def testOneTimeTransientError(self):
+    """GitPush retries transient errors."""
+    for error in self.TRANSIENT_ERRORS:
+      with cros_build_lib_unittest.RunCommandMock() as rc_mock:
+        results = [
+            rc_mock.CmdResult(128, '', error),
+            rc_mock.CmdResult(0, 'success', ''),
+        ]
+        # pylint: disable=cell-var-from-loop
+        side_effect = lambda *_args, **_kwargs: results.pop(0)
+        rc_mock.AddCmdResult(partial_mock.In('push'), side_effect=side_effect)
+        self._RunGitPush()
+
+
+class GitBranchDetectionTest(patch_unittest.GitRepoPatchTestCase):
+  """Tests that git library functions related to branch detection work."""
+
+  def testDoesCommitExistInRepoWithAmbiguousBranchName(self):
+    git1 = self._MakeRepo('git1', self.source)
+    git.CreateBranch(git1, 'peach', track=True)
+    self.CommitFile(git1, 'peach', 'Keep me.')
+    self.assertTrue(git.DoesCommitExistInRepo(git1, 'peach'))
diff --git a/lib/gob_util.py b/lib/gob_util.py
new file mode 100644
index 0000000..2fe6753
--- /dev/null
+++ b/lib/gob_util.py
@@ -0,0 +1,579 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for requesting information for a gerrit server via https.
+
+https://gerrit-review.googlesource.com/Documentation/rest-api.html
+"""
+
+from __future__ import print_function
+
+import base64
+import cookielib
+import datetime
+import httplib
+import json
+import netrc
+import os
+import socket
+import sys
+import urllib
+import urlparse
+from cStringIO import StringIO
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import retry_util
+
+
+try:
+  NETRC = netrc.netrc()
+except (IOError, netrc.NetrcParseError):
+  NETRC = netrc.netrc(os.devnull)
+TRY_LIMIT = 10
+SLEEP = 0.5
+
+# Controls the transport protocol used to communicate with Gerrit servers using
+# git. This is parameterized primarily to enable cros_test_lib.GerritTestCase.
+GIT_PROTOCOL = 'https'
+
+
+class GOBError(Exception):
+  """Exception class for errors commuicating with the gerrit-on-borg service."""
+  def __init__(self, http_status, *args, **kwargs):
+    super(GOBError, self).__init__(*args, **kwargs)
+    self.http_status = http_status
+    self.message = '(%d) %s' % (self.http_status, self.message)
+
+
+class InternalGOBError(GOBError):
+  """Exception class for GOB errors with status >= 500"""
+
+
+def _QueryString(param_dict, first_param=None):
+  """Encodes query parameters in the key:val[+key:val...] format specified here:
+
+  https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
+  """
+  q = [urllib.quote(first_param)] if first_param else []
+  q.extend(['%s:%s' % (key, val) for key, val in param_dict.iteritems()])
+  return '+'.join(q)
+
+
+def GetCookies(host, path, cookie_paths=None):
+  """Returns cookies that should be set on a request.
+
+  Used by CreateHttpConn for any requests that do not already specify a Cookie
+  header. All requests made by this library are HTTPS.
+
+  Args:
+    host: The hostname of the Gerrit service.
+    path: The path on the Gerrit service, already including /a/ if applicable.
+    cookie_paths: Files to look in for cookies. Defaults to looking in the
+      standard places where GoB places cookies.
+
+  Returns:
+    A dict of cookie name to value, with no URL encoding applied.
+  """
+  cookies = {}
+  if cookie_paths is None:
+    cookie_paths = (constants.GOB_COOKIE_PATH, constants.GITCOOKIES_PATH)
+  for cookie_path in cookie_paths:
+    if os.path.isfile(cookie_path):
+      with open(cookie_path) as f:
+        for line in f:
+          fields = line.strip().split('\t')
+          if line.strip().startswith('#') or len(fields) != 7:
+            continue
+          domain, xpath, key, value = fields[0], fields[2], fields[5], fields[6]
+          if cookielib.domain_match(host, domain) and path.startswith(xpath):
+            cookies[key] = value
+  return cookies
+
+
+def CreateHttpConn(host, path, reqtype='GET', headers=None, body=None):
+  """Opens an https connection to a gerrit service, and sends a request."""
+  headers = headers or {}
+  bare_host = host.partition(':')[0]
+  auth = NETRC.authenticators(bare_host)
+  if auth:
+    headers.setdefault('Authorization', 'Basic %s' % (
+        base64.b64encode('%s:%s' % (auth[0], auth[2]))))
+  else:
+    logging.debug('No netrc file found')
+
+  if 'Cookie' not in headers:
+    cookies = GetCookies(host, '/a/%s' % path)
+    headers['Cookie'] = '; '.join('%s=%s' % (n, v) for n, v in cookies.items())
+
+  if 'User-Agent' not in headers:
+    headers['User-Agent'] = ' '.join((
+        'chromite.lib.gob_util',
+        os.path.basename(sys.argv[0]),
+        git.GetGitRepoRevision(os.path.dirname(os.path.realpath(__file__))),
+    ))
+
+  if body:
+    body = json.JSONEncoder().encode(body)
+    headers.setdefault('Content-Type', 'application/json')
+  if logging.getLogger().isEnabledFor(logging.DEBUG):
+    logging.debug('%s https://%s/a/%s', reqtype, host, path)
+    for key, val in headers.iteritems():
+      if key.lower() in ('authorization', 'cookie'):
+        val = 'HIDDEN'
+      logging.debug('%s: %s', key, val)
+    if body:
+      logging.debug(body)
+  conn = httplib.HTTPSConnection(host)
+  conn.req_host = host
+  conn.req_params = {
+      'url': '/a/%s' % path,
+      'method': reqtype,
+      'headers': headers,
+      'body': body,
+  }
+  conn.request(**conn.req_params)
+  return conn
+
+
+def FetchUrl(host, path, reqtype='GET', headers=None, body=None,
+             ignore_204=False, ignore_404=True):
+  """Fetches the http response from the specified URL into a string buffer.
+
+  Args:
+    host: The hostname of the Gerrit service.
+    path: The path on the Gerrit service. This will be prefixed with '/a'
+          automatically.
+    reqtype: The request type. Can be GET or POST.
+    headers: A mapping of extra HTTP headers to pass in with the request.
+    body: A string of data to send after the headers are finished.
+    ignore_204: for some requests gerrit-on-borg will return 204 to confirm
+                proper processing of the request. When processing responses to
+                these requests we should expect this status.
+    ignore_404: For many requests, gerrit-on-borg will return 404 if the request
+                doesn't match the database contents.  In most such cases, we
+                want the API to return None rather than raise an Exception.
+
+  Returns:
+    A string buffer containing the connection's reply.
+  """
+  def _FetchUrlHelper():
+    err_prefix = 'A transient error occured while querying %s:\n' % (host,)
+    try:
+      conn = CreateHttpConn(host, path, reqtype=reqtype, headers=headers,
+                            body=body)
+      response = conn.getresponse()
+    except socket.error as ex:
+      logging.warning('%s%s', err_prefix, str(ex))
+      raise
+
+    # Normal/good responses.
+    response_body = response.read()
+    if response.status == 204 and ignore_204:
+      # This exception is used to confirm expected response status.
+      raise GOBError(response.status, response.reason)
+    if response.status == 404 and ignore_404:
+      return StringIO()
+    elif response.status == 200:
+      return StringIO(response_body)
+
+    # Bad responses.
+    logging.debug('response msg:\n%s', response.msg)
+    http_version = 'HTTP/%s' % ('1.1' if response.version == 11 else '1.0')
+    msg = ('%s %s %s\n%s %d %s\nResponse body: %r' %
+           (reqtype, conn.req_params['url'], http_version,
+            http_version, response.status, response.reason,
+            response_body))
+
+    # Ones we can retry.
+    if response.status >= 500:
+      # A status >=500 is assumed to be a possible transient error; retry.
+      logging.warning('%s%s', err_prefix, msg)
+      raise InternalGOBError(response.status, response.reason)
+
+    # Ones we cannot retry.
+    home = os.environ.get('HOME', '~')
+    url = 'https://%s/new-password' % host
+    if response.status in (302, 303, 307):
+      err_prefix = ('Redirect found; missing/bad %s/.netrc credentials or '
+                    'permissions (0600)?\n See %s' % (home, url))
+    elif response.status in (400,):
+      err_prefix = 'Permission error; talk to the admins of the GoB instance'
+    elif response.status in (401,):
+      err_prefix = ('Authorization error; missing/bad %s/.netrc credentials or '
+                    'permissions (0600)?\n See %s' % (home, url))
+    elif response.status in (422,):
+      err_prefix = ('Bad request body?')
+
+    if response.status >= 400:
+      # The 'X-ErrorId' header is set only on >= 400 response code.
+      logging.warning('%s\n%s\nX-ErrorId: %s', err_prefix, msg,
+                      response.getheader('X-ErrorId'))
+    else:
+      logging.warning('%s\n%s', err_prefix, msg)
+
+    try:
+      logging.warning('conn.sock.getpeername(): %s', conn.sock.getpeername())
+    except AttributeError:
+      logging.warning('peer name unavailable')
+    raise GOBError(response.status, response.reason)
+
+  return retry_util.RetryException((socket.error, InternalGOBError), TRY_LIMIT,
+                                   _FetchUrlHelper, sleep=SLEEP)
+
+
+def FetchUrlJson(*args, **kwargs):
+  """Fetch the specified URL and parse it as JSON.
+
+  See FetchUrl for arguments.
+  """
+  fh = FetchUrl(*args, **kwargs)
+  # The first line of the response should always be: )]}'
+  s = fh.readline()
+  if s and s.rstrip() != ")]}'":
+    raise GOBError(200, 'Unexpected json output: %s' % s)
+  s = fh.read()
+  if not s:
+    return None
+  return json.loads(s)
+
+
+def QueryChanges(host, param_dict, first_param=None, limit=None, o_params=None,
+                 start=None):
+  """Queries a gerrit-on-borg server for changes matching query terms.
+
+  Args:
+    host: The Gerrit server hostname.
+    param_dict: A dictionary of search parameters, as documented here:
+        http://gerrit-documentation.googlecode.com/svn/Documentation/2.6/user-search.html
+    first_param: A change identifier
+    limit: Maximum number of results to return.
+    o_params: A list of additional output specifiers, as documented here:
+        https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
+    start: Offset in the result set to start at.
+
+  Returns:
+    A list of json-decoded query results.
+  """
+  # Note that no attempt is made to escape special characters; YMMV.
+  if not param_dict and not first_param:
+    raise RuntimeError('QueryChanges requires search parameters')
+  path = 'changes/?q=%s' % _QueryString(param_dict, first_param)
+  if start:
+    path = '%s&S=%d' % (path, start)
+  if limit:
+    path = '%s&n=%d' % (path, limit)
+  if o_params:
+    path = '%s&%s' % (path, '&'.join(['o=%s' % p for p in o_params]))
+  # Don't ignore 404; a query should always return a list, even if it's empty.
+  return FetchUrlJson(host, path, ignore_404=False)
+
+
+def MultiQueryChanges(host, param_dict, change_list, limit=None, o_params=None,
+                      start=None):
+  """Initiate a query composed of multiple sets of query parameters."""
+  if not change_list:
+    raise RuntimeError(
+        "MultiQueryChanges requires a list of change numbers/id's")
+  q = ['q=%s' % '+OR+'.join([urllib.quote(str(x)) for x in change_list])]
+  if param_dict:
+    q.append(_QueryString(param_dict))
+  if limit:
+    q.append('n=%d' % limit)
+  if start:
+    q.append('S=%s' % start)
+  if o_params:
+    q.extend(['o=%s' % p for p in o_params])
+  path = 'changes/?%s' % '&'.join(q)
+  try:
+    result = FetchUrlJson(host, path, ignore_404=False)
+  except GOBError as e:
+    msg = '%s:\n%s' % (e.message, path)
+    raise GOBError(e.http_status, msg)
+  return result
+
+
+def GetGerritFetchUrl(host):
+  """Given a gerrit host name returns URL of a gerrit instance to fetch from."""
+  return 'https://%s/' % host
+
+
+def GetChangePageUrl(host, change_number):
+  """Given a gerrit host name and change number, return change page url."""
+  return 'https://%s/#/c/%d/' % (host, change_number)
+
+
+def _GetChangePath(change):
+  """Given a change id, return a path prefix for the change."""
+  return 'changes/%s' % str(change).replace('/', '%2F')
+
+
+def GetChangeUrl(host, change):
+  """Given a gerrit host name and change id, return an url for the change."""
+  return 'https://%s/a/%s' % (host, _GetChangePath(change))
+
+
+def GetChange(host, change):
+  """Query a gerrit server for information about a single change."""
+  return FetchUrlJson(host, _GetChangePath(change))
+
+
+def GetChangeReview(host, change, revision='current'):
+  """Get the current review information for a change."""
+  path = '%s/revisions/%s/review' % (_GetChangePath(change), revision)
+  return FetchUrlJson(host, path)
+
+
+def GetChangeCommit(host, change, revision='current'):
+  """Get the current review information for a change."""
+  path = '%s/revisions/%s/commit' % (_GetChangePath(change), revision)
+  return FetchUrlJson(host, path)
+
+
+def GetChangeCurrentRevision(host, change):
+  """Get information about the latest revision for a given change."""
+  jmsg = GetChangeReview(host, change)
+  if jmsg:
+    return jmsg.get('current_revision')
+
+
+def GetChangeDetail(host, change, o_params=None):
+  """Query a gerrit server for extended information about a single change."""
+  path = '%s/detail' % _GetChangePath(change)
+  if o_params:
+    path = '%s?%s' % (path, '&'.join(['o=%s' % p for p in o_params]))
+  return FetchUrlJson(host, path)
+
+
+def GetChangeReviewers(host, change):
+  """Get information about all reviewers attached to a change."""
+  path = '%s/reviewers' % _GetChangePath(change)
+  return FetchUrlJson(host, path)
+
+
+def AbandonChange(host, change, msg=''):
+  """Abandon a gerrit change."""
+  path = '%s/abandon' % _GetChangePath(change)
+  body = {'message': msg}
+  return FetchUrlJson(host, path, reqtype='POST', body=body, ignore_404=False)
+
+
+def RestoreChange(host, change, msg=''):
+  """Restore a previously abandoned change."""
+  path = '%s/restore' % _GetChangePath(change)
+  body = {'message': msg}
+  return FetchUrlJson(host, path, reqtype='POST', body=body, ignore_404=False)
+
+
+def DeleteDraft(host, change):
+  """Delete a gerrit draft patch set."""
+  path = _GetChangePath(change)
+  try:
+    FetchUrl(host, path, reqtype='DELETE', ignore_204=True, ignore_404=False)
+  except GOBError as e:
+    # On success, gerrit returns status 204; anything else is an error.
+    if e.http_status != 204:
+      raise
+  else:
+    raise GOBError(
+        200, 'Unexpectedly received a 200 http status while deleting draft %r'
+        % change)
+
+
+def SubmitChange(host, change, revision='current', wait_for_merge=True):
+  """Submits a gerrit change via Gerrit."""
+  path = '%s/revisions/%s/submit' % (_GetChangePath(change), revision)
+  body = {'wait_for_merge': wait_for_merge}
+  return FetchUrlJson(host, path, reqtype='POST', body=body, ignore_404=False)
+
+
+def CheckChange(host, change, sha1=None):
+  """Performs consistency checks on the change, and fixes inconsistencies.
+
+  This is useful for forcing Gerrit to check whether a change has already been
+  merged into the git repo. Namely, if |sha1| is provided and the change is in
+  'NEW' status, Gerrit will check if a change with that |sha1| is in the repo
+  and mark the change as 'MERGED' if it exists.
+
+  Args:
+    host: The Gerrit host to interact with.
+    change: The Gerrit change ID.
+    sha1: An optional hint of the commit's SHA1 in Git.
+  """
+  path = '%s/check' % (_GetChangePath(change),)
+  if sha1:
+    body, headers = {'expect_merged_as': sha1}, {}
+  else:
+    body, headers = {}, {'Content-Length': '0'}
+
+  return FetchUrlJson(host, path, reqtype='POST',
+                      body=body, ignore_404=False,
+                      headers=headers)
+
+
+def GetReviewers(host, change):
+  """Get information about all reviewers attached to a change."""
+  path = '%s/reviewers' % _GetChangePath(change)
+  return FetchUrlJson(host, path)
+
+
+def AddReviewers(host, change, add=None):
+  """Add reviewers to a change."""
+  if not add:
+    return
+  if isinstance(add, basestring):
+    add = (add,)
+  path = '%s/reviewers' % _GetChangePath(change)
+  for r in add:
+    body = {'reviewer': r}
+    jmsg = FetchUrlJson(host, path, reqtype='POST', body=body, ignore_404=False)
+  return jmsg
+
+
+def RemoveReviewers(host, change, remove=None):
+  """Remove reveiewers from a change."""
+  if not remove:
+    return
+  if isinstance(remove, basestring):
+    remove = (remove,)
+  for r in remove:
+    path = '%s/reviewers/%s' % (_GetChangePath(change), r)
+    try:
+      FetchUrl(host, path, reqtype='DELETE', ignore_404=False)
+    except GOBError as e:
+      # On success, gerrit returns status 204; anything else is an error.
+      if e.http_status != 204:
+        raise
+    else:
+      raise GOBError(
+          200, 'Unexpectedly received a 200 http status while deleting'
+               ' reviewer "%s" from change %s' % (r, change))
+
+
+def SetReview(host, change, revision='current', msg=None, labels=None,
+              notify=None):
+  """Set labels and/or add a message to a code review."""
+  if not msg and not labels:
+    return
+  path = '%s/revisions/%s/review' % (_GetChangePath(change), revision)
+  body = {}
+  if msg:
+    body['message'] = msg
+  if labels:
+    body['labels'] = labels
+  if notify:
+    body['notify'] = notify
+  response = FetchUrlJson(host, path, reqtype='POST', body=body)
+  if not response:
+    raise GOBError(404, 'CL %s not found in %s' % (change, host))
+  if labels:
+    for key, val in labels.iteritems():
+      if ('labels' not in response or key not in response['labels'] or
+          int(response['labels'][key] != int(val))):
+        raise GOBError(200, 'Unable to set "%s" label on change %s.' % (
+            key, change))
+
+
+def SetTopic(host, change, topic):
+  """Set |topic| for a change. If |topic| is empty, it will be deleted"""
+  path = '%s/topic' % _GetChangePath(change)
+  body = {'topic': topic}
+  return FetchUrlJson(host, path, reqtype='PUT', body=body, ignore_404=False)
+
+
+def ResetReviewLabels(host, change, label, value='0', revision='current',
+                      message=None, notify=None):
+  """Reset the value of a given label for all reviewers on a change."""
+  # This is tricky when working on the "current" revision, because there's
+  # always the risk that the "current" revision will change in between API
+  # calls.  So, the code dereferences the "current" revision down to a literal
+  # sha1 at the beginning and uses it for all subsequent calls.  As a sanity
+  # check, the "current" revision is dereferenced again at the end, and if it
+  # differs from the previous "current" revision, an exception is raised.
+  current = (revision == 'current')
+  jmsg = GetChangeDetail(
+      host, change, o_params=['CURRENT_REVISION', 'CURRENT_COMMIT'])
+  if current:
+    revision = jmsg['current_revision']
+  value = str(value)
+  path = '%s/revisions/%s/review' % (_GetChangePath(change), revision)
+  message = message or (
+      '%s label set to %s programmatically by chromite.' % (label, value))
+  for review in jmsg.get('labels', {}).get(label, {}).get('all', []):
+    if str(review.get('value', value)) != value:
+      body = {
+          'message': message,
+          'labels': {label: value},
+          'on_behalf_of': review['_account_id'],
+      }
+      if notify:
+        body['notify'] = notify
+      response = FetchUrlJson(host, path, reqtype='POST', body=body)
+      if str(response['labels'][label]) != value:
+        username = review.get('email', jmsg.get('name', ''))
+        raise GOBError(200, 'Unable to set %s label for user "%s"'
+                       ' on change %s.' % (label, username, change))
+  if current:
+    new_revision = GetChangeCurrentRevision(host, change)
+    if not new_revision:
+      raise GOBError(
+          200, 'Could not get review information for change "%s"' % change)
+    elif new_revision != revision:
+      raise GOBError(200, 'While resetting labels on change "%s", '
+                     'a new patchset was uploaded.' % change)
+
+
+def GetTipOfTrunkRevision(git_url):
+  """Returns the current git revision on the master branch."""
+  parsed_url = urlparse.urlparse(git_url)
+  path = parsed_url[2].rstrip('/') + '/+log/master?n=1&format=JSON'
+  j = FetchUrlJson(parsed_url[1], path, ignore_404=False)
+  if not j:
+    raise GOBError(
+        'Could not find revision information from %s' % git_url)
+  try:
+    return j['log'][0]['commit']
+  except (IndexError, KeyError, TypeError):
+    msg = ('The json returned by https://%s%s has an unfamiliar structure:\n'
+           '%s\n' % (parsed_url[1], path, j))
+    raise GOBError(msg)
+
+
+def GetCommitDate(git_url, commit):
+  """Returns the date of a particular git commit.
+
+  The returned object is naive in the sense that it doesn't carry any timezone
+  information - you should assume UTC.
+
+  Args:
+    git_url: URL for the repository to get the commit date from.
+    commit: A git commit identifier (e.g. a sha1).
+
+  Returns:
+     A datetime object.
+  """
+  parsed_url = urlparse.urlparse(git_url)
+  path = '%s/+log/%s?n=1&format=JSON' % (parsed_url.path.rstrip('/'), commit)
+  j = FetchUrlJson(parsed_url.netloc, path, ignore_404=False)
+  if not j:
+    raise GOBError(
+        'Could not find revision information from %s' % git_url)
+  try:
+    commit_timestr = j['log'][0]['committer']['time']
+  except (IndexError, KeyError, TypeError):
+    msg = ('The json returned by https://%s%s has an unfamiliar structure:\n'
+           '%s\n' % (parsed_url.netloc, path, j))
+    raise GOBError(msg)
+  try:
+    # We're parsing a string of the form 'Tue Dec 02 17:48:06 2014'.
+    return datetime.datetime.strptime(commit_timestr,
+                                      constants.GOB_COMMIT_TIME_FORMAT)
+  except ValueError:
+    raise GOBError('Failed parsing commit time "%s"' % commit_timestr)
+
+
+def GetAccount(host):
+  """Get information about the user account."""
+  return FetchUrlJson(host, 'accounts/self')
diff --git a/lib/gob_util_unittest b/lib/gob_util_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/gob_util_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/gob_util_unittest.py b/lib/gob_util_unittest.py
new file mode 100644
index 0000000..cfbf90e
--- /dev/null
+++ b/lib/gob_util_unittest.py
@@ -0,0 +1,116 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for gob_util.py"""
+
+from __future__ import print_function
+
+import httplib
+import mock
+import tempfile
+
+from chromite.cbuildbot import config_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import gob_util
+
+
+site_config = config_lib.GetConfig()
+
+
+class FakeHTTPResponse(object):
+  """Enough of a HTTPResponse for FetchUrl.
+
+  See https://docs.python.org/2/library/httplib.html#httpresponse-objects
+  for more details.
+  """
+
+  def __init__(self, body='', headers=(), reason=None, status=200, version=11):
+    if reason is None:
+      reason = httplib.responses[status]
+
+    self.body = body
+    self.headers = dict(headers)
+    self.msg = None
+    self.reason = reason
+    self.status = status
+    self.version = version
+
+  def read(self):
+    return self.body
+
+  def getheader(self, name, default=None):
+    return self.headers.get(name, default)
+
+  def getheaders(self):
+    return tuple(self.headers.items())
+
+
+class FakeHTTPConnection(object):
+  """Enough of a HTTPConnection result for FetchUrl."""
+
+  def __init__(self, req_url='/', req_method='GET', req_headers=None,
+               req_body=None, **kwargs):
+    self.kwargs = kwargs.copy()
+    self.req_params = {
+        'url': req_url,
+        'method': req_method,
+        'headers': req_headers,
+        'body': req_body,
+    }
+
+  def getresponse(self):
+    return FakeHTTPResponse(**self.kwargs)
+
+
+class GobTest(cros_test_lib.MockTestCase):
+  """Unittests that use mocks."""
+
+  def testUtf8Response(self):
+    """Handle gerrit responses w/UTF8 in them."""
+    utf8_data = 'That\xe2\x80\x99s an error. That\xe2\x80\x99s all we know.'
+    with mock.patch.object(gob_util, 'CreateHttpConn', autospec=False) as m:
+      m.return_value = FakeHTTPConnection(body=utf8_data)
+      gob_util.FetchUrl('', '')
+
+      m.return_value = FakeHTTPConnection(body=utf8_data, status=502)
+      self.assertRaises(gob_util.InternalGOBError, gob_util.FetchUrl, '', '')
+
+
+class GetCookieTests(cros_test_lib.TestCase):
+  """Unittests for GetCookies()"""
+
+  def testSimple(self):
+    f = tempfile.NamedTemporaryFile()
+    f.write('.googlesource.com\tTRUE\t/f\tTRUE\t2147483647\to\tfoo=bar')
+    f.flush()
+    cookies = gob_util.GetCookies('foo.googlesource.com', '/foo', [f.name])
+    self.assertEqual(cookies, {'o': 'foo=bar'})
+    cookies = gob_util.GetCookies('google.com', '/foo', [f.name])
+    self.assertEqual(cookies, {})
+    cookies = gob_util.GetCookies('foo.googlesource.com', '/', [f.name])
+    self.assertEqual(cookies, {})
+
+
+@cros_test_lib.NetworkTest()
+class NetworkGobTest(cros_test_lib.TestCase):
+  """Unittests that talk to real Gerrit."""
+
+  def test200(self):
+    """Test successful loading of change."""
+    gob_util.FetchUrlJson(site_config.params.EXTERNAL_GOB_HOST,
+                          'changes/227254/detail')
+
+  def test404(self):
+    gob_util.FetchUrlJson(site_config.params.EXTERNAL_GOB_HOST, 'foo/bar/baz')
+
+  def test404Exception(self):
+    with self.assertRaises(gob_util.GOBError) as ex:
+      gob_util.FetchUrlJson(site_config.params.EXTERNAL_GOB_HOST, 'foo/bar/baz',
+                            ignore_404=False)
+    self.assertEqual(ex.exception.http_status, 404)
+
+
+def main(_argv):
+  gob_util.TRY_LIMIT = 1
+  cros_test_lib.main(module=__name__)
diff --git a/lib/graphite.py b/lib/graphite.py
new file mode 100644
index 0000000..c108c7c
--- /dev/null
+++ b/lib/graphite.py
@@ -0,0 +1,108 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Entry point to stats reporting objects for cbuildbot.
+
+These factories setup the stats collection modules (es_utils, statsd) correctly
+so that cbuildbot stats from different sources (official builders, trybots,
+developer machines etc.) stay separate.
+"""
+
+from __future__ import print_function
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import topology
+from chromite.lib import factory
+from chromite.lib.graphite_lib import es_utils
+from chromite.lib.graphite_lib import stats
+from chromite.lib.graphite_lib import stats_es_mock
+
+
+CONNECTION_TYPE_DEBUG = 'debug'
+CONNECTION_TYPE_MOCK = 'none'
+CONNECTION_TYPE_PROD = 'prod'
+CONNECTION_TYPE_READONLY = 'readonly'
+
+# The types definitions below make linter unhappy. The 'right' way of using
+# functools.partial makes functools.wraps (and hence our decorators) blow up.
+# pylint: disable=unnecessary-lambda
+
+class ESMetadataFactoryClass(factory.ObjectFactory):
+  """Factory class for setting up an Elastic Search connection."""
+
+  _ELASTIC_SEARCH_TYPES = {
+      CONNECTION_TYPE_PROD: factory.CachedFunctionCall(
+          lambda: es_utils.ESMetadata(
+              use_http=constants.ELASTIC_SEARCH_USE_HTTP,
+              host=topology.topology.get(topology.ELASTIC_SEARCH_HOST_KEY),
+              port=topology.topology.get(topology.ELASTIC_SEARCH_PORT_KEY),
+              index=constants.ELASTIC_SEARCH_INDEX,
+              udp_port=topology.topology.get(
+                  topology.ELASTIC_SEARCH_UDP_PORT_KEY))),
+      CONNECTION_TYPE_READONLY: factory.CachedFunctionCall(
+          lambda: es_utils.ESMetadataRO(
+              use_http=constants.ELASTIC_SEARCH_USE_HTTP,
+              host=topology.topology.get(topology.ELASTIC_SEARCH_HOST_KEY),
+              port=topology.topology.get(topology.ELASTIC_SEARCH_PORT_KEY),
+              index=constants.ELASTIC_SEARCH_INDEX,
+              udp_port=topology.topology.get(
+                  topology.ELASTIC_SEARCH_UDP_PORT_KEY)))
+      }
+
+  def __init__(self):
+    super(ESMetadataFactoryClass, self).__init__(
+        'elastic search connection', self._ELASTIC_SEARCH_TYPES,
+        lambda from_setup, to_setup: from_setup == to_setup)
+
+  def SetupProd(self):
+    """Set up this factory to connect to the production Elastic Search."""
+    self.Setup(CONNECTION_TYPE_PROD)
+
+  def SetupReadOnly(self):
+    """Set up this factory to allow querying the production Elastic Search."""
+    self.Setup(CONNECTION_TYPE_READONLY)
+
+
+ESMetadataFactory = ESMetadataFactoryClass()
+
+
+class StatsFactoryClass(factory.ObjectFactory):
+  """Factory class for setting up a Statsd connection."""
+
+  _STATSD_TYPES = {
+      CONNECTION_TYPE_PROD: factory.CachedFunctionCall(
+          lambda: stats.Statsd(
+              es=ESMetadataFactory.GetInstance(),
+              host=topology.topology.get(topology.STATSD_HOST_KEY),
+              port=topology.topology.get(topology.STATSD_PORT_KEY),
+              prefix=constants.STATSD_PROD_PREFIX)),
+      CONNECTION_TYPE_DEBUG: factory.CachedFunctionCall(
+          lambda: stats.Statsd(
+              es=ESMetadataFactory.GetInstance(),
+              host=topology.topology.get(topology.STATSD_HOST_KEY),
+              port=topology.topology.get(topology.STATSD_PORT_KEY),
+              prefix=constants.STATSD_DEBUG_PREFIX)),
+      CONNECTION_TYPE_MOCK: factory.CachedFunctionCall(
+          lambda: stats_es_mock.Stats())
+      }
+
+  def __init__(self):
+    super(StatsFactoryClass, self).__init__(
+        'statsd connection', self._STATSD_TYPES,
+        lambda from_setup, to_setup: from_setup == to_setup)
+
+  def SetupProd(self):
+    """Set up this factory to connect to the production Statsd."""
+    self.Setup(CONNECTION_TYPE_PROD)
+
+  def SetupDebug(self):
+    """Set up this factory to connect to the debug Statsd."""
+    self.Setup(CONNECTION_TYPE_DEBUG)
+
+  def SetupMock(self):
+    """Set up this factory to return a mock statsd object."""
+    self.Setup(CONNECTION_TYPE_MOCK)
+
+
+StatsFactory = StatsFactoryClass()
diff --git a/lib/graphite_lib/__init__.py b/lib/graphite_lib/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/graphite_lib/__init__.py
diff --git a/lib/graphite_lib/elasticsearch_mock.py b/lib/graphite_lib/elasticsearch_mock.py
new file mode 100644
index 0000000..58998a6
--- /dev/null
+++ b/lib/graphite_lib/elasticsearch_mock.py
@@ -0,0 +1,19 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Mocks for ElasticSearch."""
+
+from __future__ import print_function
+
+from chromite.lib.graphite_lib import stats_es_mock
+
+
+class Elasticsearch(stats_es_mock.mock_class_base):
+  """Mock class for es_mock."""
+  pass
+
+
+class ElasticsearchException(Exception):
+  """Mock class for elcasticsearch.ElasticsearchException."""
+  pass
diff --git a/lib/graphite_lib/elasticsearch_mock_unittest b/lib/graphite_lib/elasticsearch_mock_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/lib/graphite_lib/elasticsearch_mock_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/graphite_lib/elasticsearch_mock_unittest.py b/lib/graphite_lib/elasticsearch_mock_unittest.py
new file mode 100644
index 0000000..59608fa
--- /dev/null
+++ b/lib/graphite_lib/elasticsearch_mock_unittest.py
@@ -0,0 +1,47 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit test for ElasticSearch mock."""
+
+from __future__ import print_function
+
+import unittest
+
+from chromite.lib.graphite_lib import elasticsearch_mock as elasticsearch
+
+class statsd_mock_test(unittest.TestCase):
+  """Test statsd_mock"""
+  def setUp(self):
+    self.es = elasticsearch.Elasticsearch(host='host',
+                                          port=1,
+                                          timeout=10)
+
+
+  def test_index_call_mock(self):
+    """Test mock Elasticsearch.index method"""
+    self.es.index(index='blah', doc_type='blah blah', body='random')
+
+
+  def test_index_exists_mock(self):
+    """Test mock Elasticsearch.indices.exists method"""
+    self.es.indices.exists(index='random index')
+
+
+  def test_index_delete_mock(self):
+    """Test mock Elasticsearch.indices.delete method"""
+    self.es.indices.delete(index='random index')
+
+
+  def test_search_mock(self):
+    """Test mock Elasticsearch.search method"""
+    self.es.search(index='index', body='query')
+
+
+  def test_exception_mock(self):
+    """Test mock elasticsearch.ElasticsearchException method"""
+    try:
+      raise elasticsearch.ElasticsearchException('error message')
+    except elasticsearch.ElasticsearchException:
+      pass
+
diff --git a/lib/graphite_lib/es_utils.py b/lib/graphite_lib/es_utils.py
new file mode 100644
index 0000000..b49ef7b
--- /dev/null
+++ b/lib/graphite_lib/es_utils.py
@@ -0,0 +1,425 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file defines helper functions for putting entries into elasticsearch.
+
+"""Utils for sending metadata to elasticsearch
+
+Elasticsearch is a key-value store NOSQL database.
+Source is here: https://github.com/elasticsearch/elasticsearch
+We will be using es to store our metadata.
+
+For example, if we wanted to store the following metadata:
+
+metadata = {
+    'host_id': 1
+    'job_id': 20
+    'time_start': 100000
+    'time_recorded': 100006
+}
+
+The following call will send metadata to the default es server.
+    es_utils.ESMetadata().post(index, metadata)
+We can also specify which port and host to use.
+
+Using for testing: Sometimes, when we choose a single index
+to put entries into, we want to clear that index of all
+entries before running our tests. Use clear_index function.
+(see es_utils_functionaltest.py for an example)
+
+This file also contains methods for sending queries to es. Currently,
+the query (json dict) we send to es is quite complicated (but flexible).
+We've included several methods that composes queries that would be useful.
+These methods are all named create_*_query()
+
+For example, the below query returns job_id, host_id, and job_start
+for all job_ids in [0, 99999] and host_id matching 10.
+
+range_eq_query = {
+    'fields': ['job_id', 'host_id', 'job_start'],
+    'query': {
+        'filtered': {
+            'query': {
+                'match': {
+                    'host_id': 10,
+                }
+            }
+            'filter': {
+                'range': {
+                    'job_id': {
+                        'gte': 0,
+                        'lte': 99999,
+                    }
+                }
+            }
+        }
+    }
+}
+
+To send a query once it is created, call execute_query() to send it to the
+intended elasticsearch server.
+
+"""
+
+from __future__ import print_function
+
+import collections
+import json
+import socket
+import time
+
+from chromite.lib import cros_logging as logging
+
+try:
+  import elasticsearch
+except ImportError:
+  from chromite.lib.graphite_lib import elasticsearch_mock as elasticsearch
+
+
+DEFAULT_TIMEOUT = 3
+
+
+class EsUtilException(Exception):
+  """Exception raised when functions here fail."""
+  pass
+
+
+QueryResult = collections.namedtuple('QueryResult', ['total', 'hits'])
+
+
+class ESMetadataRO(object):
+  """Class handling es connection for metadata."""
+
+  @property
+  def es(self):
+    """Read only property, lazily initialized"""
+    if not self._es:
+      self._es = elasticsearch.Elasticsearch(host=self.host,
+                                             port=self.port,
+                                             timeout=self.timeout)
+    return self._es
+
+
+  def __init__(self, use_http, host, port, index, udp_port,
+               timeout=DEFAULT_TIMEOUT, forgiving=True):
+    """Initialize ESMetadata object.
+
+    Args:
+      use_http: Whether to send data to ES using HTTP.
+      host: Elasticsearch host.
+      port: Elasticsearch port.
+      index: What index the metadata is stored in.
+      udp_port: What port to use for UDP data.
+      timeout: How long to wait while connecting to es.
+      forgiving: If set to true, this object is API compatible with the RW
+                 version of ESMetadata, and silently ignores write requests.
+                 Otherwise, it errors out on write requests.
+    """
+    self.use_http = use_http
+    self.host = host
+    self.port = port
+    self.index = index
+    self.udp_port = udp_port
+    self.timeout = timeout
+    self.forgiving = forgiving
+    self._es = None
+
+
+  # TODO(akeshet) remove this pylint workaround.
+  # pylint: disable=dangerous-default-value
+  def _compose_query(self, equality_constraints=[], fields_returned=None,
+                     range_constraints=[], size=1000000, sort_specs=None,
+                     regex_constraints=[], batch_constraints=[]):
+    """Creates a dict. representing multple range and/or equality queries.
+
+    Example input:
+    _compose_query(
+        fields_returned = ['time_recorded', 'hostname',
+                           'status', 'dbg_str'],
+        equality_constraints = [
+            ('_type', 'host_history'),
+            ('hostname', '172.22.169.106'),
+        ],
+        range_constraints = [
+            ('time_recorded', 1405628341.904379, 1405700341.904379)
+        ],
+        size=20,
+        sort_specs=[
+            'hostname',
+            {'time_recorded': 'asc'},
+        ]
+    )
+
+    Output:
+    {
+        'fields': ['time_recorded', 'hostname', 'status', 'dbg_str'],
+        'query': {
+            'bool': {
+                'minimum_should_match': 3,
+                'should': [
+                    {
+                        'term':  {
+                            '_type': 'host_history'
+                        }
+                    },
+                    {
+                        'term': {
+                            'hostname': '172.22.169.106'
+                        }
+                    },
+                    {
+                        'range': {
+                            'time_recorded': {
+                                'gte': 1405628341.904379,
+                                'lte': 1405700341.904379
+                            }
+                        }
+                    }
+                ]
+            },
+        },
+        'size': 20
+        'sort': [
+            'hostname',
+            { 'time_recorded': 'asc'},
+        ]
+    }
+
+    Args:
+      equality_constraints: list of tuples of (field, value) pairs
+          representing what each field should equal to in the query.
+          e.g. [ ('field1', 1), ('field2', 'value') ]
+      fields_returned: list of fields that we should return when
+          the query is executed. Set it to None to return all fields. Note
+          that the key/vals will be stored in _source key of the hit object,
+          if fields_returned is set to None.
+      range_constraints: list of tuples of (field, low, high) pairs
+          representing what each field should be between (inclusive).
+          e.g. [ ('field1', 2, 10), ('field2', -1, 20) ]
+          If you want one side to be unbounded, you can use None.
+          e.g. [ ('field1', 2, None) ] means value of field1 >= 2.
+      size: max number of entries to return. Default is 1000000.
+      sort_specs: A list of fields to sort on, tiebreakers will be
+          broken by the next field(s).
+      regex_constraints: A list of regex constraints of tuples of
+          (field, value) pairs, e.g., [('filed1', '.*value.*')].
+      batch_constraints: list of tuples of (field, list) pairs
+          representing each field should be equal to one of the values
+          in the list.
+          e.g., [ ('job_id', [10, 11, 12, 13]) ]
+
+    Returns:
+      dictionary object that represents query to es. This will return
+      None if there are no equality constraints
+      and no range constraints.
+    """
+    if not equality_constraints and not range_constraints:
+      raise EsUtilException('No range or equality constraints specified.')
+
+    # Creates list of range dictionaries to put in the 'should' list.
+    range_list = []
+    if range_constraints:
+      for key, low, high in range_constraints:
+        if low is None and high is None:
+          continue
+        temp_dict = {}
+        if low is not None:
+          temp_dict['gte'] = low
+        if high is not None:
+          temp_dict['lte'] = high
+        range_list.append({'range': {key: temp_dict}})
+
+    # Creates the list of term dictionaries to put in the 'should' list.
+    eq_list = [{'term': {k: v}} for k, v in equality_constraints if k]
+    batch_list = [{'terms': {k: v}} for k, v in batch_constraints if k]
+    regex_list = [{'regexp': {k: v}} for k, v in regex_constraints if k]
+    constraints = eq_list + batch_list + range_list + regex_list
+    num_constraints = len(constraints)
+    query = {
+        'query': {
+            'bool': {
+                'should': constraints,
+                'minimum_should_match': num_constraints,
+            }
+        },
+    }
+    if fields_returned:
+      query['fields'] = fields_returned
+    query['size'] = size
+    if sort_specs:
+      query['sort'] = sort_specs
+    return query
+
+
+  def execute_query(self, query):
+    """Makes a query on the given index.
+
+    Args:
+      query: query dictionary (see _compose_query)
+
+    Returns:
+      A QueryResult instance describing the result.
+
+    Example output:
+    {
+        "took" : 5,
+        "timed_out" : false,
+        "_shards" : {
+            "total" : 16,
+            "successful" : 16,
+            "failed" : 0
+        },
+        "hits" : {
+            "total" : 4,
+            "max_score" : 1.0,
+            "hits" : [ {
+                "_index" : "graphite_metrics2",
+                "_type" : "metric",
+                "_id" : "rtntrjgdsafdsfdsfdsfdsfdssssssss",
+                "_score" : 1.0,
+                "_source":{"target_type": "timer",
+                           "host_id": 1,
+                           "job_id": 22,
+                           "time_start": 400}
+            }, {
+                "_index" : "graphite_metrics2",
+                "_type" : "metric",
+                "_id" : "dfgfddddddddddddddddddddddhhh",
+                "_score" : 1.0,
+                "_source":{"target_type": "timer",
+                    "host_id": 2,
+                    "job_id": 23,
+                    "time_start": 405}
+            }, {
+            "_index" : "graphite_metrics2",
+            "_type" : "metric",
+            "_id" : "erwerwerwewtrewgfednvfngfngfrhfd",
+            "_score" : 1.0,
+            "_source":{"target_type": "timer",
+                       "host_id": 3,
+                       "job_id": 24,
+                       "time_start": 4098}
+            }, {
+                "_index" : "graphite_metrics2",
+                "_type" : "metric",
+                "_id" : "dfherjgwetfrsupbretowegoegheorgsa",
+                "_score" : 1.0,
+                "_source":{"target_type": "timer",
+                           "host_id": 22,
+                           "job_id": 25,
+                           "time_start": 4200}
+            } ]
+        }
+    }
+    """
+    if not self.es.indices.exists(index=self.index):
+      logging.error('Index (%s) does not exist on %s:%s',
+                    self.index, self.host, self.port)
+      return None
+    result = self.es.search(index=self.index, body=query)
+    # Check if all matched records are returned. It could be the size is
+    # set too small. Special case for size set to 1, as that means that
+    # the query cares about the first matched entry.
+    # TODO: Use pagination in Elasticsearch. This needs major change on how
+    #       query results are iterated.
+    size = query.get('size', 1)
+    return_count = len(result['hits']['hits'])
+    total_match = result['hits']['total']
+    if total_match > return_count and size != 1:
+      logging.error('There are %d matched records, only %d entries are '
+                    'returned. Query size is set to %d.', total_match,
+                    return_count, size)
+
+    # Extract the actual results from the query.
+    output = QueryResult(total_match, [])
+    for hit in result['hits']['hits']:
+      converted = {}
+      if 'fields' in hit:
+        for key, value in hit['fields'].items():
+          converted[key] = value[0]
+      else:
+        converted = hit['_source'].copy()
+      output.hits.append(converted)
+    return output
+
+
+  def query(self, *args, **kwargs):
+    """The arguments to this function are the same as _compose_query."""
+    query = self._compose_query(*args, **kwargs)
+    return self.execute_query(query)
+
+  def post(self, *args, **kwargs):  # pylint: disable=unused-argument
+    """A default implementation of post.
+
+    This implementation is noop if the object is forgiving, errors out
+    otherwise.
+    """
+    if not self.forgiving:
+      raise AttributeError('%s can not post data.' % self.__class__.__name__)
+
+
+class ESMetadata(ESMetadataRO):
+  """Class handling read and write es connection for metadata."""
+
+  def _send_data_http(self, type_str, metadata):
+    """Sends data to insert into elasticsearch using HTTP.
+
+    Args:
+      type_str: sets the _type field in elasticsearch db.
+      metadata: dictionary object containing metadata
+    """
+    self.es.index(index=self.index, doc_type=type_str, body=metadata)
+
+
+  def _send_data_udp(self, type_str, metadata):
+    """Sends data to insert into elasticsearch using UDP.
+
+    Args:
+      type_str: sets the _type field in elasticsearch db.
+      metadata: dictionary object containing metadata
+    """
+    try:
+      # Header.
+      message = json.dumps(
+          {'index': {'_index': self.index, '_type': type_str}},
+          separators=(', ', ' : '))
+      message += '\n'
+      # Metadata.
+      message += json.dumps(metadata, separators=(', ', ' : '))
+      message += '\n'
+
+      sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+      sock.sendto(message, (self.host, self.udp_port))
+    except socket.error as e:
+      logging.warning(e)
+
+
+  def post(self, type_str, metadata, log_time_recorded=True, **kwargs):
+    """Wraps call of send_data, inserts entry into elasticsearch.
+
+    Args:
+      type_str: Sets the _type field in elasticsearch db.
+      metadata: Dictionary object containing metadata
+      log_time_recorded: Whether to automatically record the time
+                         this metadata is recorded. Default is True.
+      kwargs: Additional metadata fields
+    """
+    if not metadata:
+      return
+
+    metadata = metadata.copy()
+    metadata.update(kwargs)
+    # metadata should not contain anything with key '_type'
+    if '_type' in metadata:
+      type_str = metadata['_type']
+      del metadata['_type']
+    if log_time_recorded:
+      metadata['time_recorded'] = time.time()
+    try:
+      if self.use_http:
+        self._send_data_http(type_str, metadata)
+      else:
+        self._send_data_udp(type_str, metadata)
+    except elasticsearch.ElasticsearchException as e:
+      logging.error(e)
diff --git a/lib/graphite_lib/stats.py b/lib/graphite_lib/stats.py
new file mode 100644
index 0000000..e772c06
--- /dev/null
+++ b/lib/graphite_lib/stats.py
@@ -0,0 +1,198 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module for sending statistics to statsd."""
+
+from __future__ import print_function
+
+from chromite.lib import cros_logging as logging
+
+
+# Autotest uses this library and can not guarantee existence of the statsd
+# module.
+try:
+  import statsd
+except ImportError:
+  from chromite.lib.graphite_lib import statsd_mock as statsd
+
+
+# This is _type for all metadata logged to elasticsearch from here.
+STATS_ES_TYPE = 'stats_metadata'
+
+
+# statsd logs details about what its sending at the DEBUG level, which I really
+# don't want to see tons of stats in logs, so all of these are silenced by
+# setting the logging level for all of statsdto WARNING.
+logging.getLogger('statsd').setLevel(logging.WARNING)
+
+
+def _prepend_init(_es, _conn, _prefix):
+  def wrapper(original):
+    """Decorator to override __init__."""
+
+    class _Derived(original):
+      """Derived stats class."""
+      # pylint: disable=super-on-old-class
+      def __init__(self, name, connection=None, bare=False,
+                   metadata=None):
+        name = self._add_prefix(name, _prefix, bare)
+        conn = connection if connection else _conn
+        super(_Derived, self).__init__(name, conn)
+        self.metadata = metadata
+        self.es = _es
+
+      def _add_prefix(self, name, prefix, bare=False):
+        """Add hotname prefix.
+
+        Since many people run their own local AFE, stats from a local
+        setup shouldn't get mixed into stats from prod.  Therefore,
+        this function exists to add a prefix, nominally the name of
+        the local server, if |name| doesn't already start with the
+        server name, so that each person has their own "folder" of
+        stats that they can look at.
+
+        However, this functionality might not always be wanted, so we
+        allow one to pass in |bare=True| to force us to not prepend
+        the local server name. (I'm not sure when one would use this,
+        but I don't see why I should disallow it...)
+
+        >>> prefix = 'potato_nyc'
+        >>> _add_prefix('rpc.create_job', prefix, bare=False)
+        'potato_nyc.rpc.create_job'
+        >>> _add_prefix('rpc.create_job', prefix, bare=True)
+        'rpc.create_job'
+
+        Args:
+          prefix: The string to prepend to |name| if it does not already
+                  start with |prefix|.
+          name: Stat name.
+          bare: If True, |name| will be returned un-altered.
+
+        Returns:
+          A string to use as the stat name.
+        """
+        if not bare and not name.startswith(prefix):
+          name = '%s.%s' % (prefix, name)
+        return name
+
+    return _Derived
+  return wrapper
+
+
+class Statsd(object):
+  """Parent class for recording stats to graphite."""
+  def __init__(self, es, host, port, prefix):
+    # This is the connection that we're going to reuse for every client
+    # that gets created. This should maximally reduce overhead of stats
+    # logging.
+    self.conn = statsd.Connection(host=host, port=port)
+
+    @_prepend_init(es, self.conn, prefix)
+    class Average(statsd.Average):
+      """Wrapper around statsd.Average."""
+
+      def send(self, subname, value):
+        """Sends time-series data to graphite and metadata (if any) to es.
+
+        Args:
+          subname: The subname to report the data to (i.e.
+                   'daisy.reboot')
+          value: Value to be sent.
+        """
+        statsd.Average.send(self, subname, value)
+        self.es.post(type_str=STATS_ES_TYPE, metadata=self.metadata,
+                     subname=subname, value=value)
+
+    self.Average = Average
+
+    @_prepend_init(es, self.conn, prefix)
+    class Counter(statsd.Counter):
+      """Wrapper around statsd.Counter."""
+
+      def _send(self, subname, value):
+        """Sends time-series data to graphite and metadata (if any) to es.
+
+        Args:
+          subname: The subname to report the data to (i.e.
+                   'daisy.reboot')
+          value: Value to be sent.
+        """
+        statsd.Counter._send(self, subname, value)
+        self.es.post(type_str=STATS_ES_TYPE, metadata=self.metadata,
+                     subname=subname, value=value)
+
+    self.Counter = Counter
+
+    @_prepend_init(es, self.conn, prefix)
+    class Gauge(statsd.Gauge):
+      """Wrapper around statsd.Gauge."""
+
+      def send(self, subname, value):
+        """Sends time-series data to graphite and metadata (if any) to es.
+
+        Args:
+          subname: The subname to report the data to (i.e.
+                   'daisy.reboot')
+          value: Value to be sent.
+        """
+        statsd.Gauge.send(self, subname, value)
+        self.es.post(type_str=STATS_ES_TYPE, metadata=self.metadata,
+                     subname=subname, value=value)
+
+    self.Gauge = Gauge
+
+    @_prepend_init(es, self.conn, prefix)
+    class Timer(statsd.Timer):
+      """Wrapper around statsd.Timer."""
+
+      # To override subname to not implicitly append 'total'.
+      def stop(self, subname=''):
+        statsd.Timer.stop(self, subname)
+
+
+      def send(self, subname, value):
+        """Sends time-series data to graphite and metadata (if any) to es.
+
+        Args:
+          subname: The subname to report the data to (i.e.
+                   'daisy.reboot')
+          value: Value to be sent.
+        """
+        statsd.Timer.send(self, subname, value)
+        self.es.post(type_str=STATS_ES_TYPE, metadata=self.metadata,
+                     subname=self.name, value=value)
+
+
+      def __enter__(self):
+        self.start()
+        return self
+
+
+      def __exit__(self, exc_type, exc_value, traceback):
+        if exc_type is None:
+          self.stop()
+
+    self.Timer = Timer
+
+    @_prepend_init(es, self.conn, prefix)
+    class Raw(statsd.Raw):
+      """Wrapper around statsd.Raw."""
+
+      def send(self, subname, value, timestamp=None):
+        """Sends time-series data to graphite and metadata (if any) to es.
+
+        The datapoint we send is pretty much unchanged (will not be
+        aggregated).
+
+        Args:
+          subname: The subname to report the data to (i.e.
+                   'daisy.reboot')
+          value: Value to be sent.
+          timestamp: Time associated with when this stat was sent.
+        """
+        statsd.Raw.send(self, subname, value, timestamp)
+        self.es.post(type_str=STATS_ES_TYPE, metadata=self.metadata,
+                     subname=subname, value=value, timestamp=timestamp)
+
+    self.Raw = Raw
diff --git a/lib/graphite_lib/stats_es_mock.py b/lib/graphite_lib/stats_es_mock.py
new file mode 100644
index 0000000..0090fb9
--- /dev/null
+++ b/lib/graphite_lib/stats_es_mock.py
@@ -0,0 +1,50 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=deprecated-pragma
+
+"""ElasticSearch mock."""
+
+from __future__ import print_function
+
+class mock_class_base(object):
+  """Base class for a mock statsd/es class."""
+  # pylint: disable=unused-argument
+  def __init__(self, *args, **kwargs):
+    pass
+
+
+  def __getattribute__(self, name):
+    def any_call(*args, **kwargs):
+      pass
+
+    def decorate(f):
+      return f
+
+    # TODO (dshi) crbug.com/256111 - Find better solution for mocking
+    # statsd.
+    def get_client(*args, **kwargs):
+      return self
+
+    if name == 'decorate':
+      return decorate
+    elif name == 'get_client':
+      return get_client
+    elif name == 'indices':
+      return mock_class_base()
+
+    return any_call
+
+class Stats(object):
+  """Stubbed out Stats class to replace stats.Statsd in unit tests."""
+  class Average(mock_class_base):
+    """Dummy Average class."""
+  class Counter(mock_class_base):
+    """Dummy Counter class."""
+  class Gauge(mock_class_base):
+    """Dummy Gauge class."""
+  class Timer(mock_class_base):
+    """Dummy Timer class."""
+  class Raw(mock_class_base):
+    """Dummy Raw class."""
diff --git a/lib/graphite_lib/statsd_mock.py b/lib/graphite_lib/statsd_mock.py
new file mode 100644
index 0000000..6567873
--- /dev/null
+++ b/lib/graphite_lib/statsd_mock.py
@@ -0,0 +1,49 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Mock statsd class."""
+
+from __future__ import print_function
+
+from chromite.lib.graphite_lib import stats_es_mock
+
+
+# pylint: disable=old-style-class
+class Connection:
+  """Mock class for statsd.Connection"""
+  def __init__(self, host, port):
+    pass
+
+
+  @classmethod
+  def set_defaults(cls, host, port):
+    pass
+
+
+class Average(stats_es_mock.mock_class_base):
+  """Mock class for statsd.Average."""
+
+
+class Counter(stats_es_mock.mock_class_base):
+  """Mock class for statsd.Counter."""
+
+
+class Gauge(stats_es_mock.mock_class_base):
+  """Mock class for statsd.Gauge."""
+
+
+class Timer(stats_es_mock.mock_class_base):
+  """Mock class for statsd.Timer."""
+
+
+  def __enter__(self):
+    pass
+
+
+  def __exit__(self, exc_type, exc_value, traceback):
+    pass
+
+
+class Raw(stats_es_mock.mock_class_base):
+  """Mock class for statsd.Raw."""
diff --git a/lib/graphite_lib/statsd_mock_unittest b/lib/graphite_lib/statsd_mock_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/lib/graphite_lib/statsd_mock_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/graphite_lib/statsd_mock_unittest.py b/lib/graphite_lib/statsd_mock_unittest.py
new file mode 100644
index 0000000..6cf8e00
--- /dev/null
+++ b/lib/graphite_lib/statsd_mock_unittest.py
@@ -0,0 +1,62 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittest for statsd mock."""
+
+from __future__ import print_function
+
+import unittest
+
+from chromite.lib.graphite_lib import statsd_mock as statsd
+
+class statsd_mock_test(unittest.TestCase):
+  """Test statsd_mock"""
+  def test_average_mock(self):
+    """Test mock class Average"""
+    statsd.Average('average').send('name', 1)
+
+
+  def test_connection_mock(self):
+    """Test mock class Connection"""
+    statsd.Connection(host='host', port=1)
+    statsd.Connection.set_defaults(host='host', port=1)
+
+
+  def test_counter_mock(self):
+    """Test mock class Counter"""
+    counter = statsd.Counter('counter')
+    counter.increment(subname='name', delta=1)
+    counter.decrement(subname='name', delta=1)
+
+
+  def test_gauge_mock(self):
+    """Test mock class Gauge"""
+    statsd.Gauge('gauge').send('name', 1)
+
+
+  def test_raw_mock(self):
+    """Test mock class Raw"""
+    statsd.Raw('raw').send(subname='name', value=1, timestamp=None)
+
+
+  def test_timer_mock(self):
+    """Test mock class Timer"""
+    timer = statsd.Timer('timer')
+    timer.start()
+    timer.stop()
+
+    class decorate_test(object):
+      """Test class to test timer decorator."""
+      test_timer = statsd.Timer('test')
+
+      @test_timer.decorate
+      def f(self):
+        """Test function to apply timer decorator to."""
+        return True
+
+    dt = decorate_test()
+    self.assertTrue(dt.f(), 'timer decorator failed.')
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/lib/gs.py b/lib/gs.py
new file mode 100644
index 0000000..51c1752
--- /dev/null
+++ b/lib/gs.py
@@ -0,0 +1,1070 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Library to make common google storage operations more reliable."""
+
+from __future__ import print_function
+
+import collections
+import contextlib
+import datetime
+import errno
+import getpass
+import hashlib
+import os
+import re
+import tempfile
+import urlparse
+
+from chromite.cbuildbot import constants
+from chromite.lib import cache
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import path_util
+from chromite.lib import retry_stats
+from chromite.lib import retry_util
+from chromite.lib import timeout_util
+
+
+PUBLIC_BASE_HTTPS_URL = 'https://commondatastorage.googleapis.com/'
+PRIVATE_BASE_HTTPS_URL = 'https://storage.cloud.google.com/'
+BASE_GS_URL = 'gs://'
+
+# Format used by "gsutil ls -l" when reporting modified time.
+DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
+
+# Regexp for parsing each line of output from "gsutil ls -l".
+# This regexp is prepared for the generation and meta_generation values,
+# too, even though they are not expected until we use "-a".
+#
+# A detailed listing looks like:
+#    99908  2014-03-01T05:50:08Z  gs://bucket/foo/abc#1234  metageneration=1
+#                                 gs://bucket/foo/adir/
+#    99908  2014-03-04T01:16:55Z  gs://bucket/foo/def#5678  metageneration=1
+# TOTAL: 2 objects, 199816 bytes (495.36 KB)
+LS_LA_RE = re.compile(
+    r'^\s*(?P<content_length>\d*?)\s+'
+    r'(?P<creation_time>\S*?)\s+'
+    r'(?P<url>[^#$]+).*?'
+    r'('
+    r'#(?P<generation>\d+)\s+'
+    r'meta_?generation=(?P<metageneration>\d+)'
+    r')?\s*$')
+LS_RE = re.compile(r'^\s*(?P<content_length>)(?P<creation_time>)(?P<url>.*)'
+                   r'(?P<generation>)(?P<metageneration>)\s*$')
+
+
+def PathIsGs(path):
+  """Determine if a path is a Google Storage URI."""
+  return path.startswith(BASE_GS_URL)
+
+
+def CanonicalizeURL(url, strict=False):
+  """Convert provided URL to gs:// URL, if it follows a known format.
+
+  Args:
+    url: URL to canonicalize.
+    strict: Raises exception if URL cannot be canonicalized.
+  """
+  for prefix in (PUBLIC_BASE_HTTPS_URL, PRIVATE_BASE_HTTPS_URL):
+    if url.startswith(prefix):
+      return url.replace(prefix, BASE_GS_URL, 1)
+
+  if not PathIsGs(url) and strict:
+    raise ValueError('Url %r cannot be canonicalized.' % url)
+
+  return url
+
+
+def GetGsURL(bucket, for_gsutil=False, public=True, suburl=''):
+  """Construct a Google Storage URL
+
+  Args:
+    bucket: The Google Storage bucket to use
+    for_gsutil: Do you want a URL for passing to `gsutil`?
+    public: Do we want the public or private url
+    suburl: A url fragment to tack onto the end
+
+  Returns:
+    The fully constructed URL
+  """
+  if for_gsutil:
+    urlbase = BASE_GS_URL
+  else:
+    urlbase = PUBLIC_BASE_HTTPS_URL if public else PRIVATE_BASE_HTTPS_URL
+  return '%s%s/%s' % (urlbase, bucket, suburl)
+
+
+class GSContextException(Exception):
+  """Base exception for all exceptions thrown by GSContext."""
+
+
+# Since the underlying code uses RunCommand, some callers might be trying to
+# catch cros_build_lib.RunCommandError themselves.  Extend that class so that
+# code continues to work.
+class GSCommandError(GSContextException, cros_build_lib.RunCommandError):
+  """Thrown when an error happened we couldn't decode."""
+
+
+class GSContextPreconditionFailed(GSContextException):
+  """Thrown when google storage returns code=PreconditionFailed."""
+
+
+class GSNoSuchKey(GSContextException):
+  """Thrown when google storage returns code=NoSuchKey."""
+
+
+# Detailed results of GSContext.Stat.
+#
+# The fields directory correspond to gsutil stat results.
+#
+#  Field name        Type         Example
+#   creation_time     datetime     Sat, 23 Aug 2014 06:53:20 GMT
+#   content_length    int          74
+#   content_type      string       application/octet-stream
+#   hash_crc32c       string       BBPMPA==
+#   hash_md5          string       ms+qSYvgI9SjXn8tW/5UpQ==
+#   etag              string       CNCgocbmqMACEAE=
+#   generation        int          1408776800850000
+#   metageneration    int          1
+#
+# Note: We omit a few stat fields as they are not always available, and we
+# have no callers that want this currently.
+#
+#   content_language  string/None  en   # This field may be None.
+GSStatResult = collections.namedtuple(
+    'GSStatResult',
+    ('creation_time', 'content_length', 'content_type', 'hash_crc32c',
+     'hash_md5', 'etag', 'generation', 'metageneration'))
+
+
+# Detailed results of GSContext.List.
+GSListResult = collections.namedtuple(
+    'GSListResult',
+    ('url', 'creation_time', 'content_length', 'generation', 'metageneration'))
+
+
+class GSCounter(object):
+  """A counter class for Google Storage."""
+
+  def __init__(self, ctx, path):
+    """Create a counter object.
+
+    Args:
+      ctx: A GSContext object.
+      path: The path to the counter in Google Storage.
+    """
+    self.ctx = ctx
+    self.path = path
+
+  def Get(self):
+    """Get the current value of a counter."""
+    try:
+      return int(self.ctx.Cat(self.path))
+    except GSNoSuchKey:
+      return 0
+
+  def AtomicCounterOperation(self, default_value, operation):
+    """Atomically set the counter value using |operation|.
+
+    Args:
+      default_value: Default value to use for counter, if counter
+                     does not exist.
+      operation: Function that takes the current counter value as a
+                 parameter, and returns the new desired value.
+
+    Returns:
+      The new counter value. None if value could not be set.
+    """
+    generation, _ = self.ctx.GetGeneration(self.path)
+    for _ in xrange(self.ctx.retries + 1):
+      try:
+        value = default_value if generation == 0 else operation(self.Get())
+        self.ctx.Copy('-', self.path, input=str(value), version=generation)
+        return value
+      except (GSContextPreconditionFailed, GSNoSuchKey):
+        # GSContextPreconditionFailed is thrown if another builder is also
+        # trying to update the counter and we lost the race. GSNoSuchKey is
+        # thrown if another builder deleted the counter. In either case, fetch
+        # the generation again, and, if it has changed, try the copy again.
+        new_generation, _ = self.ctx.GetGeneration(self.path)
+        if new_generation == generation:
+          raise
+        generation = new_generation
+
+  def Increment(self):
+    """Increment the counter.
+
+    Returns:
+      The new counter value. None if value could not be set.
+    """
+    return self.AtomicCounterOperation(1, lambda x: x + 1)
+
+  def Decrement(self):
+    """Decrement the counter.
+
+    Returns:
+      The new counter value. None if value could not be set.
+    """
+    return self.AtomicCounterOperation(-1, lambda x: x - 1)
+
+  def Reset(self):
+    """Reset the counter to zero.
+
+    Returns:
+      The new counter value. None if value could not be set.
+    """
+    return self.AtomicCounterOperation(0, lambda x: 0)
+
+  def StreakIncrement(self):
+    """Increment the counter if it is positive, otherwise set it to 1.
+
+    Returns:
+      The new counter value. None if value could not be set.
+    """
+    return self.AtomicCounterOperation(1, lambda x: x + 1 if x > 0 else 1)
+
+  def StreakDecrement(self):
+    """Decrement the counter if it is negative, otherwise set it to -1.
+
+    Returns:
+      The new counter value. None if value could not be set.
+    """
+    return self.AtomicCounterOperation(-1, lambda x: x - 1 if x < 0 else -1)
+
+
+class GSContext(object):
+  """A class to wrap common google storage operations."""
+
+  # Error messages that indicate an invalid BOTO config.
+  AUTHORIZATION_ERRORS = ('no configured', 'detail=Authorization')
+
+  DEFAULT_BOTO_FILE = os.path.expanduser('~/.boto')
+  DEFAULT_GSUTIL_TRACKER_DIR = os.path.expanduser('~/.gsutil/tracker-files')
+  # This is set for ease of testing.
+  DEFAULT_GSUTIL_BIN = None
+  DEFAULT_GSUTIL_BUILDER_BIN = '/b/build/third_party/gsutil/gsutil'
+  # How many times to retry uploads.
+  DEFAULT_RETRIES = 3
+
+  # Multiplier for how long to sleep (in seconds) between retries; will delay
+  # (1*sleep) the first time, then (2*sleep), continuing via attempt * sleep.
+  DEFAULT_SLEEP_TIME = 60
+
+  GSUTIL_VERSION = '4.13'
+  GSUTIL_TAR = 'gsutil_%s.tar.gz' % GSUTIL_VERSION
+  GSUTIL_URL = (PUBLIC_BASE_HTTPS_URL +
+                'chromeos-mirror/gentoo/distfiles/%s' % GSUTIL_TAR)
+  GSUTIL_API_SELECTOR = 'JSON'
+
+  RESUMABLE_UPLOAD_ERROR = ('Too many resumable upload attempts failed without '
+                            'progress')
+  RESUMABLE_DOWNLOAD_ERROR = ('Too many resumable download attempts failed '
+                              'without progress')
+
+  @classmethod
+  def GetDefaultGSUtilBin(cls, cache_dir=None):
+    if cls.DEFAULT_GSUTIL_BIN is None:
+      if cache_dir is None:
+        cache_dir = path_util.GetCacheDir()
+      if cache_dir is not None:
+        common_path = os.path.join(cache_dir, constants.COMMON_CACHE)
+        tar_cache = cache.TarballCache(common_path)
+        key = (cls.GSUTIL_TAR,)
+        # The common cache will not be LRU, removing the need to hold a read
+        # lock on the cached gsutil.
+        ref = tar_cache.Lookup(key)
+        ref.SetDefault(cls.GSUTIL_URL)
+        cls.DEFAULT_GSUTIL_BIN = os.path.join(ref.path, 'gsutil', 'gsutil')
+      else:
+        # Check if the default gsutil path for builders exists. If
+        # not, try locating gsutil. If none exists, simply use 'gsutil'.
+        gsutil_bin = cls.DEFAULT_GSUTIL_BUILDER_BIN
+        if not os.path.exists(gsutil_bin):
+          gsutil_bin = osutils.Which('gsutil')
+        if gsutil_bin is None:
+          gsutil_bin = 'gsutil'
+        cls.DEFAULT_GSUTIL_BIN = gsutil_bin
+
+    return cls.DEFAULT_GSUTIL_BIN
+
+  def __init__(self, boto_file=None, cache_dir=None, acl=None,
+               dry_run=False, gsutil_bin=None, init_boto=False, retries=None,
+               sleep=None):
+    """Constructor.
+
+    Args:
+      boto_file: Fully qualified path to user's .boto credential file.
+      cache_dir: The absolute path to the cache directory. Use the default
+        fallback if not given.
+      acl: If given, a canned ACL. It is not valid to pass in an ACL file
+        here, because most gsutil commands do not accept ACL files. If you
+        would like to use an ACL file, use the SetACL command instead.
+      dry_run: Testing mode that prints commands that would be run.
+      gsutil_bin: If given, the absolute path to the gsutil binary.  Else
+        the default fallback will be used.
+      init_boto: If set to True, GSContext will check during __init__ if a
+        valid boto config is configured, and if not, will attempt to ask the
+        user to interactively set up the boto config.
+      retries: Number of times to retry a command before failing.
+      sleep: Amount of time to sleep between failures.
+    """
+    if gsutil_bin is None:
+      gsutil_bin = self.GetDefaultGSUtilBin(cache_dir)
+    else:
+      self._CheckFile('gsutil not found', gsutil_bin)
+    self.gsutil_bin = gsutil_bin
+
+    # The version of gsutil is retrieved on demand and cached here.
+    self._gsutil_version = None
+
+    # Increase the number of retries. With 10 retries, Boto will try a total of
+    # 11 times and wait up to 2**11 seconds (~30 minutes) in total, not
+    # not including the time spent actually uploading or downloading.
+    self.gsutil_flags = ['-o', 'Boto:num_retries=10']
+
+    # Set HTTP proxy if environment variable http_proxy is set
+    # (crbug.com/325032).
+    if 'http_proxy' in os.environ:
+      url = urlparse.urlparse(os.environ['http_proxy'])
+      if not url.hostname or (not url.username and url.password):
+        logging.warning('GS_ERROR: Ignoring env variable http_proxy because it '
+                        'is not properly set: %s', os.environ['http_proxy'])
+      else:
+        self.gsutil_flags += ['-o', 'Boto:proxy=%s' % url.hostname]
+        if url.username:
+          self.gsutil_flags += ['-o', 'Boto:proxy_user=%s' % url.username]
+        if url.password:
+          self.gsutil_flags += ['-o', 'Boto:proxy_pass=%s' % url.password]
+        if url.port:
+          self.gsutil_flags += ['-o', 'Boto:proxy_port=%d' % url.port]
+
+    # Prefer boto_file if specified, else prefer the env then the default.
+    if boto_file is None:
+      boto_file = os.environ.get('BOTO_CONFIG')
+    if boto_file is None and os.path.isfile(self.DEFAULT_BOTO_FILE):
+      # Only set boto file to DEFAULT_BOTO_FILE if it exists.
+      boto_file = self.DEFAULT_BOTO_FILE
+
+    self.boto_file = boto_file
+
+    self.acl = acl
+
+    self.dry_run = dry_run
+    self.retries = self.DEFAULT_RETRIES if retries is None else int(retries)
+    self._sleep_time = self.DEFAULT_SLEEP_TIME if sleep is None else int(sleep)
+
+    if init_boto and not dry_run:
+      # We can't really expect gsutil to even be present in dry_run mode.
+      self._InitBoto()
+
+  @property
+  def gsutil_version(self):
+    """Return the version of the gsutil in this context."""
+    if not self._gsutil_version:
+      if self.dry_run:
+        self._gsutil_version = self.GSUTIL_VERSION
+      else:
+        cmd = ['-q', 'version']
+
+        # gsutil has been known to return version to stderr in the past, so
+        # use combine_stdout_stderr=True.
+        result = self.DoCommand(cmd, combine_stdout_stderr=True,
+                                redirect_stdout=True)
+
+        # Expect output like: 'gsutil version 3.35' or 'gsutil version: 4.5'.
+        match = re.search(r'^\s*gsutil\s+version:?\s+([\d.]+)', result.output,
+                          re.IGNORECASE)
+        if match:
+          self._gsutil_version = match.group(1)
+        else:
+          raise GSContextException('Unexpected output format from "%s":\n%s.' %
+                                   (result.cmdstr, result.output))
+
+    return self._gsutil_version
+
+  def _CheckFile(self, errmsg, afile):
+    """Pre-flight check for valid inputs.
+
+    Args:
+      errmsg: Error message to display.
+      afile: Fully qualified path to test file existance.
+    """
+    if not os.path.isfile(afile):
+      raise GSContextException('%s, %s is not a file' % (errmsg, afile))
+
+  def _TestGSLs(self):
+    """Quick test of gsutil functionality."""
+    result = self.DoCommand(['ls'], retries=0, debug_level=logging.DEBUG,
+                            redirect_stderr=True, error_code_ok=True)
+    return not (result.returncode == 1 and
+                any(e in result.error for e in self.AUTHORIZATION_ERRORS))
+
+  def _ConfigureBotoConfig(self):
+    """Make sure we can access protected bits in GS."""
+    print('Configuring gsutil. **Please use your @google.com account.**')
+    try:
+      if not self.boto_file:
+        self.boto_file = self.DEFAULT_BOTO_FILE
+      self.DoCommand(['config'], retries=0, debug_level=logging.CRITICAL,
+                     print_cmd=False)
+    finally:
+      if (os.path.exists(self.boto_file) and not
+          os.path.getsize(self.boto_file)):
+        os.remove(self.boto_file)
+        raise GSContextException('GS config could not be set up.')
+
+  def _InitBoto(self):
+    if not self._TestGSLs():
+      self._ConfigureBotoConfig()
+
+  def Cat(self, path, **kwargs):
+    """Returns the contents of a GS object."""
+    kwargs.setdefault('redirect_stdout', True)
+    if not PathIsGs(path):
+      # gsutil doesn't support cat-ting a local path, so read it ourselves.
+      try:
+        return osutils.ReadFile(path)
+      except Exception as e:
+        if getattr(e, 'errno', None) == errno.ENOENT:
+          raise GSNoSuchKey('%s: file does not exist' % path)
+        else:
+          raise GSContextException(str(e))
+    elif self.dry_run:
+      return ''
+    else:
+      return self.DoCommand(['cat', path], **kwargs).output
+
+  def CopyInto(self, local_path, remote_dir, filename=None, **kwargs):
+    """Upload a local file into a directory in google storage.
+
+    Args:
+      local_path: Local file path to copy.
+      remote_dir: Full gs:// url of the directory to transfer the file into.
+      filename: If given, the filename to place the content at; if not given,
+        it's discerned from basename(local_path).
+      **kwargs: See Copy() for documentation.
+
+    Returns:
+      The generation of the remote file.
+    """
+    filename = filename if filename is not None else local_path
+    # Basename it even if an explicit filename was given; we don't want
+    # people using filename as a multi-directory path fragment.
+    return self.Copy(local_path,
+                     '%s/%s' % (remote_dir, os.path.basename(filename)),
+                     **kwargs)
+
+  @staticmethod
+  def GetTrackerFilenames(dest_path):
+    """Returns a list of gsutil tracker filenames.
+
+    Tracker files are used by gsutil to resume downloads/uploads. This
+    function does not handle parallel uploads.
+
+    Args:
+      dest_path: Either a GS path or an absolute local path.
+
+    Returns:
+      The list of potential tracker filenames.
+    """
+    dest = urlparse.urlsplit(dest_path)
+    filenames = []
+    if dest.scheme == 'gs':
+      prefix = 'upload'
+      bucket_name = dest.netloc
+      object_name = dest.path.lstrip('/')
+      filenames.append(
+          re.sub(r'[/\\]', '_', 'resumable_upload__%s__%s__%s.url' %
+                 (bucket_name, object_name, GSContext.GSUTIL_API_SELECTOR)))
+    else:
+      prefix = 'download'
+      filenames.append(
+          re.sub(r'[/\\]', '_', 'resumable_download__%s__%s.etag' %
+                 (dest.path, GSContext.GSUTIL_API_SELECTOR)))
+
+    hashed_filenames = []
+    for filename in filenames:
+      if not isinstance(filename, unicode):
+        filename = unicode(filename, 'utf8').encode('utf-8')
+      m = hashlib.sha1(filename)
+      hashed_filenames.append('%s_TRACKER_%s.%s' %
+                              (prefix, m.hexdigest(), filename[-16:]))
+
+    return hashed_filenames
+
+  def _RetryFilter(self, e):
+    """Function to filter retry-able RunCommandError exceptions.
+
+    Args:
+      e: Exception object to filter. Exception may be re-raised as
+         as different type, if _RetryFilter determines a more appropriate
+         exception type based on the contents of e.
+
+    Returns:
+      True for exceptions thrown by a RunCommand gsutil that should be retried.
+    """
+    if not retry_util.ShouldRetryCommandCommon(e):
+      return False
+
+    # e is guaranteed by above filter to be a RunCommandError
+
+    if e.result.returncode < 0:
+      logging.info('Child process received signal %d; not retrying.',
+                   -e.result.returncode)
+      return False
+
+    error = e.result.error
+    if error:
+      # gsutil usually prints PreconditionException when a precondition fails.
+      # It may also print "ResumableUploadAbortException: 412 Precondition
+      # Failed", so the logic needs to be a little more general.
+      if 'PreconditionException' in error or '412 Precondition Failed' in error:
+        raise GSContextPreconditionFailed(e)
+
+      # If the file does not exist, one of the following errors occurs. The
+      # "stat" command leaves off the "CommandException: " prefix, but it also
+      # outputs to stdout instead of stderr and so will not be caught here
+      # regardless.
+      if ('CommandException: No URLs matched' in error or
+          'NotFoundException:' in error or
+          'One or more URLs matched no objects' in error):
+        raise GSNoSuchKey(e)
+
+      logging.warning('GS_ERROR: %s', error)
+
+      # TODO: Below is a list of known flaky errors that we should
+      # retry. The list needs to be extended.
+
+      # Temporary fix: remove the gsutil tracker files so that our retry
+      # can hit a different backend. This should be removed after the
+      # bug is fixed by the Google Storage team (see crbug.com/308300).
+      RESUMABLE_ERROR_MESSAGE = (
+          self.RESUMABLE_DOWNLOAD_ERROR,
+          self.RESUMABLE_UPLOAD_ERROR,
+          'ResumableUploadException',
+          'ResumableUploadAbortException',
+          'ResumableDownloadException',
+          'ssl.SSLError: The read operation timed out',
+          'Unable to find the server',
+          'doesn\'t match cloud-supplied digest',
+      )
+      if any(x in error for x in RESUMABLE_ERROR_MESSAGE):
+        # Only remove the tracker files if we try to upload/download a file.
+        if 'cp' in e.result.cmd[:-2]:
+          # Assume a command: gsutil [options] cp [options] src_path dest_path
+          # dest_path needs to be a fully qualified local path, which is already
+          # required for GSContext.Copy().
+          tracker_filenames = self.GetTrackerFilenames(e.result.cmd[-1])
+          logging.info('Potential list of tracker files: %s',
+                       tracker_filenames)
+          for tracker_filename in tracker_filenames:
+            tracker_file_path = os.path.join(self.DEFAULT_GSUTIL_TRACKER_DIR,
+                                             tracker_filename)
+            if os.path.exists(tracker_file_path):
+              logging.info('Deleting gsutil tracker file %s before retrying.',
+                           tracker_file_path)
+              logging.info('The content of the tracker file: %s',
+                           osutils.ReadFile(tracker_file_path))
+              osutils.SafeUnlink(tracker_file_path)
+        return True
+
+      # We have seen flaky errors with 5xx return codes
+      # See b/17376491 for the "JSON decoding" error.
+      # We have seen transient Oauth 2.0 credential errors (crbug.com/414345).
+      TRANSIENT_ERROR_MESSAGE = (
+          'ServiceException: 5',
+          'Failure: No JSON object could be decoded',
+          'Oauth 2.0 User Account',
+          'InvalidAccessKeyId',
+          'socket.error: [Errno 104] Connection reset by peer',
+          'Received bad request from server',
+      )
+      if any(x in error for x in TRANSIENT_ERROR_MESSAGE):
+        return True
+
+    return False
+
+  # TODO(mtennant): Make a private method.
+  def DoCommand(self, gsutil_cmd, headers=(), retries=None, version=None,
+                parallel=False, **kwargs):
+    """Run a gsutil command, suppressing output, and setting retry/sleep.
+
+    Args:
+      gsutil_cmd: The (mostly) constructed gsutil subcommand to run.
+      headers: A list of raw headers to pass down.
+      parallel: Whether gsutil should enable parallel copy/update of multiple
+        files. NOTE: This option causes gsutil to use significantly more
+        memory, even if gsutil is only uploading one file.
+      retries: How many times to retry this command (defaults to setting given
+        at object creation).
+      version: If given, the generation; essentially the timestamp of the last
+        update.  Note this is not the same as sequence-number; it's
+        monotonically increasing bucket wide rather than reset per file.
+        The usage of this is if we intend to replace/update only if the version
+        is what we expect.  This is useful for distributed reasons- for example,
+        to ensure you don't overwrite someone else's creation, a version of
+        0 states "only update if no version exists".
+
+    Returns:
+      A RunCommandResult object.
+    """
+    kwargs = kwargs.copy()
+    kwargs.setdefault('redirect_stderr', True)
+
+    cmd = [self.gsutil_bin]
+    cmd += self.gsutil_flags
+    for header in headers:
+      cmd += ['-h', header]
+    if version is not None:
+      cmd += ['-h', 'x-goog-if-generation-match:%d' % int(version)]
+
+    # Enable parallel copy/update of multiple files if stdin is not to
+    # be piped to the command. This does not split a single file into
+    # smaller components for upload.
+    if parallel and kwargs.get('input') is None:
+      cmd += ['-m']
+
+    cmd.extend(gsutil_cmd)
+
+    if retries is None:
+      retries = self.retries
+
+    extra_env = kwargs.pop('extra_env', {})
+    if self.boto_file:
+      extra_env.setdefault('BOTO_CONFIG', self.boto_file)
+
+    if self.dry_run:
+      logging.debug("%s: would've run: %s", self.__class__.__name__,
+                    cros_build_lib.CmdToStr(cmd))
+    else:
+      try:
+        return retry_stats.RetryWithStats(retry_stats.GSUTIL,
+                                          self._RetryFilter,
+                                          retries, cros_build_lib.RunCommand,
+                                          cmd, sleep=self._sleep_time,
+                                          extra_env=extra_env, **kwargs)
+      except cros_build_lib.RunCommandError as e:
+        raise GSCommandError(e.msg, e.result, e.exception)
+
+  def Copy(self, src_path, dest_path, acl=None, recursive=False,
+           skip_symlinks=True, auto_compress=False, **kwargs):
+    """Copy to/from GS bucket.
+
+    Canned ACL permissions can be specified on the gsutil cp command line.
+
+    More info:
+    https://developers.google.com/storage/docs/accesscontrol#applyacls
+
+    Args:
+      src_path: Fully qualified local path or full gs:// path of the src file.
+      dest_path: Fully qualified local path or full gs:// path of the dest
+                 file.
+      acl: One of the google storage canned_acls to apply.
+      recursive: Whether to copy recursively.
+      skip_symlinks: Skip symbolic links when copying recursively.
+      auto_compress: Automatically compress with gzip when uploading.
+
+    Returns:
+      The generation of the remote file.
+
+    Raises:
+      RunCommandError if the command failed despite retries.
+    """
+    # -v causes gs://bucket/path#generation to be listed in output.
+    cmd = ['cp', '-v']
+
+    # Certain versions of gsutil (at least 4.3) assume the source of a copy is
+    # a directory if the -r option is used. If it's really a file, gsutil will
+    # look like it's uploading it but not actually do anything. We'll work
+    # around that problem by surpressing the -r flag if we detect the source
+    # is a local file.
+    if recursive and not os.path.isfile(src_path):
+      cmd.append('-r')
+      if skip_symlinks:
+        cmd.append('-e')
+
+    if auto_compress:
+      # Pass the suffix without the '.' as that is what gsutil wants.
+      suffix = os.path.splitext(src_path)[1]
+      if not suffix:
+        raise ValueError('src file "%s" needs an extension to compress' %
+                         (src_path,))
+      cmd += ['-z', suffix[1:]]
+
+    acl = self.acl if acl is None else acl
+    if acl is not None:
+      cmd += ['-a', acl]
+
+    with cros_build_lib.ContextManagerStack() as stack:
+      # Write the input into a tempfile if possible. This is needed so that
+      # gsutil can retry failed requests.
+      if src_path == '-' and kwargs.get('input') is not None:
+        f = stack.Add(tempfile.NamedTemporaryFile)
+        f.write(kwargs['input'])
+        f.flush()
+        del kwargs['input']
+        src_path = f.name
+
+      cmd += ['--', src_path, dest_path]
+
+      if not (PathIsGs(src_path) or PathIsGs(dest_path)):
+        # Don't retry on local copies.
+        kwargs.setdefault('retries', 0)
+
+      kwargs['capture_output'] = True
+      try:
+        result = self.DoCommand(cmd, **kwargs)
+        if self.dry_run:
+          return None
+
+        # Now we parse the output for the current generation number.  Example:
+        #   Created: gs://chromeos-throw-away-bucket/foo#1360630664537000.1
+        m = re.search(r'Created: .*#(\d+)([.](\d+))?$', result.error)
+        if m:
+          return int(m.group(1))
+        else:
+          return None
+      except GSNoSuchKey:
+        # If the source was a local file, the error is a quirk of gsutil 4.5
+        # and should be ignored. If the source was remote, there might
+        # legitimately be no such file. See crbug.com/393419.
+        if os.path.isfile(src_path):
+          return None
+        raise
+
+  def CreateWithContents(self, gs_uri, contents, **kwargs):
+    """Creates the specified file with specified contents.
+
+    Args:
+      gs_uri: The URI of a file on Google Storage.
+      contents: String with contents to write to the file.
+      kwargs: See additional options that Copy takes.
+
+    Raises:
+      See Copy.
+    """
+    self.Copy('-', gs_uri, input=contents, **kwargs)
+
+  # TODO: Merge LS() and List()?
+  def LS(self, path, **kwargs):
+    """Does a directory listing of the given gs path.
+
+    Args:
+      path: The path to get a listing of.
+      kwargs: See options that DoCommand takes.
+
+    Returns:
+      A list of paths that matched |path|.  Might be more than one if a
+      directory or path include wildcards/etc...
+    """
+    if self.dry_run:
+      return []
+
+    if not PathIsGs(path):
+      # gsutil doesn't support listing a local path, so just run 'ls'.
+      kwargs.pop('retries', None)
+      kwargs.pop('headers', None)
+      result = cros_build_lib.RunCommand(['ls', path], **kwargs)
+      return result.output.splitlines()
+    else:
+      return [x.url for x in self.List(path, **kwargs)]
+
+  def List(self, path, details=False, **kwargs):
+    """Does a directory listing of the given gs path.
+
+    Args:
+      path: The path to get a listing of.
+      details: Whether to include size/timestamp info.
+      kwargs: See options that DoCommand takes.
+
+    Returns:
+      A list of GSListResult objects that matched |path|.  Might be more
+      than one if a directory or path include wildcards/etc...
+    """
+    ret = []
+    if self.dry_run:
+      return ret
+
+    cmd = ['ls']
+    if details:
+      cmd += ['-l']
+    cmd += ['--', path]
+
+    # We always request the extended details as the overhead compared to a plain
+    # listing is negligible.
+    kwargs['redirect_stdout'] = True
+    lines = self.DoCommand(cmd, **kwargs).output.splitlines()
+
+    if details:
+      # The last line is expected to be a summary line.  Ignore it.
+      lines = lines[:-1]
+      ls_re = LS_LA_RE
+    else:
+      ls_re = LS_RE
+
+    # Handle optional fields.
+    intify = lambda x: int(x) if x else None
+
+    # Parse out each result and build up the results list.
+    for line in lines:
+      match = ls_re.search(line)
+      if not match:
+        raise GSContextException('unable to parse line: %s' % line)
+      if match.group('creation_time'):
+        timestamp = datetime.datetime.strptime(match.group('creation_time'),
+                                               DATETIME_FORMAT)
+      else:
+        timestamp = None
+
+      ret.append(GSListResult(
+          content_length=intify(match.group('content_length')),
+          creation_time=timestamp,
+          url=match.group('url'),
+          generation=intify(match.group('generation')),
+          metageneration=intify(match.group('metageneration'))))
+
+    return ret
+
+  def GetSize(self, path, **kwargs):
+    """Returns size of a single object (local or GS)."""
+    if not PathIsGs(path):
+      return os.path.getsize(path)
+    else:
+      return self.Stat(path, **kwargs).content_length
+
+  def Move(self, src_path, dest_path, **kwargs):
+    """Move/rename to/from GS bucket.
+
+    Args:
+      src_path: Fully qualified local path or full gs:// path of the src file.
+      dest_path: Fully qualified local path or full gs:// path of the dest file.
+    """
+    cmd = ['mv', '--', src_path, dest_path]
+    return self.DoCommand(cmd, **kwargs)
+
+  def SetACL(self, upload_url, acl=None):
+    """Set access on a file already in google storage.
+
+    Args:
+      upload_url: gs:// url that will have acl applied to it.
+      acl: An ACL permissions file or canned ACL.
+    """
+    if acl is None:
+      if not self.acl:
+        raise GSContextException(
+            'SetAcl invoked w/out a specified acl, nor a default acl.')
+      acl = self.acl
+
+    self.DoCommand(['acl', 'set', acl, upload_url])
+
+  def ChangeACL(self, upload_url, acl_args_file=None, acl_args=None):
+    """Change access on a file already in google storage with "acl ch".
+
+    Args:
+      upload_url: gs:// url that will have acl applied to it.
+      acl_args_file: A file with arguments to the gsutil acl ch command. The
+                     arguments can be spread across multiple lines. Comments
+                     start with a # character and extend to the end of the
+                     line. Exactly one of this argument or acl_args must be
+                     set.
+      acl_args: A list of arguments for the gsutil acl ch command. Exactly
+                one of this argument or acl_args must be set.
+    """
+    if acl_args_file and acl_args:
+      raise GSContextException(
+          'ChangeACL invoked with both acl_args and acl_args set.')
+    if not acl_args_file and not acl_args:
+      raise GSContextException(
+          'ChangeACL invoked with neither acl_args nor acl_args set.')
+
+    if acl_args_file:
+      lines = osutils.ReadFile(acl_args_file).splitlines()
+      # Strip out comments.
+      lines = [x.split('#', 1)[0].strip() for x in lines]
+      acl_args = ' '.join([x for x in lines if x]).split()
+
+    self.DoCommand(['acl', 'ch'] + acl_args + [upload_url])
+
+  def Exists(self, path, **kwargs):
+    """Checks whether the given object exists.
+
+    Args:
+      path: Local path or gs:// url to check.
+      kwargs: Flags to pass to DoCommand.
+
+    Returns:
+      True if the path exists; otherwise returns False.
+    """
+    if not PathIsGs(path):
+      return os.path.exists(path)
+
+    try:
+      self.Stat(path, **kwargs)
+    except GSNoSuchKey:
+      return False
+
+    return True
+
+  def Remove(self, path, recursive=False, ignore_missing=False, **kwargs):
+    """Remove the specified file.
+
+    Args:
+      path: Full gs:// url of the file to delete.
+      recursive: Remove recursively starting at path.
+      ignore_missing: Whether to suppress errors about missing files.
+      kwargs: Flags to pass to DoCommand.
+    """
+    cmd = ['rm']
+    if 'recurse' in kwargs:
+      raise TypeError('"recurse" has been renamed to "recursive"')
+    if recursive:
+      cmd.append('-R')
+    cmd.append(path)
+    try:
+      self.DoCommand(cmd, **kwargs)
+    except GSNoSuchKey:
+      if not ignore_missing:
+        raise
+
+  def GetGeneration(self, path):
+    """Get the generation and metageneration of the given |path|.
+
+    Returns:
+      A tuple of the generation and metageneration.
+    """
+    try:
+      res = self.Stat(path)
+    except GSNoSuchKey:
+      return 0, 0
+
+    return res.generation, res.metageneration
+
+  def Stat(self, path, **kwargs):
+    """Stat a GS file, and get detailed information.
+
+    Args:
+      path: A GS path for files to Stat. Wildcards are NOT supported.
+      kwargs: Flags to pass to DoCommand.
+
+    Returns:
+      A GSStatResult object with all fields populated.
+
+    Raises:
+      Assorted GSContextException exceptions.
+    """
+    try:
+      res = self.DoCommand(['stat', path], redirect_stdout=True, **kwargs)
+    except GSCommandError as e:
+      # Because the 'gsutil stat' command logs errors itself (instead of
+      # raising errors internally like other commands), we have to look
+      # for errors ourselves.  See the bug report here:
+      # https://github.com/GoogleCloudPlatform/gsutil/issues/288
+      # Example line:
+      # INFO 0713 05:58:12.451810 stat.py] No URLs matched gs://bucket/file
+      if re.match(r'INFO [ 0-9:.]* stat.py\] No URLs matched', e.result.error):
+        raise GSNoSuchKey(path)
+
+      # No idea what this is, so just choke.
+      raise
+
+    # In dryrun mode, DoCommand doesn't return an object, so we need to fake
+    # out the behavior ourselves.
+    if self.dry_run:
+      return GSStatResult(
+          creation_time=datetime.datetime.now(),
+          content_length=0,
+          content_type='application/octet-stream',
+          hash_crc32c='AAAAAA==',
+          hash_md5='',
+          etag='',
+          generation=0,
+          metageneration=0)
+
+    # We expect Stat output like the following. However, the Content-Language
+    # line appears to be optional based on how the file in question was
+    # created.
+    #
+    # gs://bucket/path/file:
+    #     Creation time:      Sat, 23 Aug 2014 06:53:20 GMT
+    #     Content-Language:   en
+    #     Content-Length:     74
+    #     Content-Type:       application/octet-stream
+    #     Hash (crc32c):      BBPMPA==
+    #     Hash (md5):         ms+qSYvgI9SjXn8tW/5UpQ==
+    #     ETag:               CNCgocbmqMACEAE=
+    #     Generation:         1408776800850000
+    #     Metageneration:     1
+
+    if not res.output.startswith('gs://'):
+      raise GSContextException('Unexpected stat output: %s' % res.output)
+
+    def _GetField(name):
+      m = re.search(r'%s:\s*(.+)' % re.escape(name), res.output)
+      if m:
+        return m.group(1)
+      else:
+        raise GSContextException('Field "%s" missing in "%s"' %
+                                 (name, res.output))
+
+    return GSStatResult(
+        creation_time=datetime.datetime.strptime(
+            _GetField('Creation time'), '%a, %d %b %Y %H:%M:%S %Z'),
+        content_length=int(_GetField('Content-Length')),
+        content_type=_GetField('Content-Type'),
+        hash_crc32c=_GetField('Hash (crc32c)'),
+        hash_md5=_GetField('Hash (md5)'),
+        etag=_GetField('ETag'),
+        generation=int(_GetField('Generation')),
+        metageneration=int(_GetField('Metageneration')))
+
+  def Counter(self, path):
+    """Return a GSCounter object pointing at a |path| in Google Storage.
+
+    Args:
+      path: The path to the counter in Google Storage.
+    """
+    return GSCounter(self, path)
+
+  def WaitForGsPaths(self, paths, timeout, period=10):
+    """Wait until a list of files exist in GS.
+
+    Args:
+      paths: The list of files to wait for.
+      timeout: Max seconds to wait for file to appear.
+      period: How often to check for files while waiting.
+
+    Raises:
+      timeout_util.TimeoutError if the timeout is reached.
+    """
+    # Copy the list of URIs to wait for, so we don't modify the callers context.
+    pending_paths = paths[:]
+
+    def _CheckForExistence():
+      pending_paths[:] = [x for x in pending_paths if not self.Exists(x)]
+
+    def _Retry(_return_value):
+      # Retry, if there are any pending paths left.
+      return pending_paths
+
+    timeout_util.WaitForSuccess(_Retry, _CheckForExistence,
+                                timeout=timeout, period=period)
+
+
+@contextlib.contextmanager
+def TemporaryURL(prefix):
+  """Context manager to generate a random URL.
+
+  At the end, the URL will be deleted.
+  """
+  url = '%s/chromite-temp/%s/%s/%s' % (constants.TRASH_BUCKET, prefix,
+                                       getpass.getuser(),
+                                       cros_build_lib.GetRandomString())
+  ctx = GSContext()
+  ctx.Remove(url, ignore_missing=True, recursive=True)
+  try:
+    yield url
+  finally:
+    ctx.Remove(url, ignore_missing=True, recursive=True)
diff --git a/lib/gs_unittest b/lib/gs_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/gs_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/gs_unittest.py b/lib/gs_unittest.py
new file mode 100644
index 0000000..af84d10
--- /dev/null
+++ b/lib/gs_unittest.py
@@ -0,0 +1,1550 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the gs.py module."""
+
+from __future__ import print_function
+
+import contextlib
+import functools
+import datetime
+import mock
+import os
+import string
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+from chromite.lib import retry_stats
+
+
+def PatchGS(*args, **kwargs):
+  """Convenience method for patching GSContext."""
+  return mock.patch.object(gs.GSContext, *args, **kwargs)
+
+
+class GSContextMock(partial_mock.PartialCmdMock):
+  """Used to mock out the GSContext class."""
+  TARGET = 'chromite.lib.gs.GSContext'
+  ATTRS = ('__init__', 'DoCommand', 'DEFAULT_SLEEP_TIME',
+           'DEFAULT_RETRIES', 'DEFAULT_BOTO_FILE', 'DEFAULT_GSUTIL_BIN',
+           'DEFAULT_GSUTIL_BUILDER_BIN', 'GSUTIL_URL')
+  DEFAULT_ATTR = 'DoCommand'
+
+  GSResponsePreconditionFailed = """\
+Copying file:///dev/null [Content-Type=application/octet-stream]...
+Uploading   gs://chromeos-throw-away-bucket/vapier/null:         0 B    \r\
+Uploading   gs://chromeos-throw-away-bucket/vapier/null:         0 B    \r\
+PreconditionException: 412 Precondition Failed"""
+
+  DEFAULT_SLEEP_TIME = 0
+  DEFAULT_RETRIES = 2
+  TMP_ROOT = '/tmp/cros_unittest'
+  DEFAULT_BOTO_FILE = '%s/boto_file' % TMP_ROOT
+  DEFAULT_GSUTIL_BIN = '%s/gsutil_bin' % TMP_ROOT
+  DEFAULT_GSUTIL_BUILDER_BIN = DEFAULT_GSUTIL_BIN
+  GSUTIL_URL = None
+
+  def __init__(self):
+    partial_mock.PartialCmdMock.__init__(self, create_tempdir=True)
+    self.raw_gs_cmds = []
+
+  def _SetGSUtilUrl(self):
+    tempfile = os.path.join(self.tempdir, 'tempfile')
+    osutils.WriteFile(tempfile, 'some content')
+    gsutil_path = os.path.join(self.tempdir, gs.GSContext.GSUTIL_TAR)
+    cros_build_lib.CreateTarball(gsutil_path, self.tempdir, inputs=[tempfile])
+    self.GSUTIL_URL = 'file://%s' % gsutil_path
+
+  def PreStart(self):
+    os.environ.pop('BOTO_CONFIG', None)
+    # Set it here for now, instead of mocking out Cached() directly because
+    # python-mock has a bug with mocking out class methods with autospec=True.
+    # TODO(rcui): Change this when this is fixed in PartialMock.
+    self._SetGSUtilUrl()
+
+  def _target__init__(self, *args, **kwargs):
+    with PatchGS('_CheckFile', return_value=True):
+      self.backup['__init__'](*args, **kwargs)
+
+  def DoCommand(self, inst, gsutil_cmd, **kwargs):
+    result = self._results['DoCommand'].LookupResult(
+        (gsutil_cmd,), hook_args=(inst, gsutil_cmd), hook_kwargs=kwargs)
+
+    rc_mock = cros_build_lib_unittest.RunCommandMock()
+    rc_mock.AddCmdResult(
+        partial_mock.ListRegex('gsutil'), result.returncode, result.output,
+        result.error)
+
+    with rc_mock:
+      try:
+        return self.backup['DoCommand'](inst, gsutil_cmd, **kwargs)
+      finally:
+        self.raw_gs_cmds.extend(args[0] for args, _ in rc_mock.call_args_list)
+
+
+class AbstractGSContextTest(cros_test_lib.MockTempDirTestCase):
+  """Base class for GSContext tests."""
+
+  def setUp(self):
+    self.gs_mock = self.StartPatcher(GSContextMock())
+    self.gs_mock.SetDefaultCmdResult()
+    self.ctx = gs.GSContext()
+
+
+class CanonicalizeURLTest(cros_test_lib.TestCase):
+  """Tests for the CanonicalizeURL function."""
+
+  def _checkit(self, in_url, exp_url):
+    self.assertEqual(gs.CanonicalizeURL(in_url), exp_url)
+
+  def testPublicUrl(self):
+    """Test public https URLs."""
+    self._checkit(
+        'https://commondatastorage.googleapis.com/releases/some/file/t.gz',
+        'gs://releases/some/file/t.gz')
+
+  def testPrivateUrl(self):
+    """Test private https URLs."""
+    self._checkit(
+        'https://storage.cloud.google.com/releases/some/file/t.gz',
+        'gs://releases/some/file/t.gz')
+
+  def testDuplicateBase(self):
+    """Test multiple prefixes in a single URL."""
+    self._checkit(
+        ('https://storage.cloud.google.com/releases/some/'
+         'https://storage.cloud.google.com/some/file/t.gz'),
+        ('gs://releases/some/'
+         'https://storage.cloud.google.com/some/file/t.gz'))
+
+
+class VersionTest(AbstractGSContextTest):
+  """Tests GSContext.gsutil_version functionality."""
+
+  LOCAL_PATH = '/tmp/file'
+  GIVEN_REMOTE = EXPECTED_REMOTE = 'gs://test/path/file'
+
+  def testGetVersionStdout(self):
+    """Simple gsutil_version fetch test from stdout."""
+    self.gs_mock.AddCmdResult(partial_mock.In('version'), returncode=0,
+                              output='gsutil version 3.35\n')
+    self.assertEquals('3.35', self.ctx.gsutil_version)
+
+  def testGetVersionStderr(self):
+    """Simple gsutil_version fetch test from stderr."""
+    self.gs_mock.AddCmdResult(partial_mock.In('version'), returncode=0,
+                              error='gsutil version 3.36\n')
+    self.assertEquals('3.36', self.ctx.gsutil_version)
+
+  def testGetVersionCached(self):
+    """Simple gsutil_version fetch test from cache."""
+    # pylint: disable=protected-access
+    self.ctx._gsutil_version = '3.37'
+    self.assertEquals('3.37', self.ctx.gsutil_version)
+
+  def testGetVersionNewFormat(self):
+    """Simple gsutil_version fetch test for new gsutil output format."""
+    self.gs_mock.AddCmdResult(partial_mock.In('version'), returncode=0,
+                              output='gsutil version: 4.5\n')
+    self.assertEquals('4.5', self.ctx.gsutil_version)
+
+  def testGetVersionBadOutput(self):
+    """Simple gsutil_version fetch test from cache."""
+    self.gs_mock.AddCmdResult(partial_mock.In('version'), returncode=0,
+                              output='gobblety gook\n')
+    self.assertRaises(gs.GSContextException, getattr, self.ctx,
+                      'gsutil_version')
+
+
+class GetSizeTest(AbstractGSContextTest):
+  """Tests GetSize functionality."""
+
+  GETSIZE_PATH = 'gs://abc/1'
+
+  def _GetSize(self, ctx, path, **kwargs):
+    return ctx.GetSize(path, **kwargs)
+
+  def GetSize(self, ctx=None, **kwargs):
+    if ctx is None:
+      ctx = self.ctx
+    return self._GetSize(ctx, self.GETSIZE_PATH, **kwargs)
+
+  def testBasic(self):
+    """Simple test."""
+    self.gs_mock.AddCmdResult(['stat', self.GETSIZE_PATH],
+                              output=StatTest.STAT_OUTPUT)
+    self.assertEqual(self.GetSize(), 74)
+
+
+class UnmockedGetSizeTest(cros_test_lib.TempDirTestCase):
+  """Tests GetSize functionality w/out mocks."""
+
+  @cros_test_lib.NetworkTest()
+  def testBasic(self):
+    """Simple test."""
+    ctx = gs.GSContext()
+
+    local_file = os.path.join(self.tempdir, 'foo')
+    osutils.WriteFile(local_file, '!' * 5)
+
+    with gs.TemporaryURL('chromite.getsize') as tempuri:
+      ctx.Copy(local_file, tempuri)
+      self.assertEqual(ctx.GetSize(tempuri), 5)
+
+  def testLocal(self):
+    """Test local files."""
+    ctx = gs.GSContext()
+    f = os.path.join(self.tempdir, 'f')
+
+    osutils.Touch(f)
+    self.assertEqual(ctx.GetSize(f), 0)
+
+    osutils.WriteFile(f, 'f' * 10)
+    self.assertEqual(ctx.GetSize(f), 10)
+
+
+class LSTest(AbstractGSContextTest):
+  """Tests LS/List functionality."""
+
+  LS_PATH = 'gs://test/path/to/list'
+  LS_OUTPUT_LINES = [
+      '%s/foo' % LS_PATH,
+      '%s/bar bell' % LS_PATH,
+      '%s/nada/' % LS_PATH,
+  ]
+  LS_OUTPUT = '\n'.join(LS_OUTPUT_LINES)
+
+  SIZE1 = 12345
+  SIZE2 = 654321
+  DT1 = datetime.datetime(2000, 1, 2, 10, 10, 10)
+  DT2 = datetime.datetime(2010, 3, 14)
+  DT_STR1 = DT1.strftime(gs.DATETIME_FORMAT)
+  DT_STR2 = DT2.strftime(gs.DATETIME_FORMAT)
+  DETAILED_LS_OUTPUT_LINES = [
+      '%10d  %s  %s/foo' % (SIZE1, DT_STR1, LS_PATH),
+      '%10d  %s  %s/bar bell' % (SIZE2, DT_STR2, LS_PATH),
+      '          %s/nada/' % LS_PATH,
+      'TOTAL: 3 objects, XXXXX bytes (X.XX GB)',
+  ]
+  DETAILED_LS_OUTPUT = '\n'.join(DETAILED_LS_OUTPUT_LINES)
+
+  LIST_RESULT = [
+      gs.GSListResult(
+          content_length=SIZE1,
+          creation_time=DT1,
+          url='%s/foo' % LS_PATH,
+          generation=None,
+          metageneration=None),
+      gs.GSListResult(
+          content_length=SIZE2,
+          creation_time=DT2,
+          url='%s/bar bell' % LS_PATH,
+          generation=None,
+          metageneration=None),
+      gs.GSListResult(
+          content_length=None,
+          creation_time=None,
+          url='%s/nada/' % LS_PATH,
+          generation=None,
+          metageneration=None),
+  ]
+
+  def _LS(self, ctx, path, **kwargs):
+    return ctx.LS(path, **kwargs)
+
+  def LS(self, ctx=None, **kwargs):
+    if ctx is None:
+      ctx = self.ctx
+    return self._LS(ctx, self.LS_PATH, **kwargs)
+
+  def _List(self, ctx, path, **kwargs):
+    return ctx.List(path, **kwargs)
+
+  def List(self, ctx=None, **kwargs):
+    if ctx is None:
+      ctx = self.ctx
+    return self._List(ctx, self.LS_PATH, **kwargs)
+
+  def testBasicLS(self):
+    """Simple LS test."""
+    self.gs_mock.SetDefaultCmdResult(output=self.LS_OUTPUT)
+    result = self.LS()
+    self.gs_mock.assertCommandContains(['ls', '--', self.LS_PATH])
+
+    self.assertEqual(self.LS_OUTPUT_LINES, result)
+
+  def testBasicList(self):
+    """Simple List test."""
+    self.gs_mock.SetDefaultCmdResult(output=self.DETAILED_LS_OUTPUT)
+    result = self.List(details=True)
+    self.gs_mock.assertCommandContains(['ls', '-l', '--', self.LS_PATH])
+
+    self.assertEqual(self.LIST_RESULT, result)
+
+
+class UnmockedLSTest(cros_test_lib.TempDirTestCase):
+  """Tests LS/List functionality w/out mocks."""
+
+  def testLocalPaths(self):
+    """Tests listing local paths."""
+    ctx = gs.GSContext()
+
+    # The tempdir should exist, but be empty, by default.
+    self.assertEqual([], ctx.LS(self.tempdir))
+
+    # Create a few random files.
+    files = ['a', 'b', 'c!@', 'd e f', 'k\tj']
+    for f in files:
+      osutils.Touch(os.path.join(self.tempdir, f))
+
+    # See what the code finds -- order is not guaranteed.
+    found = ctx.LS(self.tempdir)
+    files.sort()
+    found.sort()
+    self.assertEqual(files, found)
+
+  @cros_test_lib.NetworkTest()
+  def testRemotePath(self):
+    """Tests listing remote paths."""
+    ctx = gs.GSContext()
+
+    with gs.TemporaryURL('chromite.ls') as tempuri:
+      # The path shouldn't exist by default.
+      with self.assertRaises(gs.GSNoSuchKey):
+        ctx.LS(tempuri)
+
+      # Create some files with known sizes.
+      files = ['a', 'b', 'c!@', 'd e f', 'k\tj']
+      uris = []
+      for f in files:
+        filename = os.path.join(self.tempdir, f)
+        osutils.WriteFile(filename, f * 10)
+        uri = os.path.join(tempuri, f)
+        uris.append(uri)
+        ctx.Copy(filename, uri)
+
+      # Check the plain listing -- order is not guaranteed.
+      found = ctx.LS(tempuri)
+      uris.sort()
+      found.sort()
+      self.assertEqual(uris, found)
+
+      # Check the detailed listing.
+      found = ctx.List(tempuri, details=True)
+      self.assertEqual(files, sorted([os.path.basename(x.url) for x in found]))
+
+      # Make sure sizes line up.
+      for f in found:
+        l = len(os.path.basename(f.url)) * 10
+        self.assertEqual(f.content_length, l)
+
+
+class CopyTest(AbstractGSContextTest, cros_test_lib.TempDirTestCase):
+  """Tests GSContext.Copy() functionality."""
+
+  GIVEN_REMOTE = EXPECTED_REMOTE = 'gs://test/path/file'
+  ACL = 'public-read'
+
+  def setUp(self):
+    self.local_path = os.path.join(self.tempdir, 'file')
+    osutils.WriteFile(self.local_path, '')
+
+  def _Copy(self, ctx, src, dst, **kwargs):
+    return ctx.Copy(src, dst, **kwargs)
+
+  def Copy(self, ctx=None, **kwargs):
+    if ctx is None:
+      ctx = self.ctx
+    return self._Copy(ctx, self.local_path, self.GIVEN_REMOTE, **kwargs)
+
+  def testBasic(self):
+    """Simple copy test."""
+    self.Copy()
+    self.gs_mock.assertCommandContains(
+        ['cp', '--', self.local_path, self.EXPECTED_REMOTE])
+
+  def testWithACL(self):
+    """ACL specified during init."""
+    ctx = gs.GSContext(acl=self.ACL)
+    self.Copy(ctx=ctx)
+    self.gs_mock.assertCommandContains(['cp', '-a', self.ACL])
+
+  def testWithACL2(self):
+    """ACL specified during invocation."""
+    self.Copy(acl=self.ACL)
+    self.gs_mock.assertCommandContains(['cp', '-a', self.ACL])
+
+  def testWithACL3(self):
+    """ACL specified during invocation that overrides init."""
+    ctx = gs.GSContext(acl=self.ACL)
+    self.Copy(ctx=ctx, acl=self.ACL)
+    self.gs_mock.assertCommandContains(['cp', '-a', self.ACL])
+
+  def testRunCommandError(self):
+    """Test RunCommandError is propagated."""
+    self.gs_mock.AddCmdResult(partial_mock.In('cp'), returncode=1)
+    self.assertRaises(cros_build_lib.RunCommandError, self.Copy)
+
+  def testGSContextPreconditionFailed(self):
+    """GSContextPreconditionFailed is raised properly."""
+    self.gs_mock.AddCmdResult(
+        partial_mock.In('cp'), returncode=1,
+        error=self.gs_mock.GSResponsePreconditionFailed)
+    self.assertRaises(gs.GSContextPreconditionFailed, self.Copy)
+
+  def testNonRecursive(self):
+    """Test non-recursive copy."""
+    self.Copy(recursive=False)
+    self.gs_mock.assertCommandContains(['-r'], expected=False)
+
+  def testRecursive(self):
+    """Test recursive copy."""
+    self.Copy(recursive=True)
+    self.gs_mock.assertCommandContains(['-r'], expected=False)
+    self._Copy(self.ctx, self.tempdir, self.GIVEN_REMOTE, recursive=True)
+    self.gs_mock.assertCommandContains(['cp', '-r'])
+
+  def testCompress(self):
+    """Test auto_compress behavior."""
+    path = os.path.join(self.tempdir, 'ok.txt')
+    self._Copy(self.ctx, path, self.GIVEN_REMOTE, auto_compress=True)
+    self.gs_mock.assertCommandContains(['-z', 'txt'], expected=True)
+
+  def testCompressNoExt(self):
+    """Test auto_compress w/bad src path."""
+    path = os.path.join(self.tempdir, 'bad.dir/bad-file')
+    self.assertRaises(ValueError, self._Copy, self.ctx, path,
+                      self.GIVEN_REMOTE, auto_compress=True)
+
+  def testGeneration(self):
+    """Test generation return value."""
+    exp_gen = 1413571271901000
+    error = (
+        'Copying file:///dev/null [Content-Type=application/octet-stream]...\n'
+        'Uploading   %(uri)s:               0 B    \r'
+        'Uploading   %(uri)s:               0 B    \r'
+        'Created: %(uri)s#%(gen)s'
+    ) % {'uri': self.GIVEN_REMOTE, 'gen': exp_gen}
+    self.gs_mock.AddCmdResult(partial_mock.In('cp'), returncode=0, error=error)
+    gen = self.Copy()
+    self.assertEqual(gen, exp_gen)
+
+  def testGeneration404(self):
+    """Test behavior when we get weird output."""
+    error = (
+        # This is a bit verbose, but it's from real output, so should be fine.
+        'Copying file:///tmp/tmpyUUPg1 [Content-Type=application/octet-stream]'
+        '...\n'
+        'Uploading   ...recovery-R38-6158.66.0-mccloud.instructions.lock:'
+        ' 0 B/38 B    \r'
+        'Uploading   ...recovery-R38-6158.66.0-mccloud.instructions.lock:'
+        ' 38 B/38 B    \r'
+        'NotFoundException: 404 Attempt to get key for "gs://chromeos-releases'
+        '/tobesigned/50,beta-\n'
+        'channel,mccloud,6158.66.0,ChromeOS-\n'
+        'recovery-R38-6158.66.0-mccloud.instructions.lock" failed. This can '
+        'happen if the\n'
+        'URI refers to a non-existent object or if you meant to operate on a '
+        'directory\n'
+        '(e.g., leaving off -R option on gsutil cp, mv, or ls of a bucket)\n'
+    )
+    self.gs_mock.AddCmdResult(partial_mock.In('cp'), returncode=1, error=error)
+    self.assertEqual(self.Copy(), None)
+
+
+class UnmockedCopyTest(cros_test_lib.TempDirTestCase):
+  """Tests Copy functionality w/out mocks."""
+
+  @cros_test_lib.NetworkTest()
+  def testNormal(self):
+    """Test normal upload/download behavior."""
+    ctx = gs.GSContext()
+
+    content = 'foooooooooooooooo!@!'
+
+    local_src_file = os.path.join(self.tempdir, 'src.txt')
+    local_dst_file = os.path.join(self.tempdir, 'dst.txt')
+
+    osutils.WriteFile(local_src_file, content)
+
+    with gs.TemporaryURL('chromite.cp') as tempuri:
+      # Upload the file.
+      gen = ctx.Copy(local_src_file, tempuri)
+
+      # Verify the generation is sane.  All we can assume is that it's a valid
+      # whole number greater than 0.
+      self.assertNotEqual(gen, None)
+      self.assertIn(type(gen), (int, long))
+      self.assertGreater(gen, 0)
+
+      # Verify the size is what we expect.
+      self.assertEqual(ctx.GetSize(tempuri), os.path.getsize(local_src_file))
+
+      # Copy it back down and verify the content is unchanged.
+      ctx.Copy(tempuri, local_dst_file)
+      new_content = osutils.ReadFile(local_dst_file)
+      self.assertEqual(content, new_content)
+
+  @cros_test_lib.NetworkTest()
+  def testCompress(self):
+    """Test auto_compress behavior."""
+    ctx = gs.GSContext()
+
+    # Need a string that compresses well.
+    content = ('zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'
+               'zzzzzlkasjdf89j2;3o4kqmnioruasddfv89uxdp;foiasjdf0892qn5kln')
+
+    local_src_file = os.path.join(self.tempdir, 'src.txt')
+    local_dst_file = os.path.join(self.tempdir, 'dst.txt')
+
+    osutils.WriteFile(local_src_file, content)
+
+    with gs.TemporaryURL('chromite.cp') as tempuri:
+      # Upload & compress the file.
+      gen = ctx.Copy(local_src_file, tempuri, auto_compress=True)
+
+      # Verify the generation is sane.  All we can assume is that it's a valid
+      # whole number greater than 0.
+      self.assertNotEqual(gen, None)
+      self.assertGreater(gen, 0)
+
+      # Verify the size is smaller (because it's compressed).
+      self.assertLess(ctx.GetSize(tempuri), os.path.getsize(local_src_file))
+
+      # Copy it back down and verify the content is decompressed & unchanged.
+      ctx.Copy(tempuri, local_dst_file)
+      new_content = osutils.ReadFile(local_dst_file)
+      self.assertEqual(content, new_content)
+
+  @cros_test_lib.NetworkTest()
+  def testVersion(self):
+    """Test version (generation) behavior."""
+    ctx = gs.GSContext()
+
+    local_src_file = os.path.join(self.tempdir, 'src.txt')
+
+    with gs.TemporaryURL('chromite.cp') as tempuri:
+      # Upload the file.
+      osutils.WriteFile(local_src_file, 'gen0')
+      gen = ctx.Copy(local_src_file, tempuri, version=0)
+
+      # Verify the generation is sane.  All we can assume is that it's a valid
+      # whole number greater than 0.
+      self.assertNotEqual(gen, None)
+      self.assertGreater(gen, 0)
+
+      # The file should exist, so this will die due to wrong generation.
+      osutils.WriteFile(local_src_file, 'gen-bad')
+      self.assertRaises(gs.GSContextPreconditionFailed, ctx.Copy,
+                        local_src_file, tempuri, version=0)
+
+      # Sanity check the content is unchanged.
+      self.assertEquals(ctx.Cat(tempuri), 'gen0')
+
+      # Upload the file, but with the right generation.
+      osutils.WriteFile(local_src_file, 'gen-new')
+      gen = ctx.Copy(local_src_file, tempuri, version=gen)
+      self.assertEquals(ctx.Cat(tempuri), 'gen-new')
+
+
+class CopyIntoTest(CopyTest):
+  """Test CopyInto functionality."""
+
+  FILE = 'ooga'
+  GIVEN_REMOTE = 'gs://test/path/file'
+  EXPECTED_REMOTE = '%s/%s' % (GIVEN_REMOTE, FILE)
+
+  def _Copy(self, ctx, *args, **kwargs):
+    return ctx.CopyInto(*args, filename=self.FILE, **kwargs)
+
+
+class RemoveTest(AbstractGSContextTest):
+  """Tests GSContext.Remove() functionality."""
+
+  def testNormal(self):
+    """Test normal remove behavior."""
+    self.assertEqual(self.ctx.Remove('gs://foo/bar'), None)
+
+  def testMissing(self):
+    """Test behavior w/missing files."""
+    self.gs_mock.AddCmdResult(['rm', 'gs://foo/bar'],
+                              error='CommandException: No URLs matched: '
+                                    'gs://foo/bar',
+                              returncode=1)
+    self.assertRaises(gs.GSNoSuchKey, self.ctx.Remove, 'gs://foo/bar')
+    # This one should not throw an exception.
+    self.ctx.Remove('gs://foo/bar', ignore_missing=True)
+
+  def testRecursive(self):
+    """Verify we pass down -R in recursive mode."""
+    self.ctx.Remove('gs://foo/bar', recursive=True)
+    self.gs_mock.assertCommandContains(['rm', '-R'])
+
+
+class UnmockedRemoveTest(cros_test_lib.TestCase):
+  """Tests Remove functionality w/out mocks."""
+
+  @cros_test_lib.NetworkTest()
+  def testNormal(self):
+    """Test normal remove behavior."""
+    ctx = gs.GSContext()
+    with gs.TemporaryURL('chromite.rm') as tempuri:
+      ctx.Copy('/dev/null', tempuri)
+      self.assertEqual(ctx.Remove(tempuri), None)
+
+  @cros_test_lib.NetworkTest()
+  def testMissing(self):
+    """Test behavior w/missing files."""
+    ctx = gs.GSContext()
+    with gs.TemporaryURL('chromite.rm') as tempuri:
+      self.assertRaises(gs.GSNoSuchKey, ctx.Remove, tempuri)
+      # This one should not throw an exception.
+      ctx.Remove(tempuri, ignore_missing=True)
+
+  @cros_test_lib.NetworkTest()
+  def testRecursive(self):
+    """Verify recursive mode works."""
+    files = ('a', 'b/c', 'd/e/ffff')
+    ctx = gs.GSContext()
+    with gs.TemporaryURL('chromite.rm') as tempuri:
+      for p in files:
+        ctx.Copy('/dev/null', os.path.join(tempuri, p))
+      ctx.Remove(tempuri, recursive=True)
+      for p in files:
+        self.assertFalse(ctx.Exists(os.path.join(tempuri, p)))
+
+  @cros_test_lib.NetworkTest()
+  def testGeneration(self):
+    """Test conditional remove behavior."""
+    ctx = gs.GSContext()
+    with gs.TemporaryURL('chromite.rm') as tempuri:
+      ctx.Copy('/dev/null', tempuri)
+      gen, _ = ctx.GetGeneration(tempuri)
+      self.assertRaises(gs.GSContextPreconditionFailed, ctx.Remove,
+                        tempuri, version=gen + 1)
+      self.assertTrue(ctx.Exists(tempuri))
+      ctx.Remove(tempuri, version=gen)
+      self.assertFalse(ctx.Exists(tempuri))
+
+
+class MoveTest(AbstractGSContextTest, cros_test_lib.TempDirTestCase):
+  """Tests GSContext.Move() functionality."""
+
+  GIVEN_REMOTE = EXPECTED_REMOTE = 'gs://test/path/file'
+
+  def setUp(self):
+    self.local_path = os.path.join(self.tempdir, 'file')
+    osutils.WriteFile(self.local_path, '')
+
+  def _Move(self, ctx, src, dst, **kwargs):
+    return ctx.Move(src, dst, **kwargs)
+
+  def Move(self, ctx=None, **kwargs):
+    if ctx is None:
+      ctx = self.ctx
+    return self._Move(ctx, self.local_path, self.GIVEN_REMOTE, **kwargs)
+
+  def testBasic(self):
+    """Simple move test."""
+    self.Move()
+    self.gs_mock.assertCommandContains(
+        ['mv', '--', self.local_path, self.EXPECTED_REMOTE])
+
+
+class GSContextInitTest(cros_test_lib.MockTempDirTestCase):
+  """Tests GSContext.__init__() functionality."""
+
+  def setUp(self):
+    os.environ.pop('BOTO_CONFIG', None)
+    self.bad_path = os.path.join(self.tempdir, 'nonexistent')
+
+    file_list = ['gsutil_bin', 'boto_file', 'acl_file']
+    cros_test_lib.CreateOnDiskHierarchy(self.tempdir, file_list)
+    for f in file_list:
+      setattr(self, f, os.path.join(self.tempdir, f))
+    self.StartPatcher(PatchGS('DEFAULT_BOTO_FILE', new=self.boto_file))
+    self.StartPatcher(PatchGS('DEFAULT_GSUTIL_BIN', new=self.gsutil_bin))
+
+  def testInitGsutilBin(self):
+    """Test we use the given gsutil binary, erroring where appropriate."""
+    self.assertEquals(gs.GSContext().gsutil_bin, self.gsutil_bin)
+    self.assertRaises(gs.GSContextException,
+                      gs.GSContext, gsutil_bin=self.bad_path)
+
+  def testBadGSUtilBin(self):
+    """Test exception thrown for bad gsutil paths."""
+    self.assertRaises(gs.GSContextException, gs.GSContext,
+                      gsutil_bin=self.bad_path)
+
+  def testInitBotoFileEnv(self):
+    """Test boto file environment is set correctly."""
+    os.environ['BOTO_CONFIG'] = self.gsutil_bin
+    self.assertTrue(gs.GSContext().boto_file, self.gsutil_bin)
+    self.assertEqual(gs.GSContext(boto_file=self.acl_file).boto_file,
+                     self.acl_file)
+    self.assertEqual(gs.GSContext(boto_file=self.bad_path).boto_file,
+                     self.bad_path)
+
+  def testInitBotoFileEnvError(self):
+    """Boto file through env var error."""
+    self.assertEquals(gs.GSContext().boto_file, self.boto_file)
+    # Check env usage next; no need to cleanup, teardown handles it,
+    # and we want the env var to persist for the next part of this test.
+    os.environ['BOTO_CONFIG'] = self.bad_path
+    self.assertEqual(gs.GSContext().boto_file, self.bad_path)
+
+  def testInitBotoFileError(self):
+    """Test bad boto file."""
+    self.assertEqual(gs.GSContext(boto_file=self.bad_path).boto_file,
+                     self.bad_path)
+
+  def testDoNotUseDefaultBotoFileIfItDoesNotExist(self):
+    """Do not set boto file if the default path does not exist."""
+    if 'BOTO_CONFIG' in os.environ:
+      del os.environ['BOTO_CONFIG']
+    gs.GSContext.DEFAULT_BOTO_FILE = 'foo/bar/doesnotexist'
+    self.assertEqual(gs.GSContext().boto_file, None)
+
+  def testInitAclFile(self):
+    """Test ACL selection logic in __init__."""
+    self.assertEqual(gs.GSContext().acl, None)
+    self.assertEqual(gs.GSContext(acl=self.acl_file).acl,
+                     self.acl_file)
+
+  def _testHTTPProxySettings(self, d):
+    flags = gs.GSContext().gsutil_flags
+    for key in d:
+      flag = 'Boto:%s=%s' % (key, d[key])
+      error_msg = '%s not in %s' % (flag, ' '.join(flags))
+      self.assertTrue(flag in flags, error_msg)
+
+  def testHTTPProxy(self):
+    """Test we set http proxy correctly."""
+    d = {'proxy': 'fooserver', 'proxy_user': 'foouser',
+         'proxy_pass': 'foopasswd', 'proxy_port': '8080'}
+    os.environ['http_proxy'] = 'http://%s:%s@%s:%s/' % (
+        d['proxy_user'], d['proxy_pass'], d['proxy'], d['proxy_port'])
+    self._testHTTPProxySettings(d)
+
+  def testHTTPProxyNoPort(self):
+    """Test we accept http proxy without port number."""
+    d = {'proxy': 'fooserver', 'proxy_user': 'foouser',
+         'proxy_pass': 'foopasswd'}
+    os.environ['http_proxy'] = 'http://%s:%s@%s/' % (
+        d['proxy_user'], d['proxy_pass'], d['proxy'])
+    self._testHTTPProxySettings(d)
+
+  def testHTTPProxyNoUserPasswd(self):
+    """Test we accept http proxy without user and password."""
+    d = {'proxy': 'fooserver', 'proxy_port': '8080'}
+    os.environ['http_proxy'] = 'http://%s:%s/' % (d['proxy'], d['proxy_port'])
+    self._testHTTPProxySettings(d)
+
+  def testHTTPProxyNoPasswd(self):
+    """Test we accept http proxy without password."""
+    d = {'proxy': 'fooserver', 'proxy_user': 'foouser',
+         'proxy_port': '8080'}
+    os.environ['http_proxy'] = 'http://%s@%s:%s/' % (
+        d['proxy_user'], d['proxy'], d['proxy_port'])
+    self._testHTTPProxySettings(d)
+
+
+class GSDoCommandTest(cros_test_lib.TestCase):
+  """Tests of gs.DoCommand behavior.
+
+  This test class inherits from cros_test_lib.TestCase instead of from
+  AbstractGSContextTest, because the latter unnecessarily mocks out
+  cros_build_lib.RunCommand, in a way that breaks _testDoCommand (changing
+  cros_build_lib.RunCommand to refer to a mock instance after the
+  GenericRetry mock has already been set up to expect a reference to the
+  original RunCommand).
+  """
+
+  def setUp(self):
+    self.ctx = gs.GSContext()
+
+  def _testDoCommand(self, ctx, headers=(), retries=None, sleep=None,
+                     version=None, recursive=False):
+    if retries is None:
+      retries = ctx.DEFAULT_RETRIES
+    if sleep is None:
+      sleep = ctx.DEFAULT_SLEEP_TIME
+
+    result = cros_build_lib.CommandResult(error='')
+    with mock.patch.object(retry_stats, 'RetryWithStats', autospec=True,
+                           return_value=result):
+      ctx.Copy('/blah', 'gs://foon', version=version, recursive=recursive)
+      cmd = [self.ctx.gsutil_bin] + self.ctx.gsutil_flags + list(headers)
+      cmd += ['cp', '-v']
+      if recursive:
+        cmd += ['-r', '-e']
+      cmd += ['--', '/blah', 'gs://foon']
+
+      # pylint: disable=protected-access
+      retry_stats.RetryWithStats.assert_called_once_with(
+          retry_stats.GSUTIL,
+          ctx._RetryFilter, retries,
+          cros_build_lib.RunCommand,
+          cmd, sleep=sleep,
+          redirect_stderr=True,
+          capture_output=True,
+          extra_env=mock.ANY)
+
+  def testDoCommandDefault(self):
+    """Verify the internal DoCommand function works correctly."""
+    self._testDoCommand(self.ctx)
+
+  def testDoCommandCustom(self):
+    """Test that retries and sleep parameters are honored."""
+    ctx = gs.GSContext(retries=4, sleep=1)
+    self._testDoCommand(ctx, retries=4, sleep=1)
+
+  def testVersion(self):
+    """Test that the version field expands into the header."""
+    self._testDoCommand(self.ctx, version=3,
+                        headers=['-h', 'x-goog-if-generation-match:3'])
+
+  def testDoCommandRecursiveCopy(self):
+    """Test that recursive copy command is honored."""
+    self._testDoCommand(self.ctx, recursive=True)
+
+
+class GSRetryFilterTest(cros_test_lib.TestCase):
+  """Verifies that we filter and process gsutil errors correctly."""
+
+  # pylint: disable=protected-access
+
+  LOCAL_PATH = '/tmp/file'
+  REMOTE_PATH = ('gs://chromeos-prebuilt/board/beltino/paladin-R33-4926.0.0'
+                 '-rc2/packages/chromeos-base/autotest-tests-0.0.1-r4679.tbz2')
+  GSUTIL_TRACKER_DIR = '/foo'
+  UPLOAD_TRACKER_FILE = (
+      'upload_TRACKER_9263880a80e4a582aec54eaa697bfcdd9c5621ea.9.tbz2__JSON.url'
+  )
+  DOWNLOAD_TRACKER_FILE = (
+      'download_TRACKER_5a695131f3ef6e4c903f594783412bb996a7f375._file__JSON.'
+      'etag')
+  RETURN_CODE = 3
+
+  def setUp(self):
+    self.ctx = gs.GSContext()
+    self.ctx.DEFAULT_GSUTIL_TRACKER_DIR = self.GSUTIL_TRACKER_DIR
+
+  def _getException(self, cmd, error, returncode=RETURN_CODE):
+    result = cros_build_lib.CommandResult(
+        error=error,
+        cmd=cmd,
+        returncode=returncode)
+    return cros_build_lib.RunCommandError('blah', result)
+
+  def assertNoSuchKey(self, error_msg):
+    cmd = ['gsutil', 'ls', self.REMOTE_PATH]
+    e = self._getException(cmd, error_msg)
+    self.assertRaises(gs.GSNoSuchKey, self.ctx._RetryFilter, e)
+
+  def assertPreconditionFailed(self, error_msg):
+    cmd = ['gsutil', 'ls', self.REMOTE_PATH]
+    e = self._getException(cmd, error_msg)
+    self.assertRaises(gs.GSContextPreconditionFailed,
+                      self.ctx._RetryFilter, e)
+
+  def testRetryOnlyFlakyErrors(self):
+    """Test that we retry only flaky errors."""
+    cmd = ['gsutil', 'ls', self.REMOTE_PATH]
+    e = self._getException(cmd, 'ServiceException: 503')
+    self.assertTrue(self.ctx._RetryFilter(e))
+
+    e = self._getException(cmd, 'UnknownException: 603')
+    self.assertFalse(self.ctx._RetryFilter(e))
+
+  def testRaiseGSErrors(self):
+    """Test that we raise appropriate exceptions."""
+    self.assertNoSuchKey('CommandException: No URLs matched.')
+    self.assertNoSuchKey('NotFoundException: 404')
+    self.assertPreconditionFailed(
+        'PreconditionException: 412 Precondition Failed')
+
+  @mock.patch('chromite.lib.osutils.SafeUnlink')
+  @mock.patch('chromite.lib.osutils.ReadFile')
+  @mock.patch('os.path.exists')
+  def testRemoveUploadTrackerFile(self, exists_mock, readfile_mock,
+                                  unlink_mock):
+    """Test removal of tracker files for resumable upload failures."""
+    cmd = ['gsutil', 'cp', self.LOCAL_PATH, self.REMOTE_PATH]
+    e = self._getException(cmd, self.ctx.RESUMABLE_UPLOAD_ERROR)
+    exists_mock.return_value = True
+    readfile_mock.return_value = 'foohash'
+    self.ctx._RetryFilter(e)
+    tracker_file_path = os.path.join(self.GSUTIL_TRACKER_DIR,
+                                     self.UPLOAD_TRACKER_FILE)
+    unlink_mock.assert_called_once_with(tracker_file_path)
+
+  @mock.patch('chromite.lib.osutils.SafeUnlink')
+  @mock.patch('chromite.lib.osutils.ReadFile')
+  @mock.patch('os.path.exists')
+  def testRemoveDownloadTrackerFile(self, exists_mock, readfile_mock,
+                                    unlink_mock):
+    """Test removal of tracker files for resumable download failures."""
+    cmd = ['gsutil', 'cp', self.REMOTE_PATH, self.LOCAL_PATH]
+    e = self._getException(cmd, self.ctx.RESUMABLE_DOWNLOAD_ERROR)
+    exists_mock.return_value = True
+    readfile_mock.return_value = 'foohash'
+    self.ctx._RetryFilter(e)
+    tracker_file_path = os.path.join(self.GSUTIL_TRACKER_DIR,
+                                     self.DOWNLOAD_TRACKER_FILE)
+    unlink_mock.assert_called_once_with(tracker_file_path)
+
+  def testRemoveTrackerFileOnlyForCP(self):
+    """Test that we remove tracker files only for 'gsutil cp'."""
+    cmd = ['gsutil', 'ls', self.REMOTE_PATH]
+    e = self._getException(cmd, self.ctx.RESUMABLE_DOWNLOAD_ERROR)
+
+    with mock.MagicMock() as self.ctx.GetTrackerFilenames:
+      self.ctx._RetryFilter(e)
+      self.assertFalse(self.ctx.GetTrackerFilenames.called)
+
+  def testNoRemoveTrackerFileOnOtherErrors(self):
+    """Test that we do not attempt to delete tracker files for other errors."""
+    cmd = ['gsutil', 'cp', self.REMOTE_PATH, self.LOCAL_PATH]
+    e = self._getException(cmd, 'One or more URLs matched no objects')
+
+    with mock.MagicMock() as self.ctx.GetTrackerFilenames:
+      self.assertRaises(gs.GSNoSuchKey, self.ctx._RetryFilter, e)
+      self.assertFalse(self.ctx.GetTrackerFilenames.called)
+
+  def testRetryTransient(self):
+    """Verify retry behavior when hitting b/11762375"""
+    error = (
+        'Removing gs://foo/bar/monkey...\n'
+        'GSResponseError: status=403, code=InvalidAccessKeyId, '
+        'reason="Forbidden", message="The User Id you provided '
+        'does not exist in our records.", detail="GOOGBWPADTH7OV25KJXZ"'
+    )
+    e = self._getException(['gsutil', 'rm', 'gs://foo/bar/monkey'], error)
+    self.assertEqual(self.ctx._RetryFilter(e), True)
+
+
+class GSContextTest(AbstractGSContextTest):
+  """Tests for GSContext()"""
+
+  def testTemporaryUrl(self):
+    """Just verify the url helper generates valid URLs."""
+    with gs.TemporaryURL('mock') as url:
+      base = url[0:len(constants.TRASH_BUCKET)]
+      self.assertEqual(base, constants.TRASH_BUCKET)
+
+      valid_chars = set(string.ascii_letters + string.digits + '/-')
+      used_chars = set(url[len(base) + 1:])
+      self.assertEqual(used_chars - valid_chars, set())
+
+  def testSetAclError(self):
+    """Ensure SetACL blows up if the acl isn't specified."""
+    self.assertRaises(gs.GSContextException, self.ctx.SetACL, 'gs://abc/3')
+
+  def testSetDefaultAcl(self):
+    """Test default ACL behavior."""
+    self.ctx.SetACL('gs://abc/1', 'monkeys')
+    self.gs_mock.assertCommandContains(['acl', 'set', 'monkeys', 'gs://abc/1'])
+
+  def testSetAcl(self):
+    """Base ACL setting functionality."""
+    ctx = gs.GSContext(acl='/my/file/acl')
+    ctx.SetACL('gs://abc/1')
+    self.gs_mock.assertCommandContains(['acl', 'set', '/my/file/acl',
+                                        'gs://abc/1'])
+
+  def testChangeAcl(self):
+    """Test changing an ACL."""
+    basic_file = """
+-g foo:READ
+
+-u bar:FULL_CONTROL"""
+    comment_file = """
+# Give foo READ permission
+-g foo:READ # Now foo can read this
+  # This whole line should be removed
+-u bar:FULL_CONTROL
+# A comment at the end"""
+    tempfile = os.path.join(self.tempdir, 'tempfile')
+    ctx = gs.GSContext()
+
+    osutils.WriteFile(tempfile, basic_file)
+    ctx.ChangeACL('gs://abc/1', acl_args_file=tempfile)
+    self.gs_mock.assertCommandContains([
+        'acl', 'ch', '-g', 'foo:READ', '-u', 'bar:FULL_CONTROL', 'gs://abc/1'
+    ])
+
+    osutils.WriteFile(tempfile, comment_file)
+    ctx.ChangeACL('gs://abc/1', acl_args_file=tempfile)
+    self.gs_mock.assertCommandContains([
+        'acl', 'ch', '-g', 'foo:READ', '-u', 'bar:FULL_CONTROL', 'gs://abc/1'
+    ])
+
+    ctx.ChangeACL('gs://abc/1',
+                  acl_args=['-g', 'foo:READ', '-u', 'bar:FULL_CONTROL'])
+    self.gs_mock.assertCommandContains([
+        'acl', 'ch', '-g', 'foo:READ', '-u', 'bar:FULL_CONTROL', 'gs://abc/1'
+    ])
+
+    with self.assertRaises(gs.GSContextException):
+      ctx.ChangeACL('gs://abc/1', acl_args_file=tempfile, acl_args=['foo'])
+
+    with self.assertRaises(gs.GSContextException):
+      ctx.ChangeACL('gs://abc/1')
+
+  def testIncrement(self):
+    """Test ability to atomically increment a counter."""
+    ctx = gs.GSContext()
+
+    with mock.patch.object(ctx, 'GetGeneration', return_value=(0, 0)):
+      ctx.Counter('gs://abc/1').Increment()
+
+    self.gs_mock.assertCommandContains(['cp', 'gs://abc/1'])
+
+  def testGetGeneration(self):
+    """Test ability to get the generation of a file."""
+    self.gs_mock.AddCmdResult(['stat', 'gs://abc/1'],
+                              output=StatTest.STAT_OUTPUT)
+    ctx = gs.GSContext()
+    ctx.GetGeneration('gs://abc/1')
+    self.gs_mock.assertCommandContains(['stat', 'gs://abc/1'])
+
+  def testCreateCached(self):
+    """Test that the function runs through."""
+    gs.GSContext(cache_dir=self.tempdir)
+
+  def testReuseCached(self):
+    """Test that second fetch is a cache hit."""
+    gs.GSContext(cache_dir=self.tempdir)
+    gs.GSUTIL_URL = None
+    gs.GSContext(cache_dir=self.tempdir)
+
+  def testUnknownError(self):
+    """Test that when gsutil fails in an unknown way, we do the right thing."""
+    self.gs_mock.AddCmdResult(['cat', '/asdf'], returncode=1)
+
+    ctx = gs.GSContext()
+    self.assertRaises(gs.GSCommandError, ctx.DoCommand, ['cat', '/asdf'])
+
+  def testWaitForGsPathsAllPresent(self):
+    """Test for waiting when all paths exist already."""
+    ctx = gs.GSContext()
+
+    with mock.patch.object(ctx, 'Exists', return_value=True):
+      ctx.WaitForGsPaths(['/path1', '/path2'], 20)
+
+  def testWaitForGsPathsDelayedSuccess(self):
+    """Test for waiting, but not all paths exist so we timeout."""
+    ctx = gs.GSContext()
+
+    # First they both don't exist, then one does, then remaining does.
+    exists = [False, False, True, False, True]
+    with mock.patch.object(ctx, 'Exists', side_effect=exists):
+      ctx.WaitForGsPaths(['/path1', '/path2'], 20, period=0.02)
+
+  def testWaitForGsPathsTimeout(self):
+    """Test for waiting, but not all paths exist so we timeout."""
+    ctx = gs.GSContext()
+
+    exists = {'/path1': True, '/path2': False}
+    with mock.patch.object(ctx, 'Exists', side_effect=lambda p: exists[p]):
+      self.assertRaises(gs.timeout_util.TimeoutError,
+                        ctx.WaitForGsPaths, ['/path1', '/path2'],
+                        timeout=1, period=0.02)
+
+  def testParallelFalse(self):
+    """Tests that "-m" is not used by default."""
+    ctx = gs.GSContext()
+    ctx.Copy('-', 'gs://abc/1')
+    self.assertFalse(any('-m' in cmd for cmd in self.gs_mock.raw_gs_cmds))
+
+  def testParallelTrue(self):
+    """Tests that "-m" is used when you pass parallel=True."""
+    ctx = gs.GSContext()
+    ctx.Copy('gs://abc/1', 'gs://abc/2', parallel=True)
+    self.assertTrue(all('-m' in cmd for cmd in self.gs_mock.raw_gs_cmds))
+
+  def testNoParallelOpWithStdin(self):
+    """Tests that "-m" is not used when we pipe the input."""
+    ctx = gs.GSContext()
+    ctx.Copy('gs://abc/1', 'gs://abc/2', input='foo', parallel=True)
+    self.assertFalse(any('-m' in cmd for cmd in self.gs_mock.raw_gs_cmds))
+
+
+class UnmockedGSContextTest(cros_test_lib.TempDirTestCase):
+  """Tests for GSContext that go over the network."""
+
+  @cros_test_lib.NetworkTest()
+  def testIncrement(self):
+    ctx = gs.GSContext()
+    with gs.TemporaryURL('testIncrement') as url:
+      counter = ctx.Counter(url)
+      self.assertEqual(0, counter.Get())
+      for i in xrange(1, 4):
+        self.assertEqual(i, counter.Increment())
+        self.assertEqual(i, counter.Get())
+
+
+class StatTest(AbstractGSContextTest):
+  """Tests Stat functionality."""
+
+  # Convenient constant for mocking Stat results.
+  STAT_OUTPUT = """gs://abc/1:
+        Creation time:    Sat, 23 Aug 2014 06:53:20 GMT
+        Content-Language: en
+        Content-Length:   74
+        Content-Type:   application/octet-stream
+        Hash (crc32c):    BBPMPA==
+        Hash (md5):   ms+qSYvgI9SjXn8tW/5UpQ==
+        ETag:     CNCgocbmqMACEAE=
+        Generation:   1408776800850000
+        Metageneration:   1
+      """
+
+  # Stat output can vary based on how/when the file was created.
+  STAT_OUTPUT_OLDER = """gs://abc/1:
+        Creation time:    Sat, 23 Aug 2014 06:53:20 GMT
+        Content-Length:   74
+        Content-Type:   application/octet-stream
+        Hash (crc32c):    BBPMPA==
+        Hash (md5):   ms+qSYvgI9SjXn8tW/5UpQ==
+        ETag:     CNCgocbmqMACEAE=
+        Generation:   1408776800850000
+        Metageneration:   1
+      """
+
+  # When stat throws an error.  It's a special snow flake.
+  STAT_ERROR_OUTPUT = ('INFO 0713 05:58:12.451810 stat.py] '
+                       'No URLs matched gs://abc/1')
+
+  def testStat(self):
+    """Test ability to get the generation of a file."""
+    self.gs_mock.AddCmdResult(['stat', 'gs://abc/1'],
+                              output=self.STAT_OUTPUT)
+    ctx = gs.GSContext()
+    result = ctx.Stat('gs://abc/1')
+    self.gs_mock.assertCommandContains(['stat', 'gs://abc/1'])
+
+    self.assertEqual(result.creation_time,
+                     datetime.datetime(2014, 8, 23, 6, 53, 20))
+    self.assertEqual(result.content_length, 74)
+    self.assertEqual(result.content_type, 'application/octet-stream')
+    self.assertEqual(result.hash_crc32c, 'BBPMPA==')
+    self.assertEqual(result.hash_md5, 'ms+qSYvgI9SjXn8tW/5UpQ==')
+    self.assertEqual(result.etag, 'CNCgocbmqMACEAE=')
+    self.assertEqual(result.generation, 1408776800850000)
+    self.assertEqual(result.metageneration, 1)
+
+  def testStatOlderOutput(self):
+    """Test ability to get the generation of a file."""
+    self.gs_mock.AddCmdResult(['stat', 'gs://abc/1'],
+                              output=self.STAT_OUTPUT_OLDER)
+    ctx = gs.GSContext()
+    result = ctx.Stat('gs://abc/1')
+    self.gs_mock.assertCommandContains(['stat', 'gs://abc/1'])
+
+    self.assertEqual(result.creation_time,
+                     datetime.datetime(2014, 8, 23, 6, 53, 20))
+    self.assertEqual(result.content_length, 74)
+    self.assertEqual(result.content_type, 'application/octet-stream')
+    self.assertEqual(result.hash_crc32c, 'BBPMPA==')
+    self.assertEqual(result.hash_md5, 'ms+qSYvgI9SjXn8tW/5UpQ==')
+    self.assertEqual(result.etag, 'CNCgocbmqMACEAE=')
+    self.assertEqual(result.generation, 1408776800850000)
+    self.assertEqual(result.metageneration, 1)
+
+  def testStatNoExist(self):
+    """Test ability to get the generation of a file."""
+    self.gs_mock.AddCmdResult(['stat', 'gs://abc/1'],
+                              error=self.STAT_ERROR_OUTPUT,
+                              returncode=1)
+    ctx = gs.GSContext()
+    self.assertRaises(gs.GSNoSuchKey, ctx.Stat, 'gs://abc/1')
+    self.gs_mock.assertCommandContains(['stat', 'gs://abc/1'])
+
+
+class UnmockedStatTest(cros_test_lib.TempDirTestCase):
+  """Tests Stat functionality w/out mocks."""
+
+  @cros_test_lib.NetworkTest()
+  def testStat(self):
+    """Test ability to get the generation of a file."""
+    ctx = gs.GSContext()
+    with gs.TemporaryURL('testStat') as url:
+
+      # The URL doesn't exist. Test Stat for this case.
+      self.assertRaises(gs.GSNoSuchKey, ctx.Stat, url)
+
+      # Populate the URL.
+      ctx.CreateWithContents(url, 'test file contents')
+
+      # Stat a URL that exists.
+      result = ctx.Stat(url)
+
+    # Verify the Stat results.
+    self.assertIsInstance(result.creation_time, datetime.datetime)
+    self.assertEqual(result.content_length, 18)
+    self.assertEqual(result.content_type, 'application/octet-stream')
+    self.assertEqual(result.hash_crc32c, 'wUc4sQ==')
+    self.assertEqual(result.hash_md5, 'iRvNNwBhmvUVG/lbg2/5sQ==')
+    self.assertIsInstance(result.etag, str)
+    self.assertIsInstance(result.generation, int)
+    self.assertEqual(result.metageneration, 1)
+
+  @cros_test_lib.NetworkTest()
+  def testMissing(self):
+    """Test exceptions when the file doesn't exist."""
+    ctx = gs.GSContext()
+    with gs.TemporaryURL('testStat') as url:
+      self.assertRaises(gs.GSNoSuchKey, ctx.Stat, url)
+      self.assertFalse(ctx.Exists(url))
+
+  def testExists(self):
+    """Test Exists behavior with local files."""
+    ctx = gs.GSContext()
+    f = os.path.join(self.tempdir, 'f')
+
+    self.assertFalse(ctx.Exists(f))
+
+    osutils.Touch(f)
+    self.assertTrue(ctx.Exists(f))
+
+
+class CatTest(cros_test_lib.TempDirTestCase):
+  """Tests GSContext.Copy() functionality."""
+
+  def testLocalFile(self):
+    """Tests catting a local file."""
+    ctx = gs.GSContext()
+    filename = os.path.join(self.tempdir, 'myfile')
+    content = 'foo'
+    osutils.WriteFile(filename, content)
+    self.assertEqual(content, ctx.Cat(filename))
+
+  def testLocalMissingFile(self):
+    """Tests catting a missing local file."""
+    ctx = gs.GSContext()
+    with self.assertRaises(gs.GSNoSuchKey):
+      ctx.Cat(os.path.join(self.tempdir, 'does/not/exist'))
+
+  def testLocalForbiddenFile(self):
+    """Tests catting a local file that we don't have access to."""
+    ctx = gs.GSContext()
+    filename = os.path.join(self.tempdir, 'myfile')
+    content = 'foo'
+    osutils.WriteFile(filename, content)
+    os.chmod(filename, 000)
+    with self.assertRaises(gs.GSContextException):
+      ctx.Cat(filename)
+
+  @cros_test_lib.NetworkTest()
+  def testNetworkFile(self):
+    """Tests catting a GS file."""
+    ctx = gs.GSContext()
+    filename = os.path.join(self.tempdir, 'myfile')
+    content = 'fOoOoOoo1\n\thi@!*!(\r\r\nend'
+    osutils.WriteFile(filename, content)
+
+    with gs.TemporaryURL('chromite.cat') as tempuri:
+      ctx.Copy(filename, tempuri)
+      self.assertEqual(content, ctx.Cat(tempuri))
+
+  @cros_test_lib.NetworkTest()
+  def testNetworkMissingFile(self):
+    """Tests catting a missing GS file."""
+    ctx = gs.GSContext()
+    with gs.TemporaryURL('chromite.cat') as tempuri:
+      with self.assertRaises(gs.GSNoSuchKey):
+        ctx.Cat(tempuri)
+
+
+class DryRunTest(cros_build_lib_unittest.RunCommandTestCase):
+  """Verify dry_run works for all of GSContext."""
+
+  def setUp(self):
+    self.ctx = gs.GSContext(dry_run=True)
+
+  def tearDown(self):
+    # Verify we don't try to call gsutil at all.
+    for call_args in self.rc.call_args_list:
+      self.assertNotIn('gsutil', call_args[0][0])
+
+  def testCat(self):
+    """Test Cat in dry_run mode."""
+    self.assertEqual(self.ctx.Cat('gs://foo/bar'), '')
+
+  def testChangeACL(self):
+    """Test ChangeACL in dry_run mode."""
+    self.assertEqual(
+        self.ctx.ChangeACL('gs://foo/bar', acl_args_file='/dev/null'),
+        None)
+
+  def testCopy(self):
+    """Test Copy in dry_run mode."""
+    self.ctx.Copy('/dev/null', 'gs://foo/bar')
+    self.ctx.Copy('gs://foo/bar', '/dev/null')
+
+  def testCreateWithContents(self):
+    """Test Copy in dry_run mode."""
+    self.ctx.CreateWithContents('gs://foo/bar', 'My Little Content(tm)')
+
+  def testCopyInto(self):
+    """Test CopyInto in dry_run mode."""
+    self.ctx.CopyInto('/dev/null', 'gs://foo/bar')
+
+  def testDoCommand(self):
+    """Test DoCommand in dry_run mode."""
+    self.ctx.DoCommand(['a-bad-command'])
+
+  def testExists(self):
+    """Test Exists in dry_run mode."""
+    self.assertEqual(self.ctx.Exists('gs://foo/bar'), True)
+
+  def testGetGeneration(self):
+    """Test GetGeneration in dry_run mode."""
+    self.assertEqual(self.ctx.GetGeneration('gs://foo/bar'), (0, 0))
+
+  def testGetSize(self):
+    """Test GetSize in dry_run mode."""
+    self.assertEqual(self.ctx.GetSize('gs://foo/bar'), 0)
+
+  def testGetTrackerFilenames(self):
+    """Test GetTrackerFilenames in dry_run mode."""
+    self.ctx.GetTrackerFilenames('foo')
+
+  def testLS(self):
+    """Test LS in dry_run mode."""
+    self.assertEqual(self.ctx.LS('gs://foo/bar'), [])
+
+  def testList(self):
+    """Test List in dry_run mode."""
+    self.assertEqual(self.ctx.List('gs://foo/bar'), [])
+
+  def testMove(self):
+    """Test Move in dry_run mode."""
+    self.ctx.Move('gs://foo/bar', 'gs://foo/bar2')
+
+  def testRemove(self):
+    """Test Remove in dry_run mode."""
+    self.ctx.Remove('gs://foo/bar')
+
+  def testSetACL(self):
+    """Test SetACL in dry_run mode."""
+    self.assertEqual(self.ctx.SetACL('gs://foo/bar', 'bad-acl'), None)
+
+  def testStat(self):
+    """Test Stat in dry_run mode."""
+    result = self.ctx.Stat('gs://foo/bar')
+    self.assertEqual(result.content_length, 0)
+    self.assertNotEqual(result.creation_time, None)
+
+  def testVersion(self):
+    """Test gsutil_version in dry_run mode."""
+    self.assertEqual(self.ctx.gsutil_version, gs.GSContext.GSUTIL_VERSION)
+
+
+class InitBotoTest(AbstractGSContextTest):
+  """Test boto file interactive initialization."""
+
+  # pylint: disable=protected-access
+
+  GS_LS_ERROR = """\
+You are attempting to access protected data with no configured credentials.
+Please see http://code.google.com/apis/storage/docs/signup.html for
+details about activating the Google Cloud Storage service and then run the
+"gsutil config" command to configure gsutil to use these credentials."""
+
+  GS_LS_ERROR2 = """\
+GSResponseError: status=400, code=MissingSecurityHeader, reason=Bad Request, \
+detail=Authorization."""
+
+  GS_LS_BENIGN = """\
+"GSResponseError: status=400, code=MissingSecurityHeader, reason=Bad Request,
+detail=A nonempty x-goog-project-id header is required for this request."""
+
+  def setUp(self):
+    self.boto_file = os.path.join(self.tempdir, 'boto_file')
+    self.ctx = gs.GSContext(boto_file=self.boto_file)
+
+  def testGSLsSkippableError(self):
+    """Benign GS error."""
+    self.gs_mock.AddCmdResult(['ls'], returncode=1, error=self.GS_LS_BENIGN)
+    self.assertTrue(self.ctx._TestGSLs())
+
+  def testGSLsAuthorizationError1(self):
+    """GS authorization error 1."""
+    self.gs_mock.AddCmdResult(['ls'], returncode=1, error=self.GS_LS_ERROR)
+    self.assertFalse(self.ctx._TestGSLs())
+
+  def testGSLsError2(self):
+    """GS authorization error 2."""
+    self.gs_mock.AddCmdResult(['ls'], returncode=1, error=self.GS_LS_ERROR2)
+    self.assertFalse(self.ctx._TestGSLs())
+
+  def _WriteBotoFile(self, contents, *_args, **_kwargs):
+    osutils.WriteFile(self.ctx.boto_file, contents)
+
+  def testInitGSLsFailButSuccess(self):
+    """Invalid GS Config, but we config properly."""
+    self.gs_mock.AddCmdResult(['ls'], returncode=1, error=self.GS_LS_ERROR)
+    self.ctx._InitBoto()
+
+  def _AddLsConfigResult(self, side_effect=None):
+    self.gs_mock.AddCmdResult(['ls'], returncode=1, error=self.GS_LS_ERROR)
+    self.gs_mock.AddCmdResult(['config'], returncode=1, side_effect=side_effect)
+
+  def testGSLsFailAndConfigError(self):
+    """Invalid GS Config, and we fail to config."""
+    self._AddLsConfigResult(
+        side_effect=functools.partial(self._WriteBotoFile, 'monkeys'))
+    self.assertRaises(cros_build_lib.RunCommandError, self.ctx._InitBoto)
+
+  def testGSLsFailAndEmptyConfigFile(self):
+    """Invalid GS Config, and we raise error on empty config file."""
+    self._AddLsConfigResult(
+        side_effect=functools.partial(self._WriteBotoFile, ''))
+    self.assertRaises(gs.GSContextException, self.ctx._InitBoto)
+
+
+class GSCounterTest(AbstractGSContextTest):
+  """Tests GSCounter functionality."""
+
+  COUNTER_URI = 'gs://foo/mock/counter'
+  INITIAL_VALUE = 100
+
+  def setUp(self):
+    self.counter = gs.GSCounter(self.ctx, self.COUNTER_URI)
+    self.cat_mock = self.PatchObject(self.ctx, 'Cat')
+    self.gen_mock = self.PatchObject(self.ctx, 'GetGeneration',
+                                     return_value=(1, 1))
+    self._SetCounter(self.INITIAL_VALUE)
+
+  def _SetCounter(self, value):
+    """Set the test counter to |value|."""
+    self.cat_mock.return_value = str(value)
+
+  def testGetInitial(self):
+    """Test Get when the counter doesn't exist."""
+    self.cat_mock.side_effect = gs.GSNoSuchKey
+    self.assertEqual(self.counter.Get(), 0)
+
+  def testGet(self):
+    """Basic Get() test."""
+    self.assertEqual(self.counter.Get(), self.INITIAL_VALUE)
+
+  def testIncrement(self):
+    """Basic Increment() test."""
+    self.assertEqual(self.counter.Increment(), self.INITIAL_VALUE + 1)
+
+  def testDecrement(self):
+    """Basic Decrement() test."""
+    self.assertEqual(self.counter.Decrement(), self.INITIAL_VALUE - 1)
+
+  def testReset(self):
+    """Basic Reset() test."""
+    self.assertEqual(self.counter.Reset(), 0)
+
+  def testStreakIncrement(self):
+    """Basic StreakIncrement() test."""
+    self._SetCounter(10)
+    self.assertEqual(self.counter.StreakIncrement(), 11)
+
+  def testStreakIncrementReset(self):
+    """Test StreakIncrement() when the counter is negative."""
+    self._SetCounter(-10)
+    self.assertEqual(self.counter.StreakIncrement(), 1)
+
+  def testStreakDecrement(self):
+    """Basic StreakDecrement() test."""
+    self._SetCounter(-10)
+    self.assertEqual(self.counter.StreakDecrement(), -11)
+
+  def testStreakDecrementReset(self):
+    """Test StreakDecrement() when the counter is positive."""
+    self._SetCounter(10)
+    self.assertEqual(self.counter.StreakDecrement(), -1)
+
+
+class UnmockedGSCounterTest(cros_test_lib.TestCase):
+  """Tests GSCounter functionality w/out mocks."""
+
+  @staticmethod
+  @contextlib.contextmanager
+  def _Counter():
+    ctx = gs.GSContext()
+    with gs.TemporaryURL('chromite.counter') as tempuri:
+      yield gs.GSCounter(ctx, tempuri)
+
+  @staticmethod
+  def _SetCounter(counter, value):
+    """Set the test counter to |value|."""
+    counter.AtomicCounterOperation(value, lambda x: value)
+
+  @cros_test_lib.NetworkTest()
+  def testGetInitial(self):
+    """Test Get when the counter doesn't exist."""
+    with self._Counter() as counter:
+      self.assertEqual(counter.Get(), 0)
+
+  @cros_test_lib.NetworkTest()
+  def testGet(self):
+    """Basic Get() test."""
+    with self._Counter() as counter:
+      self._SetCounter(counter, 100)
+      self.assertEqual(counter.Get(), 100)
+
+  @cros_test_lib.NetworkTest()
+  def testIncrement(self):
+    """Basic Increment() test."""
+    with self._Counter() as counter:
+      self._SetCounter(counter, 100)
+      self.assertEqual(counter.Increment(), 101)
+
+  @cros_test_lib.NetworkTest()
+  def testDecrement(self):
+    """Basic Decrement() test."""
+    with self._Counter() as counter:
+      self._SetCounter(counter, 100)
+      self.assertEqual(counter.Decrement(), 99)
+
+  @cros_test_lib.NetworkTest()
+  def testReset(self):
+    """Basic Reset() test."""
+    with self._Counter() as counter:
+      self._SetCounter(counter, 100)
+      self.assertEqual(counter.Reset(), 0)
+      self.assertEqual(counter.Get(), 0)
+
+  @cros_test_lib.NetworkTest()
+  def testStreakIncrement(self):
+    """Basic StreakIncrement() test."""
+    with self._Counter() as counter:
+      self._SetCounter(counter, 100)
+      self.assertEqual(counter.StreakIncrement(), 101)
+
+  @cros_test_lib.NetworkTest()
+  def testStreakIncrementReset(self):
+    """Test StreakIncrement() when the counter is negative."""
+    with self._Counter() as counter:
+      self._SetCounter(counter, -100)
+      self.assertEqual(counter.StreakIncrement(), 1)
+
+  @cros_test_lib.NetworkTest()
+  def testStreakDecrement(self):
+    """Basic StreakDecrement() test."""
+    with self._Counter() as counter:
+      self._SetCounter(counter, -100)
+      self.assertEqual(counter.StreakDecrement(), -101)
+
+  @cros_test_lib.NetworkTest()
+  def testStreakDecrementReset(self):
+    """Test StreakDecrement() when the counter is positive."""
+    with self._Counter() as counter:
+      self._SetCounter(counter, 100)
+      self.assertEqual(counter.StreakDecrement(), -1)
diff --git a/lib/image_lib.py b/lib/image_lib.py
new file mode 100644
index 0000000..3be67e8
--- /dev/null
+++ b/lib/image_lib.py
@@ -0,0 +1,112 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for manipulating ChromeOS images."""
+
+from __future__ import print_function
+
+import glob
+import os
+import re
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+
+
+class LoopbackError(Exception):
+  """An exception raised when something went wrong setting up a loopback"""
+
+
+class LoopbackPartitions(object):
+  """Loopback mount a file and provide access to its partitions.
+
+  This class can be used as a context manager with the "with" statement, or
+  individual instances of it can be created which will clean themselves up
+  when garbage collected or when explicitly closed, ala the tempfile module.
+
+  In either case, the same arguments should be passed to init.
+
+  Args:
+    path: Path to the backing file.
+    losetup: Path to the losetup utility. The default uses the system PATH.
+  """
+  def __init__(self, path, util_path=None):
+    self._util_path = util_path
+    self.path = path
+    self.dev = None
+    self.parts = {}
+
+    try:
+      cmd = ['losetup', '--show', '-f', self.path]
+      ret = self._au_safe_sudo(cmd, print_cmd=False, capture_output=True)
+      self.dev = ret.output.strip()
+      cmd = ['partx', '-d', self.dev]
+      self._au_safe_sudo(cmd, quiet=True, error_code_ok=True)
+      cmd = ['partx', '-a', self.dev]
+      self._au_safe_sudo(cmd, print_cmd=False)
+
+      self.parts = {}
+      part_devs = glob.glob(self.dev + 'p*')
+      if not part_devs:
+        logging.Warning('Didn\'t find partition devices nodes for %s.',
+                        self.path)
+        return
+
+      for part in part_devs:
+        number = int(re.search(r'p(\d+)$', part).group(1))
+        self.parts[number] = part
+
+    except:
+      self.close()
+      raise
+
+  def _au_safe_sudo(self, cmd, **kwargs):
+    """Run a command using sudo in a way that respects the util_path"""
+    newcmd = osutils.Which(cmd[0], path=self._util_path)
+    if newcmd:
+      cmd = [newcmd] + cmd[1:]
+    return cros_build_lib.SudoRunCommand(cmd, **kwargs)
+
+
+  def close(self):
+    if self.dev:
+      cmd = ['partx', '-d', self.dev]
+      self._au_safe_sudo(cmd, quiet=True, error_code_ok=True)
+      self._au_safe_sudo(['losetup', '--detach', self.dev])
+      self.dev = None
+      self.parts = {}
+
+  def __enter__(self):
+    return self
+
+  def __exit__(self, exc_type, exc, tb):
+    self.close()
+
+  def __del__(self):
+    self.close()
+
+
+def WriteLsbRelease(sysroot, fields):
+  """Writes out the /etc/lsb-release file into the given sysroot.
+
+  Args:
+    sysroot: The sysroot to write the lsb-release file to.
+    fields: A dictionary of all the fields and values to write.
+  """
+  content = '\n'.join('%s=%s' % (k, v) for k, v in fields.items()) + '\n'
+
+  path = os.path.join(sysroot, constants.LSB_RELEASE_PATH.lstrip('/'))
+
+  if os.path.exists(path):
+    # The file has already been pre-populated with some fields.  Since
+    # osutils.WriteFile(..) doesn't support appending with sudo, read in the
+    # content and prepend it to the new content to write.
+    # TODO(stevefung): Remove this appending, once all writing to the
+    #   /etc/lsb-release file has been removed from ebuilds and consolidated
+    #  to the buid tools.
+    content = osutils.ReadFile(path) + content
+
+  osutils.WriteFile(path, content, mode='w', makedirs=True, sudo=True)
diff --git a/lib/image_lib_unittest b/lib/image_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/image_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/image_lib_unittest.py b/lib/image_lib_unittest.py
new file mode 100644
index 0000000..9e73244
--- /dev/null
+++ b/lib/image_lib_unittest.py
@@ -0,0 +1,141 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the image_lib module."""
+
+from __future__ import print_function
+
+import gc
+import glob
+import os
+
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import image_lib
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+
+
+class FakeException(Exception):
+  """Fake exception used for testing exception handling."""
+
+
+class LoopbackPartitions(object):
+  """Mocked loopback partition class to use in unit tests.
+
+  Args:
+    path: Path to the image file.
+    dev: Path for the base loopback device.
+    part_count: How many partition device files to make up.
+    part_overrides: A dict which is used to update self.parts.
+  """
+  # pylint: disable=dangerous-default-value
+  def __init__(self, path='/dev/loop9999', dev=None,
+               part_count=None, part_overrides={}):
+    self.path = path
+    self.dev = dev
+    self.parts = {}
+    for i in xrange(part_count):
+      self.parts[i + 1] = path + 'p' + str(i + 1)
+    self.parts.update(part_overrides)
+
+  def close(self):
+    pass
+
+  def __enter__(self):
+    return self
+
+  def __exit__(self, exc_type, exc, tb):
+    pass
+
+
+FAKE_PATH = '/imaginary/file'
+LOOP_DEV = '/dev/loop9999'
+LOOP_PARTS_DICT = {num: '%sp%d' % (LOOP_DEV, num) for num in range(1, 13)}
+LOOP_PARTS_LIST = LOOP_PARTS_DICT.values()
+
+class LoopbackPartitionsTest(cros_test_lib.MockTestCase):
+  """Test the loopback partitions class"""
+
+  def setUp(self):
+    self.rc_mock = cros_build_lib_unittest.RunCommandMock()
+    self.StartPatcher(self.rc_mock)
+    self.rc_mock.SetDefaultCmdResult()
+
+    self.PatchObject(glob, 'glob', return_value=LOOP_PARTS_LIST)
+    def fake_which(val, *_arg, **_kwargs):
+      return val
+    self.PatchObject(osutils, 'Which', side_effect=fake_which)
+
+  def testContextManager(self):
+    """Test using the loopback class as a context manager."""
+    self.rc_mock.AddCmdResult(partial_mock.In('--show'), output=LOOP_DEV)
+    with image_lib.LoopbackPartitions(FAKE_PATH) as lb:
+      self.rc_mock.assertCommandContains(['losetup', '--show', '-f', FAKE_PATH])
+      self.rc_mock.assertCommandContains(['partx', '-d', LOOP_DEV])
+      self.rc_mock.assertCommandContains(['partx', '-a', LOOP_DEV])
+      self.rc_mock.assertCommandContains(['losetup', '--detach', LOOP_DEV],
+                                         expected=False)
+      self.assertEquals(lb.parts, LOOP_PARTS_DICT)
+    self.rc_mock.assertCommandContains(['partx', '-d', LOOP_DEV])
+    self.rc_mock.assertCommandContains(['losetup', '--detach', LOOP_DEV])
+
+  def testManual(self):
+    """Test using the loopback class closed manually."""
+    self.rc_mock.AddCmdResult(partial_mock.In('--show'), output=LOOP_DEV)
+    lb = image_lib.LoopbackPartitions(FAKE_PATH)
+    self.rc_mock.assertCommandContains(['losetup', '--show', '-f', FAKE_PATH])
+    self.rc_mock.assertCommandContains(['partx', '-d', LOOP_DEV])
+    self.rc_mock.assertCommandContains(['partx', '-a', LOOP_DEV])
+    self.rc_mock.assertCommandContains(['losetup', '--detach', LOOP_DEV],
+                                       expected=False)
+    self.assertEquals(lb.parts, LOOP_PARTS_DICT)
+    lb.close()
+    self.rc_mock.assertCommandContains(['partx', '-d', LOOP_DEV])
+    self.rc_mock.assertCommandContains(['losetup', '--detach', LOOP_DEV])
+
+  def gcFunc(self):
+    """This function isolates a local variable so it'll be garbage collected."""
+    self.rc_mock.AddCmdResult(partial_mock.In('--show'), output=LOOP_DEV)
+    lb = image_lib.LoopbackPartitions(FAKE_PATH)
+    self.rc_mock.assertCommandContains(['losetup', '--show', '-f', FAKE_PATH])
+    self.rc_mock.assertCommandContains(['partx', '-d', LOOP_DEV])
+    self.rc_mock.assertCommandContains(['partx', '-a', LOOP_DEV])
+    self.rc_mock.assertCommandContains(['losetup', '--detach', LOOP_DEV],
+                                       expected=False)
+    self.assertEquals(lb.parts, LOOP_PARTS_DICT)
+
+  def testGarbageCollected(self):
+    """Test using the loopback class closed by garbage collection."""
+    self.gcFunc()
+    # Force garbage collection in case python didn't already clean up the
+    # loopback object.
+    gc.collect()
+    self.rc_mock.assertCommandContains(['partx', '-d', LOOP_DEV])
+    self.rc_mock.assertCommandContains(['losetup', '--detach', LOOP_DEV])
+
+
+class LsbUtilsTest(cros_test_lib.MockTempDirTestCase):
+  """Tests the various LSB utilities."""
+
+  def setUp(self):
+    # Patch os.getuid(..) to pretend running as root, so reading/writing the
+    # lsb-release file doesn't require escalated privileges and the test can
+    # clean itself up correctly.
+    self.PatchObject(os, 'getuid', return_value=0)
+
+  def testWriteLsbRelease(self):
+    """Tests writing out the lsb_release file using WriteLsbRelease(..)."""
+    fields = {'x': '1', 'y': '2', 'foo': 'bar'}
+    image_lib.WriteLsbRelease(self.tempdir, fields)
+    lsb_release_file = os.path.join(self.tempdir, 'etc', 'lsb-release')
+    expected_content = 'y=2\nx=1\nfoo=bar\n'
+    self.assertFileContents(lsb_release_file, expected_content)
+
+    # Test that WriteLsbRelease(..) correctly handles an existing file.
+    fields = {'newkey1': 'value1', 'newkey2': 'value2', 'a': '3', 'b': '4'}
+    image_lib.WriteLsbRelease(self.tempdir, fields)
+    expected_content = ('y=2\nx=1\nfoo=bar\nnewkey2=value2\na=3\n'
+                        'newkey1=value1\nb=4\n')
+    self.assertFileContents(lsb_release_file, expected_content)
diff --git a/lib/image_test_lib.py b/lib/image_test_lib.py
new file mode 100644
index 0000000..7712f22
--- /dev/null
+++ b/lib/image_test_lib.py
@@ -0,0 +1,169 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions related to image tests."""
+
+from __future__ import print_function
+
+import os
+import unittest
+
+from chromite.lib import cros_logging as logging
+from chromite.lib import perf_uploader
+
+
+# File extension for file containing performance values.
+PERF_EXTENSION = '.perf'
+# Symlinks to mounted partitions.
+ROOT_A = 'dir-ROOT-A'
+STATEFUL = 'dir-STATE'
+
+
+def IsPerfFile(file_name):
+  """Return True if |file_name| may contain perf values."""
+  return file_name.endswith(PERF_EXTENSION)
+
+
+class _BoardAndDirectoryMixin(object):
+  """A mixin to hold image test's specific info."""
+
+  _board = None
+  _result_dir = None
+
+  def SetBoard(self, board):
+    self._board = board
+
+  def SetResultDir(self, result_dir):
+    self._result_dir = result_dir
+
+
+class ImageTestCase(unittest.TestCase, _BoardAndDirectoryMixin):
+  """Subclass unittest.TestCase to provide utility methods for image tests.
+
+  Tests should not directly inherit this class. They should instead inherit
+  from ForgivingImageTestCase, or NonForgivingImageTestCase.
+
+  Tests MUST use prefix "Test" (e.g.: TestLinkage, TestDiskSpace), not "test"
+  prefix, in order to be picked up by the test runner.
+
+  Tests are run inside chroot. Tests are run as root. DO NOT modify any mounted
+  partitions.
+
+  The current working directory is set up so that "ROOT_A", and "STATEFUL"
+  constants refer to the mounted partitions. The partitions are mounted
+  readonly.
+
+    current working directory
+      + ROOT_A
+        + /
+          + bin
+          + etc
+          + usr
+          ...
+      + STATEFUL
+        + var_overlay
+        ...
+  """
+
+  def IsForgiving(self):
+    """Indicate if this test is forgiving.
+
+    The test runner will classify tests into two buckets, forgiving and non-
+    forgiving. Forgiving tests DO NOT affect the result of the test runner;
+    non-forgiving tests do. In either case, test runner will still output the
+    result of each individual test.
+    """
+    raise NotImplementedError()
+
+  def _GeneratePerfFileName(self):
+    """Return a perf file name for this test.
+
+    The file name is formatted as:
+
+      image_test.<test_class><PERF_EXTENSION>
+
+    e.g.:
+
+      image_test.DiskSpaceTest.perf
+    """
+    test_name = 'image_test.%s' % self.__class__.__name__
+    file_name = '%s%s' % (test_name, PERF_EXTENSION)
+    file_name = os.path.join(self._result_dir, file_name)
+    return file_name
+
+  @staticmethod
+  def GetTestName(file_name):
+    """Return the test name from a perf |file_name|.
+
+    Args:
+      file_name: A path to the perf file as generated by _GeneratePerfFileName.
+
+    Returns:
+      The qualified test name part of the file name.
+    """
+    file_name = os.path.basename(file_name)
+    pos = file_name.rindex('.')
+    return file_name[:pos]
+
+  def OutputPerfValue(self, description, value, units,
+                      higher_is_better=True, graph=None):
+    """Record a perf value.
+
+    If graph name is not provided, the test method name will be used as the
+    graph name.
+
+    Args:
+      description: A string description of the value such as "partition-0". A
+        special description "ref" is taken as the reference.
+      value: A float value.
+      units: A string describing the unit of measurement such as "KB", "meter".
+      higher_is_better: A boolean indicating if higher value means better
+        performance.
+      graph: A string name of the graph this value will be plotted on. If not
+        provided, the graph name will take the test method name.
+    """
+    if not self._result_dir:
+      logging.warning('Result directory is not set. Ignore OutputPerfValue.')
+      return
+    if graph is None:
+      graph = self._testMethodName
+    file_name = self._GeneratePerfFileName()
+    perf_uploader.OutputPerfValue(file_name, description, value, units,
+                                  higher_is_better, graph)
+
+
+class ForgivingImageTestCase(ImageTestCase):
+  """Concrete base class of forgiving tests."""
+
+  def IsForgiving(self):
+    return True
+
+
+class NonForgivingImageTestCase(ImageTestCase):
+  """Concrete base class of non forgiving tests."""
+
+  def IsForgiving(self):
+    return False
+
+
+class ImageTestSuite(unittest.TestSuite, _BoardAndDirectoryMixin):
+  """Wrap around unittest.TestSuite to pass more info to the actual tests."""
+
+  def GetTests(self):
+    return self._tests
+
+  def run(self, result, debug=False):
+    for t in self._tests:
+      t.SetResultDir(self._result_dir)
+      t.SetBoard(self._board)
+    return super(ImageTestSuite, self).run(result)
+
+
+class ImageTestRunner(unittest.TextTestRunner, _BoardAndDirectoryMixin):
+  """Wrap around unittest.TextTestRunner to pass more info down the chain."""
+
+  def run(self, test):
+    test.SetResultDir(self._result_dir)
+    test.SetBoard(self._board)
+    return super(ImageTestRunner, self).run(test)
diff --git a/lib/json_lib.py b/lib/json_lib.py
new file mode 100644
index 0000000..651a1e4
--- /dev/null
+++ b/lib/json_lib.py
@@ -0,0 +1,77 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helpful functions when parsing JSON blobs."""
+
+from __future__ import print_function
+
+import json
+import re
+
+from chromite.lib import osutils
+
+
+def AssertIsInstance(instance, expected_type, description):
+  """Raise an error if |instance| is not of |expected_type|.
+
+  Args:
+    instance: instance of a Python object.
+    expected_type: expected type of |instance|.
+    description: short string describing |instance| used in error reporting.
+  """
+  if not isinstance(instance, expected_type):
+    raise ValueError(
+        'Expected %s to be a %s, but found %s' %
+        (description, expected_type.__name__, instance.__class__.__name__))
+
+
+def GetValueOfType(a_dict, key, value_type, value_description):
+  """Raise an exception if we cannot get |key| from |a_dict| with |value_type|.
+
+  Args:
+    a_dict: a dictionary.
+    key: string key that should be in the dictionary.
+    value_type: expected type of the value at a_dict[key].
+    value_description: string describing the value used in error reporting.
+  """
+  try:
+    value = a_dict[key]
+  except KeyError:
+    raise ValueError('Missing %s in JSON dictionary (key "%s")' %
+                     (value_description, key))
+  AssertIsInstance(value, value_type, value_description)
+  return value
+
+
+def PopValueOfType(a_dict, key, value_type, value_description):
+  """Raise an exception if we cannnot pop |key| from |a_dict| with |value_type|.
+
+  Args:
+    a_dict: a dictionary.
+    key: string key that should be in the dictionary.
+    value_type: expected type of the value at a_dict[key].
+    value_description: string describing the value used in error reporting.
+  """
+  ret = GetValueOfType(a_dict, key, value_type, value_description)
+  # We were able to get that value, so the key must exist.
+  a_dict.pop(key)
+  return ret
+
+
+def ParseJsonFileWithComments(path):
+  """Parse a JSON file with bash style comments.
+
+  Strips out comments from JSON blobs.
+
+  Args:
+    path: path to JSON file.
+
+  Returns:
+    Python representation of contents of JSON file.
+  """
+  prog = re.compile(r'\s*#.*')
+  lines = osutils.ReadFile(path).splitlines()
+  lines = ['' if prog.match(line) else line for line in lines]
+  parsed_contents = json.loads('\n'.join(lines))
+  return parsed_contents
diff --git a/lib/json_lib_unittest b/lib/json_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/json_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/json_lib_unittest.py b/lib/json_lib_unittest.py
new file mode 100644
index 0000000..e6ae978
--- /dev/null
+++ b/lib/json_lib_unittest.py
@@ -0,0 +1,69 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for json_lib."""
+
+from __future__ import print_function
+
+from chromite.lib import cros_test_lib
+from chromite.lib import json_lib
+from chromite.lib import osutils
+
+
+class JsonHelpersTest(cros_test_lib.MockTestCase):
+  """Tests for chromite.lib.json_lib."""
+
+  def testAssertIsInstance(self):
+    """Test that AssertIsInstance is correct."""
+    self.assertRaises(ValueError, json_lib.AssertIsInstance,
+                      tuple(), list, 'a bad value')
+    self.assertRaises(ValueError, json_lib.AssertIsInstance,
+                      1, float, 'a bad value')
+    self.assertRaises(ValueError, json_lib.AssertIsInstance,
+                      1, bool, 'a bad value')
+    json_lib.AssertIsInstance([1], list, 'good value')
+    json_lib.AssertIsInstance(True, bool, 'good value')
+    json_lib.AssertIsInstance({'foo': 2}, dict, 'good value')
+
+  def testGetValueOfType(self):
+    """Test that GetValueOfType is correct."""
+    self.assertRaises(
+        ValueError, json_lib.GetValueOfType,
+        {}, 'missing key', str, 'missing value')
+    self.assertRaises(
+        ValueError, json_lib.GetValueOfType,
+        {'key': 1}, 'key', bool, 'bad type')
+    self.assertRaises(
+        ValueError, json_lib.GetValueOfType,
+        {'key': [1]}, 'key', int, 'bad type')
+    self.assertEqual(
+        json_lib.GetValueOfType({'key': 1}, 'key', int, 'good value'),
+        1)
+
+  def testPopValueOfType(self):
+    """Test that PopValueOfType is correct."""
+    input_dict = {'key': 'value'}
+    self.assertEqual(
+        'value',
+        json_lib.GetValueOfType(input_dict, 'key', str, 'value'))
+    self.assertEqual(
+        'value',
+        json_lib.PopValueOfType(input_dict, 'key', str, 'value'))
+    self.assertFalse(input_dict)
+
+  def testParseJsonFileWithComments(self):
+    """Test that we can parse a JSON file with comments."""
+    JSON_WITH_COMMENTS = """
+        {
+        # I am a comment.
+        "foo": []
+        }
+        """
+    self.PatchObject(osutils, 'ReadFile', return_value=JSON_WITH_COMMENTS)
+    self.assertEqual({u'foo': []},
+                     json_lib.ParseJsonFileWithComments('fake path'))
+    self.PatchObject(osutils, 'ReadFile', return_value='')
+    self.assertRaises(ValueError,
+                      json_lib.ParseJsonFileWithComments,
+                      'fake path')
diff --git a/lib/loas.py b/lib/loas.py
new file mode 100644
index 0000000..3d18d5c
--- /dev/null
+++ b/lib/loas.py
@@ -0,0 +1,97 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Manage Google Low Overhead Authentication Service (LOAS) tasks.
+
+This is used by scripts that run outside of the chroot and require access to
+Google production resources.
+
+If you don't know what any of this means, then you don't need this module :).
+"""
+
+from __future__ import print_function
+
+import datetime
+import re
+import socket
+
+from chromite.lib import alerts
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+
+
+class LoasError(Exception):
+  """Raised when a LOAS error occurs"""
+
+
+class Loas(object):
+  """Class for holding all the various LOAS cruft."""
+
+  def __init__(self, user, email_notify, email_server=None):
+    """Initialize.
+
+    Args:
+      user: The LOAS account to check.
+      email_notify: The people to notify when the cert is going to expire.
+      email_server: The e-mail server to use when notifying.
+    """
+    self.user = user
+    self.email_notify = email_notify
+    self.email_server = email_server
+    self.enroll_msg = 'become -t -c "prodaccess --sslenroll" %s@%s' % (
+        self.user, socket.getfqdn())
+    self.last_notification = (
+        datetime.date.today() - datetime.timedelta(weeks=10))
+
+  def Check(self):
+    logging.debug('Checking LOAS credentials for %s', self.user)
+    cmd = ['runloas', '/usr/bin/loas_check']
+
+    # Error message to print when loas credential check fails. This usually
+    # is the result of production credentials expiring for accessing
+    # Keystore for the unwrapping private key.
+    loas_error = 'loas_check for %s failed! Did you run: %s' % (
+        self.user, self.enroll_msg)
+    try:
+      cros_build_lib.SudoRunCommand(cmd,
+                                    user=self.user,
+                                    error_message=loas_error)
+    except cros_build_lib.RunCommandError as e:
+      raise LoasError(e.msg)
+
+  def Status(self):
+    # Only bother checking once a day.  Our certs are valid in the
+    # range of weeks, so there's no need to constantly do this.
+    if (datetime.date.today() <
+        self.last_notification + datetime.timedelta(days=1)):
+      return
+
+    cmd = ['prodcertstatus', '--check_loas_cert_location', 'sslenrolled']
+    result = cros_build_lib.SudoRunCommand(cmd,
+                                           user=self.user,
+                                           error_code_ok=True,
+                                           redirect_stdout=True)
+
+    # Figure out how many days are left.  The command should display:
+    # SSL-ENROLLED CERT cert expires in about 22 days
+    m = re.search(r'cert expires in about ([0-9]+) days', result.output)
+    if m:
+      days_left = int(m.group(1))
+    else:
+      days_left = 0
+
+    # Send out one notification a day if there's a week or less left
+    # before our creds expire.
+    if days_left <= 7:
+      alerts.SendEmail(
+          'Loas certs expiring soon!',
+          self.email_notify,
+          server=self.email_server,
+          message='Please run:\n %s\n\n%s\n%s' % (
+              self.enroll_msg, result.output, result.error))
+      self.last_notification = datetime.date.today()
+    else:
+      # We won't expire for a while, so stop the periodic polling.
+      self.last_notification = (
+          datetime.date.today() + datetime.timedelta(days=days_left - 8))
diff --git a/lib/loas_unittest b/lib/loas_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/loas_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/loas_unittest.py b/lib/loas_unittest.py
new file mode 100644
index 0000000..eedb717
--- /dev/null
+++ b/lib/loas_unittest.py
@@ -0,0 +1,86 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for LOAS helper functions."""
+
+from __future__ import print_function
+
+import datetime
+
+from chromite.lib import alerts
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import loas
+from chromite.lib import partial_mock
+
+
+class TestLoas(cros_test_lib.MockTestCase):
+  """General tests for the LOAS module"""
+
+  def setUp(self):
+    self.rc_mock = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
+    self.email_mock = self.PatchObject(alerts, 'SendEmail')
+
+    self.user = 'foo'
+    self.email = 'some@email.com'
+    self.loas = loas.Loas(self.user, self.email)
+
+  def testCheckSuccess(self):
+    """Verify Check() behavior when loas_check passes."""
+    self.rc_mock.AddCmdResult(partial_mock.In('runloas'), returncode=0)
+    self.loas.Check()
+
+  def testCheckError(self):
+    """Verify Check() behavior when loas_check fails."""
+    self.rc_mock.AddCmdResult(partial_mock.In('runloas'), returncode=1)
+    self.assertRaises(loas.LoasError, self.loas.Check)
+
+  def testStatusError(self):
+    """Verify that errors from prodcertstatus result in an e-mail."""
+    self.rc_mock.AddCmdResult(
+        partial_mock.In('prodcertstatus'), returncode=1,
+        error='No valid SSL-ENROLLED CERT certs')
+    self.loas.Status()
+    self.assertEqual(self.email_mock.call_count, 1)
+
+  def testStatusUpToDate(self):
+    """Verify that up-to-date certs delay further checks for a while."""
+    self.rc_mock.AddCmdResult(
+        partial_mock.In('prodcertstatus'), returncode=0,
+        error='SSL-ENROLLED CERT cert expires in about 39 days')
+
+    # This should invoke prodcertstatus.
+    self.loas.Status()
+    self.assertEqual(self.email_mock.call_count, 1)
+
+    # While this should return quickly.
+    self.loas.Status()
+    self.assertEqual(self.email_mock.call_count, 1)
+
+  def testStatusExpiresSoon(self):
+    """Verify that expiring certs generate e-mails once a day."""
+    self.rc_mock.AddCmdResult(
+        partial_mock.In('prodcertstatus'), returncode=0,
+        error='SSL-ENROLLED CERT cert expires in about 3 days')
+
+    # This should invoke prodcertstatus & send an e-mail.
+    self.loas.Status()
+    self.assertEqual(self.email_mock.call_count, 1)
+
+    # While this should do nothing but return (only one e-mail a day).
+    self.loas.Status()
+    self.loas.Status()
+    self.loas.Status()
+    self.assertEqual(self.email_mock.call_count, 1)
+
+    # Grub around in internal state to fast forward the clock by a day :/.
+    self.loas.last_notification += datetime.timedelta(days=-1)
+
+    # This should send out one e-mail.
+    self.loas.Status()
+    self.assertEqual(self.email_mock.call_count, 2)
+    self.loas.Status()
+    self.loas.Status()
+    self.loas.Status()
+    self.assertEqual(self.email_mock.call_count, 2)
diff --git a/lib/locking.py b/lib/locking.py
new file mode 100644
index 0000000..b1e471c
--- /dev/null
+++ b/lib/locking.py
@@ -0,0 +1,323 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Basic locking functionality."""
+
+from __future__ import print_function
+
+import os
+import errno
+import fcntl
+import stat
+import tempfile
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import retry_util
+from chromite.lib import osutils
+
+
+LOCKF = 'lockf'
+FLOCK = 'flock'
+
+
+class LockNotAcquiredError(Exception):
+  """Signals that the lock was not acquired."""
+
+
+class LockingError(Exception):
+  """Signals miscellaneous problems in the locking process."""
+
+
+class _Lock(cros_build_lib.MasterPidContextManager):
+  """Base lockf based locking.  Derivatives need to override _GetFd"""
+
+  def __init__(self, description=None, verbose=True, locktype=LOCKF,
+               blocking=True):
+    """Initialize this instance.
+
+    Two types of locks are available: LOCKF and FLOCK.
+
+    Use LOCKF (POSIX locks) if:
+      - you need to lock a file between processes created by the
+        parallel/multiprocess libraries
+
+    Use FLOCK (BSD locks) if these scenarios apply:
+      - you need to lock a file between shell scripts running the flock program
+      - you need the lock to be bound to the fd and thus inheritable across
+        execs
+
+    Note: These two locks are completely independent; using one on a path will
+          not block using the other on the same path.
+
+    Args:
+      path: On disk pathway to lock.  Can be a directory or a file.
+      description: A description for this lock- what is it protecting?
+      verbose: Verbose logging?
+      locktype: Type of lock to use (lockf or flock).
+      blocking: If True, use a blocking lock.
+    """
+    cros_build_lib.MasterPidContextManager.__init__(self)
+    self._verbose = verbose
+    self.description = description
+    self._fd = None
+    self.locking_mechanism = fcntl.flock if locktype == FLOCK else fcntl.lockf
+    self.blocking = blocking
+
+  @property
+  def fd(self):
+    if self._fd is None:
+      self._fd = self._GetFd()
+      # Ensure that all derivatives of this lock can't bleed the fd
+      # across execs.
+      fcntl.fcntl(self._fd, fcntl.F_SETFD,
+                  fcntl.fcntl(self._fd, fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
+    return self._fd
+
+  def _GetFd(self):
+    raise NotImplementedError(self, '_GetFd')
+
+  def _enforce_lock(self, flags, message):
+    # Try nonblocking first, if it fails, display the context/message,
+    # and then wait on the lock.
+    try:
+      self.locking_mechanism(self.fd, flags|fcntl.LOCK_NB)
+      return
+    except EnvironmentError as e:
+      if e.errno == errno.EDEADLOCK:
+        self.unlock()
+      elif e.errno != errno.EAGAIN:
+        raise
+    if self.description:
+      message = '%s: blocking while %s' % (self.description, message)
+    if not self.blocking:
+      self.close()
+      raise LockNotAcquiredError(message)
+    if self._verbose:
+      logging.info(message)
+
+    try:
+      self.locking_mechanism(self.fd, flags)
+    except EnvironmentError as e:
+      if e.errno != errno.EDEADLOCK:
+        raise
+      self.unlock()
+      self.locking_mechanism(self.fd, flags)
+
+  def lock(self, shared=False):
+    """Take a lock of type |shared|.
+
+    Any existing lock will be updated if need be.
+
+    Args:
+      shared: If True make the lock shared.
+
+    Returns:
+      self, allowing it to be used as a `with` target.
+
+    Raises:
+      IOError if the operation fails in some way.
+      LockNotAcquiredError if the lock couldn't be acquired (non-blocking
+        mode only).
+    """
+    self._enforce_lock(
+        fcntl.LOCK_SH if shared else fcntl.LOCK_EX,
+        'taking a %s lock' % ('shared' if shared else 'exclusive'))
+    return self
+
+  def read_lock(self, message="taking read lock"):
+    """Take a read lock (shared), downgrading from write if required.
+
+    Args:
+      message: A description of what/why this lock is being taken.
+
+    Returns:
+      self, allowing it to be used as a `with` target.
+
+    Raises:
+      IOError if the operation fails in some way.
+    """
+    self._enforce_lock(fcntl.LOCK_SH, message)
+    return self
+
+  def write_lock(self, message="taking write lock"):
+    """Take a write lock (exclusive), upgrading from read if required.
+
+    Note that if the lock state is being upgraded from read to write,
+    a deadlock potential exists- as such we *will* release the lock
+    to work around it.  Any consuming code should not assume that
+    transitioning from shared to exclusive means no one else has
+    gotten at the critical resource in between for this reason.
+
+    Args:
+      message: A description of what/why this lock is being taken.
+
+    Returns:
+      self, allowing it to be used as a `with` target.
+
+    Raises:
+      IOError if the operation fails in some way.
+    """
+    self._enforce_lock(fcntl.LOCK_EX, message)
+    return self
+
+  def unlock(self):
+    """Release any locks held.  Noop if no locks are held.
+
+    Raises:
+      IOError if the operation fails in some way.
+    """
+    if self._fd is not None:
+      self.locking_mechanism(self._fd, fcntl.LOCK_UN)
+
+  def __del__(self):
+    # TODO(ferringb): Convert this to snakeoil.weakref.WeakRefFinalizer
+    # if/when that rebasing occurs.
+    self.close()
+
+  def close(self):
+    """Release the underlying lock and close the fd."""
+    if self._fd is not None:
+      self.unlock()
+      os.close(self._fd)
+      self._fd = None
+
+  def _enter(self):
+    # Force the fd to be opened via touching the property.
+    # We do this to ensure that even if entering a context w/out a lock
+    # held, we can do locking in that critical section if the code requests it.
+    # pylint: disable=W0104
+    self.fd
+    return self
+
+  def _exit(self, _exc_type, _exc, _traceback):
+    try:
+      self.unlock()
+    finally:
+      self.close()
+
+  def IsLocked(self):
+    """Return True if the lock is grabbed."""
+    return bool(self._fd)
+
+
+class FileLock(_Lock):
+  """Use a specified file as a locking mechanism."""
+
+  def __init__(self, path, description=None, verbose=True,
+               locktype=LOCKF, world_writable=False, blocking=True):
+    """Initializer for FileLock.
+
+    Args:
+      path: On disk pathway to lock.  Can be a directory or a file.
+      description: A description for this lock- what is it protecting?
+      verbose: Verbose logging?
+      locktype: Type of lock to use (lockf or flock).
+      world_writable: If true, the lock file will be created as root and be made
+        writable to all users.
+      blocking: If True, use a blocking lock.
+    """
+    if description is None:
+      description = "lock %s" % (path,)
+    _Lock.__init__(self, description=description, verbose=verbose,
+                   locktype=locktype, blocking=blocking)
+    self.path = os.path.abspath(path)
+    self.world_writable = world_writable
+
+  def _GetFd(self):
+    if self.world_writable:
+      create = True
+      try:
+        create = stat.S_IMODE(os.stat(self.path).st_mode) != 0o666
+      except OSError as e:
+        if e.errno != errno.ENOENT:
+          raise
+      if create:
+        osutils.SafeMakedirs(os.path.dirname(self.path), sudo=True)
+        cros_build_lib.SudoRunCommand(['touch', self.path], print_cmd=False)
+        cros_build_lib.SudoRunCommand(['chmod', '666', self.path],
+                                      print_cmd=False)
+
+    # If we're on py3.4 and this attribute is exposed, use it to close
+    # the threading race between open and fcntl setting; this is
+    # extremely paranoid code, but might as well.
+    cloexec = getattr(os, 'O_CLOEXEC', 0)
+    # There exist race conditions where the lock may be created by
+    # root, thus denying subsequent accesses from others. To prevent
+    # this, we create the lock with mode 0o666.
+    try:
+      value = os.umask(000)
+      fd = os.open(self.path, os.W_OK|os.O_CREAT|cloexec, 0o666)
+    finally:
+      os.umask(value)
+    return fd
+
+
+class ProcessLock(_Lock):
+  """Process level locking visible to parent/child only.
+
+  This lock is basically a more robust version of what
+  multiprocessing.Lock does.  That implementation uses semaphores
+  internally which require cleanup/deallocation code to run to release
+  the lock; a SIGKILL hitting the process holding the lock violates those
+  assumptions leading to a stuck lock.
+
+  Thus this implementation is based around locking of a deleted tempfile;
+  lockf locks are guranteed to be released once the process/fd is closed.
+  """
+
+  def _GetFd(self):
+    with tempfile.TemporaryFile() as f:
+      # We don't want to hold onto the object indefinitely; we just want
+      # the fd to a temporary inode, preferably one that isn't vfs accessible.
+      # Since TemporaryFile closes the fd once the object is GC'd, we just
+      # dupe the fd so we retain a copy, while the original TemporaryFile
+      # goes away.
+      return os.dup(f.fileno())
+
+
+class PortableLinkLock(object):
+  """A more primitive lock that relies on the atomicity of creating hardlinks.
+
+  Use this lock if you need to be compatible with shadow utils like groupadd
+  or useradd.
+  """
+
+  def __init__(self, path, max_retry=0, sleep=1):
+    """Construct an instance.
+
+    Args:
+      path: path to file to lock on.  Multiple processes attempting to lock the
+        same path will compete for a system wide lock.
+      max_retry: maximum number of times to attempt to acquire the lock.
+      sleep: See retry_util.GenericRetry's sleep parameter.
+    """
+    self._path = path
+    self._target_path = None
+    # These two poorly named variables are just passed straight through to
+    # retry_util.RetryException.
+    self._max_retry = max_retry
+    self._sleep = sleep
+
+  def __enter__(self):
+    fd, self._target_path = tempfile.mkstemp(
+        prefix=self._path + '.chromite.portablelock.')
+    os.close(fd)
+    try:
+      retry_util.RetryException(OSError, self._max_retry,
+                                os.link, self._target_path, self._path,
+                                sleep=self._sleep)
+    except OSError:
+      raise LockNotAcquiredError('Timeout while trying to lock %s' % self._path)
+    finally:
+      osutils.SafeUnlink(self._target_path)
+
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    try:
+      if self._target_path:
+        osutils.SafeUnlink(self._target_path)
+    finally:
+      osutils.SafeUnlink(self._path)
diff --git a/lib/locking_unittest b/lib/locking_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/locking_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/locking_unittest.py b/lib/locking_unittest.py
new file mode 100644
index 0000000..f3eda24
--- /dev/null
+++ b/lib/locking_unittest.py
@@ -0,0 +1,262 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the locking library."""
+
+from __future__ import print_function
+
+import itertools
+import multiprocessing
+import os
+import sys
+import time
+
+from chromite.lib import cros_test_lib
+from chromite.lib import locking
+from chromite.lib import osutils
+
+
+LOCK_ACQUIRED = 5
+LOCK_NOT_ACQUIRED = 6
+
+
+class LockingTest(cros_test_lib.TempDirTestCase):
+  """Test the Locking class."""
+
+  def setUp(self):
+    self.lock_file = os.path.join(self.tempdir, 'lockfile')
+
+  def _HelperSingleLockTest(self, blocking, shared, locktype):
+    """Helper method that runs a basic test with/without blocking/sharing."""
+    self.assertFalse(os.path.exists(self.lock_file))
+
+    lock = locking.FileLock(
+        self.lock_file, blocking=blocking, locktype=locktype)
+    self.assertFalse(lock.IsLocked())
+
+    lock.write_lock()
+    self.assertTrue(lock.IsLocked())
+    self.assertTrue(os.path.exists(self.lock_file))
+
+    # Acquiring the lock again should be safe.
+    lock.lock(shared)
+    self.assertTrue(lock.IsLocked())
+
+    lock.close()
+    self.assertFalse(lock.IsLocked())
+
+    osutils.SafeUnlink(self.lock_file)
+
+  def _HelperInsideProcess(self, blocking, shared, locktype=locking.LOCKF):
+    """Helper method that runs a basic test with/without blocking."""
+    try:
+      lock = locking.FileLock(
+          self.lock_file, blocking=blocking, locktype=locktype)
+      with lock.lock(shared):
+        pass
+      sys.exit(LOCK_ACQUIRED)
+    except locking.LockNotAcquiredError:
+      sys.exit(LOCK_NOT_ACQUIRED)
+
+  def _HelperStartProcess(self, blocking=False, shared=False):
+    """Create a process and invoke _HelperInsideProcess in it."""
+    p = multiprocessing.Process(target=self._HelperInsideProcess,
+                                args=(blocking, shared))
+    p.start()
+
+    # It's highly probably that p will have tried to grab the lock before the
+    # timer expired, but not certain.
+    time.sleep(0.1)
+
+    return p
+
+  def _HelperWithProcess(self, expected, blocking=False, shared=False,
+                         locktype=locking.LOCKF):
+    """Create a process and invoke _HelperInsideProcess in it."""
+    p = multiprocessing.Process(target=self._HelperInsideProcess,
+                                args=(blocking, shared, locktype))
+    p.start()
+    p.join()
+    self.assertEquals(p.exitcode, expected)
+
+  def testSingleLock(self):
+    """Just test getting releasing a lock with options."""
+    arg_list = [
+        [True, False], # blocking
+        [True, False], # shared
+        [locking.FLOCK, locking.LOCKF], # locking mechanism
+    ]
+    for args in itertools.product(*arg_list):
+      self._HelperSingleLockTest(*args)
+
+  def testDoubleLockWithFlock(self):
+    """Tests that double locks do block with flock."""
+    lock1 = locking.FileLock(
+        self.lock_file, blocking=False, locktype=locking.FLOCK)
+    lock2 = locking.FileLock(
+        self.lock_file, blocking=False, locktype=locking.FLOCK)
+
+    with lock1.write_lock():
+      self.assertTrue(lock1.IsLocked())
+      self.assertFalse(lock2.IsLocked())
+
+      self.assertRaises(locking.LockNotAcquiredError, lock2.write_lock)
+      self.assertTrue(lock1.IsLocked())
+      self.assertFalse(lock2.IsLocked())
+
+    self.assertFalse(lock1.IsLocked())
+    self.assertFalse(lock2.IsLocked())
+
+    lock2.unlock()
+    self.assertFalse(lock1.IsLocked())
+    self.assertFalse(lock2.IsLocked())
+
+  def testDoubleLockWithLockf(self):
+    """Tests that double locks don't block with lockf."""
+    lock1 = locking.FileLock(
+        self.lock_file, blocking=False, locktype=locking.LOCKF)
+    lock2 = locking.FileLock(
+        self.lock_file, blocking=False, locktype=locking.LOCKF)
+    with lock1.write_lock():
+      self.assertTrue(lock1.IsLocked())
+      self.assertFalse(lock2.IsLocked())
+
+      # With lockf, we can lock the same file twice in the same process.
+      with lock2.write_lock():
+        self.assertTrue(lock1.IsLocked())
+        self.assertTrue(lock2.IsLocked())
+
+    self.assertFalse(lock1.IsLocked())
+    self.assertFalse(lock2.IsLocked())
+
+  def testContextMgr(self):
+    """Make sure we behave properly with 'with'."""
+    # Create an instance, and use it in a with.
+    prelock = locking.FileLock(self.lock_file)
+    self._HelperWithProcess(expected=LOCK_ACQUIRED)
+
+    with prelock.write_lock() as lock:
+      # Assert the instance didn't change.
+      self.assertIs(prelock, lock)
+      self._HelperWithProcess(expected=LOCK_NOT_ACQUIRED)
+
+    self._HelperWithProcess(expected=LOCK_ACQUIRED)
+
+    # Construct the instance in the with expression.
+    with locking.FileLock(self.lock_file).write_lock() as lock:
+      self.assertIsInstance(lock, locking.FileLock)
+      self._HelperWithProcess(expected=LOCK_NOT_ACQUIRED)
+
+    self._HelperWithProcess(expected=LOCK_ACQUIRED)
+
+  def testAcquireBeforeWith(self):
+    """Sometimes you want to grab a lock and then return it into 'with'."""
+    lock = locking.FileLock(self.lock_file, blocking=False)
+
+    lock.write_lock()
+    self._HelperWithProcess(expected=LOCK_NOT_ACQUIRED)
+
+    with lock:
+      self._HelperWithProcess(expected=LOCK_NOT_ACQUIRED)
+
+    self._HelperWithProcess(expected=LOCK_ACQUIRED)
+
+  def testSingleProcessLock(self):
+    """Test grabbing the same lock in processes with no conflicts."""
+    arg_list = [
+        [LOCK_ACQUIRED],
+        [True, False], # blocking
+        [True, False], # shared
+        [locking.FLOCK, locking.LOCKF], # locking mechanism
+    ]
+    for args in itertools.product(*arg_list):
+      self._HelperWithProcess(*args)
+
+  def testNonBlockingConflicts(self):
+    """Test that we get a lock conflict for non-blocking locks."""
+    with locking.FileLock(self.lock_file).write_lock():
+      self._HelperWithProcess(expected=LOCK_NOT_ACQUIRED)
+
+      self._HelperWithProcess(expected=LOCK_NOT_ACQUIRED, shared=True)
+
+    # Can grab it after it's released.
+    self._HelperWithProcess(expected=LOCK_ACQUIRED)
+
+  def testSharedLocks(self):
+    """Test lock conflict for blocking locks."""
+    # Intial lock is NOT shared.
+    with locking.FileLock(self.lock_file).write_lock():
+      self._HelperWithProcess(expected=LOCK_NOT_ACQUIRED, shared=True)
+
+    # Intial lock IS shared.
+    with locking.FileLock(self.lock_file).read_lock():
+      self._HelperWithProcess(expected=LOCK_ACQUIRED, shared=True)
+      self._HelperWithProcess(expected=LOCK_NOT_ACQUIRED,
+                              shared=False)
+
+  def testBlockingConflicts(self):
+    """Test lock conflict for blocking locks."""
+    # Intial lock is blocking, exclusive.
+    with locking.FileLock(self.lock_file, blocking=True).write_lock():
+      self._HelperWithProcess(expected=LOCK_NOT_ACQUIRED, blocking=False)
+
+      p = self._HelperStartProcess(blocking=True, shared=False)
+
+    # when the with clause exits, p should unblock and get the lock, setting
+    # its exit code to sucess now.
+    p.join()
+    self.assertEquals(p.exitcode, LOCK_ACQUIRED)
+
+    # Intial lock is NON blocking.
+    with locking.FileLock(self.lock_file, blocking=False).write_lock():
+      self._HelperWithProcess(expected=LOCK_NOT_ACQUIRED)
+
+      p = self._HelperStartProcess(blocking=True, shared=False)
+
+    # when the with clause exits, p should unblock and get the lock, setting
+    # it's exit code to sucess now.
+    p.join()
+    self.assertEquals(p.exitcode, LOCK_ACQUIRED)
+
+    # Intial lock is shared, blocking lock is exclusive.
+    with locking.FileLock(self.lock_file, blocking=False).read_lock():
+      self._HelperWithProcess(expected=LOCK_NOT_ACQUIRED)
+      self._HelperWithProcess(expected=LOCK_ACQUIRED, shared=True)
+
+      p = self._HelperStartProcess(blocking=True, shared=False)
+      q = self._HelperStartProcess(blocking=True, shared=False)
+
+    # when the with clause exits, p should unblock and get the lock, setting
+    # it's exit code to sucess now.
+    p.join()
+    self.assertEquals(p.exitcode, LOCK_ACQUIRED)
+    q.join()
+    self.assertEquals(p.exitcode, LOCK_ACQUIRED)
+
+
+class PortableLinkLockTest(cros_test_lib.TempDirTestCase):
+  """Test locking.PortableLinkLock class."""
+
+  def tearDown(self):
+    """Looks for leaked files from the locking process."""
+    leaked_files = os.listdir(self.tempdir)
+    self.assertFalse(leaked_files,
+                     'Found unexpected leaked files from locking: %r' %
+                     leaked_files)
+
+  def testLockExclusivity(self):
+    """Test that when we have a lock, someone else can't grab it."""
+    lock_path = os.path.join(self.tempdir, 'locked_file')
+    with locking.PortableLinkLock(lock_path, max_retry=0):
+      with self.assertRaises(locking.LockNotAcquiredError):
+        with locking.PortableLinkLock(lock_path, max_retry=5, sleep=0.1):
+          self.fail('We acquired a lock twice?')
+
+  def testCanUnlock(self):
+    """Test that we release locks correctly."""
+    lock_path = os.path.join(self.tempdir, 'locked_file')
+    with locking.PortableLinkLock(lock_path, max_retry=0):
+      pass
+    with locking.PortableLinkLock(lock_path, max_retry=0):
+      pass
diff --git a/lib/namespaces.py b/lib/namespaces.py
new file mode 100644
index 0000000..36dc0a2
--- /dev/null
+++ b/lib/namespaces.py
@@ -0,0 +1,267 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Support for Linux namespaces"""
+
+from __future__ import print_function
+
+import ctypes
+import ctypes.util
+import errno
+import os
+import signal
+# Note: We avoid cros_build_lib here as that's a "large" module and we want
+# to keep this "light" and standalone.  The subprocess usage in here is also
+# simple by design -- if it gets more complicated, we should look at using
+# the RunCommand helper.
+import subprocess
+import sys
+
+from chromite.lib import osutils
+from chromite.lib import process_util
+from chromite.lib import proctitle
+
+
+CLONE_FS = 0x00000200
+CLONE_FILES = 0x00000400
+CLONE_NEWNS = 0x00020000
+CLONE_NEWUTS = 0x04000000
+CLONE_NEWIPC = 0x08000000
+CLONE_NEWUSER = 0x10000000
+CLONE_NEWPID = 0x20000000
+CLONE_NEWNET = 0x40000000
+
+
+def SetNS(fd, nstype):
+  """Binding to the Linux setns system call. See setns(2) for details.
+
+  Args:
+    fd: An open file descriptor or path to one.
+    nstype: Namespace to enter; one of CLONE_*.
+
+  Raises:
+    OSError: if setns failed.
+  """
+  try:
+    fp = None
+    if isinstance(fd, basestring):
+      fp = open(fd)
+      fd = fp.fileno()
+
+    libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True)
+    if libc.setns(ctypes.c_int(fd), ctypes.c_int(nstype)) != 0:
+      e = ctypes.get_errno()
+      raise OSError(e, os.strerror(e))
+  finally:
+    if fp is not None:
+      fp.close()
+
+
+def Unshare(flags):
+  """Binding to the Linux unshare system call. See unshare(2) for details.
+
+  Args:
+    flags: Namespaces to unshare; bitwise OR of CLONE_* flags.
+
+  Raises:
+    OSError: if unshare failed.
+  """
+  libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True)
+  if libc.unshare(ctypes.c_int(flags)) != 0:
+    e = ctypes.get_errno()
+    raise OSError(e, os.strerror(e))
+
+
+def _ReapChildren(pid):
+  """Reap all children that get reparented to us until we see |pid| exit.
+
+  Args:
+    pid: The main child to watch for.
+
+  Returns:
+    The wait status of the |pid| child.
+  """
+  pid_status = 0
+
+  while True:
+    try:
+      (wpid, status) = os.wait()
+      if pid == wpid:
+        # Save the status of our main child so we can exit with it below.
+        pid_status = status
+    except OSError as e:
+      if e.errno == errno.ECHILD:
+        break
+      elif e.errno != errno.EINTR:
+        raise
+
+  return pid_status
+
+
+def _SafeTcSetPgrp(fd, pgrp):
+  """Set |pgrp| as the controller of the tty |fd|."""
+  try:
+    curr_pgrp = os.tcgetpgrp(fd)
+  except OSError as e:
+    # This can come up when the fd is not connected to a terminal.
+    if e.errno == errno.ENOTTY:
+      return
+    raise
+
+  # We can change the owner only if currently own it.  Otherwise we'll get
+  # stopped by the kernel with SIGTTOU and that'll hit the whole group.
+  if curr_pgrp == os.getpgrp():
+    os.tcsetpgrp(fd, pgrp)
+
+
+def CreatePidNs():
+  """Start a new pid namespace
+
+  This will launch all the right manager processes.  The child that returns
+  will be isolated in a new pid namespace.
+
+  If functionality is not available, then it will return w/out doing anything.
+
+  Returns:
+    The last pid outside of the namespace.
+  """
+  first_pid = os.getpid()
+
+  try:
+    # First create the namespace.
+    Unshare(CLONE_NEWPID)
+  except OSError as e:
+    if e.errno == errno.EINVAL:
+      # For older kernels, or the functionality is disabled in the config,
+      # return silently.  We don't want to hard require this stuff.
+      return first_pid
+    else:
+      # For all other errors, abort.  They shouldn't happen.
+      raise
+
+  # Now that we're in the new pid namespace, fork.  The parent is the master
+  # of it in the original namespace, so it only monitors the child inside it.
+  # It is only allowed to fork once too.
+  pid = os.fork()
+  if pid:
+    proctitle.settitle('pid ns', 'external init')
+
+    # Mask SIGINT with the assumption that the child will catch & process it.
+    # We'll pass that back up below.
+    signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+    # Forward the control of the terminal to the child so it can manage input.
+    _SafeTcSetPgrp(sys.stdin.fileno(), pid)
+
+    # Reap the children as the parent of the new namespace.
+    process_util.ExitAsStatus(_ReapChildren(pid))
+  else:
+    # Make sure to unshare the existing mount point if needed.  Some distros
+    # create shared mount points everywhere by default.
+    try:
+      osutils.Mount('none', '/proc', 0, osutils.MS_PRIVATE | osutils.MS_REC)
+    except OSError as e:
+      if e.errno != errno.EINVAL:
+        raise
+
+    # The child needs its own proc mount as it'll be different.
+    osutils.Mount('proc', '/proc', 'proc',
+                  osutils.MS_NOSUID | osutils.MS_NODEV | osutils.MS_NOEXEC |
+                  osutils.MS_RELATIME)
+
+    pid = os.fork()
+    if pid:
+      proctitle.settitle('pid ns', 'init')
+
+      # Mask SIGINT with the assumption that the child will catch & process it.
+      # We'll pass that back up below.
+      signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+      # Now that we're in a new pid namespace, start a new process group so that
+      # children have something valid to use.  Otherwise getpgrp/etc... will get
+      # back 0 which tends to confuse -- you can't setpgrp(0) for example.
+      os.setpgrp()
+
+      # Forward the control of the terminal to the child so it can manage input.
+      _SafeTcSetPgrp(sys.stdin.fileno(), pid)
+
+      # Watch all of the children.  We need to act as the master inside the
+      # namespace and reap old processes.
+      process_util.ExitAsStatus(_ReapChildren(pid))
+
+  # Create a process group for the grandchild so it can manage things
+  # independent of the init process.
+  os.setpgrp()
+
+  # The grandchild will return and take over the rest of the sdk steps.
+  return first_pid
+
+
+def CreateNetNs():
+  """Start a new net namespace
+
+  We will bring up the loopback interface, but that is all.
+
+  If functionality is not available, then it will return w/out doing anything.
+  """
+  # The net namespace was added in 2.6.24 and may be disabled in the kernel.
+  try:
+    Unshare(CLONE_NEWNET)
+  except OSError as e:
+    if e.errno == errno.EINVAL:
+      return
+    else:
+      # For all other errors, abort.  They shouldn't happen.
+      raise
+
+  # Since we've unshared the net namespace, we need to bring up loopback.
+  # The kernel automatically adds the various ip addresses, so skip that.
+  try:
+    subprocess.call(['ip', 'link', 'set', 'up', 'lo'])
+  except OSError as e:
+    if e.errno == errno.ENOENT:
+      print('warning: could not bring up loopback for network; '
+            'install the iproute2 package', file=sys.stderr)
+    else:
+      raise
+
+
+def SimpleUnshare(mount=True, uts=True, ipc=True, net=False, pid=False):
+  """Simpler helper for setting up namespaces quickly.
+
+  If support for any namespace type is not available, we'll silently skip it.
+
+  Args:
+    mount: Create a mount namespace.
+    uts: Create a UTS namespace.
+    ipc: Create an IPC namespace.
+    net: Create a net namespace.
+    pid: Create a pid namespace.
+  """
+  # The mount namespace is the only one really guaranteed to exist --
+  # it's been supported forever and it cannot be turned off.
+  if mount:
+    Unshare(CLONE_NEWNS)
+
+  # The UTS namespace was added 2.6.19 and may be disabled in the kernel.
+  if uts:
+    try:
+      Unshare(CLONE_NEWUTS)
+    except OSError as e:
+      if e.errno != errno.EINVAL:
+        pass
+
+  # The IPC namespace was added 2.6.19 and may be disabled in the kernel.
+  if ipc:
+    try:
+      Unshare(CLONE_NEWIPC)
+    except OSError as e:
+      if e.errno != errno.EINVAL:
+        pass
+
+  if net:
+    CreateNetNs()
+
+  if pid:
+    CreatePidNs()
diff --git a/lib/namespaces_unittest b/lib/namespaces_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/namespaces_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/namespaces_unittest.py b/lib/namespaces_unittest.py
new file mode 100644
index 0000000..2c6c290
--- /dev/null
+++ b/lib/namespaces_unittest.py
@@ -0,0 +1,47 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the namespaces.py module."""
+
+from __future__ import print_function
+
+import errno
+import os
+import unittest
+
+from chromite.lib import cros_test_lib
+from chromite.lib import namespaces
+
+
+class SetNSTests(cros_test_lib.TestCase):
+  """Tests for SetNS()"""
+
+  def testBasic(self):
+    """Simple functionality test."""
+    NS_PATH = '/proc/self/ns/mnt'
+    if not os.path.exists(NS_PATH):
+      raise unittest.SkipTest('kernel too old (missing %s)' % NS_PATH)
+
+    with open(NS_PATH) as f:
+      try:
+        namespaces.SetNS(f.fileno(), 0)
+      except OSError as e:
+        if e.errno != errno.EPERM:
+          # Running as non-root will fail, so ignore it.  We ran most
+          # of the code in the process which is all we really wanted.
+          raise
+
+
+class UnshareTests(cros_test_lib.TestCase):
+  """Tests for Unshare()"""
+
+  def testBasic(self):
+    """Simple functionality test."""
+    try:
+      namespaces.Unshare(namespaces.CLONE_NEWNS)
+    except OSError as e:
+      if e.errno != errno.EPERM:
+        # Running as non-root will fail, so ignore it.  We ran most
+        # of the code in the process which is all we really wanted.
+        raise
diff --git a/lib/operation.py b/lib/operation.py
new file mode 100644
index 0000000..785278c
--- /dev/null
+++ b/lib/operation.py
@@ -0,0 +1,701 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Operation, including output and progress display
+
+This module implements the concept of an operation, which has regular progress
+updates, verbose text display and perhaps some errors.
+"""
+
+from __future__ import print_function
+
+import collections
+import contextlib
+import fcntl
+import multiprocessing
+import os
+import pty
+try:
+  import Queue
+except ImportError:
+  # Python-3 renamed to "queue".  We still use Queue to avoid collisions
+  # with naming variables as "queue".  Maybe we'll transition at some point.
+  # pylint: disable=import-error
+  import queue as Queue
+import re
+import shutil
+import struct
+import sys
+import termios
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import workspace_lib
+from chromite.lib.terminal import Color
+
+# Define filenames for captured stdout and stderr.
+STDOUT_FILE = 'stdout'
+STDERR_FILE = 'stderr'
+
+_TerminalSize = collections.namedtuple('_TerminalSize', ('lines', 'columns'))
+
+
+class _BackgroundTaskComplete(object):
+  """Sentinal object to indicate that the background task is complete."""
+
+
+class ProgressBarOperation(object):
+  """Wrapper around long running functions to show progress.
+
+  This class is intended to capture the output of a long running fuction, parse
+  the output, and display a progress bar.
+
+  To display a progress bar for a function foo with argument foo_args, this is
+  the usage case:
+    1) Create a class that inherits from ProgressBarOperation (e.g.
+    FooTypeOperation. In this class, override the ParseOutput method to parse
+    the output of foo.
+    2) op = operation.FooTypeOperation()
+       op.Run(foo, foo_args)
+  """
+
+  # Subtract 10 characters from the width of the terminal because these are used
+  # to display the percentage as well as other spaces.
+  _PROGRESS_BAR_BORDER_SIZE = 10
+
+  # By default, update the progress bar every 100 ms.
+  _PROGRESS_BAR_UPDATE_INTERVAL = 0.1
+
+  def __init__(self):
+    self._queue = multiprocessing.Queue()
+    self._stderr = None
+    self._stdout = None
+    self._stdout_path = None
+    self._stderr_path = None
+    self._progress_bar_displayed = False
+    self._workspace_path = workspace_lib.WorkspacePath()
+    self._isatty = os.isatty(sys.stdout.fileno())
+
+  def _GetTerminalSize(self, fd=pty.STDOUT_FILENO):
+    """Return a terminal size object for |fd|.
+
+    Note: Replace with os.terminal_size() in python3.3.
+    """
+    winsize = struct.pack('HHHH', 0, 0, 0, 0)
+    data = fcntl.ioctl(fd, termios.TIOCGWINSZ, winsize)
+    winsize = struct.unpack('HHHH', data)
+    return _TerminalSize(int(winsize[0]), int(winsize[1]))
+
+  def ProgressBar(self, progress):
+    """This method creates and displays a progress bar.
+
+    If not in a terminal, we do not display a progress bar.
+
+    Args:
+      progress: a float between 0 and 1 that represents the fraction of the
+        current progress.
+    """
+    if not self._isatty:
+      return
+    self._progress_bar_displayed = True
+    progress = max(0.0, min(1.0, progress))
+    width = max(1, self._GetTerminalSize().columns -
+                self._PROGRESS_BAR_BORDER_SIZE)
+    block = int(width * progress)
+    shaded = '#' * block
+    unshaded = '-' * (width - block)
+    text = '\r [%s%s] %d%%' % (shaded, unshaded, progress * 100)
+    sys.stdout.write(text)
+    sys.stdout.flush()
+
+  def OpenStdoutStderr(self):
+    """Open the stdout and stderr streams."""
+    if self._stdout is None and self._stderr is None:
+      self._stdout = open(self._stdout_path, 'r')
+      self._stderr = open(self._stderr_path, 'r')
+
+  def Cleanup(self):
+    """Method to cleanup progress bar.
+
+    If progress bar has been printed, then we make sure it displays 100% before
+    exiting.
+    """
+    if self._progress_bar_displayed:
+      self.ProgressBar(1)
+      sys.stdout.write('\n')
+      sys.stdout.flush()
+
+  def ParseOutput(self, output=None):
+    """Method to parse output and update progress bar.
+
+    This method should be overridden to read and parse the lines in _stdout and
+    _stderr.
+
+    One example use of this method could be to detect 'foo' in stdout and
+    increment the progress bar every time foo is seen.
+
+    def ParseOutput(self):
+      stdout = self._stdout.read()
+      if 'foo' in stdout:
+        # Increment progress bar.
+
+    Args:
+      output: Pass in output to parse instead of reading from self._stdout and
+        self._stderr.
+    """
+    raise NotImplementedError('Subclass must override this method.')
+
+  # TODO(ralphnathan): Deprecate this function and use parallel._BackgroundTask
+  # instead (brbug.com/863)
+  def WaitUntilComplete(self, update_period):
+    """Return True if running background task has completed."""
+    try:
+      x = self._queue.get(timeout=update_period)
+      if isinstance(x, _BackgroundTaskComplete):
+        return True
+    except Queue.Empty:
+      return False
+
+  def CaptureOutputInBackground(self, func, *args, **kwargs):
+    """Launch func in background and capture its output.
+
+    Args:
+      func: Function to execute in the background and whose output is to be
+        captured.
+      log_level: Logging level to run the func at. By default, it runs at log
+        level info.
+    """
+    log_level = kwargs.pop('log_level', logging.INFO)
+    restore_log_level = logging.getLogger().getEffectiveLevel()
+    logging.getLogger().setLevel(log_level)
+    try:
+      with cros_build_lib.OutputCapturer(
+          stdout_path=self._stdout_path, stderr_path=self._stderr_path,
+          quiet_fail=self._workspace_path is not None):
+        func(*args, **kwargs)
+    finally:
+      self._queue.put(_BackgroundTaskComplete())
+      logging.getLogger().setLevel(restore_log_level)
+
+  def MoveStdoutStderrFiles(self):
+    """On failure, move stdout/stderr files to workspace/WORKSPACE_LOGS_DIR."""
+    path = os.path.join(self._workspace_path, workspace_lib.WORKSPACE_LOGS_DIR)
+    # TODO(ralphnathan): Not sure if we need this because it should be done when
+    # we store the log file for brillo commands.
+    osutils.SafeMakedirs(path)
+    osutils.SafeUnlink(os.path.join(path, STDOUT_FILE))
+    shutil.move(self._stdout_path, path)
+    osutils.SafeUnlink(os.path.join(path, STDERR_FILE))
+    shutil.move(self._stderr_path, path)
+    logging.warning('Please look at %s for more information.', path)
+
+  # TODO (ralphnathan): Store PID of spawned process.
+  def Run(self, func, *args, **kwargs):
+    """Run func, parse its output, and update the progress bar.
+
+    Args:
+      func: Function to execute in the background and whose output is to be
+        captured.
+      upadate_period: Optional argument to specify the period that output should
+        be read.
+    """
+    update_period = kwargs.pop('update_period',
+                               self._PROGRESS_BAR_UPDATE_INTERVAL)
+
+    # If we are not running in a terminal device, do not display the progress
+    # bar.
+    if not self._isatty:
+      func(*args, **kwargs)
+      return
+
+    with osutils.TempDir() as tempdir:
+      self._stdout_path = os.path.join(tempdir, STDOUT_FILE)
+      self._stderr_path = os.path.join(tempdir, STDERR_FILE)
+      osutils.Touch(self._stdout_path)
+      osutils.Touch(self._stderr_path)
+      try:
+        with parallel.BackgroundTaskRunner(
+            self.CaptureOutputInBackground, func, *args, **kwargs) as queue:
+          queue.put([])
+          self.OpenStdoutStderr()
+          while True:
+            self.ParseOutput()
+            if self.WaitUntilComplete(update_period):
+              break
+        # Before we exit, parse the output again to update progress bar.
+        self.ParseOutput()
+        # Final sanity check to update the progress bar to 100% if it was used
+        # by ParseOutput
+        self.Cleanup()
+      except:
+        # Add a blank line before the logging message so the message isn't
+        # touching the progress bar.
+        sys.stdout.write('\n')
+        logging.error('Oops. Something went wrong.')
+        # Move the stdout/stderr files to a location that the user can access.
+        if self._workspace_path is not None:
+          self.MoveStdoutStderrFiles()
+        # Raise the exception so it can be caught again.
+        raise
+
+
+class ParallelEmergeOperation(ProgressBarOperation):
+  """ProgressBarOperation specific for scripts/parallel_emerge.py."""
+
+  def __init__(self):
+    super(ParallelEmergeOperation, self).__init__()
+    self._total = None
+    self._completed = 0
+    self._printed_no_packages = False
+    self._events = ['Fetched ', 'Completed ']
+    self._msg = None
+
+  def _GetTotal(self, output):
+    """Get total packages by looking for Total: digits packages."""
+    match = re.search(r'Total: (\d+) packages', output)
+    return int(match.group(1)) if match else None
+
+  def SetProgressBarMessage(self, msg):
+    """Message to be shown before the progress bar is displayed with 0%.
+
+       The message is not displayed if the progress bar is not going to be
+       displayed.
+    """
+    self._msg = msg
+
+  def ParseOutput(self, output=None):
+    """Parse the output of emerge to determine how to update progress bar.
+
+    1) Figure out how many packages exist. If the total number of packages to be
+    built is zero, then we do not display the progress bar.
+    2) Whenever a package is downloaded or built, 'Fetched' and 'Completed' are
+    printed respectively. By counting counting 'Fetched's and 'Completed's, we
+    can determine how much to update the progress bar by.
+
+    Args:
+      output: Pass in output to parse instead of reading from self._stdout and
+        self._stderr.
+
+    Returns:
+      A fraction between 0 and 1 indicating the level of the progress bar. If
+      the progress bar isn't displayed, then the return value is -1.
+    """
+    if output is None:
+      stdout = self._stdout.read()
+      stderr = self._stderr.read()
+      output = stdout + stderr
+
+    if self._total is None:
+      temp = self._GetTotal(output)
+      if temp is not None:
+        self._total = temp * len(self._events)
+        if self._msg is not None:
+          logging.notice(self._msg)
+
+    for event in self._events:
+      self._completed += output.count(event)
+
+    if not self._printed_no_packages and self._total == 0:
+      logging.notice('No packages to build.')
+      self._printed_no_packages = True
+
+    if self._total:
+      progress = float(self._completed) / self._total
+      self.ProgressBar(progress)
+      return progress
+    else:
+      return -1
+
+
+# TODO(sjg): When !isatty(), keep stdout and stderr separate so they can be
+# redirected separately
+# TODO(sjg): Add proper docs to this fileno
+# TODO(sjg): Handle stdin wait in quite mode, rather than silently stalling
+
+class Operation(object):
+  """Class which controls stdio and progress of an operation in progress.
+
+  This class is created to handle stdio for a running subprocess. It filters
+  it looking for errors and progress information. Optionally it can output the
+  stderr and stdout to the terminal, but it is normally supressed.
+
+  Progress information is garnered from the subprocess output based on
+  knowledge of the legacy scripts, but at some point will move over to using
+  real progress information reported through new python methods which will
+  replace the scripts.
+
+  Each operation has a name, and this class handles displaying this name
+  as it reports progress.
+
+  Operation Objects
+  =================
+
+  verbose: True / False
+    In verbose mode all output from subprocesses is displayed, otherwise
+    this output is normally supressed, unless we think it indicates an error.
+
+  progress: True / False
+    The output from subprocesses can be analysed in a very basic manner to
+    try to present progress information to the user.
+
+  explicit_verbose: True / False
+    False if we are not just using default verbosity. In that case we allow
+    verbosity to be enabled on request, since the user has not explicitly
+    disabled it. This is used by commands that the user issues with the
+    expectation that output would ordinarily be visible.
+  """
+
+  def __init__(self, name, color=None):
+    """Create a new operation.
+
+    Args:
+      name: Operation name in a form to be displayed for the user.
+      color: Determines policy for sending color to stdout; see terminal.Color
+        for details on interpretation on the value.
+    """
+    self._name = name   # Operation name.
+    self.verbose = False   # True to echo subprocess output.
+    self.progress = True   # True to report progress of the operation
+    self._column = 0    # Current output column (always 0 unless verbose).
+    self._update_len = 0    # Length of last progress update message.
+    self._line = ''   # text of current line, so far
+    self.explicit_verbose = False
+
+    self._color = Color(enabled=color)
+
+    # -1 = no newline pending
+    #  n = newline pending, and line length of last line was n
+    self._pending_nl = -1
+
+    # the type of the last stream to emit data on the current lines
+    # can be sys.stdout, sys.stderr (both from the subprocess), or None
+    # for our own mesages
+    self._cur_stream = None
+
+    self._error_count = 0   # number of error lines we have reported
+
+  def __del__(self):
+    """Object is about to be destroyed, so finish out output cleanly."""
+    self.FinishOutput()
+
+  def FinishOutput(self):
+    """Finish off any pending output.
+
+    This finishes any output line currently in progress and resets the color
+    back to normal.
+    """
+    self._FinishLine(self.verbose, final=True)
+    if self._column and self.verbose:
+      print(self._color.Stop())
+      self._column = 0
+
+  def WereErrorsDetected(self):
+    """Returns whether any errors have been detected.
+
+    Returns:
+      True if any errors have been detected in subprocess output so far.
+      False otherwise
+    """
+    return self._error_count > 0
+
+  def SetName(self, name):
+    """Set the name of the operation as displayed to the user.
+
+    Args:
+      name: Operation name.
+    """
+    self._name = name
+
+  def _FilterOutputForErrors(self, line, print_error):
+    """Filter a line of output to look for and display errors.
+
+    This uses a few regular expression searches to spot common error reports
+    from subprocesses. A count of these is kept so we know how many occurred.
+    Optionally they are displayed in red on the terminal.
+
+    Args:
+      line: the output line to filter, as a string.
+      print_error: True to print the error, False to just record it.
+    """
+    bad_things = ['Cannot GET', 'ERROR', '!!!', 'FAILED']
+    for bad_thing in bad_things:
+      if re.search(bad_thing, line, flags=re.IGNORECASE):
+        self._error_count += 1
+        if print_error:
+          print(self._color.Color(self._color.RED, line))
+          break
+
+  def _FilterOutputForProgress(self, line):
+    """Filter a line of output to look for and dispay progress information.
+
+    This uses a simple regular expression search to spot progress information
+    coming from subprocesses. This is sent to the _Progress() method.
+
+    Args:
+      line: the output line to filter, as a string.
+    """
+    match = re.match(r'Pending (\d+).*Total (\d+)', line)
+    if match:
+      pending = int(match.group(1))
+      total = int(match.group(2))
+      self._Progress(total - pending, total)
+
+  def _Progress(self, upto, total):
+    """Record and optionally display progress information.
+
+    Args:
+      upto: which step we are up to in the operation (integer, from 0).
+      total: total number of steps in operation,
+    """
+    if total > 0:
+      update_str = '%s...%d%% (%d of %d)' % (self._name,
+                                             upto * 100 // total, upto, total)
+      if self.progress:
+        # Finish the current line, print progress, and remember its length.
+        self._FinishLine(self.verbose)
+
+        # Sometimes the progress string shrinks and in this case we need to
+        # blank out the characters at the end of the line that will not be
+        # overwritten by the new line
+        pad = max(self._update_len - len(update_str), 0)
+        sys.stdout.write(update_str + (' ' * pad) + '\r')
+        self._update_len = len(update_str)
+
+  def _FinishLine(self, display, final=False):
+    """Finish off the current line and prepare to start a new one.
+
+    If a new line is pending from the previous line, then this will be output,
+    along with a color reset if needed.
+
+    We also handle removing progress messages from the output. This is done
+    using a carriage return character, following by spaces.
+
+    Args:
+      display: True to display output, False to suppress it
+      final: True if this is the final output before we exit, in which case
+          we must clean up any remaining progress message by overwriting
+          it with spaces, then carriage return
+    """
+    if display:
+      if self._pending_nl != -1:
+        # If out last output line was shorter than the progress info
+        # add spaces.
+        if self._pending_nl < self._update_len:
+          print(' ' * (self._update_len - self._pending_nl), end='')
+
+        # Output the newline, and reset our counter.
+        sys.stdout.write(self._color.Stop())
+        print()
+
+    # If this is the last thing that this operation will print, we need to
+    # close things off. So if there is some text on the current line but not
+    # enough to overwrite all the progress information we have sent, add some
+    # more spaces.
+    if final and self._update_len:
+      print(' ' * self._update_len, '\r', end='')
+
+    self._pending_nl = -1
+
+  def _CheckStreamAndColor(self, stream, display):
+    """Check that we're writing to the same stream as last call.  No?  New line.
+
+    If starting a new line, set the color correctly:
+      stdout  Magenta
+      stderr  Red
+      other   White / no colors
+
+    Args:
+      stream: The stream we're going to write to.
+      display: True to display it on terms, False to suppress it.
+    """
+    if self._column > 0 and stream != self._cur_stream:
+      self._FinishLine(display)
+      if display:
+        print(self._color.Stop())
+
+      self._column = 0
+      self._line = ''
+
+    # Use colors for child output.
+    if self._column == 0:
+      self._FinishLine(display)
+      if display:
+        color = None
+        if stream == sys.stdout:
+          color = self._color.MAGENTA
+        elif stream == sys.stderr:
+          color = self._color.RED
+        if color:
+          sys.stdout.write(self._color.Start(color))
+
+      self._cur_stream = stream
+
+  def _Out(self, stream, text, display, newline=False, do_output_filter=True):
+    """Output some text received from a child, or generated internally.
+
+    This method is the guts of the Operation class since it understands how to
+    convert a series of output requests on different streams into something
+    coherent for the user.
+
+    If the stream has changed, then a new line is started even if we were
+    still halfway through the previous line. This prevents stdout and stderr
+    becoming mixed up quite so badly.
+
+    We use color to indicate lines which are stdout and stderr. If the output
+    received from the child has color codes in it already, we pass these
+    through, so our colors can be overridden. If output is redirected then we
+    do not add color by default. Note that nothing stops the child from adding
+    it, but since we present ourselves as a terminal to the child, one might
+    hope that the child will not generate color.
+
+    If display is False, then we will not actually send this text to the
+    terminal. This is uses when verbose is required to be False.
+
+    Args:
+      stream: stream on which the text was received:
+        sys.stdout    - received on stdout
+        sys.stderr    - received on stderr
+        None          - generated by us / internally
+      text: text to output
+      display: True to display it on terms, False to suppress it
+      newline: True to start a new line after this text, False to put the next
+        lot of output immediately after this.
+      do_output_filter: True to look through output for errors and progress.
+    """
+    self._CheckStreamAndColor(stream, display)
+
+    # Output what we have, and remember what column we are up to.
+    if display:
+      sys.stdout.write(text)
+      self._column += len(text)
+      # If a newline is required, remember to output it later.
+      if newline:
+        self._pending_nl = self._column
+        self._column = 0
+
+    self._line += text
+
+    # If we now have a whole line, check it for errors and progress.
+    if newline:
+      if do_output_filter:
+        self._FilterOutputForErrors(self._line, print_error=not display)
+        self._FilterOutputForProgress(self._line)
+      self._line = ''
+
+  def Output(self, stream, data):
+    r"""Handle the output of a block of text from the subprocess.
+
+    All subprocess output should be sent through this method. It is split into
+    lines which are processed separately using the _Out() method.
+
+    Args:
+      stream: Which file the output come in on:
+        sys.stdout: stdout
+        sys.stderr: stderr
+        None: Our own internal output
+      data: Output data as a big string, potentially containing many lines of
+        text. Each line should end with \r\n. There is no requirement to send
+        whole lines - this method happily handles fragments and tries to
+        present then to the user as early as possible
+
+    #TODO(sjg): Just use a list as the input parameter to avoid the split.
+    """
+    # We cannot use splitlines() here as we need this exact behavior
+    lines = data.split('\r\n')
+
+    # Output each full line, with a \n after it.
+    for line in lines[:-1]:
+      self._Out(stream, line, display=self.verbose, newline=True)
+
+    # If we have a partial line at the end, output what we have.
+    # We will continue it later.
+    if lines[-1]:
+      self._Out(stream, lines[-1], display=self.verbose)
+
+    # Flush so that the terminal will receive partial line output (now!)
+    sys.stdout.flush()
+
+  def Outline(self, line):
+    r"""Output a line of text to the display.
+
+    This outputs text generated internally, such as a warning message or error
+    summary. It ensures that our message plays nicely with child output if
+    any.
+
+    Args:
+      line: text to output (without \n on the end)
+    """
+    self._Out(None, line, display=True, newline=True)
+    self._FinishLine(display=True)
+
+  def Info(self, line):
+    r"""Output a line of information text to the display in verbose mode.
+
+    Args:
+      line: text to output (without \n on the end)
+    """
+    self._Out(None, self._color.Color(self._color.BLUE, line),
+              display=self.verbose, newline=True, do_output_filter=False)
+    self._FinishLine(display=True)
+
+  def Notice(self, line):
+    r"""Output a line of notification text to the display.
+
+    Args:
+      line: text to output (without \n on the end)
+    """
+    self._Out(None, self._color.Color(self._color.GREEN, line),
+              display=True, newline=True, do_output_filter=False)
+    self._FinishLine(display=True)
+
+  def Warning(self, line):
+    r"""Output a line of warning text to the display.
+
+    Args:
+      line: text to output (without \n on the end)
+    """
+    self._Out(None, self._color.Color(self._color.YELLOW, line),
+              display=True, newline=True, do_output_filter=False)
+    self._FinishLine(display=True)
+
+  def Error(self, line):
+    r"""Output a line of error text to the display.
+
+    Args:
+      line: text to output (without \n on the end)
+    """
+    self._Out(None, self._color.Color(self._color.RED, line),
+              display=True, newline=True, do_output_filter=False)
+    self._FinishLine(display=True)
+
+  def Die(self, line):
+    r"""Output a line of error text to the display and die.
+
+    Args:
+      line: text to output (without \n on the end)
+    """
+    self.Error(line)
+    sys.exit(1)
+
+  @contextlib.contextmanager
+  def RequestVerbose(self, request):
+    """Perform something in verbose mode if the user hasn't disallowed it
+
+    This is intended to be used with something like:
+
+      with oper.RequestVerbose(True):
+        ... do some things that generate output
+
+    Args:
+      request: True to request verbose mode if available, False to do nothing.
+    """
+    old_verbose = self.verbose
+    if request and not self.explicit_verbose:
+      self.verbose = True
+    try:
+      yield
+    finally:
+      self.verbose = old_verbose
diff --git a/lib/operation_unittest b/lib/operation_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/operation_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/operation_unittest.py b/lib/operation_unittest.py
new file mode 100644
index 0000000..ea9e184
--- /dev/null
+++ b/lib/operation_unittest.py
@@ -0,0 +1,276 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittest for operation"""
+
+from __future__ import print_function
+
+import multiprocessing
+import os
+import sys
+
+from chromite.lib import cros_logging as logging
+from chromite.lib import cros_test_lib
+from chromite.lib import operation
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import workspace_lib
+
+
+class TestWrapperProgressBarOperation(operation.ProgressBarOperation):
+  """Inherit from operation.ProgressBarOperation for testing."""
+  def ParseOutput(self, output=None):
+    print("Calling ParseOutput")
+    print(self._stdout.read())
+
+
+class FakeParallelEmergeOperation(operation.ParallelEmergeOperation):
+  """Fake for operation.ParallelEmergeOperation."""
+  def __init__(self, queue):
+    super(FakeParallelEmergeOperation, self).__init__()
+    self._queue = queue
+
+  def ParseOutput(self, output=None):
+    super(FakeParallelEmergeOperation, self).ParseOutput()
+    self._queue.put('advance')
+
+
+class FakeException(Exception):
+  """Fake exception used for testing exception handling."""
+
+
+class ProgressBarOperationTest(cros_test_lib.WorkspaceTestCase,
+                               cros_test_lib.OutputTestCase,
+                               cros_test_lib.LoggingTestCase):
+  """Test the Progress Bar Operation class."""
+  # pylint: disable=protected-access
+
+  def setUp(self):
+    terminal_width = 20
+    self._terminal = self.PatchObject(
+        operation.ProgressBarOperation, '_GetTerminalSize',
+        return_value=operation._TerminalSize(100, terminal_width))
+    self.PatchObject(os, 'isatty', return_value=True)
+
+  def _GetStdoutPath(self):
+    """Return path to the file where stdout is captured."""
+    return os.path.join(self.workspace_path, workspace_lib.WORKSPACE_LOGS_DIR,
+                        operation.STDOUT_FILE)
+
+  def _GetStderrPath(self):
+    """Return path to the file where stderr is captured."""
+    return os.path.join(self.workspace_path, workspace_lib.WORKSPACE_LOGS_DIR,
+                        operation.STDERR_FILE)
+
+  def _VerifyProgressBar(self, width, percent, expected_shaded,
+                         expected_unshaded):
+    """Helper to test progress bar with different percentages and lengths."""
+    terminal_width = width + (
+        operation.ProgressBarOperation._PROGRESS_BAR_BORDER_SIZE)
+    self._terminal.return_value = operation._TerminalSize(100, terminal_width)
+    op = operation.ProgressBarOperation()
+    with self.OutputCapturer() as output:
+      op.ProgressBar(percent)
+    stdout = output.GetStdout()
+
+    #Check that the shaded and unshaded regions are the expected size.
+    self.assertEqual(stdout.count('#'), expected_shaded)
+    self.assertEqual(stdout.count('-'), expected_unshaded)
+
+  def testProgressBar(self):
+    """Test progress bar at different percentages."""
+    self._VerifyProgressBar(10, 0.7, 7, 3)
+    self._VerifyProgressBar(10, 0, 0, 10)
+    self._VerifyProgressBar(10, 1, 10, 0)
+    self._VerifyProgressBar(1, 0.9, 0, 1)
+    # If width of progress bar is less than _PROGRESS_BAR_BORDER_SIZE, the width
+    # defaults to 1.
+    self._VerifyProgressBar(-5, 0, 0, 1)
+    self._VerifyProgressBar(-5, 1, 1, 0)
+
+  def testWaitUntilComplete(self):
+    """Test WaitUntilComplete returns False if background task isn't complete.
+
+    As the background task is not started in this test, we expect it not to
+    complete.
+    """
+    op = operation.ProgressBarOperation()
+    self.assertFalse(op.WaitUntilComplete(0))
+
+  def testCaptureOutputInBackground(self):
+    """Test CaptureOutputInBackground puts finished in reasonable time."""
+    def func():
+      print('hi')
+
+    op = operation.ProgressBarOperation()
+    op.CaptureOutputInBackground(func)
+
+    # This function should really finish in < 1 sec. However, we wait for a
+    # longer time so the test does not fail on highly loaded builders.
+    self.assertTrue(op.WaitUntilComplete(10))
+
+  def testRun(self):
+    """Test that ParseOutput is called and foo is run in background."""
+    expected_output = 'hi'
+    def func():
+      print(expected_output)
+
+    op = TestWrapperProgressBarOperation()
+    with self.OutputCapturer():
+      op.Run(func, update_period=0.05)
+
+    # Check that foo is executed and its output is captured.
+    self.AssertOutputContainsLine(expected_output)
+    # Check that ParseOutput is executed at least once. It can be called twice:
+    #   Once in the while loop.
+    #   Once after the while loop.
+    #   However, it is possible for func to execute and finish before the while
+    #   statement is executed even once in which case ParseOutput would only be
+    #   called once.
+    self.AssertOutputContainsLine('Calling ParseOutput')
+
+  def testExceptionHandlingNotInWorkspace(self):
+    """Test exception handling if not in a workspace."""
+    def func():
+      print('foo')
+      print('bar', file=sys.stderr)
+      raise FakeException()
+
+    op = TestWrapperProgressBarOperation()
+    with self.OutputCapturer():
+      try:
+        with cros_test_lib.LoggingCapturer() as logs:
+          op.Run(func)
+      except parallel.BackgroundFailure:
+        pass
+
+    # Check that the output was dumped correctly.
+    self.AssertLogsContain(logs, 'Something went wrong.')
+    self.AssertOutputContainsLine('Captured stdout was')
+    self.AssertOutputContainsLine('Captured stderr was')
+    self.AssertOutputContainsLine('foo')
+    self.AssertOutputContainsLine('bar', check_stderr=True)
+
+  def testExceptionHandlingInWorkspace(self):
+    """Test that stdout/stderr files are moved correctly if in a workspace."""
+    def func():
+      print('foo')
+      print('bar', file=sys.stderr)
+      raise FakeException()
+
+    self.CreateWorkspace()
+    op = TestWrapperProgressBarOperation()
+    stdout_file = self._GetStdoutPath()
+    stderr_file = self._GetStderrPath()
+
+    # Check that the files don't exist before the operation is called.
+    self.assertNotExists(stdout_file)
+    self.assertNotExists(stderr_file)
+
+    try:
+      with cros_test_lib.LoggingCapturer() as logs:
+        op.Run(func)
+    except parallel.BackgroundFailure as e:
+      if not e.HasFailureType(FakeException):
+        raise e
+
+    # Check that the files have been moved to the right location.
+    self.assertExists(stdout_file)
+    self.assertExists(stderr_file)
+
+    # Check that the log message contains the path.
+    self.AssertLogsContain(logs, self.workspace_path)
+
+  def testExceptionHandlingInWorkspaceFilesAlreadyExist(self):
+    """Test that old stdout/stderr files are removed from log directory."""
+    def func():
+      print('foo')
+      print('bar', file=sys.stderr)
+      raise FakeException()
+
+    self.CreateWorkspace()
+    op = TestWrapperProgressBarOperation()
+    stdout_file = self._GetStdoutPath()
+    stderr_file = self._GetStderrPath()
+    osutils.Touch(stdout_file, makedirs=True)
+    osutils.Touch(stderr_file, makedirs=True)
+
+    # Assert that the files are empty.
+    self.assertEqual(osutils.ReadFile(stdout_file), '')
+    self.assertEqual(osutils.ReadFile(stderr_file), '')
+
+    try:
+      op.Run(func)
+    except parallel.BackgroundFailure as e:
+      if not e.HasFailureType(FakeException):
+        raise e
+
+    # Check that the files contain the right information.
+    self.assertIn('foo', osutils.ReadFile(stdout_file))
+    self.assertIn('bar', osutils.ReadFile(stderr_file))
+
+  def testLogLevel(self):
+    """Test that the log level of the function running is set correctly."""
+    func_log_level = logging.DEBUG
+    test_log_level = logging.NOTICE
+    expected_output = 'hi'
+    def func():
+      if logging.getLogger().getEffectiveLevel() == func_log_level:
+        print(expected_output)
+
+    logging.getLogger().setLevel(test_log_level)
+    op = TestWrapperProgressBarOperation()
+    with self.OutputCapturer():
+      op.Run(func, update_period=0.05, log_level=func_log_level)
+
+    # Check that OutputCapturer contains the expected output. This means that
+    # the log level was changed.
+    self.AssertOutputContainsLine(expected_output)
+    # Check that the log level was restored after the function executed.
+    self.assertEqual(logging.getLogger().getEffectiveLevel(), test_log_level)
+
+  def testParallelEmergeOperationParseOutputTotalNotFound(self):
+    """Test that ParallelEmergeOperation.ParseOutput if total is not set."""
+    def func():
+      print('hi')
+
+    op = operation.ParallelEmergeOperation()
+    with self.OutputCapturer():
+      op.Run(func)
+
+    # Check that the output is empty.
+    self.AssertOutputContainsLine('hi', check_stderr=True, invert=True)
+
+  def testParallelEmergeOperationParseOutputTotalIsZero(self):
+    """Test that ParallelEmergeOperation.ParseOutput if total is zero."""
+    def func():
+      print('Total: 0 packages.')
+
+    op = operation.ParallelEmergeOperation()
+    with self.OutputCapturer():
+      with cros_test_lib.LoggingCapturer() as logs:
+        op.Run(func)
+
+    # Check that no progress bar is printed.
+    self.AssertOutputContainsLine('%', check_stderr=True, invert=True)
+    # Check logs contain message.
+    self.AssertLogsContain(logs, 'No packages to build.')
+
+  def testParallelEmergeOperationParseOutputTotalNonZero(self):
+    """Test that ParallelEmergeOperation.ParseOutput's progress bar updates."""
+    def func(queue):
+      print('Total: 2 packages.')
+      for _ in xrange(2):
+        queue.get()
+        print('Completed ')
+
+    queue = multiprocessing.Queue()
+    op = FakeParallelEmergeOperation(queue)
+    with self.OutputCapturer():
+      op.Run(func, queue, update_period=0.005)
+
+    # Check that progress bar prints correctly at 0%, 50%, and 100%.
+    self.AssertOutputContainsLine('0%')
+    self.AssertOutputContainsLine('50%')
+    self.AssertOutputContainsLine('100%')
diff --git a/lib/osutils.py b/lib/osutils.py
new file mode 100644
index 0000000..7352e45
--- /dev/null
+++ b/lib/osutils.py
@@ -0,0 +1,1251 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common file and os related utilities, including tempdir manipulation."""
+
+from __future__ import print_function
+
+import collections
+import contextlib
+import cStringIO
+import ctypes
+import ctypes.util
+import datetime
+import errno
+import glob
+import operator
+import os
+import pwd
+import re
+import shutil
+import tempfile
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import retry_util
+
+
+# Env vars that tempdir can be gotten from; minimally, this
+# needs to match python's tempfile module and match normal
+# unix standards.
+_TEMPDIR_ENV_VARS = ('TMPDIR', 'TEMP', 'TMP')
+
+
+def GetNonRootUser():
+  """Returns a non-root user. Defaults to the current user.
+
+  If the current user is root, returns the username of the person who
+  ran the emerge command. If running using sudo, returns the username
+  of the person who ran the sudo command. If no non-root user is
+  found, returns None.
+  """
+  uid = os.getuid()
+  if uid == 0:
+    user = os.environ.get('PORTAGE_USERNAME', os.environ.get('SUDO_USER'))
+  else:
+    user = pwd.getpwuid(os.getuid()).pw_name
+
+  if user == 'root':
+    return None
+  else:
+    return user
+
+
+def IsChildProcess(pid, name=None):
+  """Return True if pid is a child of the current process.
+
+  Args:
+    pid: Child pid to search for in current process's pstree.
+    name: Name of the child process.
+
+  Note:
+    This function is not fool proof. If the process tree contains wierd names,
+    an incorrect match might be possible.
+  """
+  cmd = ['pstree', '-Ap', str(os.getpid())]
+  pstree = cros_build_lib.RunCommand(
+      cmd, capture_output=True, print_cmd=False).output
+  if name is None:
+    match = '(%d)' % pid
+  else:
+    match = '-%s(%d)' % (name, pid)
+  return match in pstree
+
+
+def ExpandPath(path):
+  """Returns path after passing through realpath and expanduser."""
+  return os.path.realpath(os.path.expanduser(path))
+
+
+def WriteFile(path, content, mode='w', atomic=False, makedirs=False,
+              sudo=False):
+  """Write the given content to disk.
+
+  Args:
+    path: Pathway to write the content to.
+    content: Content to write.  May be either an iterable, or a string.
+    mode: Optional; if binary mode is necessary, pass 'wb'.  If appending is
+          desired, 'w+', etc.
+    atomic: If the updating of the file should be done atomically.  Note this
+            option is incompatible w/ append mode.
+    makedirs: If True, create missing leading directories in the path.
+    sudo: If True, write the file as root.
+  """
+  if sudo and ('a' in mode or '+' in mode):
+    raise ValueError('append mode does not work in sudo mode')
+
+  if makedirs:
+    SafeMakedirs(os.path.dirname(path), sudo=sudo)
+
+  # If the file needs to be written as root and we are not root, write to a temp
+  # file, move it and change the permission.
+  if sudo and os.getuid() != 0:
+    with tempfile.NamedTemporaryFile(mode=mode, delete=False) as temp:
+      write_path = temp.name
+      temp.writelines(cros_build_lib.iflatten_instance(content))
+    os.chmod(write_path, 0o644)
+
+    try:
+      mv_target = path if not atomic else path + '.tmp'
+      cros_build_lib.SudoRunCommand(['mv', write_path, mv_target],
+                                    print_cmd=False, redirect_stderr=True)
+      cros_build_lib.SudoRunCommand(['chown', 'root:root', mv_target],
+                                    print_cmd=False, redirect_stderr=True)
+      if atomic:
+        cros_build_lib.SudoRunCommand(['mv', mv_target, path],
+                                      print_cmd=False, redirect_stderr=True)
+
+    except cros_build_lib.RunCommandError:
+      SafeUnlink(write_path)
+      SafeUnlink(mv_target)
+      raise
+
+  else:
+    # We have the right permissions, simply write the file in python.
+    write_path = path
+    if atomic:
+      write_path = path + '.tmp'
+    with open(write_path, mode) as f:
+      f.writelines(cros_build_lib.iflatten_instance(content))
+
+    if not atomic:
+      return
+
+    try:
+      os.rename(write_path, path)
+    except EnvironmentError:
+      SafeUnlink(write_path)
+      raise
+
+
+def Touch(path, makedirs=False, mode=None):
+  """Simulate unix touch. Create if doesn't exist and update its timestamp.
+
+  Args:
+    path: a string, file name of the file to touch (creating if not present).
+    makedirs: If True, create missing leading directories in the path.
+    mode: The access permissions to set.  In the style of chmod.  Defaults to
+          using the umask.
+  """
+  if makedirs:
+    SafeMakedirs(os.path.dirname(path))
+
+  # Create the file if nonexistant.
+  open(path, 'a').close()
+  if mode is not None:
+    os.chmod(path, mode)
+  # Update timestamp to right now.
+  os.utime(path, None)
+
+
+def ReadFile(path, mode='r'):
+  """Read a given file on disk.  Primarily useful for one off small files."""
+  with open(path, mode) as f:
+    return f.read()
+
+
+def SafeSymlink(source, dest, sudo=False):
+  """Create a symlink at |dest| pointing to |source|.
+
+  This will override the |dest| if the symlink exists. This operation is not
+  atomic.
+
+  Args:
+    source: source path.
+    dest: destination path.
+    sudo: If True, create the link as root.
+  """
+  if sudo and os.getuid() != 0:
+    cros_build_lib.SudoRunCommand(['ln', '-sfT', source, dest],
+                                  print_cmd=False, redirect_stderr=True)
+  else:
+    SafeUnlink(dest)
+    os.symlink(source, dest)
+
+
+def SafeUnlink(path, sudo=False):
+  """Unlink a file from disk, ignoring if it doesn't exist.
+
+  Returns:
+    True if the file existed and was removed, False if it didn't exist.
+  """
+  if sudo:
+    try:
+      cros_build_lib.SudoRunCommand(
+          ['rm', '--', path], print_cmd=False, redirect_stderr=True)
+      return True
+    except cros_build_lib.RunCommandError:
+      if os.path.exists(path):
+        # Technically racey, but oh well; very hard to actually hit...
+        raise
+      return False
+  try:
+    os.unlink(path)
+    return True
+  except EnvironmentError as e:
+    if e.errno != errno.ENOENT:
+      raise
+  return False
+
+
+def SafeMakedirs(path, mode=0o775, sudo=False, user='root'):
+  """Make parent directories if needed.  Ignore if existing.
+
+  Args:
+    path: The path to create.  Intermediate directories will be created as
+          needed.
+    mode: The access permissions in the style of chmod.
+    sudo: If True, create it via sudo, thus root owned.
+    user: If |sudo| is True, run sudo as |user|.
+
+  Returns:
+    True if the directory had to be created, False if otherwise.
+
+  Raises:
+    EnvironmentError: If the makedir failed.
+    RunCommandError: If using RunCommand and the command failed for any reason.
+  """
+  if sudo and not (os.getuid() == 0 and user == 'root'):
+    if os.path.isdir(path):
+      return False
+    cros_build_lib.SudoRunCommand(
+        ['mkdir', '-p', '--mode', oct(mode), path], user=user, print_cmd=False,
+        redirect_stderr=True, redirect_stdout=True)
+    return True
+
+  try:
+    os.makedirs(path, mode)
+    return True
+  except EnvironmentError as e:
+    if e.errno != errno.EEXIST or not os.path.isdir(path):
+      raise
+
+  return False
+
+
+class MakingDirsAsRoot(Exception):
+  """Raised when creating directories as root."""
+
+
+def SafeMakedirsNonRoot(path, mode=0o775, user=None):
+  """Create directories and make sure they are not owned by root.
+
+  See SafeMakedirs for the arguments and returns.
+  """
+  if user is None:
+    user = GetNonRootUser()
+
+  if user is None or user == 'root':
+    raise MakingDirsAsRoot('Refusing to create %s as root!' % path)
+
+  created = SafeMakedirs(path, mode=mode, user=user)
+  # Temporary fix: if the directory already exists and is owned by
+  # root, chown it. This corrects existing root-owned directories.
+  if not created:
+    stat_info = os.stat(path)
+    if stat_info.st_uid == 0:
+      cros_build_lib.SudoRunCommand(['chown', user, path],
+                                    print_cmd=False,
+                                    redirect_stderr=True,
+                                    redirect_stdout=True)
+  return created
+
+
+def RmDir(path, ignore_missing=False, sudo=False):
+  """Recursively remove a directory.
+
+  Args:
+    path: Path of directory to remove.
+    ignore_missing: Do not error when path does not exist.
+    sudo: Remove directories as root.
+  """
+  if sudo:
+    try:
+      cros_build_lib.SudoRunCommand(
+          ['rm', '-r%s' % ('f' if ignore_missing else '',), '--', path],
+          debug_level=logging.DEBUG,
+          redirect_stdout=True, redirect_stderr=True)
+    except cros_build_lib.RunCommandError as e:
+      if not ignore_missing or os.path.exists(path):
+        # If we're not ignoring the rm ENOENT equivalent, throw it;
+        # if the pathway still exists, something failed, thus throw it.
+        raise
+  else:
+    try:
+      shutil.rmtree(path)
+    except EnvironmentError as e:
+      if not ignore_missing or e.errno != errno.ENOENT:
+        raise
+
+
+def Which(binary, path=None, mode=os.X_OK):
+  """Return the absolute path to the specified binary.
+
+  Args:
+    binary: The binary to look for.
+    path: Search path. Defaults to os.environ['PATH'].
+    mode: File mode to check on the binary.
+
+  Returns:
+    The full path to |binary| if found (with the right mode). Otherwise, None.
+  """
+  if path is None:
+    path = os.environ.get('PATH', '')
+  for p in path.split(os.pathsep):
+    p = os.path.join(p, binary)
+    if os.path.isfile(p) and os.access(p, mode):
+      return p
+  return None
+
+
+def FindMissingBinaries(needed_tools):
+  """Verifies that the required tools are present on the system.
+
+  This is especially important for scripts that are intended to run
+  outside the chroot.
+
+  Args:
+    needed_tools: an array of string specified binaries to look for.
+
+  Returns:
+    If all tools are found, returns the empty list. Otherwise, returns the
+    list of missing tools.
+  """
+  return [binary for binary in needed_tools if Which(binary) is None]
+
+
+def DirectoryIterator(base_path):
+  """Iterates through the files and subdirs of a directory."""
+  for root, dirs, files in os.walk(base_path):
+    for e in [d + os.sep for d in dirs] + files:
+      yield os.path.join(root, e)
+
+
+def IteratePaths(end_path):
+  """Generator that iterates down to |end_path| from root /.
+
+  Args:
+    end_path: The destination. If this is a relative path, it will be resolved
+        to absolute path. In all cases, it will be normalized.
+
+  Yields:
+    All the paths gradually constructed from / to |end_path|. For example:
+    IteratePaths("/this/path") yields "/", "/this", and "/this/path".
+  """
+  return reversed(list(IteratePathParents(end_path)))
+
+
+def IteratePathParents(start_path):
+  """Generator that iterates through a directory's parents.
+
+  Args:
+    start_path: The path to start from.
+
+  Yields:
+    The passed-in path, along with its parents.  i.e.,
+    IteratePathParents('/usr/local') would yield '/usr/local', '/usr', and '/'.
+  """
+  path = os.path.abspath(start_path)
+  # There's a bug that abspath('//') returns '//'. We need to renormalize it.
+  if path == '//':
+    path = '/'
+  yield path
+  while path.strip('/'):
+    path = os.path.dirname(path)
+    yield path
+
+
+def FindInPathParents(path_to_find, start_path, test_func=None, end_path=None):
+  """Look for a relative path, ascending through parent directories.
+
+  Ascend through parent directories of current path looking for a relative
+  path.  I.e., given a directory structure like:
+  -/
+   |
+   --usr
+     |
+     --bin
+     |
+     --local
+       |
+       --google
+
+  the call FindInPathParents('bin', '/usr/local') would return '/usr/bin', and
+  the call FindInPathParents('google', '/usr/local') would return
+  '/usr/local/google'.
+
+  Args:
+    path_to_find: The relative path to look for.
+    start_path: The path to start the search from.  If |start_path| is a
+      directory, it will be included in the directories that are searched.
+    test_func: The function to use to verify the relative path.  Defaults to
+      os.path.exists.  The function will be passed one argument - the target
+      path to test.  A True return value will cause AscendingLookup to return
+      the target.
+    end_path: The path to stop searching.
+  """
+  if end_path is not None:
+    end_path = os.path.abspath(end_path)
+  if test_func is None:
+    test_func = os.path.exists
+  for path in IteratePathParents(start_path):
+    if path == end_path:
+      return None
+    target = os.path.join(path, path_to_find)
+    if test_func(target):
+      return target
+  return None
+
+
+def SetGlobalTempDir(tempdir_value, tempdir_env=None):
+  """Set the global temp directory to the specified |tempdir_value|
+
+  Args:
+    tempdir_value: The new location for the global temp directory.
+    tempdir_env: Optional. A list of key/value pairs to set in the
+      environment. If not provided, set all global tempdir environment
+      variables to point at |tempdir_value|.
+
+  Returns:
+    Returns (old_tempdir_value, old_tempdir_env).
+
+    old_tempdir_value: The old value of the global temp directory.
+    old_tempdir_env: A list of the key/value pairs that control the tempdir
+      environment and were set prior to this function. If the environment
+      variable was not set, it is recorded as None.
+  """
+  # pylint: disable=protected-access
+  with tempfile._once_lock:
+    old_tempdir_value = GetGlobalTempDir()
+    old_tempdir_env = tuple((x, os.environ.get(x)) for x in _TEMPDIR_ENV_VARS)
+
+    # Now update TMPDIR/TEMP/TMP, and poke the python
+    # internals to ensure all subprocess/raw tempfile
+    # access goes into this location.
+    if tempdir_env is None:
+      os.environ.update((x, tempdir_value) for x in _TEMPDIR_ENV_VARS)
+    else:
+      for key, value in tempdir_env:
+        if value is None:
+          os.environ.pop(key, None)
+        else:
+          os.environ[key] = value
+
+    # Finally, adjust python's cached value (we know it's cached by here
+    # since we invoked _get_default_tempdir from above).  Note this
+    # is necessary since we want *all* output from that point
+    # forward to go to this location.
+    tempfile.tempdir = tempdir_value
+
+  return (old_tempdir_value, old_tempdir_env)
+
+
+def GetGlobalTempDir():
+  """Get the path to the current global tempdir.
+
+  The global tempdir path can be modified through calls to SetGlobalTempDir.
+  """
+  # pylint: disable=protected-access
+  return tempfile._get_default_tempdir()
+
+
+def _TempDirSetup(self, prefix='tmp', set_global=False, base_dir=None):
+  """Generate a tempdir, modifying the object, and env to use it.
+
+  Specifically, if set_global is True, then from this invocation forward,
+  python and all subprocesses will use this location for their tempdir.
+
+  The matching _TempDirTearDown restores the env to what it was.
+  """
+  # Stash the old tempdir that was used so we can
+  # switch it back on the way out.
+  self.tempdir = tempfile.mkdtemp(prefix=prefix, dir=base_dir)
+  os.chmod(self.tempdir, 0o700)
+
+  if set_global:
+    self._orig_tempdir_value, self._orig_tempdir_env = \
+        SetGlobalTempDir(self.tempdir)
+
+
+def _TempDirTearDown(self, force_sudo, delete=True):
+  # Note that _TempDirSetup may have failed, resulting in these attributes
+  # not being set; this is why we use getattr here (and must).
+  tempdir = getattr(self, 'tempdir', None)
+  try:
+    if tempdir is not None and delete:
+      RmDir(tempdir, ignore_missing=True, sudo=force_sudo)
+  except EnvironmentError as e:
+    # Suppress ENOENT since we may be invoked
+    # in a context where parallel wipes of the tempdir
+    # may be occuring; primarily during hard shutdowns.
+    if e.errno != errno.ENOENT:
+      raise
+
+  # Restore environment modification if necessary.
+  orig_tempdir_value = getattr(self, '_orig_tempdir_value', None)
+  if orig_tempdir_value is not None:
+    # pylint: disable=protected-access
+    SetGlobalTempDir(orig_tempdir_value, self._orig_tempdir_env)
+
+
+class TempDir(object):
+  """Object that creates a temporary directory.
+
+  This object can either be used as a context manager or just as a simple
+  object. The temporary directory is stored as self.tempdir in the object, and
+  is returned as a string by a 'with' statement.
+  """
+
+  def __init__(self, **kwargs):
+    """Constructor. Creates the temporary directory.
+
+    Args:
+      prefix: See tempfile.mkdtemp documentation.
+      base_dir: The directory to place the temporary directory.
+      set_global: Set this directory as the global temporary directory.
+      storage: The object that will have its 'tempdir' attribute set.
+      delete: Whether the temporary dir should be deleted as part of cleanup.
+          (default: True)
+      sudo_rm: Whether the temporary dir will need root privileges to remove.
+          (default: False)
+    """
+    self.kwargs = kwargs.copy()
+    self.delete = kwargs.pop('delete', True)
+    self.sudo_rm = kwargs.pop('sudo_rm', False)
+    self.tempdir = None
+    _TempDirSetup(self, **kwargs)
+
+  def Cleanup(self):
+    """Clean up the temporary directory."""
+    if self.tempdir is not None:
+      try:
+        _TempDirTearDown(self, self.sudo_rm, delete=self.delete)
+      finally:
+        self.tempdir = None
+
+  def __enter__(self):
+    """Return the temporary directory."""
+    return self.tempdir
+
+  def __exit__(self, exc_type, exc_value, exc_traceback):
+    try:
+      self.Cleanup()
+    except Exception:
+      if exc_type:
+        # If an exception from inside the context was already in progress,
+        # log our cleanup exception, then allow the original to resume.
+        logging.error('While exiting %s:', self, exc_info=True)
+
+        if self.tempdir:
+          # Log all files in tempdir at the time of the failure.
+          try:
+            logging.error('Directory contents were:')
+            for name in os.listdir(self.tempdir):
+              logging.error('  %s', name)
+          except OSError:
+            logging.error('  Directory did not exist.')
+
+          # Log all mounts at the time of the failure, since that's the most
+          # common cause.
+          mount_results = cros_build_lib.RunCommand(
+              ['mount'], redirect_stdout=True, combine_stdout_stderr=True,
+              error_code_ok=True)
+          logging.error('Mounts were:')
+          logging.error('  %s', mount_results.output)
+
+      else:
+        # If there was not an exception from the context, raise ours.
+        raise
+
+  def __del__(self):
+    self.Cleanup()
+
+
+def TempDirDecorator(func):
+  """Populates self.tempdir with path to a temporary writeable directory."""
+  def f(self, *args, **kwargs):
+    with TempDir() as tempdir:
+      self.tempdir = tempdir
+      return func(self, *args, **kwargs)
+
+  f.__name__ = func.__name__
+  f.__doc__ = func.__doc__
+  f.__module__ = func.__module__
+  return f
+
+
+def TempFileDecorator(func):
+  """Populates self.tempfile with path to a temporary writeable file"""
+  def f(self, *args, **kwargs):
+    with tempfile.NamedTemporaryFile(dir=self.tempdir, delete=False) as f:
+      self.tempfile = f.name
+    return func(self, *args, **kwargs)
+
+  f.__name__ = func.__name__
+  f.__doc__ = func.__doc__
+  f.__module__ = func.__module__
+  return TempDirDecorator(f)
+
+
+# Flags synced from sys/mount.h.  See mount(2) for details.
+MS_RDONLY = 1
+MS_NOSUID = 2
+MS_NODEV = 4
+MS_NOEXEC = 8
+MS_SYNCHRONOUS = 16
+MS_REMOUNT = 32
+MS_MANDLOCK = 64
+MS_DIRSYNC = 128
+MS_NOATIME = 1024
+MS_NODIRATIME = 2048
+MS_BIND = 4096
+MS_MOVE = 8192
+MS_REC = 16384
+MS_SILENT = 32768
+MS_POSIXACL = 1 << 16
+MS_UNBINDABLE = 1 << 17
+MS_PRIVATE = 1 << 18
+MS_SLAVE = 1 << 19
+MS_SHARED = 1 << 20
+MS_RELATIME = 1 << 21
+MS_KERNMOUNT = 1 << 22
+MS_I_VERSION = 1 << 23
+MS_STRICTATIME = 1 << 24
+MS_ACTIVE = 1 << 30
+MS_NOUSER = 1 << 31
+
+
+def Mount(source, target, fstype, flags, data=""):
+  """Call the mount(2) func; see the man page for details."""
+  libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True)
+  if libc.mount(source, target, fstype, ctypes.c_int(flags), data) != 0:
+    e = ctypes.get_errno()
+    raise OSError(e, os.strerror(e))
+
+
+def MountDir(src_path, dst_path, fs_type=None, sudo=True, makedirs=True,
+             mount_opts=('nodev', 'noexec', 'nosuid'), skip_mtab=False,
+             **kwargs):
+  """Mount |src_path| at |dst_path|
+
+  Args:
+    src_path: Source of the new mount.
+    dst_path: Where to mount things.
+    fs_type: Specify the filesystem type to use.  Defaults to autodetect.
+    sudo: Run through sudo.
+    makedirs: Create |dst_path| if it doesn't exist.
+    mount_opts: List of options to pass to `mount`.
+    skip_mtab: Whether to write new entries to /etc/mtab.
+    kwargs: Pass all other args to RunCommand.
+  """
+  if sudo:
+    runcmd = cros_build_lib.SudoRunCommand
+  else:
+    runcmd = cros_build_lib.RunCommand
+
+  if makedirs:
+    SafeMakedirs(dst_path, sudo=sudo)
+
+  cmd = ['mount', src_path, dst_path]
+  if skip_mtab:
+    cmd += ['-n']
+  if fs_type:
+    cmd += ['-t', fs_type]
+  runcmd(cmd + ['-o', ','.join(mount_opts)], **kwargs)
+
+
+def MountTmpfsDir(path, name='osutils.tmpfs', size='5G',
+                  mount_opts=('nodev', 'noexec', 'nosuid'), **kwargs):
+  """Mount a tmpfs at |path|
+
+  Args:
+    path: Directory to mount the tmpfs.
+    name: Friendly name to include in mount output.
+    size: Size of the temp fs.
+    mount_opts: List of options to pass to `mount`.
+    kwargs: Pass all other args to MountDir.
+  """
+  mount_opts = list(mount_opts) + ['size=%s' % size]
+  MountDir(name, path, fs_type='tmpfs', mount_opts=mount_opts, **kwargs)
+
+
+def UmountDir(path, lazy=True, sudo=True, cleanup=True):
+  """Unmount a previously mounted temp fs mount.
+
+  Args:
+    path: Directory to unmount.
+    lazy: Whether to do a lazy unmount.
+    sudo: Run through sudo.
+    cleanup: Whether to delete the |path| after unmounting.
+             Note: Does not work when |lazy| is set.
+  """
+  if sudo:
+    runcmd = cros_build_lib.SudoRunCommand
+  else:
+    runcmd = cros_build_lib.RunCommand
+
+  cmd = ['umount', '-d', path]
+  if lazy:
+    cmd += ['-l']
+  runcmd(cmd)
+
+  if cleanup:
+    # We will randomly get EBUSY here even when the umount worked.  Suspect
+    # this is due to the host distro doing stupid crap on us like autoscanning
+    # directories when they get mounted.
+    def _retry(e):
+      # When we're using `rm` (which is required for sudo), we can't cleanly
+      # detect the aforementioned failure.  This is because `rm` will see the
+      # errno, handle itself, and then do exit(1).  Which means all we see is
+      # that rm failed.  Assume it's this issue as -rf will ignore most things.
+      if isinstance(e, cros_build_lib.RunCommandError):
+        return True
+      elif isinstance(e, OSError):
+        # When we aren't using sudo, we do the unlink ourselves, so the exact
+        # errno is bubbled up to us and we can detect it specifically without
+        # potentially ignoring all other possible failures.
+        return e.errno == errno.EBUSY
+      else:
+        # Something else, we don't know so do not retry.
+        return False
+    retry_util.GenericRetry(_retry, 60, RmDir, path, sudo=sudo, sleep=1)
+
+
+def SetEnvironment(env):
+  """Restore the environment variables to that of passed in dictionary."""
+  os.environ.clear()
+  os.environ.update(env)
+
+
+def SourceEnvironment(script, whitelist, ifs=',', env=None, multiline=False):
+  """Returns the environment exported by a shell script.
+
+  Note that the script is actually executed (sourced), so do not use this on
+  files that have side effects (such as modify the file system).  Stdout will
+  be sent to /dev/null, so just echoing is OK.
+
+  Args:
+    script: The shell script to 'source'.
+    whitelist: An iterable of environment variables to retrieve values for.
+    ifs: When showing arrays, what separator to use.
+    env: A dict of the initial env to pass down.  You can also pass it None
+         (to clear the env) or True (to preserve the current env).
+    multiline: Allow a variable to span multiple lines.
+
+  Returns:
+    A dictionary containing the values of the whitelisted environment
+    variables that are set.
+  """
+  dump_script = ['source "%s" >/dev/null' % script,
+                 'IFS="%s"' % ifs]
+  for var in whitelist:
+    # Note: If we want to get more exact results out of bash, we should switch
+    # to using `declare -p "${var}"`.  It would require writing a custom parser
+    # here, but it would be more robust.
+    dump_script.append(
+        '[[ "${%(var)s+set}" == "set" ]] && echo "%(var)s=\\"${%(var)s[*]}\\""'
+        % {'var': var})
+  dump_script.append('exit 0')
+
+  if env is None:
+    env = {}
+  elif env is True:
+    env = None
+  output = cros_build_lib.RunCommand(['bash'], env=env, redirect_stdout=True,
+                                     redirect_stderr=True, print_cmd=False,
+                                     input='\n'.join(dump_script)).output
+  return cros_build_lib.LoadKeyValueFile(cStringIO.StringIO(output),
+                                         multiline=multiline)
+
+
+def ListBlockDevices(device_path=None, in_bytes=False):
+  """Lists all block devices.
+
+  Args:
+    device_path: device path (e.g. /dev/sdc).
+    in_bytes: whether to display size in bytes.
+
+  Returns:
+    A list of BlockDevice items with attributes 'NAME', 'RM', 'TYPE',
+    'SIZE' (RM stands for removable).
+  """
+  keys = ['NAME', 'RM', 'TYPE', 'SIZE']
+  BlockDevice = collections.namedtuple('BlockDevice', keys)
+
+  cmd = ['lsblk', '--pairs']
+  if in_bytes:
+    cmd.append('--bytes')
+
+  if device_path:
+    cmd.append(device_path)
+
+  cmd += ['--output', ','.join(keys)]
+  output = cros_build_lib.RunCommand(
+      cmd, debug_level=logging.DEBUG, capture_output=True).output.strip()
+  devices = []
+  for line in output.splitlines():
+    d = {}
+    for k, v in re.findall(r'(\S+?)=\"(.+?)\"', line):
+      d[k] = v
+
+    devices.append(BlockDevice(**d))
+
+  return devices
+
+
+def GetDeviceInfo(device, keyword='model'):
+  """Get information of |device| by searching through device path.
+
+    Looks for the file named |keyword| in the path upwards from
+    /sys/block/|device|/device. This path is a symlink and will be fully
+    expanded when searching.
+
+  Args:
+    device: Device name (e.g. 'sdc').
+    keyword: The filename to look for (e.g. product, model).
+
+  Returns:
+    The content of the |keyword| file.
+  """
+  device_path = os.path.join('/sys', 'block', device)
+  if not os.path.isdir(device_path):
+    raise ValueError('%s is not a valid device path.' % device_path)
+
+  path_list = ExpandPath(os.path.join(device_path, 'device')).split(os.path.sep)
+  while len(path_list) > 2:
+    target = os.path.join(os.path.sep.join(path_list), keyword)
+    if os.path.isfile(target):
+      return ReadFile(target).strip()
+
+    path_list = path_list[:-1]
+
+
+def GetDeviceSize(device_path, in_bytes=False):
+  """Returns the size of |device|.
+
+  Args:
+    device_path: Device path (e.g. '/dev/sdc').
+    in_bytes: If set True, returns the size in bytes.
+
+  Returns:
+    Size of the device in human readable format unless |in_bytes| is set.
+  """
+  devices = ListBlockDevices(device_path=device_path, in_bytes=in_bytes)
+  for d in devices:
+    if d.TYPE == 'disk':
+      return int(d.SIZE) if in_bytes else d.SIZE
+
+  raise ValueError('No size info of %s is found.' % device_path)
+
+
+FileInfo = collections.namedtuple(
+    'FileInfo', ['path', 'owner', 'size', 'atime', 'mtime'])
+
+
+def StatFilesInDirectory(path, recursive=False, to_string=False):
+  """Stat files in the directory |path|.
+
+  Args:
+    path: Path to the target directory.
+    recursive: Whether to recurisvely list all files in |path|.
+    to_string: Whether to return a string containing the metadata of the
+      files.
+
+  Returns:
+    If |to_string| is False, returns a list of FileInfo objects. Otherwise,
+    returns a string of metadata of the files.
+  """
+  path = ExpandPath(path)
+  def ToFileInfo(path, stat):
+    return FileInfo(path,
+                    pwd.getpwuid(stat.st_uid)[0],
+                    stat.st_size,
+                    datetime.datetime.fromtimestamp(stat.st_atime),
+                    datetime.datetime.fromtimestamp(stat.st_mtime))
+
+  file_infos = []
+  for root, dirs, files in os.walk(path, topdown=True):
+    for filename in dirs + files:
+      filepath = os.path.join(root, filename)
+      file_infos.append(ToFileInfo(filepath, os.lstat(filepath)))
+
+    if not recursive:
+      # Process only the top-most directory.
+      break
+
+  if not to_string:
+    return file_infos
+
+  msg = 'Listing the content of %s' % path
+  msg_format = ('Path: {x.path}, Owner: {x.owner}, Size: {x.size} bytes, '
+                'Accessed: {x.atime}, Modified: {x.mtime}')
+  msg = '%s\n%s' % (msg,
+                    '\n'.join([msg_format.format(x=x) for x in file_infos]))
+  return msg
+
+
+def MountImagePartition(image_file, part_number, destination, gpt_table=None,
+                        sudo=True, makedirs=True, mount_opts=('ro', ),
+                        skip_mtab=False):
+  """Mount a |partition| from |image_file| to |destination|.
+
+  If there is a GPT table (GetImageDiskPartitionInfo), it will be used for
+  start offset and size of the selected partition. Otherwise, the GPT will
+  be read again from |image_file|. The GPT table MUST have unit of "B".
+
+  The mount option will be:
+
+    -o offset=XXX,sizelimit=YYY,(*mount_opts)
+
+  Args:
+    image_file: A path to the image file (chromiumos_base_image.bin).
+    part_number: A partition number.
+    destination: A path to the mount point.
+    gpt_table: A dictionary of PartitionInfo objects. See
+      cros_build_lib.GetImageDiskPartitionInfo.
+    sudo: Same as MountDir.
+    makedirs: Same as MountDir.
+    mount_opts: Same as MountDir.
+    skip_mtab: Same as MountDir.
+  """
+
+  if gpt_table is None:
+    gpt_table = cros_build_lib.GetImageDiskPartitionInfo(image_file, 'B',
+                                                         key_selector='number')
+
+  for _, part in gpt_table.items():
+    if part.number == part_number:
+      break
+  else:
+    part = None
+    raise ValueError('Partition number %d not found in the GPT %r.' %
+                     (part_number, gpt_table))
+
+  opts = ['loop', 'offset=%d' % part.start, 'sizelimit=%d' % part.size]
+  opts += mount_opts
+  MountDir(image_file, destination, sudo=sudo, makedirs=makedirs,
+           mount_opts=opts, skip_mtab=skip_mtab)
+
+
+@contextlib.contextmanager
+def ChdirContext(target_dir):
+  """A context manager to chdir() into |target_dir| and back out on exit.
+
+  Args:
+    target_dir: A target directory to chdir into.
+  """
+
+  cwd = os.getcwd()
+  os.chdir(target_dir)
+  try:
+    yield
+  finally:
+    os.chdir(cwd)
+
+
+class MountImageContext(object):
+  """A context manager to mount an image."""
+
+  def __init__(self, image_file, destination, part_selects=(1, 3)):
+    """Construct a context manager object to actually do the job.
+
+    Specified partitions will be mounted under |destination| according to the
+    pattern:
+
+      partition ---mount--> dir-<partition number>
+
+    Symlinks with labels "dir-<label>" will also be created in |destination| to
+    point to the mounted partitions. If there is a conflict in symlinks, the
+    first one wins.
+
+    The image is unmounted when this context manager exits.
+
+      with MountImageContext('build/images/wolf/latest', 'root_mount_point'):
+        # "dir-1", and "dir-3" will be mounted in root_mount_point
+        ...
+
+    Args:
+      image_file: A path to the image file.
+      destination: A directory in which all mount points and symlinks will be
+        created. This parameter is relative to the CWD at the time __init__ is
+        called.
+      part_selects: A list of partition numbers or labels to be mounted. If an
+        element is an integer, it is matched as partition number, otherwise
+        a partition label.
+    """
+    self._image_file = image_file
+    self._gpt_table = cros_build_lib.GetImageDiskPartitionInfo(
+        self._image_file, 'B', key_selector='number'
+    )
+    # Target dir is absolute path so that we do not have to worry about
+    # CWD being changed later.
+    self._target_dir = ExpandPath(destination)
+    self._part_selects = part_selects
+    self._mounted = set()
+    self._linked_labels = set()
+
+  def _GetMountPointAndSymlink(self, part):
+    """Given a PartitionInfo, return a tuple of mount point and symlink.
+
+    Args:
+      part: A PartitionInfo object.
+
+    Returns:
+      A tuple (mount_point, symlink).
+    """
+    dest_number = os.path.join(self._target_dir, 'dir-%d' % part.number)
+    dest_label = os.path.join(self._target_dir, 'dir-%s' % part.name)
+    return dest_number, dest_label
+
+  def _Mount(self, part):
+    """Mount the partition and create a symlink to the mount point.
+
+    The partition is mounted as "dir-partNumber", and the symlink "dir-label".
+    If "dir-label" already exists, no symlink is created.
+
+    Args:
+      part: A PartitionInfo object.
+
+    Raises:
+      ValueError if mount point already exists.
+    """
+    if part in self._mounted:
+      return
+
+    dest_number, dest_label = self._GetMountPointAndSymlink(part)
+    if os.path.exists(dest_number):
+      raise ValueError('Mount point %s already exists.' % dest_number)
+
+    MountImagePartition(self._image_file, part.number,
+                        dest_number, self._gpt_table)
+    self._mounted.add(part)
+
+    if not os.path.exists(dest_label):
+      os.symlink(os.path.basename(dest_number), dest_label)
+      self._linked_labels.add(dest_label)
+
+  def _Unmount(self, part):
+    """Unmount a partition that was mounted by _Mount."""
+    dest_number, dest_label = self._GetMountPointAndSymlink(part)
+    # Due to crosbug/358933, the RmDir call might fail. So we skip the cleanup.
+    UmountDir(dest_number, cleanup=False)
+    self._mounted.remove(part)
+
+    if dest_label in self._linked_labels:
+      SafeUnlink(dest_label)
+      self._linked_labels.remove(dest_label)
+
+  def _CleanUp(self):
+    """Unmount all mounted partitions."""
+    to_be_rmdir = []
+    for part in list(self._mounted):
+      self._Unmount(part)
+      dest_number, _ = self._GetMountPointAndSymlink(part)
+      to_be_rmdir.append(dest_number)
+    # Because _Unmount did not RmDir the mount points, we do that here.
+    for path in to_be_rmdir:
+      retry_util.RetryException(cros_build_lib.RunCommandError, 60,
+                                RmDir, path, sudo=True, sleep=1)
+
+  def __enter__(self):
+    for selector in self._part_selects:
+      matcher = operator.attrgetter('number')
+      if not isinstance(selector, int):
+        matcher = operator.attrgetter('name')
+      for _, part in self._gpt_table.items():
+        if matcher(part) == selector:
+          try:
+            self._Mount(part)
+          except:
+            self._CleanUp()
+            raise
+          break
+      else:
+        self._CleanUp()
+        raise ValueError('Partition %r not found in the GPT %r.' %
+                         (selector, self._gpt_table))
+
+    return self
+
+  def __exit__(self, exc_type, exc_value, traceback):
+    self._CleanUp()
+
+
+def _SameFileSystem(path1, path2):
+  """Determine whether two paths are on the same filesystem.
+
+  Be resilient to nonsense paths. Return False instead of blowing up.
+  """
+  try:
+    return os.stat(path1).st_dev == os.stat(path2).st_dev
+  except OSError:
+    return False
+
+
+class MountOverlayContext(object):
+  """A context manager for mounting an OverlayFS directory.
+
+  An overlay filesystem will be mounted at |mount_dir|, and will be unmounted
+  when the context exits.
+
+  Args:
+    lower_dir: The lower directory (read-only).
+    upper_dir: The upper directory (read-write).
+    mount_dir: The mount point for the merged overlay.
+    cleanup: Whether to remove the mount point after unmounting. This uses an
+        internal retry logic for cases where unmount is successful but the
+        directory still appears busy, and is generally more resilient than
+        removing it independently.
+  """
+
+  OVERLAY_FS_MOUNT_ERRORS = (32,)
+  def __init__(self, lower_dir, upper_dir, mount_dir, cleanup=False):
+    self._lower_dir = lower_dir
+    self._upper_dir = upper_dir
+    self._mount_dir = mount_dir
+    self._cleanup = cleanup
+    self.tempdir = None
+
+  def __enter__(self):
+    # Upstream Kernel 3.18 and the ubuntu backport of overlayfs have different
+    # APIs. We must support both.
+    try_legacy = False
+    stashed_e_overlay_str = None
+
+    # We must ensure that upperdir and workdir are on the same filesystem.
+    if _SameFileSystem(self._upper_dir, GetGlobalTempDir()):
+      _TempDirSetup(self)
+    elif _SameFileSystem(self._upper_dir, os.path.dirname(self._upper_dir)):
+      _TempDirSetup(self, base_dir=os.path.dirname(self._upper_dir))
+    else:
+      logging.debug('Could create find a workdir on the same filesystem as %s. '
+                    'Trying legacy API instead.',
+                    self._upper_dir)
+      try_legacy = True
+
+    if not try_legacy:
+      try:
+        MountDir('overlay', self._mount_dir, fs_type='overlay', makedirs=False,
+                 mount_opts=('lowerdir=%s' % self._lower_dir,
+                             'upperdir=%s' % self._upper_dir,
+                             'workdir=%s' % self.tempdir),
+                 quiet=True)
+      except cros_build_lib.RunCommandError as e_overlay:
+        if e_overlay.result.returncode not in self.OVERLAY_FS_MOUNT_ERRORS:
+          raise
+        logging.debug('Failed to mount overlay filesystem. Trying legacy API.')
+        stashed_e_overlay_str = str(e_overlay)
+        try_legacy = True
+
+    if try_legacy:
+      try:
+        MountDir('overlayfs', self._mount_dir, fs_type='overlayfs',
+                 makedirs=False,
+                 mount_opts=('lowerdir=%s' % self._lower_dir,
+                             'upperdir=%s' % self._upper_dir),
+                 quiet=True)
+      except cros_build_lib.RunCommandError as e_overlayfs:
+        logging.error('All attempts at mounting overlay filesystem failed.')
+        if stashed_e_overlay_str is not None:
+          logging.error('overlay: %s', stashed_e_overlay_str)
+        logging.error('overlayfs: %s', str(e_overlayfs))
+        raise
+
+    return self
+
+  def __exit__(self, exc_type, exc_value, traceback):
+    UmountDir(self._mount_dir, cleanup=self._cleanup)
+    _TempDirTearDown(self, force_sudo=True)
+
+
+MountInfo = collections.namedtuple(
+    'MountInfo',
+    'source destination filesystem options')
+
+
+def IterateMountPoints(proc_file='/proc/mounts'):
+  """Iterate over all mounts as reported by "/proc/mounts".
+
+  Args:
+    proc_file: A path to a file whose content is similar to /proc/mounts.
+      Default to "/proc/mounts" itself.
+
+  Returns:
+    A generator that yields MountInfo objects.
+  """
+  with open(proc_file) as f:
+    for line in f:
+      # Escape any \xxx to a char.
+      source, destination, filesystem, options, _, _ = [
+          re.sub(r'\\([0-7]{3})', lambda m: chr(int(m.group(1), 8)), x)
+          for x in line.split()
+      ]
+      mtab = MountInfo(source, destination, filesystem, options)
+      yield mtab
+
+
+def ResolveSymlink(file_name, root='/'):
+  """Resolve a symlink |file_name| relative to |root|.
+
+  For example:
+
+    ROOT-A/absolute_symlink --> /an/abs/path
+    ROOT-A/relative_symlink --> a/relative/path
+
+    absolute_symlink will be resolved to ROOT-A/an/abs/path
+    relative_symlink will be resolved to ROOT-A/a/relative/path
+
+  Args:
+    file_name: A path to the file.
+    root: A path to the root directory.
+
+  Returns:
+    |file_name| if |file_name| is not a symlink. Otherwise, the ultimate path
+    that |file_name| points to, with links resolved relative to |root|.
+  """
+  count = 0
+  while os.path.islink(file_name):
+    count += 1
+    if count > 128:
+      raise ValueError('Too many link levels for %s.' % file_name)
+    link = os.readlink(file_name)
+    if link.startswith('/'):
+      file_name = os.path.join(root, link[1:])
+    else:
+      file_name = os.path.join(os.path.dirname(file_name), link)
+  return file_name
+
+
+def IsInsideVm():
+  """Return True if we are running inside a virtual machine.
+
+  The detection is based on the model of the hard drive.
+  """
+  for blk_model in glob.glob('/sys/block/*/device/model'):
+    if os.path.isfile(blk_model):
+      model = ReadFile(blk_model)
+      if model.startswith('VBOX') or model.startswith('VMware'):
+        return True
+
+  return False
diff --git a/lib/osutils_unittest b/lib/osutils_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/osutils_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/osutils_unittest.py b/lib/osutils_unittest.py
new file mode 100644
index 0000000..3f31069
--- /dev/null
+++ b/lib/osutils_unittest.py
@@ -0,0 +1,782 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the osutils.py module (imagine that!)."""
+
+from __future__ import print_function
+
+import collections
+import glob
+import mock
+import os
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+
+
+class TestOsutils(cros_test_lib.TempDirTestCase):
+  """General unittests for the osutils module."""
+
+  def testReadWriteFile(self):
+    """Verify we can write data to a file, and then read it back."""
+    filename = os.path.join(self.tempdir, 'foo')
+    data = 'alsdkfjasldkfjaskdlfjasdf'
+    self.assertEqual(osutils.WriteFile(filename, data), None)
+    self.assertEqual(osutils.ReadFile(filename), data)
+
+  def testSudoWrite(self):
+    """Verify that we can write a file as sudo."""
+    with osutils.TempDir(sudo_rm=True) as tempdir:
+      root_owned_dir = os.path.join(tempdir, 'foo')
+      self.assertTrue(osutils.SafeMakedirs(root_owned_dir, sudo=True))
+      for atomic in (True, False):
+        filename = os.path.join(root_owned_dir,
+                                'bar.atomic' if atomic else 'bar')
+        self.assertRaises(IOError, osutils.WriteFile, filename, 'data')
+
+        osutils.WriteFile(filename, 'test', atomic=atomic, sudo=True)
+        self.assertEqual('test', osutils.ReadFile(filename))
+        self.assertEqual(0, os.stat(filename).st_uid)
+
+      # Appending to a file is not supported with sudo.
+      self.assertRaises(ValueError, osutils.WriteFile,
+                        os.path.join(root_owned_dir, 'nope'), 'data',
+                        sudo=True, mode='a')
+
+  def testSafeSymlink(self):
+    """Test that we can create symlinks."""
+    with osutils.TempDir(sudo_rm=True) as tempdir:
+      file_a = os.path.join(tempdir, 'a')
+      osutils.WriteFile(file_a, 'a')
+
+      file_b = os.path.join(tempdir, 'b')
+      osutils.WriteFile(file_b, 'b')
+
+      user_dir = os.path.join(tempdir, 'bar')
+      user_link = os.path.join(user_dir, 'link')
+      osutils.SafeMakedirs(user_dir)
+
+      root_dir = os.path.join(tempdir, 'foo')
+      root_link = os.path.join(root_dir, 'link')
+      osutils.SafeMakedirs(root_dir, sudo=True)
+
+      # We can create and override links owned by a non-root user.
+      osutils.SafeSymlink(file_a, user_link)
+      self.assertEqual('a', osutils.ReadFile(user_link))
+
+      osutils.SafeSymlink(file_b, user_link)
+      self.assertEqual('b', osutils.ReadFile(user_link))
+
+      # We can create and override links owned by root.
+      osutils.SafeSymlink(file_a, root_link, sudo=True)
+      self.assertEqual('a', osutils.ReadFile(root_link))
+
+      osutils.SafeSymlink(file_b, root_link, sudo=True)
+      self.assertEqual('b', osutils.ReadFile(root_link))
+
+  def testSafeUnlink(self):
+    """Test unlinking files work (existing or not)."""
+    def f(dirname, sudo=False):
+      dirname = os.path.join(self.tempdir, dirname)
+      path = os.path.join(dirname, 'foon')
+      os.makedirs(dirname)
+      open(path, 'w').close()
+      self.assertTrue(os.path.exists(path))
+      if sudo:
+        cros_build_lib.SudoRunCommand(
+            ['chown', 'root:root', '-R', '--', dirname], print_cmd=False)
+        self.assertRaises(EnvironmentError, os.unlink, path)
+      self.assertTrue(osutils.SafeUnlink(path, sudo=sudo))
+      self.assertFalse(os.path.exists(path))
+      self.assertFalse(osutils.SafeUnlink(path))
+      self.assertFalse(os.path.exists(path))
+
+    f("nonsudo", False)
+    f("sudo", True)
+
+  def testSafeMakedirs(self):
+    """Test creating directory trees work (existing or not)."""
+    path = os.path.join(self.tempdir, 'a', 'b', 'c', 'd', 'e')
+    self.assertTrue(osutils.SafeMakedirs(path))
+    self.assertTrue(os.path.exists(path))
+    self.assertFalse(osutils.SafeMakedirs(path))
+    self.assertTrue(os.path.exists(path))
+
+  def testSafeMakedirs_error(self):
+    """Check error paths."""
+    self.assertRaises(OSError, osutils.SafeMakedirs, '/foo/bar/cow/moo/wee')
+    self.assertRaises(OSError, osutils.SafeMakedirs, '')
+
+  def testSafeMakedirsSudo(self):
+    """Test creating directory trees work as root (existing or not)."""
+    path = os.path.join(self.tempdir, 'a', 'b', 'c', 'd', 'e')
+    self.assertTrue(osutils.SafeMakedirs(path, sudo=True))
+    self.assertTrue(os.path.exists(path))
+    self.assertFalse(osutils.SafeMakedirs(path, sudo=True))
+    self.assertTrue(os.path.exists(path))
+    self.assertEqual(os.stat(path).st_uid, 0)
+    # Have to manually clean up as a non-root `rm -rf` will fail.
+    cros_build_lib.SudoRunCommand(['rm', '-rf', self.tempdir], print_cmd=False)
+
+  def testRmDir(self):
+    """Test that removing dirs work."""
+    path = os.path.join(self.tempdir, 'a', 'b', 'c', 'd', 'e')
+
+    self.assertRaises(EnvironmentError, osutils.RmDir, path)
+    osutils.SafeMakedirs(path)
+    osutils.RmDir(path)
+    osutils.RmDir(path, ignore_missing=True)
+    self.assertRaises(EnvironmentError, osutils.RmDir, path)
+
+    osutils.SafeMakedirs(path)
+    osutils.RmDir(path)
+    self.assertFalse(os.path.exists(path))
+
+  def testRmDirSudo(self):
+    """Test that removing dirs via sudo works."""
+    subpath = os.path.join(self.tempdir, 'a')
+    path = os.path.join(subpath, 'b', 'c', 'd', 'e')
+    self.assertTrue(osutils.SafeMakedirs(path, sudo=True))
+    self.assertRaises(OSError, osutils.RmDir, path)
+    osutils.RmDir(subpath, sudo=True)
+    self.assertRaises(cros_build_lib.RunCommandError,
+                      osutils.RmDir, subpath, sudo=True)
+
+  def testTouchFile(self):
+    """Test that we can touch files."""
+    path = os.path.join(self.tempdir, 'touchit')
+    self.assertFalse(os.path.exists(path))
+    osutils.Touch(path)
+    self.assertTrue(os.path.exists(path))
+    self.assertEqual(os.path.getsize(path), 0)
+
+  def testTouchFileSubDir(self):
+    """Test that we can touch files in non-existent subdirs."""
+    path = os.path.join(self.tempdir, 'a', 'b', 'c', 'touchit')
+    self.assertFalse(os.path.exists(os.path.dirname(path)))
+    osutils.Touch(path, makedirs=True)
+    self.assertTrue(os.path.exists(path))
+    self.assertEqual(os.path.getsize(path), 0)
+
+
+class TestProcess(cros_build_lib_unittest.RunCommandTestCase):
+  """Tests for osutils.IsChildProcess."""
+
+  def testIsChildProcess(self):
+    """Test IsChildProcess with no name."""
+    mock_pstree_output = 'a(1)-+-b(2)\n\t|-c(3)\n\t|-foo(4)-bar(5)'
+    self.rc.AddCmdResult(partial_mock.Ignore(), output=mock_pstree_output)
+    self.assertTrue(osutils.IsChildProcess(4))
+    self.assertTrue(osutils.IsChildProcess(4, name='foo'))
+    self.assertFalse(osutils.IsChildProcess(5, name='foo'))
+
+
+class TempDirTests(cros_test_lib.TestCase):
+  """Unittests of osutils.TempDir.
+
+  Unlike other test classes in this file, TempDirTestCase isn't used as a base
+  class, because that is the functionality under test.
+  """
+  PREFIX = 'chromite.test.osutils.TempDirTests'
+
+  class HelperException(Exception):
+    """Exception for tests to raise to test exception handling."""
+
+  class HelperExceptionInner(Exception):
+    """Exception for tests to raise to test exception handling."""
+
+  def testBasicSuccessEmpty(self):
+    """Test we create and cleanup an empty tempdir."""
+    with osutils.TempDir(prefix=self.PREFIX) as td:
+      tempdir = td
+      # Show the temp directory exists and is empty.
+      self.assertTrue(os.path.isdir(tempdir))
+      self.assertEquals(os.listdir(tempdir), [])
+
+    # Show the temp directory no longer exists.
+    self.assertNotExists(tempdir)
+
+  def testBasicSuccessNotEmpty(self):
+    """Test we cleanup tempdir with stuff in it."""
+    with osutils.TempDir(prefix=self.PREFIX) as td:
+      tempdir = td
+      # Show the temp directory exists and is empty.
+      self.assertTrue(os.path.isdir(tempdir))
+      self.assertEquals(os.listdir(tempdir), [])
+
+      # Create an empty file.
+      osutils.Touch(os.path.join(tempdir, 'foo.txt'))
+
+      # Create nested sub directories.
+      subdir = os.path.join(tempdir, 'foo', 'bar', 'taco')
+      os.makedirs(subdir)
+      osutils.Touch(os.path.join(subdir, 'sauce.txt'))
+
+    # Show the temp directory no longer exists.
+    self.assertNotExists(tempdir)
+
+  def testErrorCleanup(self):
+    """Test we cleanup, even if an exception is raised."""
+    try:
+      with osutils.TempDir(prefix=self.PREFIX) as td:
+        tempdir = td
+        raise TempDirTests.HelperException()
+    except TempDirTests.HelperException:
+      pass
+
+    # Show the temp directory no longer exists.
+    self.assertNotExists(tempdir)
+
+  def testCleanupExceptionContextException(self):
+    """Test an exception during cleanup if the context DID raise."""
+    was_raised = False
+    tempdir_obj = osutils.TempDir(prefix=self.PREFIX)
+
+    with mock.patch.object(osutils, '_TempDirTearDown',
+                           side_effect=TempDirTests.HelperException):
+      try:
+        with tempdir_obj as td:
+          tempdir = td
+          raise TempDirTests.HelperExceptionInner()
+      except TempDirTests.HelperExceptionInner:
+        was_raised = True
+
+    # Show that the exception exited the context.
+    self.assertTrue(was_raised)
+
+    # Verify the tempdir object no longer contains a reference to the tempdir.
+    self.assertIsNone(tempdir_obj.tempdir)
+
+    # Cleanup the dir leaked by our mock exception.
+    os.rmdir(tempdir)
+
+  def testCleanupExceptionNoContextException(self):
+    """Test an exception during cleanup if the context did NOT raise."""
+    was_raised = False
+    tempdir_obj = osutils.TempDir(prefix=self.PREFIX)
+
+    with mock.patch.object(osutils, '_TempDirTearDown',
+                           side_effect=TempDirTests.HelperException):
+      try:
+        with tempdir_obj as td:
+          tempdir = td
+      except TempDirTests.HelperException:
+        was_raised = True
+
+    # Show that the exception exited the context.
+    self.assertTrue(was_raised)
+
+    # Verify the tempdir object no longer contains a reference to the tempdir.
+    self.assertIsNone(tempdir_obj.tempdir)
+
+    # Cleanup the dir leaked by our mock exception.
+    os.rmdir(tempdir)
+
+  def testSkipCleanup(self):
+    """Test that we leave behind tempdirs when requested."""
+    tempdir_obj = osutils.TempDir(prefix=self.PREFIX, delete=False)
+    tempdir = tempdir_obj.tempdir
+    tempdir_obj.Cleanup()
+    # Ensure we cleaned up ...
+    self.assertIsNone(tempdir_obj.tempdir)
+    # ... but leaked the directory.
+    self.assertExists(tempdir)
+    # Now really cleanup the directory leaked by the test.
+    os.rmdir(tempdir)
+
+  def testSkipCleanupGlobal(self):
+    """Test that we reset global tempdir as expected even with skip."""
+    with osutils.TempDir(prefix=self.PREFIX, set_global=True) as tempdir:
+      tempdir_before = osutils.GetGlobalTempDir()
+      tempdir_obj = osutils.TempDir(prefix=self.PREFIX, set_global=True,
+                                    delete=False)
+      tempdir_inside = osutils.GetGlobalTempDir()
+      tempdir_obj.Cleanup()
+      tempdir_after = osutils.GetGlobalTempDir()
+
+    # We shouldn't leak the outer directory.
+    self.assertNotExists(tempdir)
+    self.assertEqual(tempdir_before, tempdir_after)
+    # This is a strict substring check.
+    self.assertLess(tempdir_before, tempdir_inside)
+
+
+class MountTests(cros_test_lib.TestCase):
+  """Unittests for osutils mounting and umounting helpers."""
+
+  def testMountTmpfsDir(self):
+    """Verify mounting a tmpfs works"""
+    cleaned = False
+    with osutils.TempDir(prefix='chromite.test.osutils') as tempdir:
+      st_before = os.stat(tempdir)
+      try:
+        # Mount the dir and verify it worked.
+        osutils.MountTmpfsDir(tempdir)
+        st_after = os.stat(tempdir)
+        self.assertNotEqual(st_before.st_dev, st_after.st_dev)
+
+        # Unmount the dir and verify it worked.
+        osutils.UmountDir(tempdir)
+        cleaned = True
+
+        # Finally make sure it's cleaned up.
+        self.assertFalse(os.path.exists(tempdir))
+      finally:
+        if not cleaned:
+          cros_build_lib.SudoRunCommand(['umount', '-lf', tempdir],
+                                        error_code_ok=True)
+
+
+class IteratePathsTest(cros_test_lib.TestCase):
+  """Test iterating through all segments of a path."""
+
+  def testType(self):
+    """Check that return value is an iterator."""
+    self.assertTrue(isinstance(osutils.IteratePaths('/'), collections.Iterator))
+
+  def testRoot(self):
+    """Test iterating from root directory."""
+    inp = '/'
+    exp = ['/']
+    self.assertEquals(list(osutils.IteratePaths(inp)), exp)
+
+  def testOneDir(self):
+    """Test iterating from a directory in a root directory."""
+    inp = '/abc'
+    exp = ['/', '/abc']
+    self.assertEquals(list(osutils.IteratePaths(inp)), exp)
+
+  def testTwoDirs(self):
+    """Test iterating two dirs down."""
+    inp = '/abc/def'
+    exp = ['/', '/abc', '/abc/def']
+    self.assertEquals(list(osutils.IteratePaths(inp)), exp)
+
+  def testNormalize(self):
+    """Test argument being normalized."""
+    cases = [
+        ('//', ['/']),
+        ('///', ['/']),
+        ('/abc/', ['/', '/abc']),
+        ('/abc//def', ['/', '/abc', '/abc/def']),
+    ]
+    for inp, exp in cases:
+      self.assertEquals(list(osutils.IteratePaths(inp)), exp)
+
+
+class IteratePathParentsTest(cros_test_lib.TestCase):
+  """Test parent directory iteration functionality."""
+
+  def _RunForPath(self, path, expected):
+    result_components = []
+    for p in osutils.IteratePathParents(path):
+      result_components.append(os.path.basename(p))
+
+    result_components.reverse()
+    if expected is not None:
+      self.assertEquals(expected, result_components)
+
+  def testIt(self):
+    """Run the test vectors."""
+    vectors = {
+        '/': [''],
+        '/path/to/nowhere': ['', 'path', 'to', 'nowhere'],
+        '/path/./to': ['', 'path', 'to'],
+        '//path/to': ['', 'path', 'to'],
+        'path/to': None,
+        '': None,
+    }
+    for p, e in vectors.iteritems():
+      self._RunForPath(p, e)
+
+
+class FindInPathParentsTest(cros_test_lib.TempDirTestCase):
+  """Test FindInPathParents functionality."""
+
+  D = cros_test_lib.Directory
+
+  DIR_STRUCT = [
+      D('a', [
+          D('.repo', []),
+          D('b', [
+              D('c', [])
+          ])
+      ])
+  ]
+
+  START_PATH = os.path.join('a', 'b', 'c')
+
+  def setUp(self):
+    cros_test_lib.CreateOnDiskHierarchy(self.tempdir, self.DIR_STRUCT)
+
+  def testFound(self):
+    """Target is found."""
+    found = osutils.FindInPathParents(
+        '.repo', os.path.join(self.tempdir, self.START_PATH))
+    self.assertEquals(found, os.path.join(self.tempdir, 'a', '.repo'))
+
+  def testNotFound(self):
+    """Target is not found."""
+    found = osutils.FindInPathParents(
+        'does.not/exist', os.path.join(self.tempdir, self.START_PATH))
+    self.assertEquals(found, None)
+
+
+class SourceEnvironmentTest(cros_test_lib.TempDirTestCase):
+  """Test osutil's environmental variable related methods."""
+
+  ENV_WHITELIST = {
+      'ENV1': 'monkeys like bananas',
+      'ENV3': 'merci',
+      'ENV6': '',
+  }
+
+  ENV_OTHER = {
+      'ENV2': 'bananas are yellow',
+      'ENV4': 'de rien',
+  }
+
+  ENV = """
+declare -x ENV1="monkeys like bananas"
+declare -x ENV2="bananas are yellow"
+declare -x ENV3="merci"
+declare -x ENV4="de rien"
+declare -x ENV6=''
+declare -x ENVA=('a b c' 'd' 'e 1234 %')
+"""
+
+  ENV_MULTILINE = """
+declare -x ENVM="gentil
+mechant"
+"""
+
+  def setUp(self):
+    self.env_file = os.path.join(self.tempdir, 'environment')
+    self.env_file_multiline = os.path.join(self.tempdir, 'multiline')
+    osutils.WriteFile(self.env_file, self.ENV)
+    osutils.WriteFile(self.env_file_multiline, self.ENV_MULTILINE)
+
+  def testWhiteList(self):
+    env_dict = osutils.SourceEnvironment(
+        self.env_file, ('ENV1', 'ENV3', 'ENV5', 'ENV6'))
+    self.assertEquals(env_dict, self.ENV_WHITELIST)
+
+  def testArrays(self):
+    env_dict = osutils.SourceEnvironment(self.env_file, ('ENVA',))
+    self.assertEquals(env_dict, {'ENVA': 'a b c,d,e 1234 %'})
+
+    env_dict = osutils.SourceEnvironment(self.env_file, ('ENVA',), ifs=' ')
+    self.assertEquals(env_dict, {'ENVA': 'a b c d e 1234 %'})
+
+    env_dict = osutils.SourceEnvironment(self.env_file_multiline, ('ENVM',),
+                                         multiline=True)
+    self.assertEquals(env_dict, {'ENVM': 'gentil\nmechant'})
+
+
+class DeviceInfoTests(cros_build_lib_unittest.RunCommandTestCase):
+  """Tests methods retrieving information about devices."""
+
+  FULL_OUTPUT = """
+NAME="sda" RM="0" TYPE="disk" SIZE="128G"
+NAME="sda1" RM="1" TYPE="part" SIZE="100G"
+NAME="sda2" RM="1" TYPE="part" SIZE="28G"
+NAME="sdc" RM="1" TYPE="disk" SIZE="7.4G"
+NAME="sdc1" RM="1" TYPE="part" SIZE="1G"
+NAME="sdc2" RM="1" TYPE="part" SIZE="6.4G"
+"""
+
+  PARTIAL_OUTPUT = """
+NAME="sdc" RM="1" TYPE="disk" SIZE="7.4G"
+NAME="sdc1" RM="1" TYPE="part" SIZE="1G"
+NAME="sdc2" RM="1" TYPE="part" SIZE="6.4G"
+"""
+
+  def testListBlockDevices(self):
+    """Tests that we can list all block devices correctly."""
+    self.rc.AddCmdResult(partial_mock.Ignore(), output=self.FULL_OUTPUT)
+    devices = osutils.ListBlockDevices()
+    self.assertEqual(devices[0].NAME, 'sda')
+    self.assertEqual(devices[0].RM, '0')
+    self.assertEqual(devices[0].TYPE, 'disk')
+    self.assertEqual(devices[0].SIZE, '128G')
+    self.assertEqual(devices[3].NAME, 'sdc')
+    self.assertEqual(devices[3].RM, '1')
+    self.assertEqual(devices[3].TYPE, 'disk')
+    self.assertEqual(devices[3].SIZE, '7.4G')
+
+  def testGetDeviceSize(self):
+    """Tests that we can get the size of a device."""
+    self.rc.AddCmdResult(partial_mock.Ignore(), output=self.PARTIAL_OUTPUT)
+    self.assertEqual(osutils.GetDeviceSize('/dev/sdc'), '7.4G')
+
+
+class MountImagePartitionTests(cros_test_lib.MockTestCase):
+  """Tests for MountImagePartition."""
+
+  def setUp(self):
+    self._gpt_table = {
+        3: cros_build_lib.PartitionInfo(3, 1, 3, 2, 'fs', 'Label', 'flag')
+    }
+
+  def testWithCacheOkay(self):
+    mount_dir = self.PatchObject(osutils, 'MountDir')
+    osutils.MountImagePartition('image_file', 3, 'destination',
+                                self._gpt_table)
+    opts = ['loop', 'offset=1', 'sizelimit=2', 'ro']
+    mount_dir.assert_called_with('image_file', 'destination', makedirs=True,
+                                 skip_mtab=False, sudo=True, mount_opts=opts)
+
+  def testWithCacheFail(self):
+    self.assertRaises(ValueError, osutils.MountImagePartition,
+                      'image_file', 404, 'destination', self._gpt_table)
+
+  def testWithoutCache(self):
+    self.PatchObject(cros_build_lib, 'GetImageDiskPartitionInfo',
+                     return_value=self._gpt_table)
+    mount_dir = self.PatchObject(osutils, 'MountDir')
+    osutils.MountImagePartition('image_file', 3, 'destination')
+    opts = ['loop', 'offset=1', 'sizelimit=2', 'ro']
+    mount_dir.assert_called_with(
+        'image_file', 'destination', makedirs=True, skip_mtab=False,
+        sudo=True, mount_opts=opts
+    )
+
+
+class ChdirTests(cros_test_lib.MockTempDirTestCase):
+  """Tests for ChdirContext."""
+
+  def testChdir(self):
+    current_dir = os.getcwd()
+    self.assertNotEqual(self.tempdir, os.getcwd())
+    with osutils.ChdirContext(self.tempdir):
+      self.assertEqual(self.tempdir, os.getcwd())
+    self.assertEqual(current_dir, os.getcwd())
+
+
+class MountImageTests(cros_test_lib.MockTempDirTestCase):
+  """Tests for MountImageContext."""
+
+  def _testWithParts(self, parts, selectors, check_links=True):
+    self.PatchObject(cros_build_lib, 'GetImageDiskPartitionInfo',
+                     return_value=parts)
+    mount_dir = self.PatchObject(osutils, 'MountDir')
+    unmount_dir = self.PatchObject(osutils, 'UmountDir')
+    rmdir = self.PatchObject(osutils, 'RmDir')
+    with osutils.MountImageContext('_ignored', self.tempdir, selectors):
+      for _, part in parts.items():
+        mount_point = os.path.join(self.tempdir, 'dir-%d' % part.number)
+        mount_dir.assert_any_call(
+            '_ignored', mount_point, makedirs=True, skip_mtab=False,
+            sudo=True,
+            mount_opts=['loop', 'offset=0', 'sizelimit=0', 'ro']
+        )
+        if check_links:
+          link = os.path.join(self.tempdir, 'dir-%s' % part.name)
+          self.assertTrue(os.path.islink(link))
+          self.assertEqual(os.path.basename(mount_point),
+                           os.readlink(link))
+    for _, part in parts.items():
+      mount_point = os.path.join(self.tempdir, 'dir-%d' % part.number)
+      unmount_dir.assert_any_call(mount_point, cleanup=False)
+      rmdir.assert_any_call(mount_point, sudo=True)
+      if check_links:
+        link = os.path.join(self.tempdir, 'dir-%s' % part.name)
+        self.assertFalse(os.path.lexists(link))
+
+  def testWithPartitionNumber(self):
+    parts = {
+        1: cros_build_lib.PartitionInfo(1, 0, 0, 0, '', 'my-stateful', ''),
+        3: cros_build_lib.PartitionInfo(3, 0, 0, 0, '', 'my-root-a', ''),
+    }
+    self._testWithParts(parts, [1, 3])
+
+  def testWithPartitionLabel(self):
+    parts = {
+        42: cros_build_lib.PartitionInfo(42, 0, 0, 0, '', 'label', ''),
+    }
+    self._testWithParts(parts, ['label'])
+
+  def testInvalidPartSelector(self):
+    parts = {
+        42: cros_build_lib.PartitionInfo(42, 0, 0, 0, '', 'label', ''),
+    }
+    self.assertRaises(ValueError, self._testWithParts, parts, ['label404'])
+    self.assertRaises(ValueError, self._testWithParts, parts, [404])
+
+  def testFailOnExistingMount(self):
+    parts = {
+        42: cros_build_lib.PartitionInfo(42, 0, 0, 0, '', 'label', ''),
+    }
+    os.makedirs(os.path.join(self.tempdir, 'dir-42'))
+    self.assertRaises(ValueError, self._testWithParts, parts, [42])
+
+  def testExistingLinkNotCleanedUp(self):
+    parts = {
+        42: cros_build_lib.PartitionInfo(42, 0, 0, 0, '', 'label', ''),
+    }
+    symlink = os.path.join(self.tempdir, 'dir-label')
+    os.symlink('/tmp', symlink)
+    self.assertEqual('/tmp', os.readlink(symlink))
+    self._testWithParts(parts, [42], check_links=False)
+    self.assertEqual('/tmp', os.readlink(symlink))
+
+
+class MountOverlayTest(cros_test_lib.MockTempDirTestCase):
+  """Tests MountOverlayContext."""
+
+  def setUp(self):
+    self.upperdir = os.path.join(self.tempdir, 'first_level', 'upperdir')
+    self.lowerdir = os.path.join(self.tempdir, 'lowerdir')
+    self.mergeddir = os.path.join(self.tempdir, 'mergeddir')
+
+    for path in [self.upperdir, self.lowerdir, self.mergeddir]:
+      osutils.Touch(path, makedirs=True)
+
+  def testMountWriteUnmountRead(self):
+    mount_call = self.PatchObject(osutils, 'MountDir')
+    umount_call = self.PatchObject(osutils, 'UmountDir')
+    for cleanup in (True, False):
+      with osutils.MountOverlayContext(self.lowerdir, self.upperdir,
+                                       self.mergeddir, cleanup=cleanup):
+        mount_call.assert_any_call(
+            'overlay', self.mergeddir, fs_type='overlay', makedirs=False,
+            mount_opts=('lowerdir=%s' % self.lowerdir,
+                        'upperdir=%s' % self.upperdir,
+                        mock.ANY),
+            quiet=mock.ANY)
+      umount_call.assert_any_call(self.mergeddir, cleanup=cleanup)
+
+  def testMountFailFallback(self):
+    """Test that mount failure with overlay fs_type fallsback to overlayfs."""
+    def _FailOverlay(*_args, **kwargs):
+      if kwargs['fs_type'] == 'overlay':
+        raise cros_build_lib.RunCommandError(
+            'Phony failure',
+            cros_build_lib.CommandResult(cmd='MounDir', returncode=32))
+
+    mount_call = self.PatchObject(osutils, 'MountDir')
+    mount_call.side_effect = _FailOverlay
+    umount_call = self.PatchObject(osutils, 'UmountDir')
+    for cleanup in (True, False):
+      with osutils.MountOverlayContext(self.lowerdir, self.upperdir,
+                                       self.mergeddir, cleanup=cleanup):
+        mount_call.assert_any_call(
+            'overlay', self.mergeddir, fs_type='overlay', makedirs=False,
+            mount_opts=('lowerdir=%s' % self.lowerdir,
+                        'upperdir=%s' % self.upperdir,
+                        mock.ANY),
+            quiet=mock.ANY)
+        mount_call.assert_any_call(
+            'overlayfs', self.mergeddir, fs_type='overlayfs', makedirs=False,
+            mount_opts=('lowerdir=%s' % self.lowerdir,
+                        'upperdir=%s' % self.upperdir),
+            quiet=mock.ANY)
+      umount_call.assert_any_call(self.mergeddir, cleanup=cleanup)
+
+  def testNoValidWorkdirFallback(self):
+    """Test that we fallback to overlayfs when no valid workdir is found.."""
+    def _FailFileSystemCheck(_path1, _path2):
+      return False
+
+    check_filesystem = self.PatchObject(osutils, '_SameFileSystem')
+    check_filesystem.side_effect = _FailFileSystemCheck
+    mount_call = self.PatchObject(osutils, 'MountDir')
+    umount_call = self.PatchObject(osutils, 'UmountDir')
+
+    for cleanup in (True, False):
+      with osutils.MountOverlayContext(self.lowerdir, self.upperdir,
+                                       self.mergeddir, cleanup=cleanup):
+        mount_call.assert_any_call(
+            'overlayfs', self.mergeddir, fs_type='overlayfs', makedirs=False,
+            mount_opts=('lowerdir=%s' % self.lowerdir,
+                        'upperdir=%s' % self.upperdir),
+            quiet=mock.ANY)
+      umount_call.assert_any_call(self.mergeddir, cleanup=cleanup)
+
+
+class IterateMountPointsTests(cros_test_lib.TempDirTestCase):
+  """Test for IterateMountPoints function."""
+
+  def setUp(self):
+    self.proc_mount = os.path.join(self.tempdir, 'mounts')
+    osutils.WriteFile(
+        self.proc_mount,
+        r'''/dev/loop0 /mnt/dir_8 ext4 rw,relatime,data=ordered 0 0
+/dev/loop2 /mnt/dir_1 ext4 rw,relatime,data=ordered 0 0
+/dev/loop1 /mnt/dir_12 vfat rw 0 0
+/dev/loop4 /mnt/dir_3 ext4 ro,relatime 0 0
+weird\040system /mnt/weirdo unknown ro 0 0
+tmpfs /mnt/spaced\040dir tmpfs ro 0 0
+tmpfs /mnt/\134 tmpfs ro 0 0
+'''
+    )
+
+  def testOkay(self):
+    r = list(osutils.IterateMountPoints(self.proc_mount))
+    self.assertEqual(len(r), 7)
+    self.assertEqual(r[0].source, '/dev/loop0')
+    self.assertEqual(r[1].destination, '/mnt/dir_1')
+    self.assertEqual(r[2].filesystem, 'vfat')
+    self.assertEqual(r[3].options, 'ro,relatime')
+
+  def testEscape(self):
+    r = list(osutils.IterateMountPoints(self.proc_mount))
+    self.assertEqual(r[4].source, 'weird system')
+    self.assertEqual(r[5].destination, '/mnt/spaced dir')
+    self.assertEqual(r[6].destination, '/mnt/\\')
+
+
+class ResolveSymlinkTest(cros_test_lib.TestCase):
+  """Tests for ResolveSymlink."""
+
+  def testRelativeLink(self):
+    os.symlink('target', 'link')
+    self.assertEqual(osutils.ResolveSymlink('link'), 'target')
+    os.unlink('link')
+
+  def testAbsoluteLink(self):
+    os.symlink('/target', 'link')
+    self.assertEqual(osutils.ResolveSymlink('link'), '/target')
+    self.assertEqual(osutils.ResolveSymlink('link', '/root'), '/root/target')
+    os.unlink('link')
+
+  def testRecursion(self):
+    os.symlink('target', 'link1')
+    os.symlink('link1', 'link2')
+    self.assertEqual(osutils.ResolveSymlink('link2'), 'target')
+    os.unlink('link2')
+    os.unlink('link1')
+
+  def testRecursionWithAbsoluteLink(self):
+    os.symlink('target', 'link1')
+    os.symlink('/link1', 'link2')
+    self.assertEqual(osutils.ResolveSymlink('link2', '.'), './target')
+    os.unlink('link2')
+    os.unlink('link1')
+
+
+class IsInsideVmTest(cros_test_lib.MockTempDirTestCase):
+  """Test osutils.IsInsideVmTest function."""
+
+  def setUp(self):
+    self.model_file = os.path.join(self.tempdir, 'sda', 'device', 'model')
+    osutils.SafeMakedirs(os.path.dirname(self.model_file))
+    self.mock_glob = self.PatchObject(
+        glob, 'glob', return_value=[self.model_file])
+
+  def testIsInsideVm(self):
+    osutils.WriteFile(self.model_file, "VBOX")
+    self.assertTrue(osutils.IsInsideVm())
+    self.assertEqual(self.mock_glob.call_args[0][0],
+                     "/sys/block/*/device/model")
+
+    osutils.WriteFile(self.model_file, "VMware")
+    self.assertTrue(osutils.IsInsideVm())
+
+  def testIsNotInsideVm(self):
+    osutils.WriteFile(self.model_file, "ST1000DM000-1CH1")
+    self.assertFalse(osutils.IsInsideVm())
diff --git a/lib/parallel.py b/lib/parallel.py
new file mode 100644
index 0000000..def0be4
--- /dev/null
+++ b/lib/parallel.py
@@ -0,0 +1,810 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module for running cbuildbot stages in the background."""
+
+from __future__ import print_function
+
+import collections
+import contextlib
+import errno
+import functools
+import multiprocessing
+import os
+try:
+  import Queue
+except ImportError:
+  # Python-3 renamed to "queue".  We still use Queue to avoid collisions
+  # with naming variables as "queue".  Maybe we'll transition at some point.
+  # pylint: disable=F0401
+  import queue as Queue
+import signal
+import sys
+import tempfile
+import time
+import traceback
+
+from multiprocessing.managers import SyncManager
+
+from chromite.cbuildbot import failures_lib
+from chromite.cbuildbot import results_lib
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import signals
+from chromite.lib import timeout_util
+
+
+_BUFSIZE = 1024
+
+
+class HackTimeoutSyncManager(SyncManager):
+  """Increase the process join timeout in SyncManager.
+
+  The timeout for the manager process to join in the core library is
+  too low. The process is often killed before shutting down properly,
+  resulting in temporary directories (pymp-xxx) not being cleaned
+  up. This class increases the default timeout.
+  """
+
+  @staticmethod
+  def _finalize_manager(process, *args, **kwargs):
+    """Shutdown the manager process."""
+
+    def _join(functor, *args, **kwargs):
+      timeout = kwargs.get('timeout')
+      if not timeout is None and timeout < 1:
+        kwargs['timeout'] = 1
+
+      functor(*args, **kwargs)
+
+    process.join = functools.partial(_join, process.join)
+    SyncManager._finalize_manager(process, *args, **kwargs)
+
+
+def IgnoreSigint():
+  """Ignores any future SIGINTs."""
+  signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+
+def Manager():
+  """Create a background process for managing interprocess communication.
+
+  This manager wraps multiprocessing.Manager() and ensures that any sockets
+  created during initialization are created in '/tmp' rather than in a custom
+  temp directory. This is needed because TMPDIR might be really long, and
+  named sockets are limited to 108 characters.
+
+  Usage:
+    with Manager() as manager:
+      queue = manager.Queue()
+      ...
+
+  Returns:
+    The return value of multiprocessing.Manager()
+  """
+  old_tempdir_value, old_tempdir_env = osutils.SetGlobalTempDir('/tmp')
+  try:
+    m = HackTimeoutSyncManager()
+    # SyncManager doesn't handle KeyboardInterrupt exceptions well; pipes get
+    # broken and E_NOENT or E_PIPE errors are thrown from various places. We
+    # can just ignore SIGINT in the SyncManager and things will close properly
+    # when the enclosing with-statement exits.
+    m.start(IgnoreSigint)
+    return m
+  finally:
+    osutils.SetGlobalTempDir(old_tempdir_value, old_tempdir_env)
+
+
+class BackgroundFailure(failures_lib.CompoundFailure):
+  """Exception to show a step failed while running in a background process."""
+
+
+class ProcessExitTimeout(Exception):
+  """Raised if a process cannot exit within the timeout."""
+
+
+class ProcessUnexpectedExit(Exception):
+  """Raised if a process exits unexpectedly."""
+
+
+class ProcessSilentTimeout(Exception):
+  """Raised when there is no output for a prolonged period of time."""
+
+
+class UnexpectedException(Exception):
+  """Raised when exception occurs at an unexpected place."""
+
+
+class _BackgroundTask(multiprocessing.Process):
+  """Run a task in the background.
+
+  This task may be the 'Run' function from a buildbot stage or just a plain
+  function. It will be run in the background. Output from this task is saved
+  to a temporary file and is printed when the 'Wait' function is called.
+  """
+
+  # The time we give Python to startup and exit.
+  STARTUP_TIMEOUT = 60 * 5
+  EXIT_TIMEOUT = 60 * 10
+
+  # The time we allow processes to be silent. This is in place so that we
+  # eventually catch hanging processes, and print the remainder of our output.
+  # Do not increase this. Instead, adjust your program to print regular progress
+  # updates, so that cbuildbot (and buildbot) can know that it has not hung.
+  SILENT_TIMEOUT = 60 * 145
+
+  # The amount by which we reduce the SILENT_TIMEOUT every time we launch
+  # a subprocess. This helps ensure that children get a chance to enforce the
+  # SILENT_TIMEOUT prior to the parents enforcing it.
+  SILENT_TIMEOUT_STEP = 30
+  MINIMUM_SILENT_TIMEOUT = 60 * 135
+
+  # The time before terminating or killing a task.
+  SIGTERM_TIMEOUT = 30
+  SIGKILL_TIMEOUT = 60
+
+  # How long we allow debug commands to run (so we don't hang will trying to
+  # recover from a hang).
+  DEBUG_CMD_TIMEOUT = 60
+
+  # Interval we check for updates from print statements.
+  PRINT_INTERVAL = 1
+
+  def __init__(self, task, queue, semaphore=None, task_args=None,
+               task_kwargs=None):
+    """Create a new _BackgroundTask object.
+
+    If semaphore is supplied, it will be acquired for the duration of the
+    steps that are run in the background. This can be used to limit the
+    number of simultaneous parallel tasks.
+
+    Args:
+      task: The task (a functor) to run in the background.
+      queue: A queue to be used for managing communication between the parent
+        and child process. This queue must be valid for the length of the
+        life of the child process, until the parent has collected its status.
+      semaphore: The lock to hold while |task| runs.
+      task_args: A list of args to pass to the |task|.
+      task_kwargs: A dict of optional args to pass to the |task|.
+    """
+    multiprocessing.Process.__init__(self)
+    self._task = task
+    self._queue = queue
+    self._semaphore = semaphore
+    self._started = multiprocessing.Event()
+    self._killing = multiprocessing.Event()
+    self._output = None
+    self._parent_pid = None
+    self._task_args = task_args if task_args else ()
+    self._task_kwargs = task_kwargs if task_kwargs else {}
+
+  def _WaitForStartup(self):
+    # TODO(davidjames): Use python-2.7 syntax to simplify this.
+    self._started.wait(self.STARTUP_TIMEOUT)
+    msg = 'Process failed to start in %d seconds' % self.STARTUP_TIMEOUT
+    assert self._started.is_set(), msg
+
+  @classmethod
+  def _DebugRunCommand(cls, cmd, **kwargs):
+    """Swallow any exception RunCommand raises.
+
+    Since these commands are for purely informational purposes, we don't
+    random issues causing the bot to die.
+
+    Returns:
+      Stdout on success
+    """
+    log_level = kwargs['debug_level']
+    try:
+      with timeout_util.Timeout(cls.DEBUG_CMD_TIMEOUT):
+        return cros_build_lib.RunCommand(cmd, **kwargs).output
+    except (cros_build_lib.RunCommandError, timeout_util.TimeoutError) as e:
+      logging.log(log_level, 'Running %s failed: %s', cmd[0], str(e))
+      return ''
+
+  # Debug commands to run in gdb.  A class member so tests can stub it out.
+  GDB_COMMANDS = (
+      'info proc all',
+      'info threads',
+      'thread apply all py-list',
+      'thread apply all py-bt',
+      'thread apply all bt',
+      'detach',
+  )
+
+  @classmethod
+  def _DumpDebugPid(cls, log_level, pid):
+    """Dump debug info about the hanging |pid|."""
+    pid = str(pid)
+    commands = (
+        ('pstree', '-Apals', pid),
+        ('lsof', '-p', pid),
+    )
+    for cmd in commands:
+      cls._DebugRunCommand(cmd, debug_level=log_level, error_code_ok=True,
+                           log_output=True)
+
+    stdin = '\n'.join(['echo \\n>>> %s\\n\n%s' % (x, x)
+                       for x in cls.GDB_COMMANDS])
+    cmd = ('gdb', '--nx', '-q', '-p', pid, '-ex', 'set prompt',)
+    cls._DebugRunCommand(cmd, debug_level=log_level, error_code_ok=True,
+                         log_output=True, input=stdin)
+
+  def Kill(self, sig, log_level, first=False):
+    """Kill process with signal, ignoring if the process is dead.
+
+    Args:
+      sig: Signal to send.
+      log_level: The log level of log messages.
+      first: Whether this is the first signal we've sent.
+    """
+    self._killing.set()
+    self._WaitForStartup()
+    if logging.getLogger().isEnabledFor(log_level):
+      # Dump debug information about the hanging process.
+      logging.log(log_level, 'Killing %r (sig=%r %s)', self.pid, sig,
+                  signals.StrSignal(sig))
+
+      if first:
+        ppid = str(self.pid)
+        output = self._DebugRunCommand(
+            ('pgrep', '-P', ppid), debug_level=log_level, print_cmd=False,
+            error_code_ok=True, capture_output=True)
+        for pid in [ppid] + output.splitlines():
+          self._DumpDebugPid(log_level, pid)
+
+    try:
+      os.kill(self.pid, sig)
+    except OSError as ex:
+      if ex.errno != errno.ESRCH:
+        raise
+
+  def Cleanup(self, silent=False):
+    """Wait for a process to exit."""
+    if os.getpid() != self._parent_pid or self._output is None:
+      return
+    try:
+      # Print output from subprocess.
+      if not silent and logging.getLogger().isEnabledFor(logging.DEBUG):
+        with open(self._output.name, 'r') as f:
+          for line in f:
+            logging.debug(line.rstrip('\n'))
+    finally:
+      # Clean up our temporary file.
+      osutils.SafeUnlink(self._output.name)
+      self._output.close()
+      self._output = None
+
+  def Wait(self):
+    """Wait for the task to complete.
+
+    Output from the task is printed as it runs.
+
+    If an exception occurs, return a string containing the traceback.
+    """
+    try:
+      # Flush stdout and stderr to be sure no output is interleaved.
+      sys.stdout.flush()
+      sys.stderr.flush()
+
+      # File position pointers are shared across processes, so we must open
+      # our own file descriptor to ensure output is not lost.
+      self._WaitForStartup()
+      silent_death_time = time.time() + self.SILENT_TIMEOUT
+      results = []
+      with open(self._output.name, 'r') as output:
+        pos = 0
+        running, exited_cleanly, task_errors, all_errors = (True, False, [], [])
+        while running:
+          # Check whether the process is still alive.
+          running = self.is_alive()
+
+          try:
+            errors, results = \
+                self._queue.get(True, self.PRINT_INTERVAL)
+            if errors:
+              task_errors.extend(errors)
+              all_errors.extend(errors)
+
+            running = False
+            exited_cleanly = True
+          except Queue.Empty:
+            pass
+
+          if not running:
+            # Wait for the process to actually exit. If the child doesn't exit
+            # in a timely fashion, kill it.
+            self.join(self.EXIT_TIMEOUT)
+            if self.exitcode is None:
+              msg = '%r hung for %r seconds' % (self, self.EXIT_TIMEOUT)
+              all_errors.extend(
+                  failures_lib.CreateExceptInfo(ProcessExitTimeout(msg), ''))
+              self._KillChildren([self])
+            elif not exited_cleanly:
+              msg = ('%r exited unexpectedly with code %s'
+                     % (self, self.exitcode))
+              all_errors.extend(
+                  failures_lib.CreateExceptInfo(ProcessUnexpectedExit(msg), ''))
+
+          # Read output from process.
+          output.seek(pos)
+          buf = output.read(_BUFSIZE)
+
+          if len(buf) > 0:
+            silent_death_time = time.time() + self.SILENT_TIMEOUT
+          elif running and time.time() > silent_death_time:
+            msg = ('No output from %r for %r seconds' %
+                   (self, self.SILENT_TIMEOUT))
+            all_errors.extend(
+                failures_lib.CreateExceptInfo(ProcessSilentTimeout(msg), ''))
+            self._KillChildren([self])
+
+            # Read remaining output from the process.
+            output.seek(pos)
+            buf = output.read(_BUFSIZE)
+            running = False
+
+          # Print output so far.
+          while len(buf) > 0:
+            sys.stdout.write(buf)
+            pos += len(buf)
+            if len(buf) < _BUFSIZE:
+              break
+            buf = output.read(_BUFSIZE)
+
+          # Print error messages if anything exceptional occurred.
+          if len(all_errors) > len(task_errors):
+            logging.PrintBuildbotStepFailure()
+            msg = '\n'.join(x.str for x in all_errors if x)
+            logging.warning(msg)
+            traceback.print_stack()
+
+          sys.stdout.flush()
+          sys.stderr.flush()
+
+      # Propagate any results.
+      for result in results:
+        results_lib.Results.Record(*result)
+
+    finally:
+      self.Cleanup(silent=True)
+
+    # If an error occurred, return it.
+    return all_errors
+
+  def start(self):
+    """Invoke multiprocessing.Process.start after flushing output/err."""
+    if self.SILENT_TIMEOUT < self.MINIMUM_SILENT_TIMEOUT:
+      raise AssertionError('Maximum recursion depth exceeded in %r' % self)
+
+    sys.stdout.flush()
+    sys.stderr.flush()
+    self._output = tempfile.NamedTemporaryFile(delete=False, bufsize=0,
+                                               prefix='chromite-parallel-')
+    self._parent_pid = os.getpid()
+    return multiprocessing.Process.start(self)
+
+  def run(self):
+    """Run the list of steps."""
+    if self._semaphore is not None:
+      self._semaphore.acquire()
+
+    errors = failures_lib.CreateExceptInfo(
+        UnexpectedException('Unexpected exception in %r' % self), '')
+    pid = os.getpid()
+    try:
+      errors = self._Run()
+    finally:
+      if not self._killing.is_set() and os.getpid() == pid:
+        results = results_lib.Results.Get()
+        self._queue.put((errors, results))
+        if self._semaphore is not None:
+          self._semaphore.release()
+
+  def _Run(self):
+    """Internal method for running the list of steps."""
+    # Register a handler for a signal that is rarely used.
+    def trigger_bt(_sig_num, frame):
+      logging.error('pre-kill notification (SIGXCPU); traceback:\n%s',
+                    ''.join(traceback.format_stack(frame)))
+    signal.signal(signal.SIGXCPU, trigger_bt)
+
+    sys.stdout.flush()
+    sys.stderr.flush()
+    errors = []
+    # Send all output to a named temporary file.
+    with open(self._output.name, 'w', 0) as output:
+      # Back up sys.std{err,out}. These aren't used, but we keep a copy so
+      # that they aren't garbage collected. We intentionally don't restore
+      # the old stdout and stderr at the end, because we want shutdown errors
+      # to also be sent to the same log file.
+      _orig_stdout, _orig_stderr = sys.stdout, sys.stderr
+
+      # Replace std{out,err} with unbuffered file objects.
+      os.dup2(output.fileno(), sys.__stdout__.fileno())
+      os.dup2(output.fileno(), sys.__stderr__.fileno())
+      sys.stdout = os.fdopen(sys.__stdout__.fileno(), 'w', 0)
+      sys.stderr = os.fdopen(sys.__stderr__.fileno(), 'w', 0)
+
+      try:
+        self._started.set()
+        results_lib.Results.Clear()
+
+        # Reduce the silent timeout by the prescribed amount.
+        cls = self.__class__
+        cls.SILENT_TIMEOUT -= cls.SILENT_TIMEOUT_STEP
+
+        # Actually launch the task.
+        self._task(*self._task_args, **self._task_kwargs)
+      except failures_lib.StepFailure as ex:
+        errors.extend(failures_lib.CreateExceptInfo(
+            ex, traceback.format_exc()))
+      except BaseException as ex:
+        errors.extend(failures_lib.CreateExceptInfo(
+            ex, traceback.format_exc()))
+        if self._killing.is_set():
+          traceback.print_exc()
+      finally:
+        sys.stdout.flush()
+        sys.stderr.flush()
+
+    return errors
+
+  @classmethod
+  def _KillChildren(cls, bg_tasks, log_level=logging.WARNING):
+    """Kill a deque of background tasks.
+
+    This is needed to prevent hangs in the case where child processes refuse
+    to exit.
+
+    Args:
+      bg_tasks: A list filled with _BackgroundTask objects.
+      log_level: The log level of log messages.
+    """
+    logging.log(log_level, 'Killing tasks: %r', bg_tasks)
+    siglist = (
+        (signal.SIGXCPU, cls.SIGTERM_TIMEOUT),
+        (signal.SIGTERM, cls.SIGKILL_TIMEOUT),
+        (signal.SIGKILL, None),
+    )
+    first = True
+    for sig, timeout in siglist:
+      # Send signal to all tasks.
+      for task in bg_tasks:
+        task.Kill(sig, log_level, first)
+      first = False
+
+      # Wait for all tasks to exit, if requested.
+      if timeout is None:
+        for task in bg_tasks:
+          task.join()
+          task.Cleanup()
+        break
+
+      # Wait until timeout expires.
+      end_time = time.time() + timeout
+      while bg_tasks:
+        time_left = end_time - time.time()
+        if time_left <= 0:
+          break
+        task = bg_tasks[-1]
+        task.join(time_left)
+        if task.exitcode is not None:
+          task.Cleanup()
+          bg_tasks.pop()
+
+  @classmethod
+  @contextlib.contextmanager
+  def ParallelTasks(cls, steps, max_parallel=None, halt_on_error=False):
+    """Run a list of functions in parallel.
+
+    This function launches the provided functions in the background, yields,
+    and then waits for the functions to exit.
+
+    The output from the functions is saved to a temporary file and printed as if
+    they were run in sequence.
+
+    If exceptions occur in the steps, we join together the tracebacks and print
+    them after all parallel tasks have finished running. Further, a
+    BackgroundFailure is raised with full stack traces of all exceptions.
+
+    Args:
+      steps: A list of functions to run.
+      max_parallel: The maximum number of simultaneous tasks to run in parallel.
+        By default, run all tasks in parallel.
+      halt_on_error: After the first exception occurs, halt any running steps,
+        and squelch any further output, including any exceptions that might
+        occur.
+    """
+
+    semaphore = None
+    if max_parallel is not None:
+      semaphore = multiprocessing.Semaphore(max_parallel)
+
+    # First, start all the steps.
+    with Manager() as manager:
+      bg_tasks = collections.deque()
+      for step in steps:
+        task = cls(step, queue=manager.Queue(), semaphore=semaphore)
+        task.start()
+        bg_tasks.append(task)
+
+      foreground_except = None
+      try:
+        yield
+      except BaseException:
+        foreground_except = sys.exc_info()
+      finally:
+        errors = []
+        skip_bg_wait = halt_on_error and foreground_except is not None
+        # Wait for each step to complete.
+        while not skip_bg_wait and bg_tasks:
+          task = bg_tasks.popleft()
+          task_errors = task.Wait()
+          if task_errors:
+            errors.extend(task_errors)
+            if halt_on_error:
+              break
+
+        # If there are still tasks left, kill them.
+        if bg_tasks:
+          cls._KillChildren(bg_tasks, log_level=logging.DEBUG)
+
+        # Propagate any exceptions; foreground exceptions take precedence.
+        if foreground_except is not None:
+          # contextlib ignores caught exceptions unless explicitly re-raised.
+          raise foreground_except[0], foreground_except[1], foreground_except[2]
+        if errors:
+          raise BackgroundFailure(exc_infos=errors)
+
+  @staticmethod
+  def TaskRunner(queue, task, onexit=None, task_args=None, task_kwargs=None):
+    """Run task(*input) for each input in the queue.
+
+    Returns when it encounters an _AllTasksComplete object on the queue.
+    If exceptions occur, save them off and re-raise them as a
+    BackgroundFailure once we've finished processing the items in the queue.
+
+    Args:
+      queue: A queue of tasks to run. Add tasks to this queue, and they will
+        be run.
+      task: Function to run on each queued input.
+      onexit: Function to run after all inputs are processed.
+      task_args: A list of args to pass to the |task|.
+      task_kwargs: A dict of optional args to pass to the |task|.
+    """
+    if task_args is None:
+      task_args = []
+    elif not isinstance(task_args, list):
+      task_args = list(task_args)
+    if task_kwargs is None:
+      task_kwargs = {}
+
+    errors = []
+    while True:
+      # Wait for a new item to show up on the queue. This is a blocking wait,
+      # so if there's nothing to do, we just sit here.
+      x = queue.get()
+      if isinstance(x, _AllTasksComplete):
+        # All tasks are complete, so we should exit.
+        break
+      elif not isinstance(x, list):
+        x = task_args + list(x)
+      else:
+        x = task_args + x
+
+      # If no tasks failed yet, process the remaining tasks.
+      if not errors:
+        try:
+          task(*x, **task_kwargs)
+        except BaseException as ex:
+          errors.extend(
+              failures_lib.CreateExceptInfo(ex, traceback.format_exc()))
+
+    # Run exit handlers.
+    if onexit:
+      onexit()
+
+    # Propagate any exceptions.
+    if errors:
+      raise BackgroundFailure(exc_infos=errors)
+
+
+def RunParallelSteps(steps, max_parallel=None, halt_on_error=False,
+                     return_values=False):
+  """Run a list of functions in parallel.
+
+  This function blocks until all steps are completed.
+
+  The output from the functions is saved to a temporary file and printed as if
+  they were run in sequence.
+
+  If exceptions occur in the steps, we join together the tracebacks and print
+  them after all parallel tasks have finished running. Further, a
+  BackgroundFailure is raised with full stack traces of all exceptions.
+
+  Args:
+    steps: A list of functions to run.
+    max_parallel: The maximum number of simultaneous tasks to run in parallel.
+      By default, run all tasks in parallel.
+    halt_on_error: After the first exception occurs, halt any running steps,
+      and squelch any further output, including any exceptions that might occur.
+    return_values: If set to True, RunParallelSteps returns a list containing
+      the return values of the steps.  Defaults to False.
+
+  Returns:
+    If |return_values| is True, the function will return a list containing the
+    return values of the steps.
+
+  Example:
+    # This snippet will execute in parallel:
+    #   somefunc()
+    #   anotherfunc()
+    #   funcfunc()
+    steps = [somefunc, anotherfunc, funcfunc]
+    RunParallelSteps(steps)
+    # Blocks until all calls have completed.
+  """
+  def ReturnWrapper(queue, fn):
+    """Put the return value of |fn| into |queue|."""
+    queue.put(fn())
+
+  full_steps = []
+  queues = []
+  with cros_build_lib.ContextManagerStack() as stack:
+    if return_values:
+      # We use a managed queue here, because the child process will wait for the
+      # queue(pipe) to be flushed (i.e., when items are read from the queue)
+      # before exiting, and with a regular queue this may result in hangs for
+      # large return values.  But with a managed queue, the manager process will
+      # read the items and hold on to them until the managed queue goes out of
+      # scope and is cleaned up.
+      manager = stack.Add(Manager)
+      for step in steps:
+        queue = manager.Queue()
+        queues.append(queue)
+        full_steps.append(functools.partial(ReturnWrapper, queue, step))
+    else:
+      full_steps = steps
+
+    with _BackgroundTask.ParallelTasks(full_steps, max_parallel=max_parallel,
+                                       halt_on_error=halt_on_error):
+      pass
+
+    if return_values:
+      return [queue.get_nowait() for queue in queues]
+
+
+class _AllTasksComplete(object):
+  """Sentinel object to indicate that all tasks are complete."""
+
+
+@contextlib.contextmanager
+def BackgroundTaskRunner(task, *args, **kwargs):
+  """Run the specified task on each queued input in a pool of processes.
+
+  This context manager starts a set of workers in the background, who each
+  wait for input on the specified queue. For each input on the queue, these
+  workers run task(*args + *input, **kwargs). Note that certain kwargs will
+  not pass through to the task (see Args below for the list).
+
+  The output from these tasks is saved to a temporary file. When control
+  returns to the context manager, the background output is printed in order,
+  as if the tasks were run in sequence.
+
+  If exceptions occur in the steps, we join together the tracebacks and print
+  them after all parallel tasks have finished running. Further, a
+  BackgroundFailure is raised with full stack traces of all exceptions.
+
+  Example:
+    # This will run somefunc(1, 'small', 'cow', foo='bar') in the background
+    # as soon as data is added to the queue (i.e. queue.put() is called).
+
+    def somefunc(arg1, arg2, arg3, foo=None):
+      ...
+
+    with BackgroundTaskRunner(somefunc, 1, foo='bar') as queue:
+      ... do random stuff ...
+      queue.put(['small', 'cow'])
+      ... do more random stuff while somefunc() runs ...
+    # Exiting the with statement will block until all calls have completed.
+
+  Args:
+    task: Function to run on each queued input.
+    queue: A queue of tasks to run. Add tasks to this queue, and they will
+      be run in the background.  If None, one will be created on the fly.
+    processes: Number of processes to launch.
+    onexit: Function to run in each background process after all inputs are
+      processed.
+    halt_on_error: After the first exception occurs, halt any running steps, and
+      squelch any further output, including any exceptions that might occur.
+      Halts on exceptions in any of the background processes, or in the
+      foreground process using the BackgroundTaskRunner.
+  """
+
+  queue = kwargs.pop('queue', None)
+  processes = kwargs.pop('processes', None)
+  onexit = kwargs.pop('onexit', None)
+  halt_on_error = kwargs.pop('halt_on_error', False)
+
+  with cros_build_lib.ContextManagerStack() as stack:
+    if queue is None:
+      manager = stack.Add(Manager)
+      queue = manager.Queue()
+
+    if not processes:
+      processes = multiprocessing.cpu_count()
+
+    child = functools.partial(_BackgroundTask.TaskRunner, queue, task,
+                              onexit=onexit, task_args=args,
+                              task_kwargs=kwargs)
+    steps = [child] * processes
+    with _BackgroundTask.ParallelTasks(steps, halt_on_error=halt_on_error):
+      try:
+        yield queue
+      finally:
+        for _ in xrange(processes):
+          queue.put(_AllTasksComplete())
+
+
+def RunTasksInProcessPool(task, inputs, processes=None, onexit=None):
+  """Run the specified function with each supplied input in a pool of processes.
+
+  This function runs task(*x) for x in inputs in a pool of processes. This
+  function blocks until all tasks are completed.
+
+  The output from these tasks is saved to a temporary file. When control
+  returns to the context manager, the background output is printed in order,
+  as if the tasks were run in sequence.
+
+  If exceptions occur in the steps, we join together the tracebacks and print
+  them after all parallel tasks have finished running. Further, a
+  BackgroundFailure is raised with full stack traces of all exceptions.
+
+  Example:
+    # This snippet will execute in parallel:
+    #   somefunc('hi', 'fat', 'code')
+    #   somefunc('foo', 'bar', 'cow')
+
+    def somefunc(arg1, arg2, arg3):
+      ...
+    ...
+    inputs = [
+      ['hi', 'fat', 'code'],
+      ['foo', 'bar', 'cow'],
+    ]
+    RunTasksInProcessPool(somefunc, inputs)
+    # Blocks until all calls have completed.
+
+  Args:
+    task: Function to run on each input.
+    inputs: List of inputs.
+    processes: Number of processes, at most, to launch.
+    onexit: Function to run in each background process after all inputs are
+      processed.
+
+  Returns:
+    Returns a list containing the return values of the task for each input.
+  """
+  if not processes:
+    # - Use >=16 processes by default, in case it's a network-bound operation.
+    # - Try to use all of the CPUs, in case it's a CPU-bound operation.
+    processes = min(max(16, multiprocessing.cpu_count()), len(inputs))
+
+  with Manager() as manager:
+    # Set up output queue.
+    out_queue = manager.Queue()
+    fn = lambda idx, task_args: out_queue.put((idx, task(*task_args)))
+
+    # Micro-optimization: Setup the queue so that BackgroundTaskRunner
+    # doesn't have to set up another Manager process.
+    queue = manager.Queue()
+
+    with BackgroundTaskRunner(fn, queue=queue, processes=processes,
+                              onexit=onexit) as queue:
+      for idx, input_args in enumerate(inputs):
+        queue.put((idx, input_args))
+
+    return [x[1] for x in sorted(out_queue.get() for _ in range(len(inputs)))]
diff --git a/lib/parallel_unittest b/lib/parallel_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/parallel_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/parallel_unittest.py b/lib/parallel_unittest.py
new file mode 100644
index 0000000..9227dbf
--- /dev/null
+++ b/lib/parallel_unittest.py
@@ -0,0 +1,542 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for parallel library."""
+
+from __future__ import print_function
+
+import contextlib
+import cPickle
+import mock
+import multiprocessing
+import os
+import signal
+import sys
+import tempfile
+import time
+import unittest
+try:
+  import Queue
+except ImportError:
+  # Python-3 renamed to "queue".  We still use Queue to avoid collisions
+  # with naming variables as "queue".  Maybe we'll transition at some point.
+  # pylint: disable=F0401
+  import queue as Queue
+
+from chromite.lib import cros_logging as logging
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import partial_mock
+from chromite.lib import timeout_util
+
+
+# pylint: disable=protected-access
+
+
+_BUFSIZE = 10 ** 4
+_EXIT_TIMEOUT = 30
+_NUM_WRITES = 100
+_NUM_THREADS = 50
+_TOTAL_BYTES = _NUM_THREADS * _NUM_WRITES * _BUFSIZE
+_GREETING = 'hello world'
+_SKIP_FLAKY_TESTS = True
+
+
+class FakeMultiprocessManager(object):
+  """A fake implementation of the multiprocess manager.
+
+  This is only intended for use with ParallelMock.
+  """
+
+  def __enter__(self, *args, **kwargs):
+    return self
+
+  def __exit__(self, *args, **kwargs):
+    return None
+
+  def Queue(self):
+    return multiprocessing.Queue()
+
+  def RLock(self):
+    return multiprocessing.RLock()
+
+  def dict(self, *args, **kwargs):
+    return dict(*args, **kwargs)
+
+  def list(self, *args, **kwargs):
+    return list(*args, **kwargs)
+
+
+class ParallelMock(partial_mock.PartialMock):
+  """Run parallel steps in sequence for testing purposes.
+
+  This class updates chromite.lib.parallel to just run processes in
+  sequence instead of running them in parallel. This is useful for
+  testing.
+  """
+
+  TARGET = 'chromite.lib.parallel._BackgroundTask'
+  ATTRS = ('ParallelTasks', 'TaskRunner')
+
+  def PreStart(self):
+    self.PatchObject(parallel, 'Manager', side_effect=FakeMultiprocessManager)
+    partial_mock.PartialMock.PreStart(self)
+
+  @contextlib.contextmanager
+  def ParallelTasks(self, steps, max_parallel=None, halt_on_error=False):
+    assert max_parallel is None or isinstance(max_parallel, (int, long))
+    assert isinstance(halt_on_error, bool)
+    try:
+      yield
+    finally:
+      for step in steps:
+        step()
+
+  def TaskRunner(self, queue, task, onexit=None, task_args=None,
+                 task_kwargs=None):
+    # Setup of these matches the original code.
+    if task_args is None:
+      task_args = []
+    elif not isinstance(task_args, list):
+      task_args = list(task_args)
+    if task_kwargs is None:
+      task_kwargs = {}
+
+    try:
+      while True:
+        # Wait for a new item to show up on the queue. This is a blocking wait,
+        # so if there's nothing to do, we just sit here.
+        x = queue.get()
+        if isinstance(x, parallel._AllTasksComplete):
+          # All tasks are complete, so we should exit.
+          break
+        x = task_args + list(x)
+        task(*x, **task_kwargs)
+    finally:
+      if onexit:
+        onexit()
+
+
+class BackgroundTaskVerifier(partial_mock.PartialMock):
+  """Verify that queues are empty after BackgroundTaskRunner runs.
+
+  BackgroundTaskRunner should always empty its input queues, even if an
+  exception occurs. This is important for preventing a deadlock in the case
+  where a thread fails partway through (e.g. user presses Ctrl-C before all
+  input can be processed).
+  """
+
+  TARGET = 'chromite.lib.parallel'
+  ATTRS = ('BackgroundTaskRunner',)
+
+  @contextlib.contextmanager
+  def BackgroundTaskRunner(self, task, *args, **kwargs):
+    queue = kwargs.setdefault('queue', multiprocessing.Queue())
+    args = [task] + list(args)
+    try:
+      with self.backup['BackgroundTaskRunner'](*args, **kwargs):
+        yield queue
+    finally:
+      try:
+        queue.get(False)
+      except Queue.Empty:
+        pass
+      else:
+        raise AssertionError('Expected empty queue after BackgroundTaskRunner')
+
+
+class TestManager(cros_test_lib.TestCase):
+  """Test parallel.Manager()."""
+
+  def testSigint(self):
+    """Tests that parallel.Manager() ignores SIGINT."""
+    with parallel.Manager() as manager:
+      queue = manager.Queue()
+      os.kill(manager._process.pid, signal.SIGINT)
+      with self.assertRaises(Queue.Empty):
+        queue.get(block=False)
+
+
+class TestBackgroundWrapper(cros_test_lib.TestCase):
+  """Unittests for background wrapper."""
+
+  def setUp(self):
+    self.tempfile = None
+
+  def tearDown(self):
+    # Wait for children to exit.
+    try:
+      timeout_util.WaitForReturnValue([[]], multiprocessing.active_children,
+                                      timeout=_EXIT_TIMEOUT)
+    except timeout_util.TimeoutError:
+      pass
+
+    # Complain if there are any children left over.
+    active_children = multiprocessing.active_children()
+    for child in active_children:
+      if hasattr(child, 'Kill'):
+        child.Kill(signal.SIGKILL, log_level=logging.WARNING)
+        child.join()
+    self.assertEqual(multiprocessing.active_children(), [])
+    self.assertEqual(active_children, [])
+
+  def wrapOutputTest(self, func):
+    # Set _PRINT_INTERVAL to a smaller number to make it easier to
+    # reproduce bugs.
+    with mock.patch.multiple(parallel._BackgroundTask, PRINT_INTERVAL=0.01):
+      with tempfile.NamedTemporaryFile(bufsize=0) as output:
+        with mock.patch.multiple(sys, stdout=output):
+          func()
+        with open(output.name, 'r', 0) as tmp:
+          tmp.seek(0)
+          return tmp.read()
+
+
+class TestHelloWorld(TestBackgroundWrapper):
+  """Test HelloWorld output in various background environments."""
+
+  def setUp(self):
+    self.printed_hello = multiprocessing.Event()
+
+  def _HelloWorld(self):
+    """Write 'hello world' to stdout."""
+    sys.stdout.write('hello')
+    sys.stdout.flush()
+    sys.stdout.seek(0)
+    self.printed_hello.set()
+
+    # Wait for the parent process to read the output. Once the output
+    # has been read, try writing 'hello world' again, to be sure that
+    # rewritten output is not read twice.
+    time.sleep(parallel._BackgroundTask.PRINT_INTERVAL * 10)
+    sys.stdout.write(_GREETING)
+    sys.stdout.flush()
+
+  def _ParallelHelloWorld(self):
+    """Write 'hello world' to stdout using multiple processes."""
+    with parallel.Manager() as manager:
+      queue = manager.Queue()
+      with parallel.BackgroundTaskRunner(self._HelloWorld, queue=queue):
+        queue.put([])
+        self.printed_hello.wait()
+
+  def VerifyDefaultQueue(self):
+    """Verify that BackgroundTaskRunner will create a queue on it's own."""
+    with parallel.BackgroundTaskRunner(self._HelloWorld) as queue:
+      queue.put([])
+      self.printed_hello.wait()
+
+  def testParallelHelloWorld(self):
+    """Test that output is not written multiple times when seeking."""
+    out = self.wrapOutputTest(self._ParallelHelloWorld)
+    self.assertEquals(out, _GREETING)
+
+  def testMultipleHelloWorlds(self):
+    """Test that multiple threads can be created."""
+    parallel.RunParallelSteps([self.testParallelHelloWorld] * 2)
+
+  def testLongTempDirectory(self):
+    """Test that we can handle a long temporary directory."""
+    with osutils.TempDir() as tempdir:
+      new_tempdir = os.path.join(tempdir, 'xxx/' * 100)
+      osutils.SafeMakedirs(new_tempdir)
+      old_tempdir, old_tempdir_env = osutils.SetGlobalTempDir(new_tempdir)
+      try:
+        self.testParallelHelloWorld()
+      finally:
+        osutils.SetGlobalTempDir(old_tempdir, old_tempdir_env)
+
+
+def _BackgroundTaskRunnerArgs(results, arg1, arg2, kwarg1=None, kwarg2=None):
+  """Helper for TestBackgroundTaskRunnerArgs
+
+  We specifically want a module function to test against and not a class member.
+  """
+  results.put((arg1, arg2, kwarg1, kwarg2))
+
+
+class TestBackgroundTaskRunnerArgs(TestBackgroundWrapper):
+  """Unittests for BackgroundTaskRunner argument handling."""
+
+  def testArgs(self):
+    """Test that we can pass args down to the task."""
+    with parallel.Manager() as manager:
+      results = manager.Queue()
+      arg2s = set((1, 2, 3))
+      with parallel.BackgroundTaskRunner(_BackgroundTaskRunnerArgs, results,
+                                         'arg1', kwarg1='kwarg1') as queue:
+        for arg2 in arg2s:
+          queue.put((arg2,))
+
+      # Since the queue is unordered, need to handle arg2 specially.
+      result_arg2s = set()
+      for _ in xrange(3):
+        result = results.get()
+        self.assertEquals(result[0], 'arg1')
+        result_arg2s.add(result[1])
+        self.assertEquals(result[2], 'kwarg1')
+        self.assertEquals(result[3], None)
+      self.assertEquals(arg2s, result_arg2s)
+      self.assertEquals(results.empty(), True)
+
+
+class TestFastPrinting(TestBackgroundWrapper):
+  """Stress tests for background sys.stdout handling."""
+
+  def _FastPrinter(self):
+    # Writing lots of output quickly often reproduces bugs in this module
+    # because it can trigger race conditions.
+    for _ in range(_NUM_WRITES - 1):
+      sys.stdout.write('x' * _BUFSIZE)
+    sys.stdout.write('x' * (_BUFSIZE - 1) + '\n')
+
+  def _ParallelPrinter(self):
+    parallel.RunParallelSteps([self._FastPrinter] * _NUM_THREADS)
+
+  def _NestedParallelPrinter(self):
+    parallel.RunParallelSteps([self._ParallelPrinter])
+
+  def testSimpleParallelPrinter(self):
+    out = self.wrapOutputTest(self._ParallelPrinter)
+    self.assertEquals(len(out), _TOTAL_BYTES)
+
+  def testNestedParallelPrinter(self):
+    """Verify that no output is lost when lots of output is written."""
+    out = self.wrapOutputTest(self._NestedParallelPrinter)
+    self.assertEquals(len(out), _TOTAL_BYTES)
+
+
+class TestRunParallelSteps(cros_test_lib.TestCase):
+  """Tests for RunParallelSteps."""
+
+  def testReturnValues(self):
+    """Test that we pass return values through when requested."""
+    def f1():
+      return 1
+    def f2():
+      return 2
+    def f3():
+      pass
+
+    return_values = parallel.RunParallelSteps([f1, f2, f3], return_values=True)
+    self.assertEquals(return_values, [1, 2, None])
+
+  def testLargeReturnValues(self):
+    """Test that the managed queue prevents hanging on large return values."""
+    def f1():
+      return ret_value
+
+    ret_value = ''
+    for _ in xrange(10000):
+      ret_value += 'This will be repeated many times.\n'
+
+    return_values = parallel.RunParallelSteps([f1], return_values=True)
+    self.assertEquals(return_values, [ret_value])
+
+
+class TestParallelMock(TestBackgroundWrapper):
+  """Test the ParallelMock class."""
+
+  def setUp(self):
+    self._calls = 0
+
+  def _Callback(self):
+    self._calls += 1
+    return self._calls
+
+  def testRunParallelSteps(self):
+    """Make sure RunParallelSteps is mocked out."""
+    with ParallelMock():
+      parallel.RunParallelSteps([self._Callback])
+      self.assertEqual(1, self._calls)
+
+  def testBackgroundTaskRunner(self):
+    """Make sure BackgroundTaskRunner is mocked out."""
+    with ParallelMock():
+      parallel.RunTasksInProcessPool(self._Callback, [])
+      self.assertEqual(0, self._calls)
+      result = parallel.RunTasksInProcessPool(self._Callback, [[]])
+      self.assertEqual(1, self._calls)
+      self.assertEqual([1], result)
+      result = parallel.RunTasksInProcessPool(self._Callback, [], processes=9,
+                                              onexit=self._Callback)
+      self.assertEqual(10, self._calls)
+      self.assertEqual([], result)
+      result = parallel.RunTasksInProcessPool(self._Callback, [[]] * 10)
+      self.assertEqual(range(11, 21), result)
+
+
+class TestExceptions(cros_test_lib.MockOutputTestCase):
+  """Test cases where child processes raise exceptions."""
+
+  def _SystemExit(self):
+    sys.stdout.write(_GREETING)
+    sys.exit(1)
+
+  def _KeyboardInterrupt(self):
+    sys.stdout.write(_GREETING)
+    raise KeyboardInterrupt()
+
+  def _BadPickler(self):
+    return self._BadPickler
+
+  class _TestException(Exception):
+    """Custom exception for testing."""
+
+  def _VerifyExceptionRaised(self, fn, exc_type):
+    """A helper function to verify the correct |exc_type| is raised."""
+    for task in (lambda: parallel.RunTasksInProcessPool(fn, [[]]),
+                 lambda: parallel.RunParallelSteps([fn])):
+      output_str = ex_str = ex = None
+      with self.OutputCapturer() as capture:
+        with self.assertRaises(parallel.BackgroundFailure) as ex:
+          task()
+        output_str = capture.GetStdout()
+        ex_str = str(ex.exception)
+
+      self.assertTrue(exc_type in [x.type for x in ex.exception.exc_infos])
+      self.assertEqual(output_str, _GREETING)
+      self.assertTrue(str(exc_type) in ex_str)
+
+  def testExceptionRaising(self):
+    """Tests the exceptions are raised correctly."""
+    self.StartPatcher(BackgroundTaskVerifier())
+    self._VerifyExceptionRaised(self._KeyboardInterrupt, KeyboardInterrupt)
+    self._VerifyExceptionRaised(self._SystemExit, SystemExit)
+
+  def testExceptionPriority(self):
+    """Tests that foreground exceptions take priority over background."""
+    self.StartPatcher(BackgroundTaskVerifier())
+    with self.assertRaises(self._TestException):
+      with parallel.BackgroundTaskRunner(self._KeyboardInterrupt,
+                                         processes=1) as queue:
+        queue.put([])
+        raise self._TestException()
+
+  def testFailedPickle(self):
+    """PicklingError should be thrown when an argument fails to pickle."""
+    with self.assertRaises(cPickle.PicklingError):
+      parallel.RunTasksInProcessPool(self._SystemExit, [[self._SystemExit]])
+
+  def testFailedPickleOnReturn(self):
+    """PicklingError should be thrown when a return value fails to pickle."""
+    with self.assertRaises(parallel.BackgroundFailure):
+      parallel.RunParallelSteps([self._BadPickler], return_values=True)
+
+
+class _TestForegroundException(Exception):
+  """An exception to be raised by the foreground process."""
+
+
+class TestHalting(cros_test_lib.MockOutputTestCase, TestBackgroundWrapper):
+  """Test that child processes are halted when exceptions occur."""
+
+  def setUp(self):
+    self.failed = multiprocessing.Event()
+    self.passed = multiprocessing.Event()
+
+  def _GetKillChildrenTimeout(self):
+    """Return a timeout that is long enough for _BackgroundTask._KillChildren.
+
+    This unittest is not meant to restrict which signal succeeds in killing the
+    background process, so use a long enough timeout whenever asserting that the
+    background process is killed, keeping buffer for slow builders.
+    """
+    return (parallel._BackgroundTask.SIGTERM_TIMEOUT +
+            parallel._BackgroundTask.SIGKILL_TIMEOUT) + 30
+
+  def _Pass(self):
+    self.passed.set()
+    sys.stdout.write(_GREETING)
+
+  def _Exit(self):
+    sys.stdout.write(_GREETING)
+    self.passed.wait()
+    sys.exit(1)
+
+  def _Fail(self):
+    self.failed.wait(self._GetKillChildrenTimeout())
+    self.failed.set()
+
+  def _PassEventually(self):
+    self.passed.wait(self._GetKillChildrenTimeout())
+    self.passed.set()
+
+  @unittest.skipIf(_SKIP_FLAKY_TESTS, 'Occasionally fails.')
+  def testExceptionRaising(self):
+    """Test that exceptions halt all running steps."""
+    steps = [self._Exit, self._Fail, self._Pass, self._Fail]
+    output_str, ex_str = None, None
+    with self.OutputCapturer() as capture:
+      try:
+        parallel.RunParallelSteps(steps, halt_on_error=True)
+      except parallel.BackgroundFailure as ex:
+        output_str = capture.GetStdout()
+        ex_str = str(ex)
+        logging.debug(ex_str)
+    self.assertTrue('Traceback' in ex_str)
+    self.assertTrue(self.passed.is_set())
+    self.assertEqual(output_str, _GREETING)
+    self.assertFalse(self.failed.is_set())
+
+  def testForegroundExceptionRaising(self):
+    """Test that BackgroundTaskRunner halts tasks on a foreground exception."""
+    with self.assertRaises(_TestForegroundException):
+      with parallel.BackgroundTaskRunner(self._PassEventually,
+                                         processes=1,
+                                         halt_on_error=True) as queue:
+        queue.put([])
+        raise _TestForegroundException()
+    self.assertFalse(self.passed.is_set())
+
+  @unittest.skipIf(_SKIP_FLAKY_TESTS, 'Occasionally fails.')
+  def testTempFileCleanup(self):
+    """Test that all temp files are cleaned up."""
+    with osutils.TempDir() as tempdir:
+      self.assertEqual(os.listdir(tempdir), [])
+      self.testExceptionRaising()
+      self.assertEqual(os.listdir(tempdir), [])
+
+  def testKillQuiet(self, steps=None, **kwargs):
+    """Test that processes do get killed if they're silent for too long."""
+    if steps is None:
+      steps = [self._Fail] * 2
+    kwargs.setdefault('SILENT_TIMEOUT', 0.1)
+    kwargs.setdefault('MINIMUM_SILENT_TIMEOUT', 0.01)
+    kwargs.setdefault('SILENT_TIMEOUT_STEP', 0)
+    kwargs.setdefault('SIGTERM_TIMEOUT', 0.1)
+    kwargs.setdefault('PRINT_INTERVAL', 0.01)
+    kwargs.setdefault('GDB_COMMANDS', ('detach',))
+
+    ex_str = None
+    with mock.patch.multiple(parallel._BackgroundTask, **kwargs):
+      with self.OutputCapturer() as capture:
+        try:
+          with cros_test_lib.LoggingCapturer():
+            parallel.RunParallelSteps(steps)
+        except parallel.BackgroundFailure as ex:
+          ex_str = str(ex)
+          error_str = capture.GetStderr()
+    self.assertTrue('parallel_unittest.py' in error_str)
+    self.assertTrue(ex_str)
+
+
+class TestConstants(cros_test_lib.TestCase):
+  """Test values of constants."""
+
+  def testSilentTimeout(self):
+    """Verify the silent timeout is small enough."""
+    # Enforce that the default timeout is less than 9000, the default timeout
+    # set in build/scripts/master/factory/chromeos_factory.py:ChromiteFactory
+    # in the Chrome buildbot source code.
+    self.assertLess(
+        parallel._BackgroundTask.SILENT_TIMEOUT, 9000,
+        'Do not increase this timeout. Instead, print regular progress '
+        'updates, so that buildbot (and cbuildbot) will will know that your '
+        'program has not hung.')
+
+
+def main(_argv):
+  cros_test_lib.main(level='info', module=__name__)
diff --git a/lib/parseelf.py b/lib/parseelf.py
new file mode 100644
index 0000000..fa0d92c
--- /dev/null
+++ b/lib/parseelf.py
@@ -0,0 +1,157 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""ELF parsing related helper functions/classes."""
+
+from __future__ import print_function
+
+import cStringIO
+import os
+
+from chromite.scripts import lddtree
+
+from elftools.elf import elffile
+from elftools.elf import enums
+from elftools.common import utils
+
+
+# Reverse dict() from numeric values to strings used to lookup st_shndx.
+SH_TYPE_VALUES = dict((value, name)
+                      for name, value in enums.ENUM_SH_TYPE.iteritems())
+
+
+def ParseELFSymbols(elf):
+  """Parses list of symbols in an ELF file.
+
+  Args:
+    elf: An elffile.ELFFile instance.
+
+  Returns:
+    A 2-tuple of (imported, exported) symbols. |imported| is a set of strings
+    of undefined symbols. |exported| is a dict where the keys are defined
+    symbols and the values are 3-tuples (st_info_bind, st_size, st_shndx) with
+    the details of the corresponding exported symbol. Note that for imported
+    symbols this information is always ('STB_GLOBAL', 0, 'SHN_UNDEF') and thus
+    not included in the result.
+  """
+  imp = set()
+  exp = dict()
+
+  if elf.header.e_type not in ('ET_DYN', 'ET_EXEC'):
+    return imp, exp
+
+  for segment in elf.iter_segments():
+    if segment.header.p_type != 'PT_DYNAMIC':
+      continue
+
+    # Find strtab and symtab virtual addresses.
+    strtab_ptr = None
+    symtab_ptr = None
+    symbol_size = elf.structs.Elf_Sym.sizeof()
+    for tag in segment.iter_tags():
+      if tag.entry.d_tag == 'DT_SYMTAB':
+        symtab_ptr = tag.entry.d_ptr
+      if tag.entry.d_tag == 'DT_STRTAB':
+        strtab_ptr = tag.entry.d_ptr
+      if tag.entry.d_tag == 'DT_SYMENT':
+        assert symbol_size == tag.entry.d_val
+
+    stringtable = segment._get_stringtable()  # pylint: disable=W0212
+
+    symtab_offset = next(elf.address_offsets(symtab_ptr))
+    # Assume that symtab ends right before strtab.
+    # This is the same assumption that glibc makes in dl-addr.c.
+    # The first symbol is always local undefined, unnamed so we ignore it.
+    for i in range(1, (strtab_ptr - symtab_ptr) / symbol_size):
+      symbol_offset = symtab_offset + (i * symbol_size)
+      symbol = utils.struct_parse(elf.structs.Elf_Sym, elf.stream,
+                                  symbol_offset)
+      if symbol['st_info']['bind'] == 'STB_LOCAL':
+        # Ignore local symbols.
+        continue
+      symbol_name = stringtable.get_string(symbol.st_name)
+      if symbol['st_shndx'] == 'SHN_UNDEF':
+        if symbol['st_info']['bind'] == 'STB_GLOBAL':
+          # Global undefined --> required symbols.
+          # We ignore weak undefined symbols.
+          imp.add(symbol_name)
+      elif symbol['st_other']['visibility'] == 'STV_DEFAULT':
+        # Exported symbols must have default visibility.
+        st_shndx = SH_TYPE_VALUES.get(symbol['st_shndx'], symbol['st_shndx'])
+        exp[symbol_name] = (symbol['st_info']['bind'], symbol['st_size'],
+                            st_shndx)
+  return imp, exp
+
+
+def ParseELF(root, rel_path, ldpaths=None, parse_symbols=True):
+  """Parse the ELF file.
+
+  Loads and parses the passed elf file.
+
+  Args:
+    root: Path to the directory where the rootfs is mounted.
+    rel_path: The path to the parsing file relative to root.
+    ldpaths: The dict() with the ld path information. See lddtree.LoadLdpaths()
+        for details.
+    parse_symbols: Whether the result includes the dynamic symbols 'imp_sym' and
+        'exp_sym' sections. Disabling it reduces the time for large files with
+        many symbols.
+
+  Returns:
+    If the passed file isn't a supported ELF file, returns None. Otherwise,
+    returns a dict() with information about the parsed ELF.
+  """
+  # Ensure root has a trailing / so removing the root prefix also removes any
+  # / from the beginning of the path.
+  root = root.rstrip('/') + '/'
+
+  with open(os.path.join(root, rel_path), 'rb') as f:
+    if f.read(4) != '\x7fELF':
+      # Ignore non-ELF files. This check is done to speedup the process.
+      return
+    f.seek(0)
+    # Continue reading and cache the whole file to speedup seeks.
+    stream = cStringIO.StringIO(f.read())
+
+  try:
+    elf = elffile.ELFFile(stream)
+  except elffile.ELFError:
+    # Ignore unsupported ELF files.
+    return
+  if elf.header.e_type == 'ET_REL':
+    # Don't parse relocatable ELF files (mostly kernel modules).
+    return {
+        'type': elf.header.e_type,
+        'realpath': rel_path,
+    }
+
+  if ldpaths is None:
+    ldpaths = lddtree.LoadLdpaths(root)
+
+  result = lddtree.ParseELF(os.path.join(root, rel_path), root=root,
+                            ldpaths=ldpaths)
+  # Convert files to relative paths.
+  for libdef in result['libs'].values():
+    for path in ('realpath', 'path'):
+      if not libdef[path] is None and libdef[path].startswith(root):
+        libdef[path] = libdef[path][len(root):]
+
+  for path in ('interp', 'realpath'):
+    if not result[path] is None and result[path].startswith(root):
+      result[path] = result[path][len(root):]
+
+  result['type'] = elf.header.e_type
+  result['sections'] = dict((str(sec.name), sec['sh_size'])
+                            for sec in elf.iter_sections())
+  result['segments'] = set(seg['p_type'] for seg in elf.iter_segments())
+
+  # Some libraries (notably, the libc, which you can execute as a normal
+  # binary) have the interp set. We use the file extension in those cases
+  # because exec files shouldn't have a .so extension.
+  result['is_lib'] = ((result['interp'] is None or rel_path[-3:] == '.so') and
+                      elf.header.e_type == 'ET_DYN')
+
+  if parse_symbols:
+    result['imp_sym'], result['exp_sym'] = ParseELFSymbols(elf)
+  return result
diff --git a/lib/parseelf_unittest b/lib/parseelf_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/parseelf_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/parseelf_unittest.py b/lib/parseelf_unittest.py
new file mode 100644
index 0000000..daf33bd
--- /dev/null
+++ b/lib/parseelf_unittest.py
@@ -0,0 +1,117 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the parseelf.py module."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import parseelf
+from chromite.lib import unittest_lib
+
+
+class ELFParsingTest(cros_test_lib.TempDirTestCase):
+  """Test the ELF parsing functions."""
+
+  _ldpaths = {'interp': [], 'env': [], 'conf': []}
+
+  def testIsLib(self):
+    """Tests the 'is_lib' attribute is inferred correctly for libs."""
+    unittest_lib.BuildELF(os.path.join(self.tempdir, 'liba.so'), ['func_a'])
+    elf = parseelf.ParseELF(self.tempdir, 'liba.so', self._ldpaths)
+    self.assertTrue('is_lib' in elf)
+    self.assertTrue(elf['is_lib'])
+
+  def testNotIsLib(self):
+    """Tests the 'is_lib' attribute is inferred correctly for executables."""
+    unittest_lib.BuildELF(os.path.join(self.tempdir, 'abc_main'),
+                          executable=True)
+    elf = parseelf.ParseELF(self.tempdir, 'abc_main', self._ldpaths)
+    self.assertTrue('is_lib' in elf)
+    self.assertFalse(elf['is_lib'])
+
+  def testUnsupportedFiles(self):
+    """Tests unsupported files are ignored."""
+    osutils.WriteFile(os.path.join(self.tempdir, 'foo.so'), 'foo')
+    self.assertEquals(None,
+                      parseelf.ParseELF(self.tempdir, 'foo.so', self._ldpaths))
+
+    osutils.WriteFile(os.path.join(self.tempdir, 'foo.so'), '\x7fELF-foo')
+    self.assertEquals(None,
+                      parseelf.ParseELF(self.tempdir, 'foo.so', self._ldpaths))
+
+  def testParsedSymbols(self):
+    """Tests the list of imported/exported symbols."""
+    unittest_lib.BuildELF(os.path.join(self.tempdir, 'libabc.so'),
+                          defined_symbols=['fa', 'fb', 'fc'])
+    unittest_lib.BuildELF(os.path.join(self.tempdir, 'libxyz.so'),
+                          defined_symbols=['fx', 'fy', 'fz'],
+                          undefined_symbols=['fa', 'fb', 'fc'],
+                          used_libs=['abc'])
+
+    # Test without symbols.
+    elf = parseelf.ParseELF(self.tempdir, 'libxyz.so', self._ldpaths,
+                            parse_symbols=False)
+    self.assertFalse('imp_sym' in elf)
+    self.assertFalse('exp_sym' in elf)
+
+    # Test with symbols by default.
+    elf = parseelf.ParseELF(self.tempdir, 'libxyz.so', self._ldpaths)
+    self.assertTrue('imp_sym' in elf)
+    self.assertTrue('exp_sym' in elf)
+    self.assertEquals(elf['imp_sym'], set(['fa', 'fb', 'fc']))
+    self.assertEquals(set(k for k, (_, _, st_shndx)
+                          in elf['exp_sym'].iteritems()
+                          if st_shndx == 'SHT_DYNSYM'),
+                      set(['fx', 'fy', 'fz']))
+    for sym in ['fx', 'fy', 'fz']:
+      self.assertEquals('STB_GLOBAL', elf['exp_sym'][sym][0])
+
+  def testLibDependencies(self):
+    """Tests the list direct dependencies."""
+    # Dependencies:
+    #   u -> abc
+    #   v -> abc
+    #   prog -> u,v
+    unittest_lib.BuildELF(os.path.join(self.tempdir, 'libabc.so'),
+                          defined_symbols=['fa', 'fb', 'fc'])
+    unittest_lib.BuildELF(os.path.join(self.tempdir, 'libu.so'),
+                          defined_symbols=['fu'],
+                          undefined_symbols=['fa'],
+                          used_libs=['abc'])
+    unittest_lib.BuildELF(os.path.join(self.tempdir, 'libv.so'),
+                          defined_symbols=['fv'],
+                          undefined_symbols=['fb'],
+                          used_libs=['abc'])
+    unittest_lib.BuildELF(os.path.join(self.tempdir, 'prog'),
+                          undefined_symbols=['fu', 'fv'],
+                          used_libs=['u', 'v'],
+                          executable=True)
+
+    elf_prog = parseelf.ParseELF(self.tempdir, 'prog', self._ldpaths)
+    # Check the direct dependencies.
+    self.assertTrue('libu.so' in elf_prog['needed'])
+    self.assertTrue('libv.so' in elf_prog['needed'])
+    self.assertFalse('libabc.so' in elf_prog['needed'])
+
+  def testRelativeLibPaths(self):
+    """Test that the paths reported by ParseELF are relative to root."""
+    unittest_lib.BuildELF(os.path.join(self.tempdir, 'liba.so'), ['fa'])
+    unittest_lib.BuildELF(os.path.join(self.tempdir, 'prog'),
+                          undefined_symbols=['fa'], used_libs=['a'],
+                          executable=True)
+    elf = parseelf.ParseELF(self.tempdir, 'prog', self._ldpaths)
+    for lib in elf['libs'].values():
+      for path in ('realpath', 'path'):
+        if lib[path] is None:
+          continue
+        self.assertFalse(lib[path].startswith('/'))
+        self.assertFalse(lib[path].startswith(self.tempdir))
+        # Linked lib paths should be relative to the working directory or is the
+        # ld dynamic loader.
+        self.assertTrue(lib[path] == elf['interp'] or
+                        os.path.exists(os.path.join(self.tempdir, lib[path])))
diff --git a/lib/partial_mock.py b/lib/partial_mock.py
new file mode 100644
index 0000000..e7adeea
--- /dev/null
+++ b/lib/partial_mock.py
@@ -0,0 +1,584 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains functionality used to implement a partial mock."""
+
+from __future__ import print_function
+
+import collections
+import mock
+import os
+import re
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+
+
+class Comparator(object):
+  """Base class for all comparators."""
+
+  def Match(self, arg):
+    """Match the comparator against an argument."""
+    raise NotImplementedError('method must be implemented by a subclass.')
+
+  def Equals(self, rhs):
+    """Returns whether rhs compares the same thing."""
+    return type(self) == type(rhs) and self.__dict__ == rhs.__dict__
+
+  def __eq__(self, rhs):
+    return self.Equals(rhs)
+
+  def __ne__(self, rhs):
+    return not self.Equals(rhs)
+
+
+class In(Comparator):
+  """Checks whether an item (or key) is in a list (or dict) parameter."""
+
+  def __init__(self, key):
+    """Initialize.
+
+    Args:
+      key: Any thing that could be in a list or a key in a dict
+    """
+    Comparator.__init__(self)
+    self._key = key
+
+  def Match(self, arg):
+    try:
+      return self._key in arg
+    except TypeError:
+      return False
+
+  def __repr__(self):
+    return '<sequence or map containing %r>' % str(self._key)
+
+
+class Regex(Comparator):
+  """Checks if a string matches a regular expression."""
+
+  def __init__(self, pattern, flags=0):
+    """Initialize.
+
+    Args:
+      pattern: is the regular expression to search for
+      flags: passed to re.compile function as the second argument
+    """
+    Comparator.__init__(self)
+    self.pattern = pattern
+    self.flags = flags
+    self.regex = re.compile(pattern, flags=flags)
+
+  def Match(self, arg):
+    try:
+      return self.regex.search(arg) is not None
+    except TypeError:
+      return False
+
+  def __repr__(self):
+    s = '<regular expression %r' % self.regex.pattern
+    if self.regex.flags:
+      s += ', flags=%d' % self.regex.flags
+    s += '>'
+    return s
+
+
+class ListRegex(Regex):
+  """Checks if an iterable of strings matches a regular expression."""
+
+  @staticmethod
+  def _ProcessArg(arg):
+    if not isinstance(arg, basestring):
+      return ' '.join(arg)
+    return arg
+
+  def Match(self, arg):
+    try:
+      return self.regex.search(self._ProcessArg(arg)) is not None
+    except TypeError:
+      return False
+
+
+class Ignore(Comparator):
+  """Used when we don't care about an argument of a method call."""
+
+  def Match(self, _arg):
+    return True
+
+  def __repr__(self):
+    return '<IgnoreArg>'
+
+
+def _RecursiveCompare(lhs, rhs):
+  """Compare parameter specs recursively.
+
+  Args:
+    lhs: Left Hand Side parameter spec to compare.
+    rhs: Right Hand Side parameter spec to compare.
+    equality: In the case of comparing Comparator objects, True means we call
+      the Equals() function.  We call Match() if set to False (default).
+  """
+  if isinstance(lhs, Comparator):
+    return lhs.Match(rhs)
+  elif isinstance(lhs, (tuple, list)):
+    return (type(lhs) == type(rhs) and
+            len(lhs) == len(rhs) and
+            all(_RecursiveCompare(i, j) for i, j in zip(lhs, rhs)))
+  elif isinstance(lhs, dict):
+    return _RecursiveCompare(sorted(lhs.iteritems()), sorted(rhs.iteritems()))
+  else:
+    return lhs == rhs
+
+
+def ListContains(small, big, strict=False):
+  """Looks for a sublist within a bigger list.
+
+  Args:
+    small: The sublist to search for.
+    big: The list to search in.
+    strict: If True, all items in list must be adjacent.
+  """
+  if strict:
+    for i in xrange(len(big) - len(small) + 1):
+      if _RecursiveCompare(small, big[i:i + len(small)]):
+        return True
+    return False
+  else:
+    j = 0
+    for i in xrange(len(small)):
+      for j in xrange(j, len(big)):
+        if _RecursiveCompare(small[i], big[j]):
+          j += 1
+          break
+      else:
+        return False
+    return True
+
+
+def DictContains(small, big):
+  """Looks for a subset within a dictionary.
+
+  Args:
+    small: The sub-dict to search for.
+    big: The dict to search in.
+  """
+  for k, v in small.iteritems():
+    if k not in big or not _RecursiveCompare(v, big[k]):
+      return False
+  return True
+
+
+class MockedCallResults(object):
+  """Implements internal result specification for partial mocks.
+
+  Used with the PartialMock class.
+
+  Internal results are different from external results (return values,
+  side effects, exceptions, etc.) for functions.  Internal results are
+  *used* by the partial mock to generate external results.  Often internal
+  results represent the external results of the dependencies of the function
+  being partially mocked.  Of course, the partial mock can just pass through
+  the internal results to become external results.
+  """
+
+  Params = collections.namedtuple('Params', ['args', 'kwargs'])
+  MockedCall = collections.namedtuple(
+      'MockedCall', ['params', 'strict', 'result', 'side_effect'])
+
+  def __init__(self, name):
+    """Initialize.
+
+    Args:
+      name: The name given to the mock.  Will be used in debug output.
+    """
+    self.name = name
+    self.mocked_calls = []
+    self.default_result, self.default_side_effect = None, None
+
+  @staticmethod
+  def AssertArgs(args, kwargs):
+    """Verify arguments are of expected type."""
+    assert isinstance(args, (tuple))
+    if kwargs:
+      assert isinstance(kwargs, dict)
+
+  def AddResultForParams(self, args, result, kwargs=None, side_effect=None,
+                         strict=True):
+    """Record the internal results of a given partial mock call.
+
+    Args:
+      args: A list containing the positional args an invocation must have for
+        it to match the internal result.  The list can contain instances of
+        meta-args (such as IgnoreArg, Regex, In, etc.).  Positional argument
+        matching is always *strict*, meaning extra positional arguments in
+        the invocation are not allowed.
+      result: The internal result that will be matched for the command
+        invocation specified.
+      kwargs: A dictionary containing the keyword args an invocation must have
+        for it to match the internal result.  The dictionary can contain
+        instances of meta-args (such as IgnoreArg, Regex, In, etc.).  Keyword
+        argument matching is by default *strict*, but can be modified by the
+        |strict| argument.
+      side_effect: A functor that gets called every time a partially mocked
+        function is invoked.  The arguments the partial mock is invoked with are
+        passed to the functor.  This is similar to how side effects work for
+        mocks.
+      strict: Specifies whether keyword are matched strictly.  With strict
+        matching turned on, any keyword args a partial mock is invoked with that
+        are not specified in |kwargs| will cause the match to fail.
+    """
+    self.AssertArgs(args, kwargs)
+    if kwargs is None:
+      kwargs = {}
+
+    params = self.Params(args=args, kwargs=kwargs)
+    dup, filtered = cros_build_lib.PredicateSplit(
+        lambda mc: mc.params == params, self.mocked_calls)
+
+    new = self.MockedCall(params=params, strict=strict, result=result,
+                          side_effect=side_effect)
+    filtered.append(new)
+    self.mocked_calls = filtered
+
+    if dup:
+      logging.debug('%s: replacing mock for arguments %r:\n%r -> %r',
+                    self.name, params, dup, new)
+
+  def SetDefaultResult(self, result, side_effect=None):
+    """Set the default result for an unmatched partial mock call.
+
+    Args:
+      result: See AddResultsForParams.
+      side_effect: See AddResultsForParams.
+    """
+    self.default_result, self.default_side_effect = result, side_effect
+
+  def LookupResult(self, args, kwargs=None, hook_args=None, hook_kwargs=None):
+    """For a given mocked function call lookup the recorded internal results.
+
+    Args:
+      args: A list containing positional args the function was called with.
+      kwargs: A dict containing keyword args the function was called with.
+      hook_args: A list of positional args to call the hook with.
+      hook_kwargs: A dict of key/value args to call the hook with.
+
+    Returns:
+      The recorded result for the invocation.
+
+    Raises:
+      AssertionError when the call is not mocked, or when there is more
+      than one mock that matches.
+    """
+    def filter_fn(mc):
+      if mc.strict:
+        return _RecursiveCompare(mc.params, params)
+
+      return (DictContains(mc.params.kwargs, kwargs) and
+              _RecursiveCompare(mc.params.args, args))
+
+    self.AssertArgs(args, kwargs)
+    if kwargs is None:
+      kwargs = {}
+
+    params = self.Params(args, kwargs)
+    matched, _ = cros_build_lib.PredicateSplit(filter_fn, self.mocked_calls)
+    if len(matched) > 1:
+      raise AssertionError(
+          '%s: args %r matches more than one mock:\n%s'
+          % (self.name, params, '\n'.join([repr(c) for c in matched])))
+    elif matched:
+      side_effect, result = matched[0].side_effect, matched[0].result
+    elif (self.default_result, self.default_side_effect) != (None, None):
+      side_effect, result = self.default_side_effect, self.default_result
+    else:
+      raise AssertionError('%s: %r not mocked!' % (self.name, params))
+
+    if side_effect:
+      assert hook_args is not None
+      assert hook_kwargs is not None
+      hook_result = side_effect(*hook_args, **hook_kwargs)
+      if hook_result is not None:
+        return hook_result
+    return result
+
+
+class PartialMock(object):
+  """Provides functionality for partially mocking out a function or method.
+
+  Partial mocking is useful in cases where the side effects of a function or
+  method are complex, and so re-using the logic of the function with
+  *dependencies* mocked out is preferred over mocking out the entire function
+  and re-implementing the side effect (return value, state modification) logic
+  in the test.  It is also useful for creating re-usable mocks.
+  """
+
+  TARGET = None
+  ATTRS = None
+
+  def __init__(self, create_tempdir=False):
+    """Initialize.
+
+    Args:
+      create_tempdir: If set to True, the partial mock will create its own
+        temporary directory when start() is called, and will set self.tempdir to
+        the path of the directory.  The directory is deleted when stop() is
+        called.
+    """
+    self.backup = {}
+    self.patchers = {}
+    self.patched = {}
+    self.external_patchers = []
+    self.create_tempdir = create_tempdir
+
+    # Set when start() is called.
+    self._tempdir_obj = None
+    self.tempdir = None
+    self.__saved_env__ = None
+    self.started = False
+
+    self._results = {}
+
+    if not all([self.TARGET, self.ATTRS]) and any([self.TARGET, self.ATTRS]):
+      raise AssertionError('TARGET=%r but ATTRS=%r!'
+                           % (self.TARGET, self.ATTRS))
+
+    if self.ATTRS is not None:
+      for attr in self.ATTRS:
+        self._results[attr] = MockedCallResults(attr)
+
+  def __enter__(self):
+    return self.start()
+
+  def __exit__(self, exc_type, exc_value, traceback):
+    self.stop()
+
+  def PreStart(self):
+    """Called at the beginning of start(). Child classes can override this.
+
+    If __init__ was called with |create_tempdir| set, then self.tempdir will
+    point to an existing temporary directory when this function is called.
+    """
+
+  def PreStop(self):
+    """Called at the beginning of stop().  Child classes can override this.
+
+    If __init__ was called with |create_tempdir| set, then self.tempdir will
+    not be deleted until after this function returns.
+    """
+
+  def StartPatcher(self, patcher):
+    """PartialMock will stop the patcher when stop() is called."""
+    self.external_patchers.append(patcher)
+    return patcher.start()
+
+  def PatchObject(self, *args, **kwargs):
+    """Create and start a mock.patch.object().
+
+    stop() will be called automatically during tearDown.
+    """
+    return self.StartPatcher(mock.patch.object(*args, **kwargs))
+
+  def _start(self):
+    if not all([self.TARGET, self.ATTRS]):
+      return
+
+    chunks = self.TARGET.rsplit('.', 1)
+    module = cros_build_lib.load_module(chunks[0])
+
+    cls = getattr(module, chunks[1])
+    for attr in self.ATTRS:
+      self.backup[attr] = getattr(cls, attr)
+      src_attr = '_target%s' % attr if attr.startswith('__') else attr
+      if hasattr(self.backup[attr], 'reset_mock'):
+        raise AssertionError(
+            'You are trying to nest mock contexts - this is currently '
+            'unsupported by PartialMock.')
+      if callable(self.backup[attr]):
+        patcher = mock.patch.object(cls, attr, autospec=True,
+                                    side_effect=getattr(self, src_attr))
+      else:
+        patcher = mock.patch.object(cls, attr, getattr(self, src_attr))
+      self.patched[attr] = patcher.start()
+      self.patchers[attr] = patcher
+
+    return self
+
+  def start(self):
+    """Activates the mock context."""
+    try:
+      self.__saved_env__ = os.environ.copy()
+      self.tempdir = None
+      if self.create_tempdir:
+        self._tempdir_obj = osutils.TempDir(set_global=True)
+        self.tempdir = self._tempdir_obj.tempdir
+
+      self.started = True
+      self.PreStart()
+      return self._start()
+    except:
+      self.stop()
+      raise
+
+  def stop(self):
+    """Restores namespace to the unmocked state."""
+    try:
+      if self.__saved_env__ is not None:
+        osutils.SetEnvironment(self.__saved_env__)
+
+      tasks = ([self.PreStop] + [p.stop for p in self.patchers.itervalues()] +
+               [p.stop for p in self.external_patchers])
+      if self._tempdir_obj is not None:
+        tasks += [self._tempdir_obj.Cleanup]
+      cros_build_lib.SafeRun(tasks)
+    finally:
+      self.started = False
+      self.tempdir, self._tempdir_obj = None, None
+
+  def UnMockAttr(self, attr):
+    """Unsetting the mock of an attribute/function."""
+    self.patchers.pop(attr).stop()
+
+
+def CheckAttr(f):
+  """Automatically set mock_attr based on class default.
+
+  This function decorator automatically sets the mock_attr keyword argument
+  based on the class default. The mock_attr specifies which mocked attribute
+  a given function is referring to.
+
+  Raises an AssertionError if mock_attr is left unspecified.
+  """
+
+  def new_f(self, *args, **kwargs):
+    mock_attr = kwargs.pop('mock_attr', None)
+    if mock_attr is None:
+      mock_attr = self.DEFAULT_ATTR
+      if self.DEFAULT_ATTR is None:
+        raise AssertionError(
+            'mock_attr not specified, and no default configured.')
+    return f(self, *args, mock_attr=mock_attr, **kwargs)
+  return new_f
+
+
+class PartialCmdMock(PartialMock):
+  """Base class for mocking functions that wrap command line functionality.
+
+  Implements mocking for functions that shell out.  The internal results are
+  'returncode', 'output', 'error'.
+  """
+
+  CmdResult = collections.namedtuple(
+      'MockResult', ['returncode', 'output', 'error'])
+
+  DEFAULT_ATTR = None
+
+  @CheckAttr
+  def SetDefaultCmdResult(self, returncode=0, output='', error='',
+                          side_effect=None, mock_attr=None):
+    """Specify the default command result if no command is matched.
+
+    Args:
+      returncode: See AddCmdResult.
+      output: See AddCmdResult.
+      error: See AddCmdResult.
+      side_effect: See MockedCallResults.AddResultForParams
+      mock_attr: Which attributes's mock is being referenced.
+    """
+    result = self.CmdResult(returncode, output, error)
+    self._results[mock_attr].SetDefaultResult(result, side_effect)
+
+  @CheckAttr
+  def AddCmdResult(self, cmd, returncode=0, output='', error='',
+                   kwargs=None, strict=False, side_effect=None, mock_attr=None):
+    """Specify the result to simulate for a given command.
+
+    Args:
+      cmd: The command string or list to record a result for.
+      returncode: The returncode of the command (on the command line).
+      output: The stdout output of the command.
+      error: The stderr output of the command.
+      kwargs: Keyword arguments that the function needs to be invoked with.
+      strict: Defaults to False.  See MockedCallResults.AddResultForParams.
+      side_effect: See MockedCallResults.AddResultForParams
+      mock_attr: Which attributes's mock is being referenced.
+    """
+    result = self.CmdResult(returncode, output, error)
+    self._results[mock_attr].AddResultForParams(
+        (cmd,), result, kwargs=kwargs, side_effect=side_effect, strict=strict)
+
+  @CheckAttr
+  def CommandContains(self, args, cmd_arg_index=-1, mock_attr=None, **kwargs):
+    """Verify that at least one command contains the specified args.
+
+    Args:
+      args: Set of expected command-line arguments.
+      cmd_arg_index: The index of the command list in the positional call_args.
+        Defaults to the last positional argument.
+      kwargs: Set of expected keyword arguments.
+      mock_attr: Which attributes's mock is being referenced.
+    """
+    for call_args, call_kwargs in self.patched[mock_attr].call_args_list:
+      if (ListContains(args, call_args[cmd_arg_index]) and
+          DictContains(kwargs, call_kwargs)):
+        return True
+    return False
+
+  @CheckAttr
+  def assertCommandContains(self, args=(), expected=True, mock_attr=None,
+                            **kwargs):
+    """Assert that RunCommand was called with the specified args.
+
+    This verifies that at least one of the RunCommand calls contains the
+    specified arguments on the command line.
+
+    Args:
+      args: Set of expected command-line arguments.
+      expected: If False, instead verify that none of the RunCommand calls
+          contained the specified arguments.
+      **kwargs: Set of expected keyword arguments.
+      mock_attr: Which attributes's mock is being referenced.
+    """
+    if bool(expected) != self.CommandContains(args, **kwargs):
+      if expected:
+        msg = 'Expected to find %r in any of:\n%s'
+      else:
+        msg = 'Expected to not find %r in any of:\n%s'
+      patched = self.patched[mock_attr]
+      cmds = '\n'.join(repr(x) for x in patched.call_args_list)
+      raise AssertionError(msg % (mock.call(args, **kwargs), cmds))
+
+  @CheckAttr
+  def assertCommandCalled(self, args=(), mock_attr=None, **kwargs):
+    """Assert that RunCommand was called with the specified args.
+
+    This verifies that at least one of the RunCommand calls exactly
+    matches the specified command line and misc-arguments.
+
+    Args:
+      args: Set of expected command-line arguments.
+      mock_attr: Which attributes's mock is being referenced.
+      **kwargs: Set of expected keyword arguments.
+    """
+    call = mock.call(args, **kwargs)
+    patched = self.patched[mock_attr]
+
+    for icall in patched.call_args_list:
+      if call == icall:
+        return
+
+    cmds = '\n'.join(repr(x) for x in patched.call_args_list)
+    raise AssertionError('Expected to find %r in any of:\n%s' % (call, cmds))
+
+  @property
+  @CheckAttr
+  def call_count(self, mock_attr=None):
+    """Return the number of times we've been called."""
+    return self.patched[mock_attr].call_count
+
+  @property
+  @CheckAttr
+  def call_args_list(self, mock_attr=None):
+    """Return the list of args we've been called with."""
+    return self.patched[mock_attr].call_args_list
diff --git a/lib/partial_mock_unittest b/lib/partial_mock_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/partial_mock_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/partial_mock_unittest.py b/lib/partial_mock_unittest.py
new file mode 100644
index 0000000..f7dc2ad
--- /dev/null
+++ b/lib/partial_mock_unittest.py
@@ -0,0 +1,209 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the partial_mock test helper code."""
+
+from __future__ import print_function
+
+from chromite.lib import cros_test_lib
+from chromite.lib import partial_mock
+
+
+# pylint: disable=W0212
+
+
+class ComparatorTest(cros_test_lib.TestCase):
+  """Test Comparitor functionality."""
+  TEST_KEY1 = 'monkey'
+  TEST_KEY2 = 'foon'
+
+  def testEquals(self):
+    """__eq__, __ne__ functionality of Comparator classes."""
+    for cls_name in ['In', 'Regex', 'ListRegex']:
+      cls = getattr(partial_mock, cls_name)
+      obj1 = cls(self.TEST_KEY1)
+      obj2 = cls(self.TEST_KEY1)
+      obj3 = cls(self.TEST_KEY2)
+      self.assertEquals(obj1, obj2)
+      self.assertFalse(obj1 == obj3)
+      self.assertNotEquals(obj1, obj3)
+
+  def testIgnoreEquals(self):
+    """Verify __eq__ functionality for Ignore."""
+    obj1 = partial_mock.Ignore()
+    obj2 = partial_mock.Ignore()
+    self.assertEquals(obj1, obj2)
+    self.assertFalse(obj1 != obj2)
+
+  def testListRegex(self):
+    """Verify ListRegex match functionality."""
+    obj = partial_mock.ListRegex('.*monkey.*')
+    self.assertTrue(obj.Match(['the', 'small monkeys', 'jumped']))
+    self.assertFalse(obj.Match(['the', 'jumped']))
+    self.assertFalse(obj.Match(None))
+    self.assertFalse(obj.Match(1))
+
+
+class RecursiveCompareTest(cros_test_lib.TestCase):
+  """Test recursive compare functionality."""
+
+  LHS_DICT = {3: 1, 1: 2}
+  RHS_DICT = {1: 2, 3: 1}
+  LIST = [1, 2, 3, 4]
+  TUPLE = (1, 2, 3, 4)
+
+  def TrueHelper(self, lhs, rhs):
+    self.assertTrue(partial_mock._RecursiveCompare(lhs, rhs))
+
+  def FalseHelper(self, lhs, rhs):
+    self.assertFalse(partial_mock._RecursiveCompare(lhs, rhs))
+
+  def testIt(self):
+    """Test basic equality cases."""
+    self.TrueHelper(self.LHS_DICT, self.RHS_DICT)
+    self.TrueHelper({3: self.LIST, 1: self.LHS_DICT},
+                    {1: self.LHS_DICT, 3: self.LIST})
+    self.FalseHelper({1: self.LHS_DICT, 3: self.LIST},
+                     {1: self.LHS_DICT, 3: self.LIST + [5]})
+    self.FalseHelper(self.LIST, self.TUPLE)
+
+  def testUnicode(self):
+    """Test recursively comparing unicode and non-unicode strings."""
+    self.assertTrue(partial_mock._RecursiveCompare(['foo'], [u'foo']))
+
+
+class ListContainsTest(cros_test_lib.TestCase):
+  """Unittests for ListContains method."""
+
+  L = range(10) + range(10) + [9]
+  STRICTLY_TRUE_LISTS = [range(10), range(9, 10), range(3, 6), range(1), [],
+                         [9, 9]]
+  LOOSELY_TRUE_LISTS = [range(0, 10, 2), range(3, 6, 2), [1, 1]]
+  FALSE_LISTS = [[1.5], [-1], [1, 1, 1], [10], [22], range(6, 11), range(-1, 5)]
+
+  def testStrictContains(self):
+    """Test ListContains with strict=True."""
+    for x in self.STRICTLY_TRUE_LISTS:
+      self.assertTrue(partial_mock.ListContains(x, self.L, strict=True))
+    for x in self.LOOSELY_TRUE_LISTS + self.FALSE_LISTS:
+      self.assertFalse(partial_mock.ListContains(x, self.L, strict=True))
+
+  def testLooseContains(self):
+    """Test ListContains with strict=False."""
+    for x in self.STRICTLY_TRUE_LISTS + self.LOOSELY_TRUE_LISTS:
+      self.assertTrue(partial_mock.ListContains(x, self.L))
+    for x in self.FALSE_LISTS:
+      self.assertFalse(partial_mock.ListContains(x, self.L))
+
+  def testUnicode(self):
+    """Test ListContains with unicode and non-unicode strings."""
+    self.assertTrue(partial_mock.ListContains(['foo'], [u'foo']))
+
+
+class MockedCallResultsTest(cros_test_lib.TestCase):
+  """Test MockedCallResults functionality."""
+
+  ARGS = ('abc',)
+  LIST_ARGS = ([1, 2, 3, 4],)
+  KWARGS = {'test': 'ing'}
+  NEW_ENTRY = {'new': 'entry'}
+
+  def KwargsHelper(self, result, kwargs, strict=True):
+    self.mr.AddResultForParams(self.ARGS, result, kwargs=kwargs,
+                               strict=strict)
+
+  def setUp(self):
+    self.mr = partial_mock.MockedCallResults('SomeFunction')
+
+  def testNoMock(self):
+    """The call is not mocked."""
+    self.assertRaises(AssertionError, self.mr.LookupResult, self.ARGS)
+
+  def testArgReplacement(self):
+    """Replacing mocks for args-only calls."""
+    self.mr.AddResultForParams(self.ARGS, 1)
+    self.mr.AddResultForParams(self.ARGS, 2)
+    self.assertEquals(2, self.mr.LookupResult(self.ARGS))
+
+  def testKwargsStrictReplacement(self):
+    """Replacing strict kwargs mock with another strict mock."""
+    self.KwargsHelper(1, self.KWARGS)
+    self.KwargsHelper(2, self.KWARGS)
+    self.assertEquals(2, self.mr.LookupResult(self.ARGS, kwargs=self.KWARGS))
+
+  def testKwargsNonStrictReplacement(self):
+    """Replacing strict kwargs mock with nonstrict mock."""
+    self.KwargsHelper(1, self.KWARGS)
+    self.KwargsHelper(2, self.KWARGS, strict=False)
+    self.assertEquals(2, self.mr.LookupResult(self.ARGS, kwargs=self.KWARGS))
+
+  def testListArgLookup(self):
+    """Matching of arguments containing lists."""
+    self.mr.AddResultForParams(self.LIST_ARGS, 1)
+    self.mr.AddResultForParams(self.ARGS, 1)
+    self.assertEquals(1, self.mr.LookupResult(self.LIST_ARGS))
+
+  def testKwargsStrictLookup(self):
+    """Strict lookup fails due to extra kwarg."""
+    self.KwargsHelper(1, self.KWARGS)
+    kwargs = self.NEW_ENTRY
+    kwargs.update(self.KWARGS)
+    self.assertRaises(AssertionError, self.mr.LookupResult, self.ARGS,
+                      kwargs=kwargs)
+
+  def testKwargsNonStrictLookup(self):
+    """"Nonstrict lookup passes with extra kwarg."""
+    self.KwargsHelper(1, self.KWARGS, strict=False)
+    kwargs = self.NEW_ENTRY
+    kwargs.update(self.KWARGS)
+    self.assertEquals(1, self.mr.LookupResult(self.ARGS, kwargs=kwargs))
+
+  def testIgnoreMatching(self):
+    """Deep matching of Ignore objects."""
+    ignore = partial_mock.Ignore()
+    self.mr.AddResultForParams((ignore, ignore), 1, kwargs={'test': ignore})
+    self.assertEquals(
+        1, self.mr.LookupResult(('some', 'values'), {'test': 'bla'}))
+
+  def testRegexMatching(self):
+    """Regex matching."""
+    self.mr.AddResultForParams((partial_mock.Regex('pre.ix'),), 1)
+    self.mr.AddResultForParams((partial_mock.Regex('suffi.'),), 2)
+    self.assertEquals(1, self.mr.LookupResult(('prefix',)))
+    self.assertEquals(2, self.mr.LookupResult(('suffix',)))
+
+  def testMultipleMatches(self):
+    """Lookup matches mutilple results."""
+    self.mr.AddResultForParams((partial_mock.Ignore(),), 1)
+    self.mr.AddResultForParams((partial_mock.In('test'),), 2)
+    self.assertRaises(AssertionError, self.mr.LookupResult, ('test',))
+
+  def testDefaultResult(self):
+    """Test default result matching."""
+    self.mr.SetDefaultResult(1)
+    self.mr.AddResultForParams((partial_mock.In('test'),), 2)
+    self.assertEquals(1, self.mr.LookupResult(self.ARGS))
+    self.assertEquals(2, self.mr.LookupResult(('test',)))
+
+  def _ExampleHook(self, *args, **kwargs):
+    """Example hook for testing."""
+    self.assertEquals(args, self.LIST_ARGS)
+    self.assertEquals(kwargs, self.KWARGS)
+    return 2
+
+  def testHook(self):
+    """Return value of hook is used as the final result."""
+    self.mr.AddResultForParams(self.ARGS, 1, side_effect=self._ExampleHook)
+    self.assertEqual(
+        2, self.mr.LookupResult(self.ARGS, hook_args=self.LIST_ARGS,
+                                hook_kwargs=self.KWARGS))
+
+  def testDefaultHook(self):
+    """Verify default hooks are used."""
+    self.mr.SetDefaultResult(1, self._ExampleHook)
+    self.mr.AddResultForParams((partial_mock.In('test'),), 3)
+    self.assertEqual(
+        2, self.mr.LookupResult(self.ARGS, hook_args=self.LIST_ARGS,
+                                hook_kwargs=self.KWARGS))
+    self.assertEquals(3, self.mr.LookupResult(('test',)))
diff --git a/lib/patch.py b/lib/patch.py
new file mode 100644
index 0000000..8271df6
--- /dev/null
+++ b/lib/patch.py
@@ -0,0 +1,2058 @@
+# Copyright (c) 2011-2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module that handles the processing of patches to the source tree."""
+
+from __future__ import print_function
+
+import calendar
+import collections
+import os
+import random
+import re
+import time
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import gob_util
+
+
+site_config = config_lib.GetConfig()
+
+
+# We import mock so that we can identify mock.MagicMock instances in tests
+# that use mock.
+try:
+  import mock
+except ImportError:
+  mock = None
+
+
+_MAXIMUM_GERRIT_NUMBER_LENGTH = 7
+_GERRIT_CHANGE_ID_PREFIX = 'I'
+_GERRIT_CHANGE_ID_LENGTH = 40
+_GERRIT_CHANGE_ID_TOTAL_LENGTH = (_GERRIT_CHANGE_ID_LENGTH +
+                                  len(_GERRIT_CHANGE_ID_PREFIX))
+REPO_NAME_RE = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_\-]*(/[a-zA-Z0-9_-]+)*$')
+BRANCH_NAME_RE = re.compile(r'^(refs/heads/)?[a-zA-Z0-9_][a-zA-Z0-9_\-]*$')
+
+# Constants for attributes names.
+ATTR_REMOTE = 'remote'
+ATTR_GERRIT_NUMBER = 'gerrit_number'
+ATTR_PROJECT = 'project'
+ATTR_BRANCH = 'branch'
+ATTR_PROJECT_URL = 'project_url'
+ATTR_REF = 'ref'
+ATTR_CHANGE_ID = 'change_id'
+ATTR_COMMIT = 'commit'
+ATTR_PATCH_NUMBER = 'patch_number'
+ATTR_OWNER_EMAIL = 'owner_email'
+ATTR_FAIL_COUNT = 'fail_count'
+ATTR_PASS_COUNT = 'pass_count'
+ATTR_TOTAL_FAIL_COUNT = 'total_fail_count'
+
+ALL_ATTRS = (
+    ATTR_REMOTE,
+    ATTR_GERRIT_NUMBER,
+    ATTR_PROJECT,
+    ATTR_BRANCH,
+    ATTR_PROJECT_URL,
+    ATTR_REF,
+    ATTR_CHANGE_ID,
+    ATTR_COMMIT,
+    ATTR_PATCH_NUMBER,
+    ATTR_OWNER_EMAIL,
+    ATTR_FAIL_COUNT,
+    ATTR_PASS_COUNT,
+    ATTR_TOTAL_FAIL_COUNT,
+)
+
+def ParseSHA1(text, error_ok=True):
+  """Checks if |text| conforms to the SHA1 format and parses it.
+
+  Args:
+    text: The string to check.
+    error_ok: If set, do not raise an exception if |text| is not a
+      valid SHA1.
+
+  Returns:
+    If |text| is a valid SHA1, returns |text|.  Otherwise,
+    returns None when |error_ok| is set and raises an exception when
+    |error_ok| is False.
+  """
+  valid = git.IsSHA1(text)
+  if not error_ok and not valid:
+    raise ValueError('%s is not a valid SHA1', text)
+
+  return text if valid else None
+
+
+def ParseGerritNumber(text, error_ok=True):
+  """Checks if |text| conforms to the Gerrit number format and parses it.
+
+  Args:
+    text: The string to check.
+    error_ok: If set, do not raise an exception if |text| is not a
+      valid Gerrit number.
+
+  Returns:
+    If |text| is a valid Gerrit number, returns |text|.  Otherwise,
+    returns None when |error_ok| is set and raises an exception when
+    |error_ok| is False.
+  """
+  valid = text.isdigit() and len(text) <= _MAXIMUM_GERRIT_NUMBER_LENGTH
+  if not error_ok and not valid:
+    raise ValueError('%s is not a valid Gerrit number', text)
+
+  return text if valid else None
+
+
+def ParseChangeID(text, error_ok=True):
+  """Checks if |text| conforms to the change-ID format and parses it.
+
+  Change-ID is a string that starts with I/i. E.g.
+    I47ea30385af60ae4cc2acc5d1a283a46423bc6e1
+
+  Args:
+    text: The string to check.
+    error_ok: If set, do not raise an exception if |text| is not a
+      valid change-ID.
+
+  Returns:
+    If |text| is a valid change-ID, returns |text|.  Otherwise,
+    returns None when |error_ok| is set and raises an exception when
+    |error_ok| is False.
+  """
+  valid = (text.startswith(_GERRIT_CHANGE_ID_PREFIX) and
+           len(text) == _GERRIT_CHANGE_ID_TOTAL_LENGTH and
+           git.IsSHA1(text[len(_GERRIT_CHANGE_ID_PREFIX):].lower()))
+
+  if not error_ok and not valid:
+    raise ValueError('%s is not a valid change-ID', text)
+
+  return text if valid else None
+
+
+FullChangeId = collections.namedtuple(
+    'FullChangeId', ('project', 'branch', 'change_id'))
+
+
+def ParseFullChangeID(text, error_ok=True):
+  """Checks if |text| conforms to the full change-ID format and parses it.
+
+  Full change-ID format: project~branch~change-id. E.g.
+    chromiumos/chromite~master~I47ea30385af60ae4cc2acc5d1a283a46423bc6e1
+
+  Args:
+    text: The string to check.
+    error_ok: If set, do not raise an exception if |text| is not a
+      valid full change-ID.
+
+  Returns:
+    If |text| is a valid full change-ID, returns (project, branch,
+    change_id).  Otherwise, returns None when |error_ok| is set and
+    raises an exception when |error_ok| is False.
+  """
+  fields = text.split('~')
+  if not len(fields) == 3:
+    if not error_ok:
+      raise ValueError('%s is not a valid full change-ID', text)
+
+    return None
+
+  project, branch, change_id = fields
+  if (not REPO_NAME_RE.match(project) or
+      not BRANCH_NAME_RE.match(branch) or
+      not ParseChangeID(change_id)):
+    if not error_ok:
+      raise ValueError('%s is not a valid full change-ID', text)
+
+    return None
+
+  return FullChangeId(project, branch, change_id)
+
+
+class PatchException(Exception):
+  """Base exception class all patch exception derive from."""
+
+  # Unless instances override it, default all exceptions to ToT.
+  inflight = False
+
+  def __init__(self, patch, message=None):
+    is_mock = mock is not None and isinstance(patch, mock.MagicMock)
+    if not isinstance(patch, GitRepoPatch) and not is_mock:
+      raise TypeError(
+          'Patch must be a GitRepoPatch derivative; got type %s: %r'
+          % (type(patch), patch))
+    Exception.__init__(self)
+    self.patch = patch
+    self.message = message
+    self.args = (patch,)
+    if message is not None:
+      self.args += (message,)
+
+  def ShortExplanation(self):
+    """Print a short explanation of why the patch failed.
+
+    Explanations here should be suitable for inclusion in a sentence
+    starting with the CL number. This is useful for writing nice error
+    messages about dependency errors.
+    """
+    return 'failed: %s' % (self.message,)
+
+  def __str__(self):
+    return '%s %s' % (self.patch.PatchLink(), self.ShortExplanation())
+
+
+class ApplyPatchException(PatchException):
+  """Exception thrown if we fail to apply a patch."""
+
+  def __init__(self, patch, message=None, inflight=False, trivial=False,
+               files=()):
+    PatchException.__init__(self, patch, message=message)
+    self.inflight = inflight
+    self.trivial = trivial
+    self.files = files = tuple(files)
+    # Reset args; else serialization can break.
+    self.args = (patch, message, inflight, trivial, files)
+
+  def _StringifyInflight(self):
+    return 'the current patch series' if self.inflight else 'ToT'
+
+  def _StringifyFilenames(self):
+    """Stringify our list of filenames for presentation in Gerrit."""
+    # Prefix each filename with a hyphen so that Gerrit will format it as an
+    # unordered list.
+    return '\n\n'.join('- %s' % x for x in self.files)
+
+  def ShortExplanation(self):
+    s = 'conflicted with %s' % (self._StringifyInflight(),)
+    if self.trivial:
+      s += (' because file content merging is disabled for this '
+            'project.')
+    else:
+      s += '.'
+    if self.files:
+      s += ('\n\nThe conflicting files are amongst:\n\n'
+            '%s' % (self._StringifyFilenames(),))
+    if self.message:
+      s += '\n\n%s' % (self.message,)
+    return s
+
+
+class EbuildConflict(ApplyPatchException):
+  """Exception thrown if two CLs delete the same ebuild."""
+
+  def __init__(self, patch, inflight, ebuilds):
+    ApplyPatchException.__init__(self, patch, inflight=inflight, files=ebuilds)
+    self.args = (patch, inflight, ebuilds)
+
+  def ShortExplanation(self):
+    return ('deletes an ebuild that is not present anymore. For this reason, '
+            'we refuse to merge your change.\n\n'
+            'When you rebase your change, please take into account that the '
+            'following ebuilds have been uprevved or deleted:\n\n'
+            '%s' % (self._StringifyFilenames()))
+
+
+class PatchIsEmpty(ApplyPatchException):
+  """Exception thrown if we try to apply an empty patch"""
+
+  def ShortExplanation(self):
+    return 'had no changes after rebasing to %s.' % (
+        self._StringifyInflight(),)
+
+
+class DependencyError(PatchException):
+  """Thrown when a change cannot be applied due to a failure in a dependency."""
+
+  def __init__(self, patch, error):
+    """Initialize the error object.
+
+    Args:
+      patch: The GitRepoPatch instance that this exception concerns.
+      error: A PatchException object that can be stringified to describe
+        the error.
+    """
+    PatchException.__init__(self, patch)
+    self.inflight = error.inflight
+    self.error = error
+    self.args = (patch, error)
+
+  def ShortExplanation(self):
+    link = self.error.patch.PatchLink()
+    return 'depends on %s, which %s' % (link, self.error.ShortExplanation())
+
+
+class BrokenCQDepends(PatchException):
+  """Raised if a patch has a CQ-DEPEND line that is ill formated."""
+
+  def __init__(self, patch, text, msg=None):
+    PatchException.__init__(self, patch)
+    self.text = text
+    self.msg = msg
+    self.args = (patch, text, msg)
+
+  def ShortExplanation(self):
+    s = 'has a malformed CQ-DEPEND target: %s' % (self.text,)
+    if self.msg is not None:
+      s += '; %s' % (self.msg,)
+    return s
+
+
+class BrokenChangeID(PatchException):
+  """Raised if a patch has an invalid or missing Change-ID."""
+
+  def __init__(self, patch, message, missing=False):
+    PatchException.__init__(self, patch)
+    self.message = message
+    self.missing = missing
+    self.args = (patch, message, missing)
+
+  def ShortExplanation(self):
+    return 'has a broken ChangeId: %s' % (self.message,)
+
+
+class ChangeMatchesMultipleCheckouts(PatchException):
+  """Raised if the given change matches multiple checkouts."""
+
+  def ShortExplanation(self):
+    return ('matches multiple checkouts. Does the manifest check out the '
+            'same project and branch to different locations?')
+
+
+class ChangeNotInManifest(PatchException):
+  """Raised if we try to apply a not-in-manifest change."""
+
+  def ShortExplanation(self):
+    return 'could not be found in the repo manifest.'
+
+
+class PatchNotMergeable(PatchException):
+  """Raised if a patch is not mergeable."""
+
+  def __init__(self, patch, reason):
+    PatchException.__init__(self, patch)
+    self.reason = str(reason)
+    self.args = (patch, reason)
+
+  def ShortExplanation(self):
+    return self.reason
+
+
+def MakeChangeId(unusable=False):
+  """Create a random Change-Id.
+
+  Args:
+    unusable: If set to True, return a Change-Id like string that gerrit
+      will explicitly fail on.  This is primarily used for internal ids,
+      as a fallback when a Change-Id could not be parsed.
+  """
+  s = '%x' % (random.randint(0, 2 ** 160),)
+  s = s.rjust(_GERRIT_CHANGE_ID_LENGTH, '0')
+  if unusable:
+    return 'Fake-ID %s' % s
+  return '%s%s' % (_GERRIT_CHANGE_ID_PREFIX, s)
+
+
+class PatchCache(object):
+  """Dict-like object used for tracking a group of patches.
+
+  This is usable both for existence checks against given string
+  deps, and for change querying.
+  """
+
+  def __init__(self, initial=()):
+    self._dict = {}
+    self.Inject(*initial)
+
+  def Inject(self, *args):
+    """Inject a sequence of changes into this cache."""
+    for change in args:
+      self.InjectCustomKeys(change.LookupAliases(), change)
+
+  def InjectCustomKeys(self, keys, change):
+    """Inject a change w/ a list of keys.  Generally you want Inject instead.
+
+    Args:
+      keys: A list of keys to update.
+      change: The change to update the keys to.
+    """
+    for key in keys:
+      self._dict[str(key)] = change
+
+  def _GetAliases(self, value):
+    if hasattr(value, 'LookupAliases'):
+      return value.LookupAliases()
+    elif not isinstance(value, basestring):
+      # This isn't needed in production code; it however is
+      # rather useful to flush out bugs in test code.
+      raise ValueError("Value %r isn't a string" % (value,))
+    return [value]
+
+  def Remove(self, *args):
+    """Remove a change from this cache."""
+    for change in args:
+      for alias in self._GetAliases(change):
+        self._dict.pop(alias, None)
+
+  def __iter__(self):
+    return iter(set(self._dict.itervalues()))
+
+  def __getitem__(self, key):
+    """If the given key exists, return the Change, else None."""
+    for alias in self._GetAliases(key):
+      val = self._dict.get(alias)
+      if val is not None:
+        return val
+    return None
+
+  def __contains__(self, key):
+    return self[key] is not None
+
+  def copy(self):
+    """Return a copy of this cache."""
+    return self.__class__(list(self))
+
+
+def StripPrefix(text):
+  """Strips the leading '*' for internal change names.
+
+  Args:
+    text: text to examine.
+
+  Returns:
+    A tuple of the corresponding remote and the stripped text.
+  """
+  remote = site_config.params.EXTERNAL_REMOTE
+  prefix = site_config.params.INTERNAL_CHANGE_PREFIX
+  if text.startswith(prefix):
+    text = text[len(prefix):]
+    remote = site_config.params.INTERNAL_REMOTE
+
+  return remote, text
+
+
+def AddPrefix(patch, text):
+  """Add the leading '*' to |text| if applicable.
+
+  Examines patch.remote and adds the prefix to text if applicable.
+
+  Args:
+    patch: A PatchQuery object to examine.
+    text: The text to add prefix to.
+
+  Returns:
+    |text| with an added prefix for internal patches; otherwise, returns text.
+  """
+  return '%s%s' % (site_config.params.CHANGE_PREFIX[patch.remote], text)
+
+
+def ParsePatchDep(text, no_change_id=False, no_sha1=False,
+                  no_full_change_id=False, no_gerrit_number=False):
+  """Parses a given patch dependency and convert it to a PatchQuery object.
+
+  Parse user-given dependency (e.g. from the CQ-DEPEND line in the
+  commit message) and returns a PatchQuery object with the relevant
+  information of the dependency.
+
+  Args:
+    text: The text to parse.
+    no_change_id: Do not allow change-ID.
+    no_sha1: Do not allow SHA1.
+    no_full_change_id: Do not allow full change-ID.
+    no_gerrit_number: Do not allow gerrit_number.
+
+  Retruns:
+    A PatchQuery object.
+  """
+  original_text = text
+  if not text:
+    raise ValueError('ParsePatchDep invoked with an empty value: %r'
+                     % (text,))
+  # Deal w/ CL: targets.
+  if text.upper().startswith('CL:'):
+    if not text.startswith('CL:'):
+      raise ValueError(
+          "ParsePatchDep: 'CL:' must be upper case: %r"
+          % (original_text,))
+    text = text[3:]
+
+  # Strip the prefix to determine the remote.
+  remote, text = StripPrefix(text)
+
+  parsed = ParseFullChangeID(text)
+  if parsed:
+    if no_full_change_id:
+      raise ValueError(
+          'ParsePatchDep: Full Change-ID is not allowed: %r.' % original_text)
+
+    return PatchQuery(remote, project=parsed.project,
+                      tracking_branch=parsed.branch, change_id=parsed.change_id)
+
+  parsed = ParseChangeID(text)
+  if parsed:
+    if no_change_id:
+      raise ValueError(
+          'ParsePatchDep: Change-ID is not allowed: %r.' % original_text)
+
+    return PatchQuery(remote, change_id=parsed)
+
+  parsed = ParseGerritNumber(text)
+  if parsed:
+    if no_gerrit_number:
+      raise ValueError(
+          'ParsePatchDep: Gerrit number is not allowed: %r.' % original_text)
+
+    return PatchQuery(remote, gerrit_number=parsed)
+
+  parsed = ParseSHA1(text)
+  if parsed:
+    if no_sha1:
+      raise ValueError(
+          'ParsePatchDep: SHA1 is not allowed: %r.' % original_text)
+
+    return PatchQuery(remote, sha1=parsed)
+
+  raise ValueError('Cannot parse the dependency: %s' % original_text)
+
+
+def GetOptionLinesFromCommitMessage(commit_message, option_re):
+  """Finds lines in |commit_message| that start with |option_re|.
+
+  Args:
+    commit_message: (str) Text of the commit message.
+    option_re: (str) regular expression to match the key identifying this
+               option. Additionally, any whitespace surrounding the option
+               is ignored.
+
+  Returns:
+    list of line values that matched the option (with the option stripped
+    out) if at least 1 line matched the option (even if it provided no
+    valuse). None if no lines of the message matched the option.
+  """
+  option_lines = []
+  matched = False
+  lines = commit_message.splitlines()[2:]
+  for line in lines:
+    line = line.strip()
+    if re.match(option_re, line):
+      matched = True
+      line = re.sub(option_re, '', line, count=1).strip()
+      if line:
+        option_lines.append(line)
+
+  if matched:
+    return option_lines
+  else:
+    return None
+
+
+# TODO(akeshet): Refactor CQ-DEPEND parsing logic to use general purpose
+# GetOptionFromCommitMessage.
+def GetPaladinDeps(commit_message):
+  """Get the paladin dependencies for the given |commit_message|."""
+  PALADIN_DEPENDENCY_RE = re.compile(r'^([ \t]*CQ.?DEPEND.)(.*)$',
+                                     re.MULTILINE | re.IGNORECASE)
+  PATCH_RE = re.compile('[^, ]+')
+  EXPECTED_PREFIX = 'CQ-DEPEND='
+  matches = PALADIN_DEPENDENCY_RE.findall(commit_message)
+  dependencies = []
+  for prefix, match in matches:
+    if prefix != EXPECTED_PREFIX:
+      msg = 'Expected %r, but got %r' % (EXPECTED_PREFIX, prefix)
+      raise ValueError(msg)
+    for chunk in PATCH_RE.findall(match):
+      chunk = ParsePatchDep(chunk, no_sha1=True)
+      if chunk not in dependencies:
+        dependencies.append(chunk)
+  return dependencies
+
+
+class PatchQuery(object):
+  """Store information about a patch.
+
+  This stores information about a patch used to query Gerrit and/or
+  our internal PatchCache. It is mostly used to describe a patch
+  dependency.
+
+  It is is intended to match a single patch. If a user specified a
+  non-full change id then it might match multiple patches. If a user
+  specified an invalid change id then it might not match any patches.
+  """
+
+  def __init__(self, remote, project=None, tracking_branch=None, change_id=None,
+               sha1=None, gerrit_number=None):
+    """Initializes a PatchQuery instance.
+
+    Args:
+      remote: The remote git instance path, defined in constants.CROS_REMOTES.
+      project: The name of the project that the patch applies to.
+      tracking_branch: The remote branch of the project the patch applies to.
+      change_id: The Gerrit Change-ID representing this patch.
+      sha1: The sha1 of the commit. This *must* be accurate
+      gerrit_number: The Gerrit number of the patch.
+    """
+    self.remote = remote
+    self.tracking_branch = None
+    if tracking_branch:
+      self.tracking_branch = os.path.basename(tracking_branch)
+    self.project = project
+    self.sha1 = None if sha1 is None else ParseSHA1(sha1)
+    self.tree_hash = None
+    self.change_id = None if change_id is None else ParseChangeID(change_id)
+    self.gerrit_number = (None if gerrit_number is None else
+                          ParseGerritNumber(gerrit_number))
+    self.id = self.full_change_id = None
+    self._SetFullChangeID()
+    # self.id is the only attribute with the internal prefix (*) if
+    # applicable. All other atttributes are strictly external format.
+    self._SetID()
+
+  def _SetFullChangeID(self):
+    """Set the unique full Change-ID if possible."""
+    if (self.project is not None and
+        self.tracking_branch is not None and
+        self.change_id is not None):
+      self.full_change_id = '%s~%s~%s' % (
+          self.project, self.tracking_branch, self.change_id)
+
+  def _SetID(self, override_value=None):
+    """Set the unique ID to be used internally, if possible."""
+    if override_value is not None:
+      self.id = override_value
+      return
+
+    if not self.full_change_id:
+      self._SetFullChangeID()
+
+    if self.full_change_id:
+      self.id = AddPrefix(self, self.full_change_id)
+
+    elif self.sha1:
+      # We assume sha1 is unique, but in rare cases (e.g. two branches with
+      # the same history) it is not. We don't handle that.
+      self.id = '%s%s' % (site_config.params.CHANGE_PREFIX[self.remote],
+                          self.sha1)
+
+  def LookupAliases(self):
+    """Returns the list of lookup keys to query a PatchCache.
+
+    Each key has to be unique for the patch. If no unique key can be
+    generated yet (because of incomplete patch information), we'd
+    rather return None to avoid retrieving incorrect patch from the
+    cache.
+    """
+    l = []
+    if self.gerrit_number:
+      l.append(self.gerrit_number)
+
+    # Note that change-ID alone is not unique. Use full change-id here.
+    if self.full_change_id:
+      l.append(self.full_change_id)
+
+    # Note that in rare cases (two branches with the same history),
+    # the commit hash may not be unique. We don't handle that.
+    if self.sha1:
+      l.append(self.sha1)
+
+    return ['%s%s' % (site_config.params.CHANGE_PREFIX[self.remote], x)
+            for x in l if x is not None]
+
+  def ToGerritQueryText(self):
+    """Generate a text used to query Gerrit.
+
+    This text may not be unique because the lack of information from
+    user-specified dependencies (crbug.com/354734). In which cases,
+    the Gerrit query would fail.
+    """
+    # Try to return a unique ID if possible.
+    if self.gerrit_number:
+      return self.gerrit_number
+    elif self.full_change_id:
+      return self.full_change_id
+    elif self.sha1:
+      # SHA1 may not not be unique, but we don't handle that here.
+      return self.sha1
+    elif self.change_id:
+      # Fall back to use Change-Id, which is not unique.
+      return self.change_id
+    else:
+      # We cannot query without at least one of the three fields. A
+      # special case is UploadedLocalPatch which has none of the
+      # above, but also is not used for query.
+      raise ValueError(
+          'We do not have enough information to generate a Gerrit query. '
+          'At least one of the following fields needs to be set: Change-Id, '
+          'Gerrit number, or sha1')
+
+  def __hash__(self):
+    """Returns a hash to be used in a set or a list."""
+    if self.id:
+      return hash(self.id)
+    else:
+      return hash((self.remote, self.project, self.tracking_branch,
+                   self.gerrit_number, self.change_id, self.sha1))
+
+  def __eq__(self, other):
+    """Defines when two PatchQuery objects are considered equal."""
+    # We allow comparing against a string to make testing easier.
+    if isinstance(other, basestring):
+      return self.id == other
+
+    if self.id is not None:
+      return self.id == other.id
+
+    return ((self.remote, self.project, self.tracking_branch,
+             self.gerrit_number, self.change_id, self.sha1) ==
+            (other.remote, other.project, other.tracking_branch,
+             other.gerrit_number, other.change_id, other.sha1))
+
+
+class GitRepoPatch(PatchQuery):
+  """Representing a patch from a branch of a local or remote git repository."""
+
+  # Note the selective case insensitivity; gerrit allows only this.
+  # TOOD(ferringb): back VALID_CHANGE_ID_RE down to {8,40}, requires
+  # ensuring CQ's internals can do the translation (almost can now,
+  # but will fail in the case of a CQ-DEPEND on a change w/in the
+  # same pool).
+  pattern = (r'^' + re.escape(_GERRIT_CHANGE_ID_PREFIX) + r'[0-9a-fA-F]{' +
+             re.escape(str(_GERRIT_CHANGE_ID_LENGTH)) + r'}$')
+  _STRICT_VALID_CHANGE_ID_RE = re.compile(pattern)
+  _GIT_CHANGE_ID_RE = re.compile(r'^Change-Id:[\t ]*(\w+)\s*$',
+                                 re.I | re.MULTILINE)
+
+  def __init__(self, project_url, project, ref, tracking_branch, remote,
+               sha1=None, change_id=None):
+    """Initialization of abstract Patch class.
+
+    Args:
+      project_url: The url of the git repo (can be local or remote) to pull the
+                   patch from.
+      project: See PatchQuery for documentation.
+      ref: The refspec to pull from the git repo.
+      tracking_branch: See PatchQuery for documentation.
+      remote: See PatchQuery for documentation.
+      sha1: The sha1 of the commit, if known. This *must* be accurate.  Can
+        be None if not yet known- in which case Fetch will update it.
+      change_id: See PatchQuery for documentation.
+    """
+    super(GitRepoPatch, self).__init__(remote, project=project,
+                                       tracking_branch=tracking_branch,
+                                       change_id=change_id,
+                                       sha1=sha1, gerrit_number=None)
+
+    # git_remote_url is the url of the remote git repo that this patch
+    # belongs to. Differs from project_url as that may point to a local
+    # repo or a gerrit review repo.
+    self.git_remote_url = '%s/%s' % (
+        site_config.params.GIT_REMOTES.get(remote), project)
+    self.project_url = project_url
+    self.commit_message = None
+    self._subject_line = None
+    self.ref = ref
+    self._is_fetched = set()
+    self._committer_email = None
+    self._committer_name = None
+    self._commit_message = None
+
+  @property
+  def commit_message(self):
+    return self._commit_message
+
+  @commit_message.setter
+  def commit_message(self, value):
+    self._commit_message = self._AddFooters(value) if value else value
+
+  @property
+  def internal(self):
+    """Whether patch is to an internal cros project."""
+    return self.remote == site_config.params.INTERNAL_REMOTE
+
+  def _GetFooters(self, msg):
+    """Get the Git footers of the specified commit message.
+
+    Args:
+      msg: A commit message
+
+    Returns:
+      The parsed footers from the commit message.  Footers are
+      lines of the form 'key: value' and are at the end of the commit
+      message in a separate paragraph.  We return a list of pairs like
+      ('key', 'value').
+    """
+    footers = []
+    data = re.split(r'\n{2,}', msg.rstrip('\n'))[-1]
+    for line in data.splitlines():
+      m = re.match(r'([A-Za-z0-9-]+): *(.*)', line.rstrip('\n'))
+      if m:
+        footers.append(m.groups())
+    return footers
+
+  def _AddFooters(self, msg):
+    """Ensure that commit messages have a change ID.
+
+    Args:
+      msg: The commit message.
+
+    Returns:
+      The modified commit message with necessary Gerrit footers.
+    """
+    if not msg:
+      msg = '<no commit message provided>'
+
+    if msg[-1] != '\n':
+      msg += '\n'
+
+    # This function is adapted from the version in Gerrit:
+    # goto/createCherryPickCommitMessage
+    old_footers = self._GetFooters(msg)
+
+    if not old_footers:
+      # Doesn't end in a "Signed-off-by: ..." style line? Add another line
+      # break to start a new paragraph for the reviewed-by tag lines.
+      msg += '\n'
+
+    # This replicates the behavior of
+    # goto/createCherryPickCommitMessage, but can result in multiple
+    # Change-Id footers.  We should consider changing this behavior.
+    if ('Change-Id', self.change_id) not in old_footers and self.change_id:
+      msg += 'Change-Id: %s\n' % self.change_id
+
+    return msg
+
+  def _PullData(self, rev, git_repo):
+    """Returns info about a commit object in the local repository.
+
+    Args:
+      rev: The commit to find information about
+      git_repo: The path of the local git repository.
+
+    Returns:
+      A 6-tuple of (sha1, tree_hash, commit subject, commit message,
+      committer email, committer name).
+    """
+    f = '%H%x00%T%x00%s%x00%B%x00%ce%x00%cn'
+    cmd = ['log', '--pretty=format:%s' % f, '-n1', rev]
+    ret = git.RunGit(git_repo, cmd, error_code_ok=True)
+    # TODO(phobbs): this should probably use a namedtuple...
+    if ret.returncode != 0:
+      return None, None, None, None, None, None
+    output = ret.output.split('\0')
+    if len(output) != 6:
+      return None, None, None, None, None, None
+    return [unicode(x.strip(), 'ascii', 'ignore') for x in output]
+
+  def UpdateMetadataFromRepo(self, git_repo, sha1):
+    """Update this this object's metadata given a sha1.
+
+    This updates various internal fields such as the committer name and email,
+    the commit message, tree hash, etc.
+
+    Raises a PatchException if the found sha1 differs from self.sha1.
+
+    Args:
+      git_repo: The path to the git repository that this commit exists in.
+      sha1: The sha1 of the commit.  If None, assumes it was just fetched and
+        uses "FETCH_HEAD".
+
+    Returns:
+      The sha1 of the commit.
+    """
+    sha1 = sha1 or 'FETCH_HEAD'
+    sha1, tree_hash, subject, msg, email, name = self._PullData(sha1, git_repo)
+    sha1 = ParseSHA1(sha1, error_ok=False)
+
+    if self.sha1 is not None and sha1 != self.sha1:
+      # Even if we know the sha1, still do a sanity check to ensure we
+      # actually just fetched it.
+      raise PatchException(self,
+                           'Patch %s specifies sha1 %s, yet in fetching from '
+                           '%s we could not find that sha1.  Internal error '
+                           'most likely.' % (self, self.sha1, self.ref))
+
+    self._committer_email = email
+    self._committer_name = name
+    self.sha1 = sha1
+    self.tree_hash = tree_hash
+    self.commit_message = msg
+    self._EnsureId(self.commit_message)
+    self._subject_line = subject
+    self._is_fetched.add(git_repo)
+    return self.sha1
+
+  def HasBeenFetched(self, git_repo):
+    """Whether this patch has already exists locally in `git_repo`
+
+    Args:
+      git_repo: The git repository to fetch this patch into.
+
+    Returns:
+      If it exists, the sha1 of this patch in `git_repo`.
+    """
+    git_repo = os.path.normpath(git_repo)
+    if git_repo in self._is_fetched:
+      return self.sha1
+
+    # See if we've already got the object.
+    if self.sha1 is not None:
+      return self._PullData(self.sha1, git_repo)[0]
+
+  def Fetch(self, git_repo):
+    """Fetch this patch into the given git repository.
+
+    FETCH_HEAD is implicitly reset by this operation.  Additionally,
+    if the sha1 of the patch was not yet known, it is pulled and stored
+    on this object and the git_repo is updated w/ the requested git
+    object.
+
+    While doing so, we'll load the commit message and Change-Id if not
+    already known.
+
+    Finally, if the sha1 is known and it's already available in the target
+    repository, this will skip the actual fetch operation (it's unneeded).
+
+    Args:
+      git_repo: The git repository to fetch this patch into.
+
+    Returns:
+      The sha1 of the patch.
+    """
+    sha1 = self.HasBeenFetched(git_repo)
+
+    if sha1 is None:
+      git.RunGit(git_repo, ['fetch', '-f', self.project_url, self.ref],
+                 print_cmd=True)
+
+    return self.UpdateMetadataFromRepo(git_repo, sha1=sha1 or self.sha1)
+
+  def GetDiffStatus(self, git_repo):
+    """Isolate the paths and modifications this patch induces.
+
+    Note that detection of file renaming is explicitly turned off.
+    This is intentional since the level of rename detection can vary
+    by user configuration, and trying to have our code specify the
+    minimum level is fairly messy from an API perspective.
+
+    Args:
+      git_repo: Git repository to operate upon.
+
+    Returns:
+      A dictionary of path -> modification_type tuples.  See
+      `git log --help`, specifically the --diff-filter section for details.
+    """
+
+    self.Fetch(git_repo)
+
+    try:
+      lines = git.RunGit(git_repo, ['diff', '--no-renames', '--name-status',
+                                    '%s^..%s' % (self.sha1, self.sha1)])
+    except cros_build_lib.RunCommandError as e:
+      # If we get a 128, that means git couldn't find the the parent of our
+      # sha1- meaning we're the first commit in the repository (there is no
+      # parent).
+      if e.result.returncode != 128:
+        raise
+      return {}
+    lines = lines.output.splitlines()
+    return dict(line.split('\t', 1)[::-1] for line in lines)
+
+  def _AmendCommitMessage(self, git_repo):
+    """"Amend the commit and update our sha1 with the new commit."""
+    git.RunGit(git_repo, ['commit', '--amend', '-m', self.commit_message])
+    self.sha1 = ParseSHA1(self._PullData('HEAD', git_repo)[0], error_ok=False)
+
+  def CherryPick(self, git_repo, trivial=False, inflight=False,
+                 leave_dirty=False):
+    """Attempts to cherry-pick the given rev into branch.
+
+    Args:
+      git_repo: The git repository to operate upon.
+      trivial: Only allow trivial merges when applying change.
+      inflight: If true, changes are already applied in this branch.
+      leave_dirty: If True, if a CherryPick fails leaves partial commit behind.
+
+    Raises:
+      A ApplyPatchException if the request couldn't be handled.
+    """
+    # Note the --ff; we do *not* want the sha1 to change unless it
+    # has to.
+    cmd = ['cherry-pick', '--strategy', 'resolve', '--ff']
+    if trivial:
+      cmd += ['-X', 'trivial']
+    cmd.append(self.sha1)
+
+    reset_target = None if leave_dirty else 'HEAD'
+    try:
+      git.RunGit(git_repo, cmd, capture_output=False)
+      self._AmendCommitMessage(git_repo)
+      reset_target = None
+      return
+    except cros_build_lib.RunCommandError as error:
+      ret = error.result.returncode
+      if ret not in (1, 2):
+        logging.error('Unknown cherry-pick exit code %s; %s', ret, error)
+        raise ApplyPatchException(
+            self, inflight=inflight,
+            message=('Unknown exit code %s returned from cherry-pick '
+                     'command: %s' % (ret, error)))
+      elif ret == 1:
+        # This means merge resolution was fine, but there was content conflicts.
+        # If there are no conflicts, then this is caused by the change already
+        # being merged.
+        result = git.RunGit(git_repo,
+                            ['diff', '--name-only', '--diff-filter=U'])
+
+        # Output is one line per filename.
+        conflicts = result.output.splitlines()
+        if not conflicts:
+          # No conflicts means the git repo is in a pristine state.
+          reset_target = None
+          raise PatchIsEmpty(self, inflight=inflight)
+
+        # Making it here means that it wasn't trivial, nor was it already
+        # applied.
+        assert not trivial
+        raise ApplyPatchException(self, inflight=inflight, files=conflicts)
+
+      # ret=2 handling, this deals w/ trivial conflicts; including figuring
+      # out if it was trivial induced or not.
+      if not trivial:
+        logging.error('The git tree may be corrupted.')
+        logging.error('If the git error is "unable to read tree", '
+                      'please clean up this repo.')
+        raise
+
+      # Here's the kicker; trivial conflicts can mask content conflicts.
+      # We would rather state if it's a content conflict since in solving the
+      # content conflict, the trivial conflict is solved.  Thus this
+      # second run, where we let the exception fly through if one occurs.
+      # Note that a trivial conflict means the tree is unmodified; thus
+      # no need for cleanup prior to this invocation.
+      reset_target = None
+      self.CherryPick(git_repo, trivial=False, inflight=inflight)
+      # Since it succeeded, we need to rewind.
+      reset_target = 'HEAD^'
+
+      raise ApplyPatchException(self, trivial=True, inflight=inflight)
+    finally:
+      if reset_target is not None:
+        git.RunGit(git_repo, ['reset', '--hard', reset_target],
+                   error_code_ok=True)
+
+  def Apply(self, git_repo, upstream, revision=None, trivial=False):
+    """Apply patch into a standalone git repo.
+
+    The git repo does not need to be part of a repo checkout.
+
+    Args:
+      git_repo: The git repository to operate upon.
+      revision: Revision to attach the tracking branch to.
+      upstream: The branch to base the patch on.
+      trivial: Only allow trivial merges when applying change.
+    """
+
+    self.Fetch(git_repo)
+
+    logging.info('Attempting to cherry-pick change %s', self)
+
+    # If the patch branch exists use it, otherwise create it and switch to it.
+    if git.DoesCommitExistInRepo(git_repo, constants.PATCH_BRANCH):
+      git.RunGit(git_repo, ['checkout', '-f', constants.PATCH_BRANCH])
+    else:
+      git.RunGit(git_repo,
+                 ['checkout', '-b', constants.PATCH_BRANCH, '-t', upstream])
+      if revision:
+        git.RunGit(git_repo, ['reset', '--hard', revision])
+
+    # Figure out if we're inflight.  At this point, we assume that the branch
+    # is checked out and rebased onto upstream.  If HEAD differs from upstream,
+    # then there are already other patches that have been applied.
+    upstream, head = [
+        git.RunGit(git_repo, ['rev-list', '-n1', x]).output.strip()
+        for x in (upstream, 'HEAD')]
+    inflight = (head != upstream)
+
+    self._FindEbuildConflicts(git_repo, upstream, inflight=inflight)
+
+    do_checkout = True
+    try:
+      self.CherryPick(git_repo, trivial=trivial, inflight=inflight)
+      do_checkout = False
+      return
+    except ApplyPatchException:
+      if not inflight:
+        raise
+      git.RunGit(git_repo, ['checkout', '-f', '--detach', upstream])
+
+      self.CherryPick(git_repo, trivial=trivial, inflight=False)
+      # Making it here means that it was an inflight issue; throw the original.
+      raise
+    finally:
+      # Ensure we're on the correct branch on the way out.
+      if do_checkout:
+        git.RunGit(git_repo, ['checkout', '-f', constants.PATCH_BRANCH],
+                   error_code_ok=True)
+
+  def ApplyAgainstManifest(self, manifest, trivial=False):
+    """Applies the patch against the specified manifest.
+
+      manifest: A ManifestCheckout object which is used to discern which
+        git repo to patch, what the upstream branch should be, etc.
+      trivial:  Only allow trivial merges when applying change.
+
+    Raises:
+      ApplyPatchException: If the patch failed to apply.
+    """
+    checkout = self.GetCheckout(manifest)
+    revision = checkout.get('revision')
+    # revision might be a branch which is written as it would appear on the
+    # remote. If so, rewrite it as a local reference to the remote branch.
+    # For example, refs/heads/master might become refs/remotes/cros/master.
+    if revision and not git.IsSHA1(revision):
+      revision = 'refs/remotes/%s/%s' % \
+          (checkout['remote'], git.StripRefs(revision))
+    upstream = checkout['tracking_branch']
+    self.Apply(checkout.GetPath(absolute=True), upstream, revision=revision,
+               trivial=trivial)
+
+  def GerritDependencies(self):
+    """Returns a list of Gerrit change numbers that this patch depends on.
+
+    Ordinary patches have no Gerrit-style dependencies since they're not
+    from Gerrit at all. See GerritPatch.GerritDependencies instead.
+    """
+    return []
+
+  def _EnsureId(self, commit_message):
+    """Ensure we have a usable Change-Id.
+
+    This will parse the Change-Id out of the given commit message;
+    if it cannot find one, it logs a warning and creates a fake ID.
+
+    By its nature, that fake ID is useless- it's created to simplify
+    API usage for patch consumers. If CQ were to see and try operating
+    on one of these, it would fail for example.
+    """
+    if self.id is not None:
+      return self.id
+
+    try:
+      self.change_id = self._ParseChangeId(commit_message)
+    except BrokenChangeID:
+      logging.warning(
+          'Change %s, sha1 %s lacks a change-id in its commit '
+          'message.  CQ-DEPEND against this rev may not work, nor '
+          'will any gerrit querying.  Please add the appropriate '
+          'Change-Id into the commit message to resolve this.',
+          self, self.sha1)
+      self._SetID(self.sha1)
+    else:
+      self._SetID()
+
+  def _ParseChangeId(self, data):
+    """Parse a Change-Id out of a block of text.
+
+    Note that the returned content is *not* ran through FormatChangeId;
+    this is left up to the invoker.
+    """
+    # Grab just the last pararaph.
+    git_metadata = re.split(r'\n{2,}', data.rstrip())[-1]
+    change_id_match = self._GIT_CHANGE_ID_RE.findall(git_metadata)
+    if not change_id_match:
+      raise BrokenChangeID(self, 'Missing Change-Id in %s' % (data,),
+                           missing=True)
+
+    # Now, validate it.  This has no real effect on actual gerrit patches,
+    # but for local patches the validation is useful for general sanity
+    # enforcement.
+    change_id_match = change_id_match[-1]
+    # Note that since we're parsing it from basically a commit message,
+    # the gerrit standard format is required- no internal markings.
+    if not self._STRICT_VALID_CHANGE_ID_RE.match(change_id_match):
+      raise BrokenChangeID(self, change_id_match)
+
+    return ParseChangeID(change_id_match)
+
+  def PaladinDependencies(self, git_repo):
+    """Returns an ordered list of dependencies based on the Commit Message.
+
+    Parses the Commit message for this change looking for lines that follow
+    the format:
+
+    CQ-DEPEND=change_num+ e.g.
+
+    A commit which depends on a couple others.
+
+    BUG=blah
+    TEST=blah
+    CQ-DEPEND=10001,10002
+    """
+    dependencies = []
+    logging.debug('Checking for CQ-DEPEND dependencies for change %s', self)
+
+    # Only fetch the commit message if needed.
+    if self.commit_message is None:
+      self.Fetch(git_repo)
+
+    try:
+      dependencies = GetPaladinDeps(self.commit_message)
+    except ValueError as e:
+      raise BrokenCQDepends(self, str(e))
+
+    if dependencies:
+      logging.debug('Found %s Paladin dependencies for change %s',
+                    dependencies, self)
+    return dependencies
+
+  def _FindEbuildConflicts(self, git_repo, upstream, inflight=False):
+    """Verify that there are no ebuild conflicts in the given |git_repo|.
+
+    When an ebuild is uprevved, git treats the uprev as a "delete" and an "add".
+    If a developer writes a CL to delete an ebuild, and the CQ uprevs the ebuild
+    in the mean time, the ebuild deletion is silently lost, because git does
+    not flag the double-delete as a conflict. Instead the CQ attempts to test
+    the CL and it ends up breaking the CQ.
+
+    Args:
+      git_repo: The directory to examine.
+      upstream: The upstream git revision.
+      inflight: Whether we currently have patches applied to this repository.
+    """
+    ebuilds = [path for (path, mtype) in
+               self.GetDiffStatus(git_repo).iteritems()
+               if mtype == 'D' and path.endswith('.ebuild')]
+
+    conflicts = self._FindMissingFiles(git_repo, 'HEAD', ebuilds)
+    if not conflicts:
+      return
+
+    if inflight:
+      # If we're inflight, test against ToT for an accurate error message.
+      tot_conflicts = self._FindMissingFiles(git_repo, upstream, ebuilds)
+      if tot_conflicts:
+        inflight = False
+        conflicts = tot_conflicts
+
+    raise EbuildConflict(self, inflight=inflight, ebuilds=conflicts)
+
+  def _FindMissingFiles(self, git_repo, tree_revision, files):
+    """Return a list of the |files| that are missing in |tree_revision|.
+
+    Args:
+      git_repo: Git repository to work in.
+      tree_revision: Revision of the tree to use.
+      files: Files to look for.
+
+    Returns:
+      A list of the |files| that are missing in |tree_revision|.
+    """
+    if not files:
+      return []
+
+    cmd = ['ls-tree', '--full-name', '--name-only', '-z', tree_revision, '--']
+    output = git.RunGit(git_repo, cmd + files, error_code_ok=True).output
+    existing_filenames = output.split('\0')[:-1]
+    return [x for x in files if x not in existing_filenames]
+
+  def GetCheckout(self, manifest, strict=True):
+    """Get the ProjectCheckout associated with this patch.
+
+    Args:
+      manifest: A ManifestCheckout object.
+      strict: If the change refers to a project/branch that is not in the
+        manifest, raise a ChangeNotInManifest error.
+
+    Raises:
+      ChangeMatchesMultipleCheckouts if there are multiple checkouts that
+      match this change.
+    """
+    checkouts = manifest.FindCheckouts(self.project, self.tracking_branch,
+                                       only_patchable=True)
+    if len(checkouts) != 1:
+      if len(checkouts) > 1:
+        raise ChangeMatchesMultipleCheckouts(self)
+      elif strict:
+        raise ChangeNotInManifest(self)
+      return None
+
+    return checkouts[0]
+
+  def PatchLink(self):
+    """Return a CL link for this patch."""
+    # GitRepoPatch instances don't have a CL link, so just return the string
+    # representation.
+    return str(self)
+
+  def __str__(self):
+    """Returns custom string to identify this patch."""
+    s = '%s:%s' % (self.project, self.ref)
+    if self.sha1 is not None:
+      s = '%s:%s%s' % (s, site_config.params.CHANGE_PREFIX[self.remote],
+                       self.sha1[:8])
+    # TODO(ferringb,build): This gets a bit long in output; should likely
+    # do some form of truncation to it.
+    if self._subject_line:
+      s += ' "%s"' % (self._subject_line,)
+    return s
+
+  def GetLocalSHA1(self, git_repo, revision):
+    """Get the local SHA1 for this patch in the given |manifest|.
+
+    Args:
+      git_repo: The path to the repo.
+      revision: The tracking branch.
+
+    Returns:
+      The local SHA1 for this patch, if it is present in the given |manifest|.
+      If this patch is not present, returns None.
+    """
+    query = 'Change-Id: %s' % self.change_id
+    cmd = ['log', '-F', '--all-match', '--grep', query,
+           '--format=%H', '%s..HEAD' % revision]
+    output = git.RunGit(git_repo, cmd).output.split()
+    if len(output) == 1:
+      return output[0]
+    elif len(output) > 1:
+      raise BrokenChangeID(self, 'Duplicate change ID')
+
+
+class LocalPatch(GitRepoPatch):
+  """Represents patch coming from an on-disk git repo."""
+
+  def __init__(self, project_url, project, ref, tracking_branch, remote,
+               sha1):
+    GitRepoPatch.__init__(self, project_url, project, ref, tracking_branch,
+                          remote, sha1=sha1)
+    # Initialize our commit message/ChangeId now, since we know we have
+    # access to the data right now.
+    self.Fetch(project_url)
+
+  def _GetCarbonCopy(self):
+    """Returns a copy of this commit object, with a different sha1.
+
+    This is used to work around a Gerrit bug, where a commit object cannot be
+    uploaded for review if an existing branch (in refs/tryjobs/*) points to
+    that same sha1.  So instead we create a copy of the commit object and upload
+    that to refs/tryjobs/*.
+
+    Returns:
+      The sha1 of the new commit object.
+    """
+    hash_fields = [('tree_hash', '%T'), ('parent_hash', '%P')]
+    transfer_fields = [('GIT_AUTHOR_NAME', '%an'),
+                       ('GIT_AUTHOR_EMAIL', '%ae'),
+                       ('GIT_AUTHOR_DATE', '%ad'),
+                       ('GIT_COMMITTER_NAME', '%cn'),
+                       ('GIT_COMMITTER_EMAIL', '%ce'),
+                       ('GIT_COMMITER_DATE', '%ct')]
+    fields = hash_fields + transfer_fields
+
+    format_string = '%n'.join([code for _, code in fields] + ['%B'])
+    result = git.RunGit(self.project_url,
+                        ['log', '--format=%s' % format_string, '-n1',
+                         self.sha1])
+    lines = result.output.splitlines()
+    field_value = dict(zip([name for name, _ in fields],
+                           [line.strip() for line in lines]))
+    commit_body = '\n'.join(lines[len(fields):])
+
+    if len(field_value['parent_hash'].split()) != 1:
+      raise PatchException(self,
+                           'Branch %s:%s contains merge result %s!'
+                           % (self.project, self.ref, self.sha1))
+
+    extra_env = dict([(field, field_value[field]) for field, _ in
+                      transfer_fields])
+
+    # Reset the commit date to a value that can't conflict; if we
+    # leave this to git, it's possible for a fast moving set of commit/uploads
+    # to all occur within the same second (thus the same commit date),
+    # resulting in the same sha1.
+    extra_env['GIT_COMMITTER_DATE'] = str(
+        int(extra_env['GIT_COMMITER_DATE']) - 1)
+
+    result = git.RunGit(
+        self.project_url,
+        ['commit-tree', field_value['tree_hash'], '-p',
+         field_value['parent_hash']],
+        extra_env=extra_env, input=commit_body)
+
+    new_sha1 = result.output.strip()
+    if new_sha1 == self.sha1:
+      raise PatchException(
+          self,
+          'Internal error!  Carbon copy of %s is the same as original!'
+          % self.sha1)
+
+    return new_sha1
+
+  def Upload(self, push_url, remote_ref, carbon_copy=True, dryrun=False,
+             reviewers=(), cc=()):
+    """Upload the patch to a remote git branch.
+
+    Args:
+      push_url: Which url to push to.
+      remote_ref: The ref on the remote host to push to.
+      carbon_copy: Use a carbon_copy of the local commit.
+      dryrun: Do the git push with --dry-run
+      reviewers: Iterable of reviewers to add.
+      cc: Iterable of people to add to cc.
+
+    Returns:
+      A list of gerrit URLs found in the output
+    """
+    if carbon_copy:
+      ref_to_upload = self._GetCarbonCopy()
+    else:
+      ref_to_upload = self.sha1
+
+    cmd = ['push']
+
+    # This matches repo's project.py:Project.UploadForReview logic.
+    if reviewers or cc:
+      if push_url.startswith('ssh://'):
+        rp = (['gerrit receive-pack'] +
+              ['--reviewer=%s' % x for x in reviewers] +
+              ['--cc=%s' % x for x in cc])
+        cmd.append('--receive-pack=%s' % ' '.join(rp))
+      else:
+        rp = ['r=%s' % x for x in reviewers] + ['cc=%s' % x for x in cc]
+        remote_ref += '%' + ','.join(rp)
+
+    cmd += [push_url, '%s:%s' % (ref_to_upload, remote_ref)]
+    if dryrun:
+      cmd.append('--dry-run')
+
+    # Depending on git/gerrit/weather, the URL might be written to stdout or
+    # stderr.  Just combine them so we don't have to worry about it.
+    result = git.RunGit(self.project_url, cmd, capture_output=True,
+                        combine_stdout_stderr=True)
+    lines = result.output.splitlines()
+    urls = []
+    for num, line in enumerate(lines):
+      # Look for output like:
+      # remote: New Changes:
+      # remote:   https://chromium-review.googlesource.com/36756 Enforce a ...
+      if 'New Changes:' in line:
+        urls = []
+        for line in lines[num + 1:]:
+          line = line.split()
+          if len(line) < 2 or not line[1].startswith('http'):
+            break
+          urls.append(line[1])
+        break
+    return urls
+
+
+class UploadedLocalPatch(GitRepoPatch):
+  """Represents an uploaded local patch passed in using --remote-patch."""
+
+  def __init__(self, project_url, project, ref, tracking_branch,
+               original_branch, original_sha1, remote, carbon_copy_sha1=None):
+    """Initializes an UploadedLocalPatch instance.
+
+    Args:
+      project_url: See GitRepoPatch for documentation.
+      project: See GitRepoPatch for documentation.
+      ref: See GitRepoPatch for documentation.
+      tracking_branch: See GitRepoPatch for documentation.
+      original_branch: The tracking branch of the local patch.
+      original_sha1: The sha1 of the local commit.
+      remote: See GitRepoPatch for documentation.
+      carbon_copy_sha1: The alternative commit hash to use.
+    """
+    GitRepoPatch.__init__(self, project_url, project, ref, tracking_branch,
+                          remote, sha1=carbon_copy_sha1)
+    self.original_branch = original_branch
+    self.original_sha1 = ParseSHA1(original_sha1)
+    self._original_sha1_valid = False if self.original_sha1 is None else True
+    if self._original_sha1_valid and not self.id:
+      self.id = AddPrefix(self, self.original_sha1)
+
+  def LookupAliases(self):
+    """Return the list of lookup keys this change is known by."""
+    l = GitRepoPatch.LookupAliases(self)
+    if self._original_sha1_valid:
+      l.append(AddPrefix(self, self.original_sha1))
+
+    return l
+
+  def __str__(self):
+    """Returns custom string to identify this patch."""
+    s = '%s:%s:%s' % (self.project, self.original_branch,
+                      self.original_sha1[:8])
+    # TODO(ferringb,build): This gets a bit long in output; should likely
+    # do some form of truncation to it.
+    if self._subject_line:
+      s += ':"%s"' % (self._subject_line,)
+    return s
+
+
+class GerritFetchOnlyPatch(GitRepoPatch):
+  """Object that contains information to cherry-pick a Gerrit CL."""
+
+  def __init__(self, project_url, project, ref, tracking_branch, remote,
+               sha1, change_id, gerrit_number, patch_number, owner_email=None,
+               fail_count=0, pass_count=0, total_fail_count=0):
+    """Initializes a GerritFetchOnlyPatch object."""
+    super(GerritFetchOnlyPatch, self).__init__(
+        project_url, project, ref, tracking_branch, remote,
+        change_id=change_id, sha1=sha1)
+    self.gerrit_number = gerrit_number
+    self.patch_number = patch_number
+    # TODO: Do we need three variables for the commit hash?
+    self.revision = self.commit = self.sha1
+
+    # Variables below are required to print the CL link.
+    self.owner_email = owner_email
+    self.owner = None
+    if self.owner_email:
+      self.owner = self.owner_email.split('@', 1)[0]
+
+    self.url = gob_util.GetChangePageUrl(
+        site_config.params.GERRIT_HOSTS[self.remote], int(self.gerrit_number))
+    self.fail_count = fail_count
+    self.pass_count = pass_count
+    self.total_fail_count = total_fail_count
+
+  @classmethod
+  def FromAttrDict(cls, attr_dict):
+    """Get a GerritFetchOnlyPatch instance from a dict.
+
+    Args:
+      attr_dict: A dictionary with the keys given in ALL_ATTRS.
+    """
+    return GerritFetchOnlyPatch(attr_dict[ATTR_PROJECT_URL],
+                                attr_dict[ATTR_PROJECT],
+                                attr_dict[ATTR_REF],
+                                attr_dict[ATTR_BRANCH],
+                                attr_dict[ATTR_REMOTE],
+                                attr_dict[ATTR_COMMIT],
+                                attr_dict[ATTR_CHANGE_ID],
+                                attr_dict[ATTR_GERRIT_NUMBER],
+                                attr_dict[ATTR_PATCH_NUMBER],
+                                owner_email=attr_dict[ATTR_OWNER_EMAIL],
+                                fail_count=int(attr_dict[ATTR_FAIL_COUNT]),
+                                pass_count=int(attr_dict[ATTR_PASS_COUNT]),
+                                total_fail_count=int(
+                                    attr_dict[ATTR_TOTAL_FAIL_COUNT]))
+
+  def _EnsureId(self, commit_message):
+    """Ensure we have a usable Change-Id
+
+    Validate what we received from gerrit against what the commit message
+    states.
+    """
+    # GerritPatch instances get their Change-Id from gerrit
+    # directly; for this to fail, there is an internal bug.
+    assert self.id is not None
+
+    # For GerritPatches, we still parse the ID- this is
+    # primarily so we can throw an appropriate warning,
+    # and also validate our parsing against gerrit's in
+    # the process.
+    try:
+      parsed_id = self._ParseChangeId(commit_message)
+      if parsed_id != self.change_id:
+        raise AssertionError(
+            'For Change-Id %s, sha %s, our parsing of the Change-Id did not '
+            'match what gerrit told us.  This is an internal bug: either our '
+            "parsing no longer matches gerrit's, or somehow this instance's "
+            'stored change_id was invalidly modified.  Our parsing of the '
+            'Change-Id yielded: %s'
+            % (self.change_id, self.sha1, parsed_id))
+
+    except BrokenChangeID:
+      logging.warning(
+          'Change %s, Change-Id %s, sha1 %s lacks a change-id in its commit '
+          'message.  This can break the ability for any children to depend on '
+          'this Change as a parent.  Please add the appropriate '
+          'Change-Id into the commit message to resolve this.',
+          self, self.change_id, self.sha1)
+
+  def GetAttributeDict(self):
+    """Get a dictionary of attribute used for manifest.
+
+    Returns:
+      A dictionary with the keys given in ALL_ATTRS.
+    """
+    attr_dict = {
+        ATTR_REMOTE: self.remote,
+        ATTR_GERRIT_NUMBER: self.gerrit_number,
+        ATTR_PROJECT: self.project,
+        ATTR_PROJECT_URL: self.project_url,
+        ATTR_REF: self.ref,
+        ATTR_BRANCH: self.tracking_branch,
+        ATTR_CHANGE_ID: self.change_id,
+        ATTR_COMMIT: self.commit,
+        ATTR_PATCH_NUMBER: self.patch_number,
+        ATTR_OWNER_EMAIL: self.owner_email,
+        ATTR_FAIL_COUNT: str(self.fail_count),
+        ATTR_PASS_COUNT: str(self.pass_count),
+        ATTR_TOTAL_FAIL_COUNT: str(self.total_fail_count),
+    }
+
+    return attr_dict
+
+class GerritPatch(GerritFetchOnlyPatch):
+  """Object that represents a Gerrit CL."""
+
+  def __init__(self, patch_dict, remote, url_prefix):
+    """Construct a GerritPatch object from Gerrit query results.
+
+    Gerrit query JSON fields are documented at:
+    http://gerrit-documentation.googlecode.com/svn/Documentation/2.2.1/json.html
+
+    Args:
+      patch_dict: A dictionary containing the parsed JSON gerrit query results.
+      remote: The manifest remote the patched project uses.
+      url_prefix: The project name will be appended to this to get the full
+                  repository URL.
+    """
+    self.patch_dict = patch_dict
+    self.url_prefix = url_prefix
+    current_patch_set = patch_dict.get('currentPatchSet', {})
+    # id - The CL's ChangeId
+    # revision - The CL's SHA1 hash.
+    # number - The CL's gerrit number.
+    super(GerritPatch, self).__init__(
+        os.path.join(url_prefix, patch_dict['project']),
+        patch_dict['project'],
+        current_patch_set.get('ref'),
+        patch_dict['branch'],
+        remote,
+        current_patch_set.get('revision'),
+        patch_dict['id'],
+        ParseGerritNumber(str(patch_dict['number'])),
+        current_patch_set.get('number'),
+        owner_email=patch_dict['owner']['email'])
+
+    prefix_str = site_config.params.CHANGE_PREFIX[self.remote]
+    self.gerrit_number_str = '%s%s' % (prefix_str, self.gerrit_number)
+    self.url = patch_dict['url']
+    # status - Current state of this change.  Can be one of
+    # ['NEW', 'SUBMITTED', 'MERGED', 'ABANDONED'].
+    self.status = patch_dict['status']
+    self._approvals = []
+    if 'currentPatchSet' in self.patch_dict:
+      self._approvals = self.patch_dict['currentPatchSet'].get('approvals', [])
+    self.commit_timestamp = current_patch_set.get('date', 0)
+    self.approval_timestamp = max(
+        self.commit_timestamp,
+        max(x['grantedOn'] for x in self._approvals) if self._approvals else 0)
+    self._commit_message = None
+    self.commit_message = patch_dict.get('commitMessage')
+
+  @staticmethod
+  def ConvertQueryResults(change, host):
+    """Converts HTTP query results to the old SQL format.
+
+    The HTTP interface to gerrit uses a different json schema from the old SQL
+    interface.  This method converts data from the new schema to the old one,
+    typically before passing it to the GerritPatch constructor.
+
+    Old interface:
+      http://gerrit-documentation.googlecode.com/svn/Documentation/2.6/json.html
+
+    New interface:
+      https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#json-entities
+    """
+    try:
+      _convert_tm = lambda tm: calendar.timegm(
+          time.strptime(tm.partition('.')[0], '%Y-%m-%d %H:%M:%S'))
+      _convert_user = lambda u: {
+          'name': u.get('name'),
+          'email': u.get('email'),
+          'username': u.get('name'),
+      }
+      change_id = change['change_id'].split('~')[-1]
+      patch_dict = {
+          'project': change['project'],
+          'branch': change['branch'],
+          'createdOn': _convert_tm(change['created']),
+          'lastUpdated': _convert_tm(change['updated']),
+          'id': change_id,
+          'owner': _convert_user(change['owner']),
+          'number': str(change['_number']),
+          'url': gob_util.GetChangePageUrl(host, change['_number']),
+          'status': change['status'],
+          'subject': change.get('subject'),
+      }
+      current_revision = change.get('current_revision', '')
+      current_revision_info = change.get('revisions', {}).get(current_revision)
+      if current_revision_info:
+        approvals = []
+        for label, label_data in change['labels'].iteritems():
+          # Skip unknown labels.
+          if label not in constants.GERRIT_ON_BORG_LABELS:
+            continue
+          for review_data in label_data.get('all', []):
+            granted_on = review_data.get('date', change['created'])
+            approvals.append({
+                'type': constants.GERRIT_ON_BORG_LABELS[label],
+                'description': label,
+                'value': str(review_data.get('value', '0')),
+                'grantedOn': _convert_tm(granted_on),
+                'by': _convert_user(review_data),
+            })
+
+        date = current_revision_info['commit']['committer']['date']
+        patch_dict['currentPatchSet'] = {
+            'approvals': approvals,
+            'ref': current_revision_info['fetch']['http']['ref'],
+            'revision': current_revision,
+            'number': str(current_revision_info['_number']),
+            'date': _convert_tm(date),
+            'draft': current_revision_info.get('draft', False),
+        }
+
+        current_commit = current_revision_info.get('commit')
+        if current_commit:
+          patch_dict['commitMessage'] = current_commit['message']
+          parents = current_commit.get('parents', [])
+          patch_dict['dependsOn'] = [{'revision': p['commit']} for p in parents]
+
+      return patch_dict
+    except:
+      logging.error('Error while converting:\n%s', change, exc_info=True)
+      raise
+
+  def __reduce__(self):
+    """Used for pickling to re-create patch object."""
+    return self.__class__, (self.patch_dict.copy(), self.remote,
+                            self.url_prefix)
+
+  def GerritDependencies(self):
+    """Returns the list of PatchQuery objects that this patch depends on."""
+    results = []
+    for d in self.patch_dict.get('dependsOn', []):
+      gerrit_number = d.get('number')
+      if gerrit_number is not None:
+        gerrit_number = ParseGerritNumber(gerrit_number, error_ok=False)
+
+      change_id = d.get('id')
+      if change_id is not None:
+        change_id = ParseChangeID(change_id, error_ok=False)
+
+      sha1 = d.get('revision')
+      if sha1 is not None:
+        sha1 = ParseSHA1(sha1, error_ok=False)
+
+      if not gerrit_number and not change_id and not sha1:
+        raise AssertionError(
+            'While processing the dependencies of change %s, no "number", "id",'
+            ' or "revision" key found in: %r' % (self.gerrit_number, d))
+
+      results.append(
+          PatchQuery(self.remote, project=self.project,
+                     tracking_branch=self.tracking_branch,
+                     gerrit_number=gerrit_number,
+                     change_id=change_id, sha1=sha1))
+    return results
+
+  def IsAlreadyMerged(self):
+    """Returns whether the patch has already been merged in Gerrit."""
+    return self.status == 'MERGED'
+
+  def IsBeingMerged(self):
+    """Whether the patch is merged or in the progress of being merged."""
+    return self.status in ('SUBMITTED', 'MERGED')
+
+  def IsDraft(self):
+    """Return true if the latest patchset is a draft."""
+    return self.patch_dict['currentPatchSet']['draft']
+
+  def HasApproval(self, field, value):
+    """Return whether the current patchset has the specified approval.
+
+    Args:
+      field: Which field to check.
+        'VRIF': Whether patch was verified.
+        'CRVW': Whether patch was approved.
+        'COMR': Whether patch was marked commit ready.
+        'TRY':  Whether patch was marked ready for trybot.
+      value: The expected value of the specified field (as string, or as list
+             of accepted strings).
+    """
+    # All approvals default to '0', so use that if there's no matches.
+    type_approvals = [x['value'] for x in self._approvals if x['type'] == field]
+    type_approvals = type_approvals or ['0']
+    if isinstance(value, (tuple, list)):
+      return bool(set(value) & set(type_approvals))
+    else:
+      return value in type_approvals
+
+  def HasApprovals(self, flags):
+    """Return whether the current patchset has the specified approval.
+
+    Args:
+      flags: A dictionary of flag -> value mappings in
+        GerritPatch.HasApproval format.
+        ex: { 'CRVW': '2', 'VRIF': '1', 'COMR': ('1', '2') }
+
+    returns boolean telling if all flag requirements are met.
+    """
+    return all(self.HasApproval(field, value)
+               for field, value in flags.iteritems())
+
+  def WasVetoed(self):
+    """Return whether this CL was vetoed with VRIF=-1 or CRVW=-2."""
+    return self.HasApproval('VRIF', '-1') or self.HasApproval('CRVW', '-2')
+
+  def IsMergeable(self):
+    """Return true if all Gerrit approvals required for submission are set."""
+    return not self.GetMergeException()
+
+  def HasReadyFlag(self):
+    """Return true if the trybot-ready or commit-ready flag is set."""
+    return self.HasApproval('COMR', ('1', '2')) or self.HasApproval('TRY', '1')
+
+  def GetMergeException(self):
+    """Return the reason why this change is not mergeable.
+
+    If the change is in fact mergeable, return None.
+    """
+    if self.IsDraft():
+      return PatchNotMergeable(self, 'is a draft.')
+
+    if self.status != 'NEW':
+      statuses = {
+          'MERGED': 'is already merged.',
+          'SUBMITTED': 'is being merged.',
+          'ABANDONED': 'is abandoned.',
+      }
+      message = statuses.get(self.status, 'has status %s.' % self.status)
+      return PatchNotMergeable(self, message)
+
+    if self.HasApproval('VRIF', '-1'):
+      return PatchNotMergeable(self, 'is marked as Verified=-1.')
+    elif self.HasApproval('CRVW', '-2'):
+      return PatchNotMergeable(self, 'is marked as Code-Review=-2.')
+    elif not self.HasApproval('CRVW', '2'):
+      return PatchNotMergeable(self, 'is not marked Code-Review=+2.')
+    elif not self.HasApproval('VRIF', '1'):
+      return PatchNotMergeable(self, 'is not marked Verified=+1.')
+    elif not self.HasApproval('COMR', ('1', '2')):
+      return PatchNotMergeable(self, 'is not marked Commit-Queue>=+1.')
+
+  def GetLatestApproval(self, field):
+    """Return most recent value of specific field on the current patchset.
+
+    Args:
+      field: Which field to check ('VRIF', 'CRVW', ...).
+
+    Returns:
+      Most recent field value (as str) or '0' if no such field.
+    """
+    # All approvals default to '0', so use that if there's no matches.
+    type_approvals = [x['value'] for x in self._approvals if x['type'] == field]
+    return type_approvals[-1] if type_approvals else '0'
+
+  def PatchLink(self):
+    """Return a CL link for this patch."""
+    return 'CL:%s' % (self.gerrit_number_str,)
+
+  def _AddFooters(self, msg):
+    """Ensure that commit messages have necessary Gerrit footers on the end.
+
+    Args:
+      msg: The commit message.
+
+    Returns:
+      The modified commit message with necessary Gerrit footers.
+    """
+    msg = super(GerritPatch, self)._AddFooters(msg)
+
+    # This function is adapted from the version in Gerrit:
+    # goto/createCherryPickCommitMessage
+    old_footers = self._GetFooters(msg)
+
+    gerrit_host = site_config.params.GERRIT_HOSTS[self.remote]
+    reviewed_on = 'https://%s/%s' % (gerrit_host, self.gerrit_number)
+    if ('Reviewed-on', reviewed_on) not in old_footers:
+      msg += 'Reviewed-on: %s\n' % reviewed_on
+
+    for approval in self._approvals:
+      footer = FooterForApproval(approval, old_footers)
+      if footer and footer not in old_footers:
+        msg += '%s: %s\n' % footer
+
+    return msg
+
+  def __str__(self):
+    """Returns custom string to identify this patch."""
+    s = '%s:%s' % (self.owner, self.gerrit_number_str)
+    if self.sha1 is not None:
+      s = '%s:%s%s' % (s, site_config.params.CHANGE_PREFIX[self.remote],
+                       self.sha1[:8])
+    if self._subject_line:
+      s += ':"%s"' % (self._subject_line,)
+    return s
+
+
+FOOTER_TAGS_BY_APPROVAL_TYPE = {
+    'CRVW': 'Reviewed-by',
+    'VRIF': 'Tested-by',
+    'COMR': 'Commit-Ready',
+    'TRY': None,
+    'SUBM': 'Submitted-by',
+}
+
+
+def FooterForApproval(approval, footers):
+  """Return a commit-message footer for a given approver.
+
+  Args:
+    approval: A dict containing the information about an approver
+    footers: A sequence of existing footers in the commit message.
+
+  Returns:
+    A 'footer', which is a tuple (tag, id).
+  """
+  if int(approval.get('value', 0)) <= 0:
+    # Negative votes aren't counted.
+    return
+
+  name = approval.get('by', {}).get('name')
+  email = approval.get('by', {}).get('email')
+  ident = ' '.join(x for x in [name, email and '<%s>' % email] if x)
+
+  # Nothing reasonable to describe them by? Ignore them.
+  if not ident:
+    return
+
+  # Don't bother adding additional footers if the CL has already been
+  # signed off.
+  if ('Signed-off-by', ident) in footers:
+    return
+
+  # If the tag is unknown, don't return anything at all.
+  if approval['type'] not in FOOTER_TAGS_BY_APPROVAL_TYPE:
+    logging.warning('unknown gerrit type %s (%r)', approval['type'], approval)
+    return
+
+  # We don't care about certain gerrit flags as they aren't approval related.
+  tag = FOOTER_TAGS_BY_APPROVAL_TYPE[approval['type']]
+  if not tag:
+    return
+
+  return tag, ident
+
+
+def GeneratePatchesFromRepo(git_repo, project, tracking_branch, branch, remote,
+                            allow_empty=False):
+  """Create a list of LocalPatch objects from a repo on disk.
+
+  Args:
+    git_repo: The path to the repo.
+    project: The name of the associated project.
+    tracking_branch: The remote tracking branch we want to test against.
+    branch: The name of our local branch, where we will look for patches.
+    remote: The name of the remote to use. E.g. 'cros'
+    allow_empty: Whether to allow the case where no patches were specified.
+  """
+
+  result = git.RunGit(
+      git_repo,
+      ['rev-list', '--reverse', '%s..%s' % (tracking_branch, branch)])
+
+  sha1s = result.output.splitlines()
+  if not sha1s:
+    if not allow_empty:
+      cros_build_lib.Die('No changes found in %s:%s' % (project, branch))
+    return
+
+  for sha1 in sha1s:
+    yield LocalPatch(os.path.join(git_repo, '.git'),
+                     project, branch, tracking_branch,
+                     remote, sha1)
+
+
+def PrepareLocalPatches(manifest, patches):
+  """Finish validation of parameters, and save patches to a temp folder.
+
+  Args:
+    manifest: The manifest object for the checkout in question.
+    patches: A list of user-specified patches, in project:branch form.
+      cbuildbot pre-processes the patch names before sending them to us,
+      so we can expect that branch names will always be present.
+  """
+  patch_info = []
+  for patch in patches:
+    project, branch = patch.split(':')
+    project_patch_info = []
+    for checkout in manifest.FindCheckouts(project, only_patchable=True):
+      tracking_branch = checkout['tracking_branch']
+      project_dir = checkout.GetPath(absolute=True)
+      remote = checkout['remote']
+      project_patch_info.extend(GeneratePatchesFromRepo(
+          project_dir, project, tracking_branch, branch, remote))
+
+    if not project_patch_info:
+      cros_build_lib.Die('No changes found in %s:%s' % (project, branch))
+    patch_info.extend(project_patch_info)
+
+  return patch_info
+
+
+def PrepareRemotePatches(patches):
+  """Generate patch objects from list of --remote-patch parameters.
+
+  Args:
+    patches: A list of --remote-patches strings that the user specified on
+             the commandline.  Patch strings are colon-delimited.  Patches come
+             in the format
+             <project>:<original_branch>:<ref>:<tracking_branch>:<tag>.
+             A description of each element:
+             project: The manifest project name that the patch is for.
+             original_branch: The name of the development branch that the local
+                              patch came from.
+             ref: The remote ref that points to the patch.
+             tracking_branch: The upstream branch that the original_branch was
+                              tracking.  Should be a manifest branch.
+             tag: Denotes whether the project is an internal or external
+                  project.
+  """
+  patch_info = []
+  for patch in patches:
+    try:
+      project, original_branch, ref, tracking_branch, tag = patch.split(':')
+    except ValueError as e:
+      raise ValueError(
+          'Unexpected tryjob format.  You may be running an '
+          "older version of chromite.  Run 'repo sync "
+          "chromiumos/chromite'.  Error was %s" % e)
+
+    if tag not in constants.PATCH_TAGS:
+      raise ValueError('Bad remote patch format.  Unknown tag %s' % tag)
+
+    remote = site_config.params.EXTERNAL_REMOTE
+    if tag == constants.INTERNAL_PATCH_TAG:
+      remote = site_config.params.INTERNAL_REMOTE
+
+    push_url = site_config.params.GIT_REMOTES[remote]
+    patch_info.append(UploadedLocalPatch(os.path.join(push_url, project),
+                                         project, ref, tracking_branch,
+                                         original_branch,
+                                         os.path.basename(ref), remote))
+
+  return patch_info
+
+
+def GetChangesAsString(changes, prefix='CL:', delimiter=' '):
+  """Gets a human readable string listing |changes| in CL:1234 form.
+
+  Args:
+    changes: A list of GerritPatch objects.
+    prefix: Prefix to use. Defaults to 'CL:'
+    delimiter: Delimiter to use. Defaults to a space.
+  """
+  formatted_changes = ['%s%s' % (prefix, AddPrefix(x, x.gerrit_number))
+                       for x in changes]
+  return delimiter.join(sorted(formatted_changes))
diff --git a/lib/patch_unittest b/lib/patch_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/patch_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/patch_unittest.py b/lib/patch_unittest.py
new file mode 100644
index 0000000..a3f2c4c
--- /dev/null
+++ b/lib/patch_unittest.py
@@ -0,0 +1,1062 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for chromite.lib.patch."""
+
+from __future__ import print_function
+
+import copy
+import contextlib
+import itertools
+import mock
+import os
+import shutil
+import time
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import gerrit
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import patch as cros_patch
+
+
+site_config = config_lib.GetConfig()
+
+
+_GetNumber = iter(itertools.count()).next
+
+FAKE_PATCH_JSON = {
+    'project': 'tacos/chromite',
+    'branch': 'master',
+    'id': 'Iee5c89d929f1850d7d4e1a4ff5f21adda800025f',
+    'currentPatchSet': {
+        'number': '2',
+        'ref': gerrit.GetChangeRef(1112, 2),
+        'revision': 'ff10979dd360e75ff21f5cf53b7f8647578785ef',
+    },
+    'number': '1112',
+    'subject': 'chromite commit',
+    'owner': {
+        'name': 'Chromite Master',
+        'email': 'chromite@chromium.org',
+    },
+    'url': 'https://chromium-review.googlesource.com/1112',
+    'lastUpdated': 1311024529,
+    'sortKey': '00166e8700001052',
+    'open': True,
+    'status': 'NEW',
+}
+
+# Change-ID of a known open change in public gerrit.
+GERRIT_OPEN_CHANGEID = '8366'
+GERRIT_MERGED_CHANGEID = '3'
+GERRIT_ABANDONED_CHANGEID = '2'
+
+
+class GitRepoPatchTestCase(cros_test_lib.TempDirTestCase):
+  """Helper TestCase class for writing test cases."""
+
+  # No mock bits are to be used in this class's tests.
+  # This needs to actually validate git output, and git behaviour, rather
+  # than test our assumptions about git's behaviour/output.
+
+  patch_kls = cros_patch.GitRepoPatch
+
+  COMMIT_TEMPLATE = """\
+commit abcdefgh
+
+Author: Fake person
+Date:  Tue Oct 99
+
+I am the first commit.
+
+%(extra)s
+
+%(change-id)s
+"""
+
+  # Boolean controlling whether the target class natively knows its
+  # ChangeId; only GerritPatches do.
+  has_native_change_id = False
+
+  DEFAULT_TRACKING = (
+      'refs/remotes/%s/master' % site_config.params.EXTERNAL_REMOTE)
+
+  def _CreateSourceRepo(self, path):
+    """Generate a new repo with a single commit."""
+    tmp_path = '%s-tmp' % path
+    os.mkdir(path)
+    os.mkdir(tmp_path)
+    self._run(['git', 'init', '--separate-git-dir', path], cwd=tmp_path)
+
+    # Add an initial commit then wipe the working tree.
+    self._run(['git', 'commit', '--allow-empty', '-m', 'initial commit'],
+              cwd=tmp_path)
+    shutil.rmtree(tmp_path)
+
+  def setUp(self):
+    # Create an empty repo to work from.
+    self.source = os.path.join(self.tempdir, 'source.git')
+    self._CreateSourceRepo(self.source)
+    self.default_cwd = os.path.join(self.tempdir, 'unwritable')
+    self.original_cwd = os.getcwd()
+    os.mkdir(self.default_cwd)
+    os.chdir(self.default_cwd)
+    # Disallow write so as to smoke out any invalid writes to
+    # cwd.
+    os.chmod(self.default_cwd, 0o500)
+
+  def tearDown(self):
+    if hasattr(self, 'original_cwd'):
+      os.chdir(self.original_cwd)
+
+  def _MkPatch(self, source, sha1, ref='refs/heads/master', **kwargs):
+    return self.patch_kls(source, 'chromiumos/chromite', ref,
+                          '%s/master' % site_config.params.EXTERNAL_REMOTE,
+                          kwargs.pop('remote',
+                                     site_config.params.EXTERNAL_REMOTE),
+                          sha1=sha1, **kwargs)
+
+  def _run(self, cmd, cwd=None):
+    # Note that cwd is intentionally set to a location the user can't write
+    # to; this flushes out any bad usage in the tests that would work by
+    # fluke of being invoked from w/in a git repo.
+    if cwd is None:
+      cwd = self.default_cwd
+    return cros_build_lib.RunCommand(
+        cmd, cwd=cwd, print_cmd=False, capture_output=True).output.strip()
+
+  def _GetSha1(self, cwd, refspec):
+    return self._run(['git', 'rev-list', '-n1', refspec], cwd=cwd)
+
+  def _MakeRepo(self, name, clone, remote=None, alternates=True):
+    path = os.path.join(self.tempdir, name)
+    cmd = ['git', 'clone', clone, path]
+    if alternates:
+      cmd += ['--reference', clone]
+    if remote is None:
+      remote = site_config.params.EXTERNAL_REMOTE
+    cmd += ['--origin', remote]
+    self._run(cmd)
+    return path
+
+  def _MakeCommit(self, repo, commit=None):
+    if commit is None:
+      commit = 'commit at %s' % (time.time(),)
+    self._run(['git', 'commit', '-a', '-m', commit], repo)
+    return self._GetSha1(repo, 'HEAD')
+
+  def CommitFile(self, repo, filename, content, commit=None, **kwargs):
+    osutils.WriteFile(os.path.join(repo, filename), content)
+    self._run(['git', 'add', filename], repo)
+    sha1 = self._MakeCommit(repo, commit=commit)
+    if not self.has_native_change_id:
+      kwargs.pop('ChangeId', None)
+    patch = self._MkPatch(repo, sha1, **kwargs)
+    self.assertEqual(patch.sha1, sha1)
+    return patch
+
+  def _CommonGitSetup(self):
+    git1 = self._MakeRepo('git1', self.source)
+    git2 = self._MakeRepo('git2', self.source)
+    patch = self.CommitFile(git1, 'monkeys', 'foon')
+    return git1, git2, patch
+
+  def MakeChangeId(self, how_many=1):
+    l = [cros_patch.MakeChangeId() for _ in xrange(how_many)]
+    if how_many == 1:
+      return l[0]
+    return l
+
+  def CommitChangeIdFile(self, repo, changeid=None, extra=None,
+                         filename='monkeys', content='flinging',
+                         raw_changeid_text=None, **kwargs):
+    template = self.COMMIT_TEMPLATE
+    if changeid is None:
+      changeid = self.MakeChangeId()
+    if raw_changeid_text is None:
+      raw_changeid_text = 'Change-Id: %s' % (changeid,)
+    if extra is None:
+      extra = ''
+    commit = template % {'change-id': raw_changeid_text, 'extra': extra}
+
+    return self.CommitFile(repo, filename, content, commit=commit,
+                           ChangeId=changeid, **kwargs)
+
+
+class TestGitRepoPatch(GitRepoPatchTestCase):
+  """Unittests for git patch related methods."""
+
+  def testGetDiffStatus(self):
+    git1, _, patch1 = self._CommonGitSetup()
+    # Ensure that it can work on the first commit, even if it
+    # doesn't report anything (no delta; it's the first files).
+    patch1 = self._MkPatch(git1, self._GetSha1(git1, self.DEFAULT_TRACKING))
+    self.assertEqual({}, patch1.GetDiffStatus(git1))
+    patch2 = self.CommitFile(git1, 'monkeys', 'blah')
+    self.assertEqual({'monkeys': 'M'}, patch2.GetDiffStatus(git1))
+    git.RunGit(git1, ['mv', 'monkeys', 'monkeys2'])
+    patch3 = self._MkPatch(git1, self._MakeCommit(git1, commit='mv'))
+    self.assertEqual({'monkeys': 'D', 'monkeys2': 'A'},
+                     patch3.GetDiffStatus(git1))
+    patch4 = self.CommitFile(git1, 'monkey2', 'blah')
+    self.assertEqual({'monkey2': 'A'}, patch4.GetDiffStatus(git1))
+
+  def testFetch(self):
+    _, git2, patch = self._CommonGitSetup()
+    patch.Fetch(git2)
+    self.assertEqual(patch.sha1, self._GetSha1(git2, 'FETCH_HEAD'))
+    # Verify reuse; specifically that Fetch doesn't actually run since
+    # the rev is already available locally via alternates.
+    patch.project_url = '/dev/null'
+    git3 = self._MakeRepo('git3', git2)
+    patch.Fetch(git3)
+    self.assertEqual(patch.sha1, self._GetSha1(git3, patch.sha1))
+
+  def testFetchFirstPatchInSeries(self):
+    git1, git2, patch = self._CommonGitSetup()
+    self.CommitFile(git1, 'monkeys', 'foon2')
+    patch.Fetch(git2)
+
+  def testFetchWithoutSha1(self):
+    git1, git2, _ = self._CommonGitSetup()
+    patch2 = self.CommitFile(git1, 'monkeys', 'foon2')
+    sha1, patch2.sha1 = patch2.sha1, None
+    patch2.Fetch(git2)
+    self.assertEqual(sha1, patch2.sha1)
+
+  def testAlreadyApplied(self):
+    git1 = self._MakeRepo('git1', self.source)
+    patch1 = self._MkPatch(git1, self._GetSha1(git1, 'HEAD'))
+    self.assertRaises2(cros_patch.PatchIsEmpty, patch1.Apply, git1,
+                       self.DEFAULT_TRACKING, check_attrs={'inflight': False})
+    patch2 = self.CommitFile(git1, 'monkeys', 'rule')
+    self.assertRaises2(cros_patch.PatchIsEmpty, patch2.Apply, git1,
+                       self.DEFAULT_TRACKING, check_attrs={'inflight': True})
+
+  def testDeleteEbuildTwice(self):
+    """Test that double-deletes of ebuilds are flagged as conflicts."""
+    # Create monkeys.ebuild for testing.
+    git1 = self._MakeRepo('git1', self.source)
+    patch1 = self.CommitFile(git1, 'monkeys.ebuild', 'rule')
+    git.RunGit(git1, ['rm', 'monkeys.ebuild'])
+    patch2 = self._MkPatch(git1, self._MakeCommit(git1, commit='rm'))
+
+    # Delete an ebuild that does not exist in TOT.
+    check_attrs = {'inflight': False, 'files': ('monkeys.ebuild',)}
+    self.assertRaises2(cros_patch.EbuildConflict, patch2.Apply, git1,
+                       self.DEFAULT_TRACKING, check_attrs=check_attrs)
+
+    # Delete an ebuild that exists in TOT, but does not exist in the current
+    # patch series.
+    check_attrs['inflight'] = True
+    self.assertRaises2(cros_patch.EbuildConflict, patch2.Apply, git1,
+                       patch1.sha1, check_attrs=check_attrs)
+
+  def testCleanlyApply(self):
+    _, git2, patch = self._CommonGitSetup()
+    # Clone git3 before we modify git2; else we'll just wind up
+    # cloning its master.
+    git3 = self._MakeRepo('git3', git2)
+    patch.Apply(git2, self.DEFAULT_TRACKING)
+    # Verify reuse; specifically that Fetch doesn't actually run since
+    # the object is available in alternates.  testFetch partially
+    # validates this; the Apply usage here fully validates it via
+    # ensuring that the attempted Apply goes boom if it can't get the
+    # required sha1.
+    patch.project_url = '/dev/null'
+    patch.Apply(git3, self.DEFAULT_TRACKING)
+
+  def testFailsApply(self):
+    _, git2, patch1 = self._CommonGitSetup()
+    patch2 = self.CommitFile(git2, 'monkeys', 'not foon')
+    # Note that Apply creates it's own branch, resetting to master
+    # thus we have to re-apply (even if it looks stupid, it's right).
+    patch2.Apply(git2, self.DEFAULT_TRACKING)
+    self.assertRaises2(cros_patch.ApplyPatchException,
+                       patch1.Apply, git2, self.DEFAULT_TRACKING,
+                       exact_kls=True, check_attrs={'inflight': True})
+
+  def testTrivial(self):
+    _, git2, patch1 = self._CommonGitSetup()
+    # Throw in a bunch of newlines so that content-merging would work.
+    content = 'not foon%s' % ('\n' * 100)
+    patch1 = self._MkPatch(git2, self._GetSha1(git2, 'HEAD'))
+    patch1 = self.CommitFile(git2, 'monkeys', content)
+    git.RunGit(
+        git2, ['update-ref', self.DEFAULT_TRACKING, patch1.sha1])
+    patch2 = self.CommitFile(git2, 'monkeys', '%sblah' % content)
+    patch3 = self.CommitFile(git2, 'monkeys', '%sblahblah' % content)
+    # Get us a back to the basic, then derive from there; this is used to
+    # verify that even if content merging works, trivial is flagged.
+    self.CommitFile(git2, 'monkeys', 'foon')
+    patch4 = self.CommitFile(git2, 'monkeys', content)
+    patch5 = self.CommitFile(git2, 'monkeys', '%sfoon' % content)
+    # Reset so we derive the next changes from patch1.
+    git.RunGit(git2, ['reset', '--hard', patch1.sha1])
+    patch6 = self.CommitFile(git2, 'blah', 'some-other-file')
+    self.CommitFile(git2, 'monkeys',
+                    '%sblah' % content.replace('not', 'bot'))
+
+    self.assertRaises2(cros_patch.PatchIsEmpty,
+                       patch1.Apply, git2, self.DEFAULT_TRACKING, trivial=True,
+                       check_attrs={'inflight': False, 'trivial': False})
+
+    # Now test conflicts since we're still at ToT; note that this is an actual
+    # conflict because the fuzz anchors have changed.
+    self.assertRaises2(cros_patch.ApplyPatchException,
+                       patch3.Apply, git2, self.DEFAULT_TRACKING, trivial=True,
+                       check_attrs={'inflight': False, 'trivial': False},
+                       exact_kls=True)
+
+    # Now test trivial conflict; this would've merged fine were it not for
+    # trivial.
+    self.assertRaises2(cros_patch.PatchIsEmpty,
+                       patch4.Apply, git2, self.DEFAULT_TRACKING, trivial=True,
+                       check_attrs={'inflight': False, 'trivial': False},
+                       exact_kls=True)
+
+    # Move us into inflight testing.
+    patch2.Apply(git2, self.DEFAULT_TRACKING, trivial=True)
+
+    # Repeat the tests from above; should still be the same.
+    self.assertRaises2(cros_patch.PatchIsEmpty,
+                       patch4.Apply, git2, self.DEFAULT_TRACKING, trivial=True,
+                       check_attrs={'inflight': False, 'trivial': False})
+
+    # Actual conflict merge conflict due to inflight; non trivial induced.
+    self.assertRaises2(cros_patch.ApplyPatchException,
+                       patch5.Apply, git2, self.DEFAULT_TRACKING, trivial=True,
+                       check_attrs={'inflight': True, 'trivial': False},
+                       exact_kls=True)
+
+    self.assertRaises2(cros_patch.PatchIsEmpty,
+                       patch1.Apply, git2, self.DEFAULT_TRACKING, trivial=True,
+                       check_attrs={'inflight': False})
+
+    self.assertRaises2(cros_patch.ApplyPatchException,
+                       patch5.Apply, git2, self.DEFAULT_TRACKING, trivial=True,
+                       check_attrs={'inflight': True, 'trivial': False},
+                       exact_kls=True)
+
+    # And this should apply without issue, despite the differing history.
+    patch6.Apply(git2, self.DEFAULT_TRACKING, trivial=True)
+
+  def _assertLookupAliases(self, remote):
+    git1 = self._MakeRepo('git1', self.source)
+    patch = self.CommitChangeIdFile(git1, remote=remote)
+    prefix = '*' if patch.internal else ''
+    vals = [patch.sha1, getattr(patch, 'gerrit_number', None),
+            getattr(patch, 'original_sha1', None)]
+    # Append full Change-ID if it exists.
+    if patch.project and patch.tracking_branch and patch.change_id:
+      vals.append('%s~%s~%s' % (
+          patch.project, patch.tracking_branch, patch.change_id))
+    vals = [x for x in vals if x is not None]
+    self.assertEqual(set(prefix + x for x in vals), set(patch.LookupAliases()))
+
+  def testExternalLookupAliases(self):
+    self._assertLookupAliases(site_config.params.EXTERNAL_REMOTE)
+
+  def testInternalLookupAliases(self):
+    self._assertLookupAliases(site_config.params.INTERNAL_REMOTE)
+
+  def _CheckPaladin(self, repo, master_id, ids, extra):
+    patch = self.CommitChangeIdFile(
+        repo, master_id, extra=extra,
+        filename='paladincheck', content=str(_GetNumber()))
+    deps = patch.PaladinDependencies(repo)
+    # Assert that our parsing unique'ifies the results.
+    self.assertEqual(len(deps), len(set(deps)))
+    # Verify that we have the correct dependencies.
+    dep_ids = []
+    dep_ids += [(dep.remote, dep.change_id) for dep in deps
+                if dep.change_id is not None]
+    dep_ids += [(dep.remote, dep.gerrit_number) for dep in deps
+                if dep.gerrit_number is not None]
+    dep_ids += [(dep.remote, dep.sha1) for dep in deps
+                if dep.sha1 is not None]
+    for input_id in ids:
+      change_tuple = cros_patch.StripPrefix(input_id)
+      self.assertIn(change_tuple, dep_ids)
+
+    return patch
+
+  def testPaladinDependencies(self):
+    git1 = self._MakeRepo('git1', self.source)
+    cid1, cid2, cid3, cid4 = self.MakeChangeId(4)
+    # Verify it handles nonexistant CQ-DEPEND.
+    self._CheckPaladin(git1, cid1, [], '')
+    # Single key, single value.
+    self._CheckPaladin(git1, cid1, [cid2],
+                       'CQ-DEPEND=%s' % cid2)
+    # Single key, gerrit number.
+    self._CheckPaladin(git1, cid1, ['123'],
+                       'CQ-DEPEND=%s' % 123)
+    # Single key, gerrit number.
+    self._CheckPaladin(git1, cid1, ['123456'],
+                       'CQ-DEPEND=%s' % 123456)
+    # Single key, gerrit number; ensure it
+    # cuts off before a million changes (this
+    # is done to avoid collisions w/ sha1 when
+    # we're using shortened versions).
+    self.assertRaises(cros_patch.BrokenCQDepends,
+                      self._CheckPaladin, git1, cid1,
+                      ['123456789'], 'CQ-DEPEND=%s' % '123456789')
+    # Single key, gerrit number, internal.
+    self._CheckPaladin(git1, cid1, ['*123'],
+                       'CQ-DEPEND=%s' % '*123')
+    # Ensure SHA1's aren't allowed.
+    sha1 = '0' * 40
+    self.assertRaises(cros_patch.BrokenCQDepends,
+                      self._CheckPaladin, git1, cid1,
+                      [sha1], 'CQ-DEPEND=%s' % sha1)
+
+    # Single key, multiple values
+    self._CheckPaladin(git1, cid1, [cid2, '1223'],
+                       'CQ-DEPEND=%s %s' % (cid2, '1223'))
+    # Dumb comma behaviour
+    self._CheckPaladin(git1, cid1, [cid2, cid3],
+                       'CQ-DEPEND=%s, %s,' % (cid2, cid3))
+    # Multiple keys.
+    self._CheckPaladin(git1, cid1, [cid2, '*245', cid4],
+                       'CQ-DEPEND=%s, %s\nCQ-DEPEND=%s' % (cid2, '*245', cid4))
+
+    # Ensure it goes boom on invalid data.
+    self.assertRaises(cros_patch.BrokenCQDepends, self._CheckPaladin,
+                      git1, cid1, [], 'CQ-DEPEND=monkeys')
+    self.assertRaises(cros_patch.BrokenCQDepends, self._CheckPaladin,
+                      git1, cid1, [], 'CQ-DEPEND=%s monkeys' % (cid2,))
+    # Validate numeric is allowed.
+    self._CheckPaladin(git1, cid1, [cid2, '1'], 'CQ-DEPEND=1 %s' % cid2)
+    # Validate that it unique'ifies the results.
+    self._CheckPaladin(git1, cid1, ['1'], 'CQ-DEPEND=1 1')
+
+    # Invalid syntax
+    self.assertRaises(cros_patch.BrokenCQDepends, self._CheckPaladin,
+                      git1, cid1, [], 'CQ-DEPENDS=1')
+    self.assertRaises(cros_patch.BrokenCQDepends, self._CheckPaladin,
+                      git1, cid1, [], 'CQ_DEPEND=1')
+    self.assertRaises(cros_patch.BrokenCQDepends, self._CheckPaladin,
+                      git1, cid1, [], ' CQ-DEPEND=1')
+
+  def testChangeIdMetadata(self):
+    """Verify Change-Id is set in git metadata."""
+    git1, git2, _ = self._CommonGitSetup()
+    changeid = 'I%s' % ('1'.rjust(40, '0'))
+    patch = self.CommitChangeIdFile(git1, changeid=changeid, change_id=changeid,
+                                    raw_changeid_text='')
+    patch.change_id = changeid
+    patch.Fetch(git1)
+    self.assertIn('Change-Id: %s\n' % changeid, patch.commit_message)
+    patch = self.CommitChangeIdFile(git2, changeid=changeid, change_id=changeid)
+    patch.Fetch(git2)
+    self.assertEqual(patch.change_id, changeid)
+    self.assertIn('Change-Id: %s\n' % changeid, patch.commit_message)
+
+
+
+class TestGetOptionLinesFromCommitMessage(cros_test_lib.TestCase):
+  """Tests of GetOptionFromCommitMessage."""
+
+  _M1 = """jabberwocky: by Lewis Carroll
+
+'Twas brillig, and the slithy toves
+did gyre and gimble in the wabe.
+"""
+
+  _M2 = """jabberwocky: by Lewis Carroll
+
+All mimsy were the borogroves,
+And the mome wraths outgrabe.
+jabberwocky: Charles Lutwidge Dodgson
+"""
+
+  _M3 = """jabberwocky: by Lewis Carroll
+
+He took his vorpal sword in hand:
+Long time the manxome foe he sought
+jabberwocky:
+"""
+
+  _M4 = """the poem continues...
+
+jabberwocky: O frabjuous day!
+jabberwocky: Calloh! Callay!
+"""
+
+  def testNoMessage(self):
+    o = cros_patch.GetOptionLinesFromCommitMessage('', 'jabberwocky:')
+    self.assertEqual(None, o)
+
+  def testNoOption(self):
+    o = cros_patch.GetOptionLinesFromCommitMessage(self._M1, 'jabberwocky:')
+    self.assertEqual(None, o)
+
+  def testYesOption(self):
+    o = cros_patch.GetOptionLinesFromCommitMessage(self._M2, 'jabberwocky:')
+    self.assertEqual(['Charles Lutwidge Dodgson'], o)
+
+  def testEmptyOption(self):
+    o = cros_patch.GetOptionLinesFromCommitMessage(self._M3, 'jabberwocky:')
+    self.assertEqual([], o)
+
+  def testMultiOption(self):
+    o = cros_patch.GetOptionLinesFromCommitMessage(self._M4, 'jabberwocky:')
+    self.assertEqual(['O frabjuous day!', 'Calloh! Callay!'], o)
+
+
+class TestApplyAgainstManifest(GitRepoPatchTestCase,
+                               cros_test_lib.MockTestCase):
+  """Test applying a patch against a manifest"""
+
+  MANIFEST_TEMPLATE = """\
+<?xml version="1.0" encoding="UTF-8"?>
+<manifest>
+  <remote name="cros" />
+  <default revision="refs/heads/master" remote="cros" />
+  %(projects)s
+</manifest>
+"""
+
+  def _CommonRepoSetup(self, *projects):
+    basedir = self.tempdir
+    repodir = os.path.join(basedir, '.repo')
+    manifest_file = os.path.join(repodir, 'manifest.xml')
+    proj_pieces = []
+    for project in projects:
+      proj_pieces.append('<project')
+      for key, val in project.items():
+        if key == 'path':
+          val = os.path.relpath(os.path.realpath(val),
+                                os.path.realpath(self.tempdir))
+        proj_pieces.append(' %s="%s"' % (key, val))
+      proj_pieces.append(' />\n  ')
+    proj_str = ''.join(proj_pieces)
+    content = self.MANIFEST_TEMPLATE % {'projects': proj_str}
+    os.mkdir(repodir)
+    osutils.WriteFile(manifest_file, content)
+    return basedir
+
+  def testApplyAgainstManifest(self):
+    git1, git2, _ = self._CommonGitSetup()
+
+    readme_text = 'Dummy README text.'
+    readme1 = self.CommitFile(git1, 'README', readme_text)
+    readme_text += ' Even more dummy README text.'
+    readme2 = self.CommitFile(git1, 'README', readme_text)
+    readme_text += ' Even more README text.'
+    readme3 = self.CommitFile(git1, 'README', readme_text)
+
+    git1_proj = {
+        'path': git1,
+        'name': 'chromiumos/chromite',
+        'revision': str(readme1.sha1),
+        'upstream': 'refs/heads/master',
+    }
+    git2_proj = {
+        'path': git2,
+        'name': 'git2',
+    }
+    basedir = self._CommonRepoSetup(git1_proj, git2_proj)
+
+    self.PatchObject(git.ManifestCheckout, '_GetManifestsBranch',
+                     return_value=None)
+    manifest = git.ManifestCheckout(basedir)
+
+    readme2.ApplyAgainstManifest(manifest)
+    readme3.ApplyAgainstManifest(manifest)
+
+    # Verify that both readme2 and readme3 are on the patch branch.
+    cmd = ['git', 'log', '--format=%T',
+           '%s..%s' % (readme1.sha1, constants.PATCH_BRANCH)]
+    trees = self._run(cmd, git1).splitlines()
+    self.assertEqual(trees, [str(readme3.tree_hash), str(readme2.tree_hash)])
+
+
+class TestLocalPatchGit(GitRepoPatchTestCase):
+  """Test Local patch handling."""
+
+  patch_kls = cros_patch.LocalPatch
+
+  def setUp(self):
+    self.sourceroot = os.path.join(self.tempdir, 'sourceroot')
+
+  def _MkPatch(self, source, sha1, ref='refs/heads/master', **kwargs):
+    remote = kwargs.pop('remote', site_config.params.EXTERNAL_REMOTE)
+    return self.patch_kls(source, 'chromiumos/chromite', ref,
+                          '%s/master' % remote, remote, sha1, **kwargs)
+
+  def testUpload(self):
+    def ProjectDirMock(_sourceroot):
+      return git1
+
+    git1, git2, patch = self._CommonGitSetup()
+
+    git2_sha1 = self._GetSha1(git2, 'HEAD')
+
+    patch.ProjectDir = ProjectDirMock
+    # First suppress carbon copy behaviour so we verify pushing plain works.
+    sha1 = patch.sha1
+    patch._GetCarbonCopy = lambda: sha1  # pylint: disable=protected-access
+    patch.Upload(git2, 'refs/testing/test1')
+    self.assertEqual(self._GetSha1(git2, 'refs/testing/test1'),
+                     patch.sha1)
+
+    # Enable CarbonCopy behaviour; verify it lands a different
+    # sha1.  Additionally verify it didn't corrupt the patch's sha1 locally.
+    del patch._GetCarbonCopy
+    patch.Upload(git2, 'refs/testing/test2')
+    self.assertNotEqual(self._GetSha1(git2, 'refs/testing/test2'),
+                        patch.sha1)
+    self.assertEqual(patch.sha1, sha1)
+    # Ensure the carbon creation didn't damage the target repo.
+    self.assertEqual(self._GetSha1(git1, 'HEAD'), sha1)
+
+    # Ensure we didn't damage the target repo's state at all.
+    self.assertEqual(git2_sha1, self._GetSha1(git2, 'HEAD'))
+    # Ensure the content is the same.
+    base = ['git', 'show']
+    self.assertEqual(
+        self._run(base + ['refs/testing/test1:monkeys'], git2),
+        self._run(base + ['refs/testing/test2:monkeys'], git2))
+    base = ['git', 'log', '--format=%B', '-n1']
+    self.assertEqual(
+        self._run(base + ['refs/testing/test1'], git2),
+        self._run(base + ['refs/testing/test2'], git2))
+
+
+class UploadedLocalPatchTestCase(GitRepoPatchTestCase):
+  """Test uploading of local git patches."""
+
+  PROJECT = 'chromiumos/chromite'
+  ORIGINAL_BRANCH = 'original_branch'
+  ORIGINAL_SHA1 = 'ffffffff'.ljust(40, '0')
+
+  patch_kls = cros_patch.UploadedLocalPatch
+
+  def _MkPatch(self, source, sha1, ref='refs/heads/master', **kwargs):
+    return self.patch_kls(source, self.PROJECT, ref,
+                          '%s/master' % site_config.params.EXTERNAL_REMOTE,
+                          self.ORIGINAL_BRANCH,
+                          kwargs.pop('original_sha1', self.ORIGINAL_SHA1),
+                          kwargs.pop('remote',
+                                     site_config.params.EXTERNAL_REMOTE),
+                          carbon_copy_sha1=sha1, **kwargs)
+
+
+class TestUploadedLocalPatch(UploadedLocalPatchTestCase):
+  """Test uploading of local git patches."""
+
+  def testStringRepresentation(self):
+    _, _, patch = self._CommonGitSetup()
+    str_rep = str(patch).split(':')
+    for element in [self.PROJECT, self.ORIGINAL_BRANCH, self.ORIGINAL_SHA1[:8]]:
+      self.assertTrue(element in str_rep,
+                      msg="Couldn't find %s in %s" % (element, str_rep))
+
+
+# pylint: disable=protected-access
+class TestGerritPatch(TestGitRepoPatch):
+  """Test Gerrit patch handling."""
+
+  has_native_change_id = True
+
+  class patch_kls(cros_patch.GerritPatch):
+    """Test helper class to suppress pointing to actual gerrit."""
+    # Suppress the behaviour pointing the project url at actual gerrit,
+    # instead slaving it back to a local repo for tests.
+    def __init__(self, *args, **kwargs):
+      cros_patch.GerritPatch.__init__(self, *args, **kwargs)
+      assert hasattr(self, 'patch_dict')
+      self.project_url = self.patch_dict['_unittest_url_bypass']
+
+  @property
+  def test_json(self):
+    return copy.deepcopy(FAKE_PATCH_JSON)
+
+  def _MkPatch(self, source, sha1, ref='refs/heads/master', **kwargs):
+    json = self.test_json
+    remote = kwargs.pop('remote', site_config.params.EXTERNAL_REMOTE)
+    url_prefix = kwargs.pop('url_prefix',
+                            site_config.params.EXTERNAL_GERRIT_URL)
+    suppress_branch = kwargs.pop('suppress_branch', False)
+    change_id = kwargs.pop('ChangeId', None)
+    if change_id is None:
+      change_id = self.MakeChangeId()
+    json.update(kwargs)
+    change_num, patch_num = _GetNumber(), _GetNumber()
+    # Note we intentionally use a gerrit like refspec here; we want to
+    # ensure that none of our common code pathways puke on a non head/tag.
+    refspec = gerrit.GetChangeRef(change_num + 1000, patch_num)
+    json['currentPatchSet'].update(
+        dict(number=patch_num, ref=refspec, revision=sha1))
+    json['branch'] = os.path.basename(ref)
+    json['_unittest_url_bypass'] = source
+    json['id'] = change_id
+
+    obj = self.patch_kls(json.copy(), remote, url_prefix)
+    self.assertEqual(obj.patch_dict, json)
+    self.assertEqual(obj.remote, remote)
+    self.assertEqual(obj.url_prefix, url_prefix)
+    self.assertEqual(obj.project, json['project'])
+    self.assertEqual(obj.ref, refspec)
+    self.assertEqual(obj.change_id, change_id)
+    self.assertEqual(obj.id, '%s%s~%s~%s' % (
+        site_config.params.CHANGE_PREFIX[remote], json['project'],
+        json['branch'], change_id))
+
+    # Now make the fetching actually work, if desired.
+    if not suppress_branch:
+      # Note that a push is needed here, rather than a branch; branch
+      # will just make it under refs/heads, we want it literally in
+      # refs/changes/
+      self._run(['git', 'push', source, '%s:%s' % (sha1, refspec)], source)
+    return obj
+
+  def testApprovalTimestamp(self):
+    """Test that the approval timestamp is correctly extracted from JSON."""
+    repo = self._MakeRepo('git', self.source)
+    for approvals, expected in [(None, 0), ([], 0), ([1], 1), ([1, 3, 2], 3)]:
+      currentPatchSet = copy.deepcopy(FAKE_PATCH_JSON['currentPatchSet'])
+      if approvals is not None:
+        currentPatchSet['approvals'] = [{'grantedOn': x} for x in approvals]
+      patch = self._MkPatch(repo, self._GetSha1(repo, self.DEFAULT_TRACKING),
+                            currentPatchSet=currentPatchSet)
+      msg = 'Expected %r, but got %r (approvals=%r)' % (
+          expected, patch.approval_timestamp, approvals)
+      self.assertEqual(patch.approval_timestamp, expected, msg)
+
+  def _assertGerritDependencies(self,
+                                remote=site_config.params.EXTERNAL_REMOTE):
+    convert = str
+    if remote == site_config.params.INTERNAL_REMOTE:
+      convert = lambda val: '*%s' % (val,)
+    git1 = self._MakeRepo('git1', self.source, remote=remote)
+    patch = self._MkPatch(git1, self._GetSha1(git1, 'HEAD'), remote=remote)
+    cid1, cid2 = '1', '2'
+
+    # Test cases with no dependencies, 1 dependency, and 2 dependencies.
+    self.assertEqual(patch.GerritDependencies(), [])
+    patch.patch_dict['dependsOn'] = [{'number': cid1}]
+    self.assertEqual(
+        [cros_patch.AddPrefix(x, x.gerrit_number)
+         for x in patch.GerritDependencies()],
+        [convert(cid1)])
+    patch.patch_dict['dependsOn'].append({'number': cid2})
+    self.assertEqual(
+        [cros_patch.AddPrefix(x, x.gerrit_number)
+         for x in patch.GerritDependencies()],
+        [convert(cid1), convert(cid2)])
+
+  def testExternalGerritDependencies(self):
+    self._assertGerritDependencies()
+
+  def testInternalGerritDependencies(self):
+    self._assertGerritDependencies(site_config.params.INTERNAL_REMOTE)
+
+  def testReviewedOnMetadata(self):
+    """Verify Change-Id and Reviewed-On are set in git metadata."""
+    git1, _, patch = self._CommonGitSetup()
+    patch.Apply(git1, self.DEFAULT_TRACKING)
+    reviewed_on = '/'.join([site_config.params.EXTERNAL_GERRIT_URL,
+                            patch.gerrit_number])
+    self.assertIn('Reviewed-on: %s\n' % reviewed_on, patch.commit_message)
+
+  def _MakeFooters(self):
+    return (
+        (),
+        (('Footer-1', 'foo'),),
+        (('Change-id', '42'),),
+        (('Footer-1', 'foo'), ('Change-id', '42')),)
+
+  def _MakeCommitMessages(self):
+    headers = (
+        'A standard commit message header',
+        '',
+        'Footer-1: foo',
+        'Change-id: 42')
+
+    bodies = (
+        '',
+        '\n',
+        'Lots of comments\n about the commit\n' * 100)
+
+    for header, body, preexisting in itertools.product(headers,
+                                                       bodies,
+                                                       self._MakeFooters()):
+      yield '\n'.join((header,
+                       body,
+                       '\n'.join('%s: %s' for tag, ident in preexisting)))
+
+  def testAddFooters(self):
+    repo = self._MakeRepo('git', self.source)
+    patch = self._MkPatch(repo, self._GetSha1(repo, 'HEAD'))
+    approval = {'type': 'VRIF', 'value': '1', 'grantedOn': 1391733002}
+
+    for msg in self._MakeCommitMessages():
+      for footers in self._MakeFooters():
+        ctx = contextlib.nested(
+            mock.patch('chromite.lib.patch.FooterForApproval',
+                       new=mock.Mock(side_effect=itertools.cycle(footers))),
+            mock.patch.object(patch, '_approvals',
+                              new=[approval] * len(footers)))
+
+        with ctx:
+          patch._commit_message = msg
+
+          # Idempotence
+          self.assertEqual(patch._AddFooters(msg),
+                           patch._AddFooters(patch._AddFooters(msg)))
+
+          # there may be pre-existing footers.  This asserts that we
+          # can Get all of the footers after we Set them.
+          self.assertFalse(bool(
+              set(footers) -
+              set(patch._GetFooters(patch._AddFooters(msg)))))
+
+          if set(footers) - set(patch._GetFooters(msg)):
+            self.assertNotEqual(msg, patch._AddFooters(msg))
+
+
+
+class PrepareRemotePatchesTest(cros_test_lib.TestCase):
+  """Test preparing remote patches."""
+
+  def MkRemote(self,
+               project='my/project', original_branch='my-local',
+               ref='refs/tryjobs/elmer/patches', tracking_branch='master',
+               internal=False):
+
+    l = [project, original_branch, ref, tracking_branch,
+         getattr(constants, ('%s_PATCH_TAG' %
+                             ('INTERNAL' if internal else 'EXTERNAL')))]
+    return ':'.join(l)
+
+  def assertRemote(self, patch, project='my/project',
+                   original_branch='my-local',
+                   ref='refs/tryjobs/elmer/patches', tracking_branch='master',
+                   internal=False):
+    self.assertEqual(patch.project, project)
+    self.assertEqual(patch.original_branch, original_branch)
+    self.assertEqual(patch.ref, ref)
+    self.assertEqual(patch.tracking_branch, tracking_branch)
+    self.assertEqual(patch.internal, internal)
+
+  def test(self):
+    # Check handling of a single patch...
+    patches = cros_patch.PrepareRemotePatches([self.MkRemote()])
+    self.assertEqual(len(patches), 1)
+    self.assertRemote(patches[0])
+
+    # Check handling of a multiple...
+    patches = cros_patch.PrepareRemotePatches(
+        [self.MkRemote(), self.MkRemote(project='foon')])
+    self.assertEqual(len(patches), 2)
+    self.assertRemote(patches[0])
+    self.assertRemote(patches[1], project='foon')
+
+    # Ensure basic validation occurs:
+    chunks = self.MkRemote().split(':')
+    self.assertRaises(ValueError, cros_patch.PrepareRemotePatches,
+                      ':'.join(chunks[:-1]))
+    self.assertRaises(ValueError, cros_patch.PrepareRemotePatches,
+                      ':'.join(chunks[:-1] + ['monkeys']))
+    self.assertRaises(ValueError, cros_patch.PrepareRemotePatches,
+                      ':'.join(chunks + [':']))
+
+
+class PrepareLocalPatchesTests(cros_build_lib_unittest.RunCommandTestCase):
+  """Test preparing local patches."""
+
+  def setUp(self):
+    self.path, self.project, self.branch = 'mydir', 'my/project', 'mybranch'
+    self.tracking_branch = 'kernel'
+    self.patches = ['%s:%s' % (self.project, self.branch)]
+    self.manifest = mock.MagicMock()
+    attrs = dict(tracking_branch=self.tracking_branch,
+                 local_path=self.path,
+                 remote='cros')
+    checkout = git.ProjectCheckout(attrs)
+    self.PatchObject(
+        self.manifest, 'FindCheckouts', return_value=[checkout]
+    )
+
+  def PrepareLocalPatches(self, output):
+    """Check the returned GitRepoPatchInfo against golden values."""
+    output_obj = mock.MagicMock()
+    output_obj.output = output
+    self.PatchObject(cros_patch.LocalPatch, 'Fetch', return_value=output_obj)
+    self.PatchObject(git, 'RunGit', return_value=output_obj)
+    patch_info = cros_patch.PrepareLocalPatches(self.manifest, self.patches)[0]
+    self.assertEquals(patch_info.project, self.project)
+    self.assertEquals(patch_info.ref, self.branch)
+    self.assertEquals(patch_info.tracking_branch, self.tracking_branch)
+
+  def testBranchSpecifiedSuccessRun(self):
+    """Test success with branch specified by user."""
+    self.PrepareLocalPatches('12345'.rjust(40, '0'))
+
+  def testBranchSpecifiedNoChanges(self):
+    """Test when no changes on the branch specified by user."""
+    self.assertRaises(SystemExit, self.PrepareLocalPatches, '')
+
+
+class TestFormatting(cros_test_lib.TestCase):
+  """Test formatting of output."""
+
+  VALID_CHANGE_ID = 'I47ea30385af60ae4cc2acc5d1a283a46423bc6e1'
+
+  def _assertResult(self, functor, value, expected=None, raises=False,
+                    **kwargs):
+    if raises:
+      self.assertRaises2(ValueError, functor, value,
+                         msg='%s(%r) did not throw a ValueError'
+                         % (functor.__name__, value), **kwargs)
+    else:
+      self.assertEqual(functor(value, **kwargs), expected,
+                       msg='failed: %s(%r) != %r'
+                       % (functor.__name__, value, expected))
+
+  def _assertBad(self, functor, values, **kwargs):
+    for value in values:
+      self._assertResult(functor, value, raises=True, **kwargs)
+
+  def _assertGood(self, functor, values, **kwargs):
+    for value, expected in values:
+      self._assertResult(functor, value, expected, **kwargs)
+
+  def testGerritNumber(self):
+    """Tests that we can pasre a Gerrit number."""
+    self._assertGood(cros_patch.ParseGerritNumber,
+                     [('12345',) * 2, ('12',) * 2, ('123',) * 2])
+
+    self._assertBad(
+        cros_patch.ParseGerritNumber,
+        ['is', 'i1325', '01234567', '012345a', '**12345', '+123', '/0123'],
+        error_ok=False)
+
+  def testChangeID(self):
+    """Tests that we can parse a change-ID."""
+    self._assertGood(cros_patch.ParseChangeID, [(self.VALID_CHANGE_ID,) * 2])
+
+    # Change-IDs too short/long, with unexpected characters in it.
+    self._assertBad(
+        cros_patch.ParseChangeID,
+        ['is', '**i1325', 'i134'.ljust(41, '0'), 'I1234+'.ljust(41, '0'),
+         'I123'.ljust(42, '0')],
+        error_ok=False)
+
+  def testSHA1(self):
+    """Tests that we can parse a SHA1 hash."""
+    self._assertGood(cros_patch.ParseSHA1,
+                     [('1' * 40,) * 2,
+                      ('a' * 40,) * 2,
+                      ('1a7e034'.ljust(40, '0'),) * 2])
+
+    self._assertBad(
+        cros_patch.ParseSHA1,
+        ['0abcg', 'Z', '**a', '+123', '1234ab' * 10],
+        error_ok=False)
+
+  def testFullChangeID(self):
+    """Tests that we can parse a full change-ID."""
+    change_id = self.VALID_CHANGE_ID
+    self._assertGood(
+        cros_patch.ParseFullChangeID,
+        (('foo~bar~%s' % change_id,
+          cros_patch.FullChangeId('foo', 'bar', change_id)),
+         ('foo/bar/baz~refs/heads/_my-branch_~%s' % change_id,
+          cros_patch.FullChangeId('foo/bar/baz', 'refs/heads/_my-branch_',
+                                  change_id))))
+
+  def testInvalidFullChangeID(self):
+    """Should throw an error on bad inputs."""
+    change_id = self.VALID_CHANGE_ID
+    self._assertBad(
+        cros_patch.ParseFullChangeID,
+        ['foo', 'foo~bar', 'foo~bar~baz', 'foo~refs/bar~%s' % change_id],
+        error_ok=False)
+
+  def testParsePatchDeps(self):
+    """Tests that we can parse the dependency specified by the user."""
+    change_id = self.VALID_CHANGE_ID
+    vals = ['CL:12345', 'project~branch~%s' % change_id, change_id,
+            change_id[1:]]
+    for val in vals:
+      self.assertTrue(cros_patch.ParsePatchDep(val) is not None)
+
+    self._assertBad(cros_patch.ParsePatchDep,
+                    ['145462399', 'I47ea3', 'i47ea3'.ljust(41, '0')])
+
+
+class MockPatchBase(cros_test_lib.MockTestCase):
+  """Base test case with helper methods to generate mock patches."""
+
+  def setUp(self):
+    self.patch_mock = None
+    self._patch_counter = (itertools.count(1)).next
+
+  def MockPatch(self, change_id=None, patch_number=None, is_merged=False,
+                project='chromiumos/chromite',
+                remote=site_config.params.EXTERNAL_REMOTE,
+                tracking_branch='refs/heads/master', is_draft=False,
+                approvals=()):
+    """Helper function to create mock GerritPatch objects."""
+    if change_id is None:
+      change_id = self._patch_counter()
+    gerrit_number = str(change_id)
+    change_id = hex(change_id)[2:].rstrip('L').lower()
+    change_id = 'I%s' % change_id.rjust(40, '0')
+    sha1 = hex(_GetNumber())[2:].rstrip('L').lower().rjust(40, '0')
+    patch_number = (patch_number if patch_number is not None else _GetNumber())
+    fake_url = 'http://foo/bar'
+    if not approvals:
+      approvals = [{'type': 'VRIF', 'value': '1', 'grantedOn': 1391733002},
+                   {'type': 'CRVW', 'value': '2', 'grantedOn': 1391733002},
+                   {'type': 'COMR', 'value': '1', 'grantedOn': 1391733002}]
+
+    current_patch_set = {
+        'number': patch_number,
+        'revision': sha1,
+        'draft': is_draft,
+        'approvals': approvals,
+    }
+    patch_dict = {
+        'currentPatchSet': current_patch_set,
+        'id': change_id,
+        'number': gerrit_number,
+        'project': project,
+        'branch': tracking_branch,
+        'owner': {'email': 'elmer.fudd@chromium.org'},
+        'remote': remote,
+        'status': 'MERGED' if is_merged else 'NEW',
+        'url': '%s/%s' % (fake_url, change_id),
+    }
+
+    patch = cros_patch.GerritPatch(patch_dict, remote, fake_url)
+    patch.pass_count = 0
+    patch.fail_count = 1
+    patch.total_fail_count = 3
+    return patch
+
+  def GetPatches(self, how_many=1, always_use_list=False, **kwargs):
+    """Get a sequential list of patches.
+
+    Args:
+      how_many: How many patches to return.
+      always_use_list: Whether to use a list for a single item list.
+      **kwargs: Keyword arguments for self.MockPatch.
+    """
+    patches = [self.MockPatch(**kwargs) for _ in xrange(how_many)]
+    if self.patch_mock:
+      for i, patch in enumerate(patches):
+        self.patch_mock.SetGerritDependencies(patch, patches[:i + 1])
+    if how_many == 1 and not always_use_list:
+      return patches[0]
+    return patches
diff --git a/lib/path_util.py b/lib/path_util.py
new file mode 100644
index 0000000..0990238
--- /dev/null
+++ b/lib/path_util.py
@@ -0,0 +1,343 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Handle path inference and translation."""
+
+from __future__ import print_function
+
+import collections
+import os
+import tempfile
+
+from chromite.cbuildbot import constants
+from chromite.lib import bootstrap_lib
+from chromite.lib import cros_build_lib
+from chromite.lib import git
+from chromite.lib import osutils
+
+
+GENERAL_CACHE_DIR = '.cache'
+CHROME_CACHE_DIR = '.cros_cache'
+
+CHECKOUT_TYPE_UNKNOWN = 'unknown'
+CHECKOUT_TYPE_GCLIENT = 'gclient'
+CHECKOUT_TYPE_REPO = 'repo'
+CHECKOUT_TYPE_SDK_BOOTSTRAP = 'bootstrap'
+
+CheckoutInfo = collections.namedtuple(
+    'CheckoutInfo', ['type', 'root', 'chrome_src_dir'])
+
+
+class ChrootPathResolver(object):
+  """Perform path resolution to/from the chroot.
+
+  Args:
+    source_path: Value to override default source root inference.
+    source_from_path_repo: Whether to infer the source root from the converted
+      path's repo parent during inbound translation; overrides |source_path|.
+  """
+
+  # TODO(garnold) We currently infer the source root based on the path's own
+  # encapsulating repository. This is a heuristic catering to paths are being
+  # translated to be used in a chroot that's not associated with the currently
+  # executing code (for example, cbuildbot run on a build root or a foreign
+  # tree checkout). This approach might result in arbitrary repo-contained
+  # paths being translated to invalid chroot paths where they actually should
+  # not, and other valid source paths failing to translate because they are not
+  # repo-contained. Eventually we'll want to make this behavior explicit, by
+  # either passing a source_root value, or requesting to infer it from the path
+  # (source_from_path_repo=True), but otherwise defaulting to the executing
+  # code's source root in the normal case. When that happens, we'll be
+  # switching source_from_path_repo to False by default. See chromium:485746.
+
+  def __init__(self, source_path=None, source_from_path_repo=True):
+    self._inside_chroot = cros_build_lib.IsInsideChroot()
+    self._source_path = (constants.SOURCE_ROOT if source_path is None
+                         else source_path)
+    self._source_from_path_repo = source_from_path_repo
+
+    # The following are only needed if outside the chroot.
+    if self._inside_chroot:
+      self._chroot_path = None
+      self._chroot_to_host_roots = None
+    else:
+      self._chroot_path = self._GetSourcePathChroot(self._source_path)
+
+      # Initialize mapping of known root bind mounts.
+      self._chroot_to_host_roots = (
+          (constants.CHROOT_SOURCE_ROOT, self._source_path),
+          (constants.CHROOT_CACHE_ROOT, self._GetCachePath),
+      )
+
+  @classmethod
+  @cros_build_lib.MemoizedSingleCall
+  def _GetCachePath(cls):
+    """Returns the cache directory."""
+    return os.path.realpath(GetCacheDir())
+
+  def _GetSourcePathChroot(self, source_path):
+    """Returns path to the chroot directory of a given source root."""
+    if source_path is None:
+      return None
+    return os.path.join(source_path, constants.DEFAULT_CHROOT_DIR)
+
+  def _TranslatePath(self, path, src_root, dst_root_input):
+    """If |path| starts with |src_root|, replace it using |dst_root_input|.
+
+    Args:
+      path: An absolute path we want to convert to a destination equivalent.
+      src_root: The root that path needs to be contained in.
+      dst_root_input: The root we want to relocate the relative path into, or a
+        function returning this value.
+
+    Returns:
+      A translated path, or None if |src_root| is not a prefix of |path|.
+
+    Raises:
+      ValueError: If |src_root| is a prefix but |dst_root_input| yields None,
+        which means we don't have sufficient information to do the translation.
+    """
+    if not path.startswith(os.path.join(src_root, '')) and path != src_root:
+      return None
+    dst_root = dst_root_input() if callable(dst_root_input) else dst_root_input
+    if dst_root is None:
+      raise ValueError('No target root to translate path to')
+    return os.path.join(dst_root, path[len(src_root):].lstrip(os.path.sep))
+
+  def _GetChrootPath(self, path):
+    """Translates a fully-expanded host |path| into a chroot equivalent.
+
+    This checks path prefixes in order from the most to least "contained": the
+    chroot itself, then the cache directory, and finally the source tree. The
+    idea is to return the shortest possible chroot equivalent.
+
+    Args:
+      path: A host path to translate.
+
+    Returns:
+      An equivalent chroot path.
+
+    Raises:
+      ValueError: If |path| is not reachable from the chroot.
+    """
+    new_path = None
+
+    # Preliminary: compute the actual source and chroot paths to use. These are
+    # generally the precomputed values, unless we're inferring the source root
+    # from the path itself.
+    source_path = self._source_path
+    chroot_path = self._chroot_path
+    if self._source_from_path_repo:
+      path_repo_dir = git.FindRepoDir(path)
+      if path_repo_dir is not None:
+        source_path = os.path.abspath(os.path.join(path_repo_dir, '..'))
+      chroot_path = self._GetSourcePathChroot(source_path)
+
+    # First, check if the path happens to be in the chroot already.
+    if chroot_path is not None:
+      new_path = self._TranslatePath(path, chroot_path, '/')
+
+    # Second, check the cache directory.
+    if new_path is None:
+      new_path = self._TranslatePath(path, self._GetCachePath(),
+                                     constants.CHROOT_CACHE_ROOT)
+
+    # Finally, check the current SDK checkout tree.
+    if new_path is None and source_path is not None:
+      new_path = self._TranslatePath(path, source_path,
+                                     constants.CHROOT_SOURCE_ROOT)
+
+    if new_path is None:
+      raise ValueError('Path is not reachable from the chroot')
+
+    return new_path
+
+  def _GetHostPath(self, path):
+    """Translates a fully-expanded chroot |path| into a host equivalent.
+
+    We first attempt translation of known roots (source). If any is successful,
+    we check whether the result happens to point back to the chroot, in which
+    case we trim the chroot path prefix and recurse. If neither was successful,
+    just prepend the chroot path.
+
+    Args:
+      path: A chroot path to translate.
+
+    Returns:
+      An equivalent host path.
+
+    Raises:
+      ValueError: If |path| could not be mapped to a proper host destination.
+    """
+    new_path = None
+
+    # Attempt resolution of known roots.
+    for src_root, dst_root in self._chroot_to_host_roots:
+      new_path = self._TranslatePath(path, src_root, dst_root)
+      if new_path is not None:
+        break
+
+    if new_path is None:
+      # If no known root was identified, just prepend the chroot path.
+      new_path = self._TranslatePath(path, '', self._chroot_path)
+    else:
+      # Check whether the resolved path happens to point back at the chroot, in
+      # which case trim the chroot path prefix and continue recursively.
+      path = self._TranslatePath(new_path, self._chroot_path, '/')
+      if path is not None:
+        new_path = self._GetHostPath(path)
+
+    return new_path
+
+  def _ConvertPath(self, path, get_converted_path):
+    """Expands |path|; if outside the chroot, applies |get_converted_path|.
+
+    Args:
+      path: A path to be converted.
+      get_converted_path: A conversion function.
+
+    Returns:
+      An expanded and (if needed) converted path.
+
+    Raises:
+      ValueError: If path conversion failed.
+    """
+    # NOTE: We do not want to expand wrapper script symlinks because this
+    # prevents them from working. Therefore, if the path points to a file we
+    # only resolve its dirname but leave the basename intact. This means our
+    # path resolution might return unusable results for file symlinks that
+    # point outside the reachable space. These are edge cases in which the user
+    # is expected to resolve the realpath themselves in advance.
+    expanded_path = os.path.expanduser(path)
+    if os.path.isfile(expanded_path):
+      expanded_path = os.path.join(
+          os.path.realpath(os.path.dirname(expanded_path)),
+          os.path.basename(expanded_path))
+    else:
+      expanded_path = os.path.realpath(expanded_path)
+
+    if self._inside_chroot:
+      return expanded_path
+
+    try:
+      return get_converted_path(expanded_path)
+    except ValueError as e:
+      raise ValueError('%s: %s' % (e, path))
+
+  def ToChroot(self, path):
+    """Resolves current environment |path| for use in the chroot."""
+    return self._ConvertPath(path, self._GetChrootPath)
+
+  def FromChroot(self, path):
+    """Resolves chroot |path| for use in the current environment."""
+    return self._ConvertPath(path, self._GetHostPath)
+
+
+def _IsSdkBootstrapCheckout(path):
+  """Return True if |path| is an SDK bootstrap.
+
+  A bootstrap is a lone git checkout of chromite. It cannot be managed by repo.
+  Underneath this bootstrap chromite, there are several SDK checkouts, each
+  managed by repo.
+  """
+  submodule_git = os.path.join(path, '.git')
+  if not git.IsSubmoduleCheckoutRoot(submodule_git, 'origin',
+                                     constants.CHROMITE_URL):
+    # Not a git checkout of chromite.
+    return False
+
+  # This could be an SDK under sdk_checkouts or the parent bootstrap.
+  # It'll be an SDK checkout if it has a parent ".repo".
+  if git.FindRepoDir(path):
+    # It is managed by repo, therefore it is a child SDK checkout.
+    return False
+
+  return True
+
+
+def DetermineCheckout(cwd):
+  """Gather information on the checkout we are in.
+
+  There are several checkout types, as defined by CHECKOUT_TYPE_XXX variables.
+  This function determines what checkout type |cwd| is in, for example, if |cwd|
+  belongs to a `repo` checkout.
+
+  There is a special case when |cwd| is a child SDK checkout of a bootstrap
+  chromite (e.g. something under chromite/sdk_checkouts/xxx.yyy.zzz/). This
+  case should report that |cwd| belongs to a bootstrap checkout instead of the
+  `repo` checkout of the "xxx.yyy.zzz" child SDK.
+
+  Returns:
+    A CheckoutInfo object with these attributes:
+      type: The type of checkout.  Valid values are CHECKOUT_TYPE_*.
+      root: The root of the checkout.
+      chrome_src_dir: If the checkout is a Chrome checkout, the path to the
+        Chrome src/ directory.
+  """
+  checkout_type = CHECKOUT_TYPE_UNKNOWN
+  root, path = None, None
+
+  # Check for SDK bootstrap first because it goes top to bottom.
+  # If we do it bottom to top, we'll hit chromite/sdk_checkouts/*/.repo first
+  # and will wrongly conclude that this is a repo checkout. So we go top down
+  # to visit chromite/ first.
+  for path in osutils.IteratePaths(cwd):
+    if _IsSdkBootstrapCheckout(path):
+      checkout_type = CHECKOUT_TYPE_SDK_BOOTSTRAP
+      break
+  else:
+    for path in osutils.IteratePathParents(cwd):
+      gclient_file = os.path.join(path, '.gclient')
+      if os.path.exists(gclient_file):
+        checkout_type = CHECKOUT_TYPE_GCLIENT
+        break
+      repo_dir = os.path.join(path, '.repo')
+      if os.path.isdir(repo_dir):
+        checkout_type = CHECKOUT_TYPE_REPO
+        break
+
+  if checkout_type != CHECKOUT_TYPE_UNKNOWN:
+    root = path
+
+  # Determine the chrome src directory.
+  chrome_src_dir = None
+  if checkout_type == CHECKOUT_TYPE_GCLIENT:
+    chrome_src_dir = os.path.join(root, 'src')
+
+  return CheckoutInfo(checkout_type, root, chrome_src_dir)
+
+
+def FindCacheDir():
+  """Returns the cache directory location based on the checkout type."""
+  cwd = os.getcwd()
+  checkout = DetermineCheckout(cwd)
+  path = None
+  if checkout.type == CHECKOUT_TYPE_REPO:
+    path = os.path.join(checkout.root, GENERAL_CACHE_DIR)
+  elif checkout.type == CHECKOUT_TYPE_SDK_BOOTSTRAP:
+    path = os.path.join(checkout.root, bootstrap_lib.SDK_CHECKOUTS,
+                        GENERAL_CACHE_DIR)
+  elif checkout.type == CHECKOUT_TYPE_GCLIENT:
+    path = os.path.join(checkout.root, CHROME_CACHE_DIR)
+  elif checkout.type == CHECKOUT_TYPE_UNKNOWN:
+    path = os.path.join(tempfile.gettempdir(), 'chromeos-cache')
+  else:
+    raise AssertionError('Unexpected type %s' % checkout.type)
+
+  return path
+
+
+def GetCacheDir():
+  """Returns the current cache dir."""
+  return os.environ.get(constants.SHARED_CACHE_ENVVAR, FindCacheDir())
+
+
+def ToChrootPath(path):
+  """Resolves current environment |path| for use in the chroot."""
+  return ChrootPathResolver().ToChroot(path)
+
+
+def FromChrootPath(path):
+  """Resolves chroot |path| for use in the current environment."""
+  return ChrootPathResolver().FromChroot(path)
diff --git a/lib/path_util_unittest b/lib/path_util_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/path_util_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/path_util_unittest.py b/lib/path_util_unittest.py
new file mode 100644
index 0000000..3fe93d1
--- /dev/null
+++ b/lib/path_util_unittest.py
@@ -0,0 +1,332 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the path_util module."""
+
+from __future__ import print_function
+
+import itertools
+import mock
+import os
+import tempfile
+
+from chromite.cbuildbot import constants
+from chromite.lib import bootstrap_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.lib import partial_mock
+from chromite.lib import path_util
+
+
+FAKE_SOURCE_PATH = '/path/to/source/tree'
+FAKE_REPO_PATH = '/path/to/repo'
+CUSTOM_SOURCE_PATH = '/custom/source/path'
+
+
+class DetermineCheckoutTest(cros_test_lib.MockTempDirTestCase):
+  """Verify functionality for figuring out what checkout we're in."""
+
+  def setUp(self):
+    self.rc_mock = cros_build_lib_unittest.RunCommandMock()
+    self.StartPatcher(self.rc_mock)
+    self.rc_mock.SetDefaultCmdResult()
+
+  def RunTest(self, dir_struct, cwd, expected_root, expected_type,
+              expected_src):
+    """Run a test with specific parameters and expected results."""
+    cros_test_lib.CreateOnDiskHierarchy(self.tempdir, dir_struct)
+    cwd = os.path.join(self.tempdir, cwd)
+    checkout_info = path_util.DetermineCheckout(cwd)
+    full_root = expected_root
+    if expected_root is not None:
+      full_root = os.path.join(self.tempdir, expected_root)
+    full_src = expected_src
+    if expected_src is not None:
+      full_src = os.path.join(self.tempdir, expected_src)
+
+    self.assertEquals(checkout_info.root, full_root)
+    self.assertEquals(checkout_info.type, expected_type)
+    self.assertEquals(checkout_info.chrome_src_dir, full_src)
+
+  def testGclientRepo(self):
+    """Recognizes a GClient repo checkout."""
+    dir_struct = [
+        'a/.gclient',
+        'a/b/.repo/',
+        'a/b/c/.gclient',
+        'a/b/c/d/somefile',
+    ]
+    self.RunTest(dir_struct, 'a/b/c', 'a/b/c',
+                 path_util.CHECKOUT_TYPE_GCLIENT,
+                 'a/b/c/src')
+    self.RunTest(dir_struct, 'a/b/c/d', 'a/b/c',
+                 path_util.CHECKOUT_TYPE_GCLIENT,
+                 'a/b/c/src')
+    self.RunTest(dir_struct, 'a/b', 'a/b',
+                 path_util.CHECKOUT_TYPE_REPO,
+                 None)
+    self.RunTest(dir_struct, 'a', 'a',
+                 path_util.CHECKOUT_TYPE_GCLIENT,
+                 'a/src')
+
+  def testGitUnderGclient(self):
+    """Recognizes a chrome git checkout by gclient."""
+    self.rc_mock.AddCmdResult(
+        partial_mock.In('config'), output=constants.CHROMIUM_GOB_URL)
+    dir_struct = [
+        'a/.gclient',
+        'a/src/.git/',
+    ]
+    self.RunTest(dir_struct, 'a/src', 'a',
+                 path_util.CHECKOUT_TYPE_GCLIENT,
+                 'a/src')
+
+  def testGitUnderRepo(self):
+    """Recognizes a chrome git checkout by repo."""
+    self.rc_mock.AddCmdResult(
+        partial_mock.In('config'), output=constants.CHROMIUM_GOB_URL)
+    dir_struct = [
+        'a/.repo/',
+        'a/b/.git/',
+    ]
+    self.RunTest(dir_struct, 'a/b', 'a',
+                 path_util.CHECKOUT_TYPE_REPO,
+                 None)
+
+  def testBadGit1(self):
+    """.git is not a directory."""
+    self.RunTest(['a/.git'], 'a', None,
+                 path_util.CHECKOUT_TYPE_UNKNOWN, None)
+
+  def testBadGit2(self):
+    """'git config' returns nothing."""
+    self.RunTest(['a/.repo/', 'a/b/.git/'], 'a/b', 'a',
+                 path_util.CHECKOUT_TYPE_REPO, None)
+
+  def testBadGit3(self):
+    """'git config' returns error."""
+    self.rc_mock.AddCmdResult(partial_mock.In('config'), returncode=5)
+    self.RunTest(['a/.git/'], 'a', None,
+                 path_util.CHECKOUT_TYPE_UNKNOWN, None)
+
+  def testSdkBootstrap(self):
+    """Recognizes an SDK bootstrap case."""
+    self.rc_mock.AddCmdResult(
+        partial_mock.In('config'), output=constants.CHROMITE_URL)
+    dir_struct = [
+        'a/.git/',
+        'a/sdk_checkouts/1.0.0/.repo',
+        'a/sdk_checkouts/1.0.0/chromite/.git',
+    ]
+    self.RunTest(dir_struct, 'a', 'a',
+                 path_util.CHECKOUT_TYPE_SDK_BOOTSTRAP, None)
+    self.RunTest(dir_struct, 'a/b', 'a',
+                 path_util.CHECKOUT_TYPE_SDK_BOOTSTRAP, None)
+    self.RunTest(dir_struct, 'a/sdk_checkouts', 'a',
+                 path_util.CHECKOUT_TYPE_SDK_BOOTSTRAP, None)
+    self.RunTest(dir_struct, 'a/sdk_checkouts/1.0.0', 'a',
+                 path_util.CHECKOUT_TYPE_SDK_BOOTSTRAP, None)
+    self.RunTest(dir_struct, 'a/sdk_checkouts/1.0.0/chromite', 'a',
+                 path_util.CHECKOUT_TYPE_SDK_BOOTSTRAP, None)
+
+
+class FindCacheDirTest(cros_test_lib.WorkspaceTestCase):
+  """Test cache dir specification and finding functionality."""
+
+  def setUp(self):
+    dir_struct = [
+        'repo/.repo/',
+        'repo/manifest/',
+        'gclient/.gclient',
+    ]
+    cros_test_lib.CreateOnDiskHierarchy(self.tempdir, dir_struct)
+    self.repo_root = os.path.join(self.tempdir, 'repo')
+    self.gclient_root = os.path.join(self.tempdir, 'gclient')
+    self.nocheckout_root = os.path.join(self.tempdir, 'nothing')
+    self.CreateBootstrap('1.0.0')
+    self.bootstrap_cache = os.path.join(
+        self.bootstrap_path, bootstrap_lib.SDK_CHECKOUTS,
+        path_util.GENERAL_CACHE_DIR)
+
+    self.rc_mock = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
+    self.cwd_mock = self.PatchObject(os, 'getcwd')
+
+  def testRepoRoot(self):
+    """Test when we are inside a repo checkout."""
+    self.cwd_mock.return_value = self.repo_root
+    self.assertEquals(
+        path_util.FindCacheDir(),
+        os.path.join(self.repo_root, path_util.GENERAL_CACHE_DIR))
+
+  def testGclientRoot(self):
+    """Test when we are inside a gclient checkout."""
+    self.cwd_mock.return_value = self.gclient_root
+    self.assertEquals(
+        path_util.FindCacheDir(),
+        os.path.join(self.gclient_root, path_util.CHROME_CACHE_DIR))
+
+  def testTempdir(self):
+    """Test when we are not in any checkout."""
+    self.cwd_mock.return_value = self.nocheckout_root
+    self.assertStartsWith(
+        path_util.FindCacheDir(),
+        os.path.join(tempfile.gettempdir(), ''))
+
+  def testBootstrap(self):
+    """Test when running from bootstrap."""
+    self.cwd_mock.return_value = self.bootstrap_path
+    self.rc_mock.AddCmdResult(
+        partial_mock.In('config'), output=constants.CHROMITE_URL)
+    self.assertEquals(
+        path_util.FindCacheDir(),
+        self.bootstrap_cache)
+
+  def testSdkCheckoutsInsideBootstrap(self):
+    """Test when in the bootstrap SDK checkout location."""
+    self.cwd_mock.return_value = os.path.join(
+        self.bootstrap_path, bootstrap_lib.SDK_CHECKOUTS)
+    self.rc_mock.AddCmdResult(
+        partial_mock.In('config'), output=constants.CHROMITE_URL)
+    self.assertEquals(
+        path_util.FindCacheDir(),
+        self.bootstrap_cache)
+
+  def testSdkInsideBootstrap(self):
+    """Test when in an SDK checkout inside the bootstrap."""
+    self.cwd_mock.return_value = os.path.join(
+        self.bootstrap_path, bootstrap_lib.SDK_CHECKOUTS, '1.0.0', 'chromite')
+    self.rc_mock.AddCmdResult(
+        partial_mock.In('config'), output=constants.CHROMITE_URL)
+    self.assertEquals(
+        path_util.FindCacheDir(),
+        self.bootstrap_cache)
+
+
+class TestPathResolver(cros_test_lib.MockTestCase):
+  """Tests of ChrootPathResolver class."""
+
+  def setUp(self):
+    self.PatchObject(constants, 'SOURCE_ROOT', new=FAKE_SOURCE_PATH)
+    self.PatchObject(path_util, 'GetCacheDir', return_value='/path/to/cache')
+    self.PatchObject(git, 'FindRepoDir',
+                     return_value=os.path.join(FAKE_REPO_PATH, '.fake_repo'))
+    self.chroot_path = None
+
+  def FakeCwd(self, base_path):
+    return os.path.join(base_path, 'somewhere/in/there')
+
+  def SetChrootPath(self, source_path):
+    """Set and fake the chroot path."""
+    self.chroot_path = os.path.join(source_path, constants.DEFAULT_CHROOT_DIR)
+
+  @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True)
+  def testInsideChroot(self, _):
+    """Tests {To,From}Chroot() call from inside the chroot."""
+    self.SetChrootPath(constants.SOURCE_ROOT)
+    resolver = path_util.ChrootPathResolver()
+
+    self.assertEqual(os.path.realpath('some/path'),
+                     resolver.ToChroot('some/path'))
+    self.assertEqual(os.path.realpath('/some/path'),
+                     resolver.ToChroot('/some/path'))
+    self.assertEqual(os.path.realpath('some/path'),
+                     resolver.FromChroot('some/path'))
+    self.assertEqual(os.path.realpath('/some/path'),
+                     resolver.FromChroot('/some/path'))
+
+  @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False)
+  def testOutsideChrootInbound(self, _):
+    """Tests ToChroot() calls from outside the chroot."""
+    for source_path, source_from_path_repo in itertools.product(
+        (None, CUSTOM_SOURCE_PATH), (False, True)):
+      if source_from_path_repo:
+        actual_source_path = FAKE_REPO_PATH
+      else:
+        actual_source_path = source_path or constants.SOURCE_ROOT
+
+      fake_cwd = self.FakeCwd(actual_source_path)
+      self.PatchObject(os, 'getcwd', return_value=fake_cwd)
+      self.SetChrootPath(actual_source_path)
+      resolver = path_util.ChrootPathResolver(
+          source_path=source_path,
+          source_from_path_repo=source_from_path_repo)
+      source_rel_cwd = os.path.relpath(fake_cwd, actual_source_path)
+
+      # Case: path inside the chroot space.
+      self.assertEqual(
+          '/some/path',
+          resolver.ToChroot(os.path.join(self.chroot_path, 'some/path')))
+
+      # Case: path inside the cache directory.
+      self.assertEqual(
+          os.path.join(constants.CHROOT_CACHE_ROOT, 'some/path'),
+          resolver.ToChroot(os.path.join(path_util.GetCacheDir(),
+                                         'some/path')))
+
+      # Case: absolute path inside the source tree.
+      if source_from_path_repo:
+        self.assertEqual(
+            os.path.join(constants.CHROOT_SOURCE_ROOT, 'some/path'),
+            resolver.ToChroot(os.path.join(FAKE_REPO_PATH, 'some/path')))
+      else:
+        self.assertEqual(
+            os.path.join(constants.CHROOT_SOURCE_ROOT, 'some/path'),
+            resolver.ToChroot(os.path.join(actual_source_path, 'some/path')))
+
+      # Case: relative path inside the source tree.
+      if source_from_path_repo:
+        self.assertEqual(
+            os.path.join(constants.CHROOT_SOURCE_ROOT, source_rel_cwd,
+                         'some/path'),
+            resolver.ToChroot('some/path'))
+      else:
+        self.assertEqual(
+            os.path.join(constants.CHROOT_SOURCE_ROOT, source_rel_cwd,
+                         'some/path'),
+            resolver.ToChroot('some/path'))
+
+      # Case: unreachable, path with improper source root prefix.
+      with self.assertRaises(ValueError):
+        resolver.ToChroot(os.path.join(actual_source_path + '-foo',
+                                       'some/path'))
+
+      # Case: unreachable (random).
+      with self.assertRaises(ValueError):
+        resolver.ToChroot('/some/path')
+
+  @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False)
+  def testOutsideChrootOutbound(self, _):
+    """Tests FromChroot() calls from outside the chroot."""
+    self.PatchObject(os, 'getcwd', return_value=self.FakeCwd(FAKE_SOURCE_PATH))
+    self.SetChrootPath(constants.SOURCE_ROOT)
+    resolver = path_util.ChrootPathResolver()
+
+    # Case: source root path.
+    self.assertEqual(
+        os.path.join(constants.SOURCE_ROOT, 'some/path'),
+        resolver.FromChroot(os.path.join(constants.CHROOT_SOURCE_ROOT,
+                                         'some/path')))
+
+    # Case: cyclic source/chroot sub-path elimination.
+    self.assertEqual(
+        os.path.join(constants.SOURCE_ROOT, 'some/path'),
+        resolver.FromChroot(os.path.join(
+            constants.CHROOT_SOURCE_ROOT,
+            constants.DEFAULT_CHROOT_DIR,
+            constants.CHROOT_SOURCE_ROOT.lstrip(os.path.sep),
+            constants.DEFAULT_CHROOT_DIR,
+            constants.CHROOT_SOURCE_ROOT.lstrip(os.path.sep),
+            'some/path')))
+
+    # Case: path inside the cache directory.
+    self.assertEqual(
+        os.path.join(path_util.GetCacheDir(), 'some/path'),
+        resolver.FromChroot(os.path.join(constants.CHROOT_CACHE_ROOT,
+                                         'some/path')))
+
+    # Case: non-rooted chroot paths.
+    self.assertEqual(
+        os.path.join(self.chroot_path, 'some/path'),
+        resolver.FromChroot('/some/path'))
diff --git a/lib/paygen/__init__.py b/lib/paygen/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/lib/paygen/__init__.py
diff --git a/lib/paygen/download_cache.py b/lib/paygen/download_cache.py
new file mode 100644
index 0000000..cc40e3f
--- /dev/null
+++ b/lib/paygen/download_cache.py
@@ -0,0 +1,395 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Downloads files upon request in a thread/process safe way.
+
+DEPRECATED: Should be merged into chromite.lib.cache.
+"""
+
+from __future__ import print_function
+
+import md5
+import os
+import shutil
+import stat
+import tempfile
+import time
+
+from chromite.lib import locking
+from chromite.lib import osutils
+from chromite.lib.paygen import urilib
+from chromite.lib.paygen import utils
+
+
+FETCH_RETRY_COUNT = 10
+DEFAULT_DAYS_TO_KEEP = 1
+ONE_DAY = 24 * 60 * 60
+
+
+class RetriesExhaustedError(Exception):
+  """Raised when we make too many attempts to download the same file."""
+
+
+def _DefaultFetchFunc(uri, cache_file):
+  """The default fetch function.
+
+  This simply downloads the uri into the cache file using urilib
+
+  Args:
+    uri: The URI to download.
+    cache_file: The path to put the downloaded file in.
+  """
+  urilib.Copy(uri, cache_file)
+
+
+class DownloadCache(object):
+  """This class downloads files into a local directory upon request.
+
+  This classes uses locking to make this safe across processes, and
+  threads.
+
+  Example usage:
+
+    # This will create the cache dir, and purge old contents.
+    cache = DownloadCache('/tmp/my_cache')
+
+    # file is copied into file, blocking for download if needed.
+    cache.GetFileCopy('gs://bucket/foo', '/tmp/foo')
+
+    # file is loaded into cache, but not locked.
+    tempfile = cache.GetFileInTempFile('gs://bucket/foo')
+    tempfile.close()
+  """
+
+  # Name of the purge management lock over the entire cache.
+  _CACHE_LOCK = 'cache.lock'
+  _FILE_DIR = 'cache'
+  _LOCK_DIR = 'lock'
+
+  _GET_FILE_SPIN_DELAY = 2
+
+  def __init__(self, cache_dir, max_age=ONE_DAY, cache_size=None):
+    """Create a DownloadCache.
+
+    Since Purging is not performed very often, we can exceed max_age or
+    cache_size.
+
+    Args:
+      cache_dir: The directory in which to create the cache.
+      max_age: Purge files not used for this number of seconds. None for no
+               max_age.
+      cache_size: Purge the least recently used files until the cache is
+                  below this size in bytes. None for no size limit.
+
+      If no condition is provided, we purge all files unused for one full day.
+    """
+    # One directory for cached files, one for lock files.
+    self._cache_dir = os.path.realpath(cache_dir)
+    self._file_dir = os.path.join(self._cache_dir, self._FILE_DIR)
+    self._lock_dir = os.path.join(self._cache_dir, self._LOCK_DIR)
+
+    self._max_age = max_age
+    self._cache_size = cache_size
+
+    self._SetupCache()
+
+  def _SetupCache(self):
+    """Make sure that our cache contains only files/directories we expect."""
+    try:
+      osutils.SafeMakedirs(self._cache_dir)
+      # The purge lock ensures nobody else is modifying the cache in any way.
+      with self._PurgeLock(blocking=False, shared=False):
+        # We have changed the layout of our cache directories over time.
+        # Clean up any left over files.
+        expected = (self._CACHE_LOCK, self._FILE_DIR, self._LOCK_DIR)
+        unexpected = set(os.listdir(self._cache_dir)).difference(expected)
+
+        for name in unexpected:
+          filename = os.path.join(self._cache_dir, name)
+          if os.path.isdir(filename):
+            shutil.rmtree(filename)
+          else:
+            os.unlink(filename)
+
+        # Create the cache file dir if needed.
+        if not os.path.exists(self._file_dir):
+          os.makedirs(self._file_dir)
+
+        # Create the lock dir if needed.
+        if not os.path.exists(self._lock_dir):
+          os.makedirs(self._lock_dir)
+    except locking.LockNotAcquiredError:
+      # If we can't get an exclusive lock on the cache, someone else set it up.
+      pass
+
+  def _UriToCacheFile(self, uri):
+    """Convert a URI to an cache file (full path).
+
+    Args:
+      uri: The uri of the file to be cached locally.
+
+    Returns:
+      The full path file name of the cache file associated with a given URI.
+    """
+    # We use the md5 hash of the URI as our file name. This allows us to
+    # store all cache files in a single directory, which removes race
+    # conditions around directories.
+    m = md5.new(uri)
+    return os.path.join(self._file_dir, m.digest().encode('hex'))
+
+  def _PurgeLock(self, blocking=False, shared=False):
+    """Acquire a lock on the cache as a whole.
+
+    An exclusive lock proves nobody else will modify anything, and nobody
+    else will hold any _CacheFileLocks. A shared lock is required before
+    getting any kind of _CacheFileLock.
+
+    Args:
+      blocking: Block until the lock is available?
+      shared: Get a shared lock, or an exclusive lock?
+
+    Returns:
+      Locking.FileLock (acquired)
+    """
+    lock_file = os.path.join(self._cache_dir, self._CACHE_LOCK)
+    lock = locking.FileLock(lock_file, locktype=locking.FLOCK,
+                            blocking=blocking)
+    return lock.lock(shared)
+
+  def _CacheFileLock(self, cache_file, blocking=False, shared=False):
+    """Acquire a lock on a file in the cache.
+
+    A shared lock will ensure no other processes are modifying the file, but
+    getting it does not ensure that the file in question actually exists.
+
+    An exclusive lock is required to modify a cache file, this usually means
+    downloading it.
+
+    A shared _PurgeLock should be held before trying to acquire any type
+    of cache file lock.
+
+    Args:
+      cache_file: The full path of file in cache to lock.
+      blocking: Block until the lock is available?
+      shared: Get a shared lock, or an exclusive lock?
+
+    Returns:
+      Locking.FileLock (acquired)
+    """
+    lock_file = os.path.join(self._lock_dir, os.path.basename(cache_file))
+    lock = locking.FileLock(lock_file, locktype=locking.FLOCK,
+                            blocking=blocking)
+    return lock.lock(shared)
+
+  def Purge(self, max_age=None, cache_size=None):
+    """Attempts to clean up the cache contents.
+
+    Is a no-op if cache lock is not acquirable.
+
+    Args:
+      max_age: Overrides the __init__ max_age for this one
+                       purge. Mostly intended for unittests.
+      cache_size: Overrides the __init__ cache_size for this one
+                       purge. Mostly intended for unittests.
+    """
+    max_age = self._max_age if max_age is None else max_age
+    cache_size = self._cache_size if cache_size is None else cache_size
+
+    try:
+      # Prevent other changes while we purge the cache.
+      with self._PurgeLock(shared=False, blocking=False):
+
+        # Purge files based on age, if specified.
+        if max_age is not None:
+          now = time.time()
+          for f in utils.ListdirFullpath(self._file_dir):
+            if (now - os.path.getmtime(f)) > max_age:
+              os.unlink(f)
+
+        # Purge files based on size, if specified.
+        if cache_size is not None:
+          # Find cache files, and sort them so the oldest are first.
+          # This defines which ones we will purge first.
+          cache_files = utils.ListdirFullpath(self._file_dir)
+          cache_files.sort(key=os.path.getmtime)
+
+          sizes = [os.path.getsize(f) for f in cache_files]
+          total_size = sum(sizes)
+
+          # Remove files until we are small enough to fit.
+          for f, size in zip(cache_files, sizes):
+            if total_size < cache_size:
+              break
+            total_size -= size
+            os.unlink(f)
+
+        # Just remove all lock files. They will be recreated as needed.
+        shutil.rmtree(self._lock_dir)
+        os.makedirs(self._lock_dir)
+
+    except locking.LockNotAcquiredError:
+      # If we can't get an exclusive lock on the file, it's in use, leave it.
+      pass
+
+  def _FetchIntoCache(self, uri, cache_file, fetch_func=_DefaultFetchFunc):
+    """This function downloads the specified file (if not already local).
+
+    You must hold the PurgeLock when calling this method.
+
+    If it can't get an exclusive lock, or if the file is already present,
+    it does nothing.
+
+    Args:
+      uri: The uri of the file.
+      cache_file: The location in the cache to download too.
+      fetch_func: Function to get the file.
+
+    Returns:
+      True if a file was downloaded, False otherwise. (used in unittests)
+
+    Raises:
+      May raise any download error associated with the URI's protocol.
+    """
+    try:
+      # Write protect the file before modifying it.
+      with self._CacheFileLock(cache_file, shared=False, blocking=False):
+        if os.path.exists(cache_file):
+          return False
+
+        try:
+          fetch_func(uri, cache_file)
+          # Make the file read-only by everyone.
+          os.chmod(cache_file, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
+        except:
+          # If there was any error with the download, make sure no partial
+          # file was left behind.
+          if os.path.exists(cache_file):
+            os.unlink(cache_file)
+          raise
+
+    except locking.LockNotAcquiredError:
+      # In theory, if it's already locked, that either means a download is in
+      # progress, or there is a shared lock which means it's already present.
+      return False
+
+    # Try to cleanup the cache after we just grew it.
+    self.Purge()
+    return True
+
+  # TODO: Instead of hooking in fetch functions in the cache here, we could
+  # set up protocol handlers which would know how to handle special cases
+  # generally, identified by a protocol prefix like "prepimage://" or
+  # "decompress://". That would help make sure they're handled consistently.
+  def GetFileObject(self, uri, fetch_func=_DefaultFetchFunc):
+    """Get an open readonly File object for the file in the cache.
+
+    This method will populate the cache with the requested file if it's
+    not already present, and will return an already opened read only file
+    object for the cache contents.
+
+    Even if the file is purged, this File object will remain valid until
+    closed. Since this method is the only legitimate way to get access to
+    a file in the cache, and it returns read only Files, cache files should
+    never be modified.
+
+    This method may block while trying to download and/or lock the file.
+
+    Args:
+      uri: The uri of the file to access.
+      fetch_func: A function to produce the file if it isn't already in the
+                  cache.
+
+    Returns:
+      File object opened with 'rb' mode.
+
+    Raises:
+      Exceptions from a failed download are passed through 'as is' from
+      the underlying download mechanism.
+
+      RetriesExhaustedError if we need a large number of attempts to
+      download the same file.
+    """
+    cache_file = self._UriToCacheFile(uri)
+
+    # We keep trying until we succeed, or throw an exception.
+    for _ in xrange(FETCH_RETRY_COUNT):
+      with self._PurgeLock(shared=True, blocking=True):
+        # Attempt to download the file, if needed.
+        self._FetchIntoCache(uri, cache_file, fetch_func)
+
+        # Get a shared lock on the file. This can block if another process
+        # has a non-shared lock (ie: they are downloading).
+        with self._CacheFileLock(cache_file, shared=True, blocking=True):
+
+          if os.path.exists(cache_file):
+            fd = open(cache_file, 'rb')
+
+            # Touch the timestamp on cache file to help purging logic.
+            os.utime(cache_file, None)
+
+            return fd
+          else:
+            # We don't have the file in our cache. There are three ways this
+            # can happen:
+            #
+            # A) Another process was trying to download, blocked our download,
+            #    then got a download error.
+            # B) Another process removed the file(illegally). We will recover as
+            #    soon as all read-only locks are released.
+            # C) Our download failed without throwing an exception. We will
+            #    block forever if this continues to happen.
+
+            # Sleep so we don't spin too quickly, then try again.
+            time.sleep(self._GET_FILE_SPIN_DELAY)
+
+    raise RetriesExhaustedError(uri)
+
+  def GetFileCopy(self, uri, filepath):
+    """Copy a cache file into your file (downloading as needed).
+
+    Copy the file into your specified filename (creating or overridding). It
+    will be downloaded into the cache first, if needed. It is your
+    responsibility to manage filepath after it is populated.
+
+    Args:
+      uri: The uri of the file to access.
+      filepath: The name of the file to copy uri contents into.
+
+    Raises:
+      Exceptions from a failed download are passed through 'as is' from
+      the underlying download mechanism.
+    """
+    with self.GetFileObject(uri) as src:
+      with open(filepath, 'w+b') as dest:
+        shutil.copyfileobj(src, dest)
+
+  def GetFileInTempFile(self, uri):
+    """Copy a cache file into a tempfile (downloading as needed).
+
+    The cache file is copied into a tempfile.NamedTemporaryFile.
+
+    This file is owned strictly by the caller and can be modified/deleted as
+    needed. Closing the NamedTemporaryFile will delete it.
+
+    Args:
+      uri: The uri of the file to access.
+
+    Returns:
+      tempfile.NamedTemporaryFile containing the requested file.
+      NamedTemporaryFile.name will contain the file's name.
+
+    Raises:
+      Exceptions from a failed download are passed through 'as is' from
+      the underlying download mechanism.
+    """
+    temp = tempfile.NamedTemporaryFile()
+    self.GetFileCopy(uri, temp.name)
+    return temp
+
+  # Cache objects can be used with "with" statements.
+  def __enter__(self):
+    return self
+
+  def __exit__(self, _type, _value, _traceback):
+    self.Purge()
diff --git a/lib/paygen/download_cache_unittest b/lib/paygen/download_cache_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/lib/paygen/download_cache_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/paygen/download_cache_unittest.py b/lib/paygen/download_cache_unittest.py
new file mode 100644
index 0000000..46883d3
--- /dev/null
+++ b/lib/paygen/download_cache_unittest.py
@@ -0,0 +1,444 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test download_cache library.
+
+DEPRECATED: Should be migrated to chromite.lib.cache_unittest.
+"""
+
+from __future__ import print_function
+
+import multiprocessing
+import os
+import pickle
+import traceback
+
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib.paygen import download_cache
+from chromite.lib.paygen import gslib
+
+
+# We access a lot of protected members during testing.
+# pylint: disable=W0212
+
+# The inProcess methods have to be standalone to be pickleable.
+def _inProcessFetchIntoCache(uri_tempdir):
+  """In a sub-process, call DownloadCache._UriToCacheFile."""
+  try:
+    uri, tempdir = uri_tempdir
+    process_cache = download_cache.DownloadCache(tempdir)
+    file_name = process_cache._UriToCacheFile(uri)
+    with process_cache._PurgeLock(shared=True, blocking=True):
+      return process_cache._FetchIntoCache(uri, file_name)
+  except Exception:
+    traceback.print_exc()
+    raise
+
+
+def _inProcessGetFile(uri_tempdir):
+  """In a sub-process, call DownloadCache.GetFile."""
+
+  try:
+    uri, tempdir = uri_tempdir
+    process_cache = download_cache.DownloadCache(tempdir, cache_size=0)
+
+    # If there is a URI, fetch it, else wipe.
+    if uri:
+      with process_cache.GetFileObject(uri) as f:
+        return f.read()
+    else:
+      process_cache.Purge()
+      return None
+  except Exception:
+    traceback.print_exc()
+    raise
+
+
+class DownloadCachePickleTest(cros_test_lib.TempDirTestCase):
+  """Test pickle/unpickle the download cache."""
+
+  def testPickleUnpickle(self):
+    # pylint: disable=E1101
+    cache = download_cache.DownloadCache(self.tempdir)
+    pickle_path = os.path.join(self.tempdir, 'cache.pickle')
+
+    # Do pickle dump.
+    with open(pickle_path, 'w') as pickle_fh:
+      pickle.dump(cache, pickle_fh)
+
+    # Load pickle file.
+    with open(pickle_path, 'r') as pickle_fh:
+      pickle.load(pickle_fh)
+
+
+class FetchFuncTest(cros_test_lib.TempDirTestCase):
+  """Test getting files with a custom fetch function."""
+
+  dummy_uri = 'dummy URI'
+  dummy_uri2 = 'dummy URI 2'
+
+  def testFetchFunc(self):
+    """Test getting files with a custome fetch function."""
+
+    call_count = [0]
+
+    def dummyFetchFunction(uri, cache_file):
+      """Write the uri into the file to have verifiable content"""
+      call_count[0] += 1
+      osutils.WriteFile(cache_file, uri)
+
+    cache = download_cache.DownloadCache(self.tempdir)
+    self.assertEqual(call_count[0], 0)
+    cache.GetFileObject(self.dummy_uri, dummyFetchFunction)
+    self.assertEqual(call_count[0], 1)
+    with cache.GetFileObject(self.dummy_uri, dummyFetchFunction) as f:
+      self.assertEqual(f.read(), self.dummy_uri)
+    self.assertEqual(call_count[0], 1)
+
+    cache.GetFileObject(self.dummy_uri2, dummyFetchFunction)
+    self.assertEqual(call_count[0], 2)
+    with cache.GetFileObject(self.dummy_uri2, dummyFetchFunction) as f:
+      self.assertEqual(f.read(), self.dummy_uri2)
+    self.assertEqual(call_count[0], 2)
+
+    with cache.GetFileObject(self.dummy_uri, dummyFetchFunction) as f:
+      self.assertEqual(f.read(), self.dummy_uri)
+    with cache.GetFileObject(self.dummy_uri2, dummyFetchFunction) as f:
+      self.assertEqual(f.read(), self.dummy_uri2)
+    self.assertEqual(call_count[0], 2)
+
+
+class DownloadCacheTest(cros_test_lib.TempDirTestCase):
+  """Test DownloadCache helper class."""
+
+  uri_large = 'gs://chromeos-releases-test/download_cache/file_large'
+  uri_a = 'gs://chromeos-releases-test/download_cache/file_a'
+  uri_b = 'gs://chromeos-releases-test/download_cache/file_b'
+
+  hash_large = 'ce11166b2742c12c93efa307c4c4adbf'
+  hash_a = '591430f83b55355d9233babd172baea5'
+  hash_b = '22317eb6cccea8c87f960c45ecec3478'
+
+  def setUp(self):
+    # Use a subdir specifically for the cache so we can use the tempdir for
+    # other things (including tempfiles by gsutil/etc...).
+    self.cache_dir = os.path.join(self.tempdir, 'unittest-cache')
+
+  def _verifyFileContents(self, cache, uri):
+    """Test helper to make sure a cached file contains correct contents."""
+
+    # Fetch it
+    with cache.GetFileObject(uri) as f:
+      contents = f.read()
+
+    # Make sure the contents are valid.
+    self.assertEqual(contents, gslib.Cat(uri))
+
+    # Make sure the cache file exists where expected.
+    cache_file = cache._UriToCacheFile(uri)
+
+    self.assertTrue(cache_file.startswith(self.cache_dir))
+    self.assertTrue(os.path.exists(cache_file))
+
+  def _validateCacheContents(self, cache, expected_contents):
+    """Test helper to make sure the cache holds what we expect."""
+
+    expected_contents = set(expected_contents)
+    expected_top_contents = set(['cache', 'cache.lock', 'lock'])
+
+    cache_top_contents = set(os.listdir(cache._cache_dir))
+    file_dir_contents = set(os.listdir(cache._file_dir))
+    lock_dir_contents = set(os.listdir(cache._lock_dir))
+
+    # We should always have exactly the expected files in the top dir.
+    self.assertEqual(cache_top_contents, expected_top_contents)
+
+    # Cache contents should match the expected list.
+    self.assertEqual(file_dir_contents, expected_contents)
+
+    # The lock directory should contain no files not in the file_dir.
+    self.assertTrue(lock_dir_contents.issubset(file_dir_contents))
+
+  def testCacheFileNames(self):
+    """Make sure that some of the files we create have the expected names."""
+    cache = download_cache.DownloadCache(self.cache_dir)
+
+    expected_cache_lock = os.path.join(self.cache_dir, 'cache.lock')
+    expected_cache = os.path.join(self.cache_dir,
+                                  'cache/3ba505fc7774455169af6f50b7964dff')
+
+    expected_lock = os.path.join(self.cache_dir,
+                                 'lock/3ba505fc7774455169af6f50b7964dff')
+
+    # Make sure a cache content file is named as expected.
+    self.assertEqual(cache._UriToCacheFile('gs://bucket/of/awesome'),
+                     expected_cache)
+
+    # Make sure the lock file for a cache content file is named as expected.
+    with cache._CacheFileLock(expected_cache) as file_lock:
+      self.assertEqual(file_lock.path, expected_lock)
+
+    with cache._PurgeLock() as purge_lock:
+      self.assertEqual(purge_lock.path, expected_cache_lock)
+
+    with cache._CacheFileLock(expected_cache) as cache_file_lock:
+      self.assertEqual(cache_file_lock.path, expected_lock)
+
+  def testSetupCacheClean(self):
+    """Test _SetupCache with a clean directory."""
+    # Create a cache, and see if it has expected contents.
+    cache = download_cache.DownloadCache(self.cache_dir)
+    self._validateCacheContents(cache, ())
+
+  def testSetupCacheDirty(self):
+    """Test _SetupCache with a dirty directory."""
+    # Create some unexpected directories.
+    for make_dir in ['foo/bar/stuff', 'bar']:
+      os.makedirs(os.path.join(self.cache_dir, make_dir))
+
+    # Touch some unexpected files.
+    for touch_file in ['bogus', 'foo/bogus']:
+      file(os.path.join(self.cache_dir, touch_file), 'w').close()
+
+    # Create a cache, and see
+    cache = download_cache.DownloadCache(self.cache_dir)
+    self._validateCacheContents(cache, ())
+
+  @cros_test_lib.NetworkTest()
+  def testGetFileObject(self):
+    """Just create a download cache, and GetFile on it."""
+
+    cache = download_cache.DownloadCache(self.cache_dir)
+
+    # Fetch a file
+    with cache.GetFileObject(self.uri_a) as f:
+      self.assertIsInstance(f, file)
+    self._verifyFileContents(cache, self.uri_a)
+    self._validateCacheContents(cache, (self.hash_a,))
+
+    # Fetch a different file
+    with cache.GetFileObject(self.uri_b) as f:
+      self.assertIsInstance(f, file)
+    self._verifyFileContents(cache, self.uri_b)
+    self._validateCacheContents(cache, (self.hash_a, self.hash_b))
+
+    # Fetch the first file a second time.
+    cache.GetFileObject(self.uri_a).close()
+    self._verifyFileContents(cache, self.uri_a)
+
+    # There should be only 2 files in the cache.
+    self._validateCacheContents(cache, (self.hash_a, self.hash_b))
+
+    # Fetch a larger file
+    cache.GetFileObject(self.uri_large).close()
+    self._verifyFileContents(cache, self.uri_large)
+
+    # There should be 3 files in the cache.
+    self._validateCacheContents(cache,
+                                (self.hash_a, self.hash_b, self.hash_large))
+
+  @cros_test_lib.NetworkTest()
+  def testGetFileCopy(self):
+    """Just create a download cache, and GetFileCopy from it."""
+
+    file_a = os.path.join(self.tempdir, 'foo')
+    file_b = os.path.join(self.tempdir, 'bar')
+
+    cache = download_cache.DownloadCache(self.cache_dir)
+
+    # Fetch non-existent files.
+    cache.GetFileCopy(self.uri_a, file_a)
+    cache.GetFileCopy(self.uri_a, file_b)
+
+    with open(file_a, 'r') as f:
+      contents_a = f.read()
+
+    with open(file_b, 'r') as f:
+      contents_b = f.read()
+
+    self.assertEqual(contents_a, contents_b)
+
+    # Fetch and overwrite existent files.
+    cache.GetFileCopy(self.uri_b, file_a)
+    cache.GetFileCopy(self.uri_b, file_b)
+
+    with open(file_a, 'r') as f:
+      contents_a = f.read()
+
+    with open(file_b, 'r') as f:
+      contents_b = f.read()
+
+    self.assertEqual(contents_a, contents_b)
+
+  @cros_test_lib.NetworkTest()
+  def testGetFileInTempFile(self):
+    """Just create a download cache, and GetFileInTempFile on it."""
+
+    cache = download_cache.DownloadCache(self.cache_dir)
+
+    # Fetch a file
+    file_t = cache.GetFileInTempFile(self.uri_a)
+
+    with cache.GetFileObject(self.uri_a) as f:
+      contents_a = f.read()
+
+    with file_t as f:
+      contents_t = f.read()
+
+    self.assertEqual(contents_t, contents_a)
+    self.assertEqual(contents_t, gslib.Cat(self.uri_a))
+
+  @cros_test_lib.NetworkTest()
+  def testPurgeLogic(self):
+    cache = download_cache.DownloadCache(self.cache_dir)
+
+    cache.GetFileObject(self.uri_a).close()
+    cache.GetFileObject(self.uri_b).close()
+
+    # The default cache logic should leave these files untouched, since
+    # they are less than a day old.
+    cache.Purge()
+    self._validateCacheContents(cache, (self.hash_a, self.hash_b))
+
+    # Purge until the cache is empty.
+    cache.Purge(cache_size=0)
+    self._validateCacheContents(cache, ())
+
+    # Refetch two files.
+    cache.GetFileObject(self.uri_a).close()
+    cache.GetFileObject(self.uri_b).close()
+
+    # Change the timestamp so uri_a hasn't been used for a very long time.
+    os.utime(os.path.join(self.cache_dir, 'cache', self.hash_a),
+             (2, 2))
+
+    # Purge files that haven't been used recently.
+    cache.Purge(max_age=1000)
+    self._validateCacheContents(cache, (self.hash_b,))
+
+  @cros_test_lib.NetworkTest()
+  def testContextMgr(self):
+    """Make sure we behave properly with 'with'."""
+
+    # Create an instance, and use it in a with
+    precache = download_cache.DownloadCache(self.cache_dir, cache_size=0)
+
+    with precache as cache:
+      # Assert the instance didn't change.
+      self.assertIs(precache, cache)
+
+      # Download a file.
+      cache.GetFileObject(self.uri_a).close()
+
+      self._validateCacheContents(cache, (self.hash_a,))
+
+    # After the with exited, which should have purged everything.
+    self._validateCacheContents(cache, ())
+
+  @cros_test_lib.NetworkTest()
+  def testThreadedDownloads(self):
+    """Spin off multiple processes and fetch a file.
+
+       Ensure the process locking allows the file to be downloaded exactly
+       once.
+    """
+    pool = multiprocessing.Pool(processes=10)
+
+    # Create a tuple of the three args we want to pass to inProcess test,
+    # use map semantics as a convenient way to run in parallel.
+    results = pool.map(_inProcessFetchIntoCache,
+                       [(self.uri_large, self.cache_dir)] * 20)
+
+    # Results contains a list of booleans showing which instances actually
+    # performed the download. Exactly one of them should have. The list could
+    # also contain exceptions if one of the downloads failed.
+    results.sort()
+    self.assertEqual(results, [False] * 19 + [True])
+
+  @cros_test_lib.NetworkTest()
+  def testThreadedGetFile(self):
+    """Spin off multiple processes and call GetFile.
+
+       Ensure all processes complete, and return the same local file.
+    """
+    pool = multiprocessing.Pool(processes=10)
+
+    # Create a tuple of the three args we want to pass to inProcess test,
+    # use map semantics as a convenient way to run in parallel.
+    results = pool.map(_inProcessGetFile,
+                       [(self.uri_a, self.cache_dir)] * 20)
+
+    # Fetch it ourselves and verify the results.
+    cache = download_cache.DownloadCache(self.cache_dir)
+    self._verifyFileContents(cache, self.uri_a)
+
+    with cache.GetFileObject(self.uri_a) as f:
+      contents_a = f.read()
+
+    # Ensure that every process gave back the expected result.
+    expected = [contents_a] * 20
+    self.assertEqual(results, expected)
+
+  @cros_test_lib.NetworkTest()
+  def testThreadedGetFileMultiple(self):
+    """Spin off multiple processes and call GetFile with multiple uris.
+
+       Ensure all processes complete, and return the right local file.
+    """
+    pool = multiprocessing.Pool(processes=20)
+
+    # Create a tuple of the three args we want to pass to inProcess test,
+    # use map semantics as a convenient way to run in parallel.
+    results = pool.map(_inProcessGetFile,
+                       [(self.uri_a, self.cache_dir),
+                        (self.uri_b, self.cache_dir)] * 10)
+
+    # Fetch it ourselves and verify the results.
+    cache = download_cache.DownloadCache(self.cache_dir)
+
+    with cache.GetFileObject(self.uri_a) as f:
+      contents_a = f.read()
+
+    with cache.GetFileObject(self.uri_b) as f:
+      contents_b = f.read()
+
+    self._verifyFileContents(cache, self.uri_a)
+    self._verifyFileContents(cache, self.uri_b)
+
+    # Ensure that every process gave back the expected result.
+    expected = [contents_a, contents_b] * 10
+    self.assertEqual(results, expected)
+
+  @cros_test_lib.NetworkTest()
+  def testThreadedGetFileMultiplePurge(self):
+    """Do fetches and purges in a multiprocess environment.
+
+       Ensure all processes complete, and return the right local file.
+    """
+    pool = multiprocessing.Pool(processes=30)
+
+    requests = [(self.uri_a, self.cache_dir),
+                (self.uri_b, self.cache_dir),
+                (None, self.cache_dir)] * 10
+
+    # Create a tuple of the three args we want to pass to inProcess test,
+    # use map semantics as a convenient way to run in parallel.
+    results = pool.map(_inProcessGetFile, requests)
+
+    # Fetch it ourselves and verify the results.
+    cache = download_cache.DownloadCache(self.cache_dir)
+
+    with cache.GetFileObject(self.uri_a) as f:
+      contents_a = f.read()
+
+    with cache.GetFileObject(self.uri_b) as f:
+      contents_b = f.read()
+
+    self._verifyFileContents(cache, self.uri_a)
+    self._verifyFileContents(cache, self.uri_b)
+
+    # Ensure that every process gave back the expected result.
+    expected = [contents_a, contents_b, None] * 10
+    self.assertEqual(results, expected)
diff --git a/lib/paygen/dryrun_lib.py b/lib/paygen/dryrun_lib.py
new file mode 100644
index 0000000..1928b26
--- /dev/null
+++ b/lib/paygen/dryrun_lib.py
@@ -0,0 +1,85 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Library for dry_run utilities."""
+
+from __future__ import print_function
+
+from chromite.lib import cros_logging as logging
+
+
+class DryRunMgr(object):
+  """Manage the calling of functions that make real changes.
+
+  We'll automatically disable things when in dry_run mode.
+  """
+
+  __slots__ = (
+      'dry_run',   # Boolean.  See __init__ docstring.
+      'quiet',     # Boolean.  See __init__ docstring.
+  )
+
+  def __init__(self, dry_run, quiet=False):
+    """Create a DryRunMgr object.
+
+    Args:
+      dry_run: If True then this DryRunMgr will not execute the functions
+        given to the Run method.
+      quiet: If False, then when Run method skips functions (because of
+        dry_run), then give a log message about skipping.
+    """
+    self.dry_run = dry_run
+    self.quiet = quiet
+
+  def __nonzero__(self):
+    """This allows a DryRunMgr to serve as a Boolean proxy for self.dry_run."""
+    return self.dry_run
+
+  def __call__(self, func, *args, **kwargs):
+    """See Run method, which this forwards to.
+
+    This makes a DryRunMgr object callable.  Example:
+    drm(os.remove, '/some/file')
+    """
+    return self.Run(func, *args, **kwargs)
+
+  def Run(self, func, *args, **kwargs):
+    """Run func(*args, **kwargs) if self.dry_run is not True.
+
+    Examples:
+    drm.Run(os.remove, '/some/file')
+
+    Args:
+      func: Must be a function object.
+      args: Index-based arguments to pass to func.
+      kwargs: Keyword-based arguments to pass to func.
+
+    Returns:
+      Whatever func returns if it is called, otherwise None.
+    """
+    func_name = None
+    try:
+      func_name = '%s.%s' % (func.__module__, func.__name__)
+    except AttributeError:
+      # This happens in unittests where func is a mocked function.
+      # pylint: disable=W0212
+      func_name = func._name
+
+    if self.dry_run:
+      return self._Skip(func_name, *args, **kwargs)
+    else:
+      return self._Call(func, *args, **kwargs)
+
+  def _Call(self, func, *args, **kwargs):
+    """Call func(*args, **kwargs)."""
+    return func(*args, **kwargs)
+
+  def _Skip(self, func_name, *args, **kwargs):
+    """If not quiet, give message about skipping func_name(*args, **kwargs)."""
+    if not self.quiet:
+      argstr_list = ([repr(a) for a in args] +
+                     ['%s=%r' % (k, v) for k, v in kwargs.iteritems()])
+      argstr = ', '.join(argstr_list)
+
+      logging.info('dry-run skipping %s(%s)', func_name, argstr)
diff --git a/lib/paygen/dryrun_lib_unittest b/lib/paygen/dryrun_lib_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/lib/paygen/dryrun_lib_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/paygen/dryrun_lib_unittest.py b/lib/paygen/dryrun_lib_unittest.py
new file mode 100644
index 0000000..1e5d36c
--- /dev/null
+++ b/lib/paygen/dryrun_lib_unittest.py
@@ -0,0 +1,86 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for dry_run library."""
+
+from __future__ import print_function
+
+from chromite.lib import cros_test_lib
+from chromite.lib.paygen import dryrun_lib
+
+
+# pylint: disable=W0212
+
+
+class FuncClass(object):
+  """Helper class with a Func to call."""
+  @staticmethod
+  def Func(func, *args, **kwargs):
+    """Dummy function."""
+
+
+class TestDryRunMgr(cros_test_lib.MoxTestCase):
+  """Test cases for dryrun_lib."""
+
+  def testNonzero(self):
+    self.assertTrue(dryrun_lib.DryRunMgr(True))
+    self.assertFalse(dryrun_lib.DryRunMgr(False))
+
+  def testCall(self):
+    self.mox.StubOutWithMock(FuncClass, 'Func')
+    drm = dryrun_lib.DryRunMgr(False)
+
+    # Set up the test replay script.
+    FuncClass.Func('arg1', 'arg2', keya='arga')
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    drm._Call(FuncClass.Func, 'arg1', 'arg2', keya='arga')
+    self.mox.VerifyAll()
+
+  def testSkip(self):
+    self.mox.StubOutWithMock(FuncClass, 'Func')
+    drm = dryrun_lib.DryRunMgr(True)
+
+    # Set up the test replay script.
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    drm._Skip(FuncClass.Func, 'arg1', 'arg2', keya='arga')
+    self.mox.VerifyAll()
+
+  def testRunCall(self):
+    mocked_drm = self.mox.CreateMock(dryrun_lib.DryRunMgr)
+    mocked_drm.dry_run = False
+    mocked_drm.quiet = False
+
+    args = ['arg1', 'arg2']
+    kwargs = {'keya': 'arga', 'keyb': 'argb'}
+
+    # Set up the test replay script.
+    mocked_drm._Call(FuncClass.Func, *args, **kwargs)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    dryrun_lib.DryRunMgr.Run(mocked_drm, FuncClass.Func,
+                             *args, **kwargs)
+    self.mox.VerifyAll()
+
+  def testRunSkip(self):
+    mocked_drm = self.mox.CreateMock(dryrun_lib.DryRunMgr)
+    mocked_drm.dry_run = True
+    mocked_drm.quiet = False
+
+    args = ['arg1', 'arg2']
+    kwargs = {'keya': 'arga', 'keyb': 'argb'}
+
+    # Set up the test replay script.
+    func_path = '%s.%s' % (FuncClass.Func.__module__, FuncClass.Func.__name__)
+    mocked_drm._Skip(func_path, *args, **kwargs)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    dryrun_lib.DryRunMgr.Run(mocked_drm, FuncClass.Func,
+                             *args, **kwargs)
+    self.mox.VerifyAll()
diff --git a/lib/paygen/filelib.py b/lib/paygen/filelib.py
new file mode 100644
index 0000000..81ebc70
--- /dev/null
+++ b/lib/paygen/filelib.py
@@ -0,0 +1,316 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common local file interface library."""
+
+from __future__ import print_function
+
+import base64
+import filecmp
+import fnmatch
+import hashlib
+import os
+import shutil
+
+
+class MissingFileError(RuntimeError):
+  """Raised when required file is missing."""
+
+
+class MissingDirectoryError(RuntimeError):
+  """Raised when required directory is missing."""
+
+
+def Cmp(path1, path2):
+  """Return True if paths hold identical files.
+
+  If either file is missing then always return False.
+
+  Args:
+    path1: Path to a local file.
+    path2: Path to a local file.
+
+  Returns:
+    True if files are the same, False otherwise.
+  """
+  return (os.path.exists(path1) and os.path.exists(path2) and
+          filecmp.cmp(path1, path2))
+
+
+def Copy(src_path, dest_path):
+  """Copy one path to another.
+
+  Automatically create the directory for dest_path, if necessary.
+
+  Args:
+    src_path: Path to local file to copy from.
+    dest_path: Path to local file to copy to.
+  """
+  dest_dir = os.path.dirname(dest_path)
+  if dest_dir and not Exists(dest_dir, as_dir=True):
+    Makedir(dest_dir, fill_path=True)
+
+  shutil.copy2(src_path, dest_path)
+
+
+def Size(path):
+  """Return size of file in bytes.
+
+  Args:
+    path: Path to a local file.
+
+  Returns:
+    Size of file in bytes.
+
+  Raises:
+    MissingFileError if file is missing.
+  """
+  if os.path.isfile(path):
+    return os.stat(path).st_size
+
+  raise MissingFileError('No file at %r.' % path)
+
+
+def Exists(path, as_dir=False):
+  """Return True if file exists at given path.
+
+  If path is a directory and as_dir is False then this will return False.
+
+  Args:
+    path: Path to a local file.
+    as_dir: If True then check path as a directory, otherwise check as a file.
+
+  Returns:
+    True if file (or directory) exists at path, False otherwise.
+  """
+  if as_dir:
+    return os.path.isdir(path)
+  else:
+    return os.path.isfile(path)
+
+
+def Makedir(*args, **kwargs):
+  """Make the directory at path or paths.
+
+  Args:
+    args: One or more local or /cns paths.
+    fill_path: Create parent directories as necessary.
+      Same as 'mkdir -p' option.  Defaults to False.
+
+  Raises:
+    MissingDirectoryError if fill_path not given and directory above a
+      given path does not exist.
+  """
+  fill_path = kwargs.pop('fill_path', False)
+
+  for path in args:
+    if not fill_path:
+      path_dir = os.path.dirname(path)
+      if not Exists(path_dir, as_dir=True):
+        raise MissingDirectoryError('Cannot create directory %r without'
+                                    ' fill_path option.' % path)
+
+    os.makedirs(path)
+
+
+def Remove(*args, **kwargs):
+  """Delete the file(s) at path_or_paths, or directory with recurse set.
+
+  The first path to fail to be removed will abort the command, unless
+  the failure is for a path that cannot be found and ignore_no_match is True.
+  For example, if paths is [pathA, pathB, pathC] and pathB fails to be removed
+  then pathC will also not be removed, but pathA will.
+
+  Args:
+    args: One or more paths to local files.
+    ignore_no_match: If True, then do not complain if anything was not
+      removed because no file was found at path.  Like rm -f.  Defaults to
+      False.
+    recurse: Remove recursively starting at path.  Same as rm -R.  Defaults
+      to False.
+
+  Returns:
+    True if everything was removed, False if anything was not removed (which can
+      only happen with no exception if ignore_no_match is True).
+
+  Raises:
+    MissingFileError if file is missing and ignore_no_match was False.
+  """
+  ignore_no_match = kwargs.pop('ignore_no_match', False)
+  recurse = kwargs.pop('recurse', False)
+
+  any_no_match = False
+
+  for path in args:
+    if os.path.isdir(path) and recurse:
+      shutil.rmtree(path)
+    elif os.path.exists(path):
+      # Note that a directory path with recurse==False will call os.remove here,
+      # which will fail, causing this function to fail.  As it should.
+      os.remove(path)
+    elif ignore_no_match:
+      any_no_match = True
+    else:
+      raise MissingFileError('No file at %r.' % path)
+
+  return not any_no_match
+
+
+def ListFiles(root_path, recurse=False, filepattern=None, sort=False):
+  """Return list of full file paths under given root path.
+
+  Directories are intentionally excluded.
+
+  Args:
+    root_path: e.g. /some/path/to/dir
+    recurse: Look for files in subdirectories, as well
+    filepattern: glob pattern to match against basename of file
+    sort: If True then do a default sort on paths.
+
+  Returns:
+    List of paths to files that matched
+  """
+  # Smoothly accept trailing '/' in root_path.
+  root_path = root_path.rstrip('/')
+
+  paths = []
+
+  if recurse:
+    # Recursively walk paths starting at root_path, filter for files.
+    for entry in os.walk(root_path):
+      dir_path, _, files = entry
+      for file_entry in files:
+        paths.append(os.path.join(dir_path, file_entry))
+
+  else:
+    # List paths directly in root_path, filter for files.
+    for filename in os.listdir(root_path):
+      path = os.path.join(root_path, filename)
+      if os.path.isfile(path):
+        paths.append(path)
+
+  # Filter by filepattern, if specified.
+  if filepattern:
+    paths = [p for p in paths
+             if fnmatch.fnmatch(os.path.basename(p), filepattern)]
+
+  # Sort results, if specified.
+  if sort:
+    paths = sorted(paths)
+
+  return paths
+
+
+def CopyFiles(src_dir, dst_dir):
+  """Recursively copy all files from src_dir into dst_dir
+
+  Args:
+    src_dir: directory to copy from.
+    dst_dir: directory to copy into.
+
+  Returns:
+    A list of absolute path files for all copied files.
+  """
+  dst_paths = []
+  src_paths = ListFiles(src_dir, recurse=True)
+  for src_path in src_paths:
+    dst_path = src_path.replace(src_dir, dst_dir)
+    Copy(src_path, dst_path)
+    dst_paths.append(dst_path)
+
+  return dst_paths
+
+
+def RemoveDirContents(base_dir):
+  """Remove all contents of a directory.
+
+  Args:
+    base_dir: directory to delete contents of.
+  """
+  for obj_name in os.listdir(base_dir):
+    Remove(os.path.join(base_dir, obj_name), recurse=True)
+
+
+def MD5Sum(file_path):
+  """Computer the MD5Sum of a file.
+
+  Args:
+    file_path: The full path to the file to compute the sum.
+
+  Returns:
+    A string of the md5sum if the file exists or
+    None if the file does not exist or is actually a directory.
+  """
+  # For some reason pylint refuses to accept that md5 is a function in
+  # the hashlib module, hence this pylint disable.
+  # pylint: disable=E1101
+  if not os.path.exists(file_path):
+    return None
+
+  if os.path.isdir(file_path):
+    return None
+
+  # Note that there is anecdotal evidence in other code that not using the
+  # binary flag with this open (open(file_path, 'rb')) can malfunction.  The
+  # problem has not shown up here, but be aware.
+  md5_hash = hashlib.md5()
+  with open(file_path) as file_fobj:
+    for line in file_fobj:
+      md5_hash.update(line)
+
+  return md5_hash.hexdigest()
+
+
+def ReadBlock(file_obj, size=1024):
+  """Generator function to Read and return a specificed number of bytes.
+
+  Args:
+    file_obj: The file object to read data from
+    size: The size in bytes to read in at a time.
+
+  Yields:
+    The block of data that was read.
+  """
+  while True:
+    data = file_obj.read(size)
+    if not data:
+      break
+
+    yield data
+
+
+def ShaSums(file_path):
+  """Calculate the SHA1 and SHA256 checksum of a file.
+
+  Args:
+    file_path: The full path to the file.
+
+  Returns:
+    A tuple of base64 encoded sha1 and sha256 hashes.
+  """
+  # pylint: disable=E1101
+  sha1 = hashlib.sha1()
+  sha256 = hashlib.sha256()
+  with open(file_path, mode='r') as file_fobj:
+    for block in ReadBlock(file_fobj):
+      sha1.update(block)
+      sha256.update(block)
+
+  # Encode in base 64 string.  Other bases could be supported here.
+  sha1_hex = base64.b64encode(sha1.digest())
+  sha256_hex = base64.b64encode(sha256.digest())
+
+  return sha1_hex, sha256_hex
+
+
+def TruncateToSize(file_path, size):
+  """Truncates a file down to a given size, if it is bigger.
+
+  Args:
+    file_path: path to the file to truncate
+    size: the size to truncate down to, in bytes
+  """
+  if size < os.path.getsize(file_path):
+    with open(file_path, 'r+') as file_obj:
+      file_obj.truncate(size)
diff --git a/lib/paygen/filelib_unittest b/lib/paygen/filelib_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/lib/paygen/filelib_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/paygen/filelib_unittest.py b/lib/paygen/filelib_unittest.py
new file mode 100644
index 0000000..b0b518d
--- /dev/null
+++ b/lib/paygen/filelib_unittest.py
@@ -0,0 +1,326 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the filelib module."""
+
+from __future__ import print_function
+
+import os
+import shutil
+import subprocess
+
+from chromite.lib import cros_test_lib
+from chromite.lib.paygen import filelib
+from chromite.lib.paygen import utils
+
+
+class TestFileManipulation(cros_test_lib.TestCase):
+  """Test cases for filelib."""
+
+  FILE1 = 'file1a'
+  FILE2 = 'file2'
+  SUBDIR = 'subdir'
+  SUBFILE = '%s/file1b' % SUBDIR
+  FILE_GLOB = 'file1*'
+
+  FILE1_CONTENTS = 'Howdy doody there dandy'
+  FILE2_CONTENTS = 'Once upon a time in a galaxy far far away.'
+  SUBFILE_CONTENTS = 'Five little monkeys jumped on the bed.'
+
+  def _SetUpTempdir(self, tempdir):
+    with open(os.path.join(tempdir, self.FILE1), 'w') as out1:
+      out1.write(self.FILE1_CONTENTS)
+
+    with open(os.path.join(tempdir, self.FILE2), 'w') as out2:
+      out2.write(self.FILE2_CONTENTS)
+
+    subdir = os.path.join(tempdir, self.SUBDIR)
+    filelib.Makedir(subdir)
+
+    with open(os.path.join(tempdir, self.SUBFILE), 'w') as out3:
+      out3.write(self.SUBFILE_CONTENTS)
+
+  def testIntegrationScript(self):
+    dir1 = None
+    dir2 = None
+    try:
+      dir1 = utils.CreateTmpDir('filelib_unittest1-')
+      dir2 = utils.CreateTmpDir('filelib_unittest2-')
+
+      self._SetUpTempdir(dir1)
+
+      dir1_file1 = os.path.join(dir1, self.FILE1)
+      dir1_file2 = os.path.join(dir1, self.FILE2)
+      dir1_subfile = os.path.join(dir1, self.SUBFILE)
+      dir1_top_files = [dir1_file1, dir1_file2]
+      dir1_deep_files = dir1_top_files + [dir1_subfile]
+
+      dir2_file1 = os.path.join(dir2, self.FILE1)
+      dir2_file2 = os.path.join(dir2, self.FILE2)
+      dir2_subdir = os.path.join(dir2, self.SUBDIR)
+      dir2_subfile = os.path.join(dir2, self.SUBFILE)
+      dir2_top_files = [dir2_file1, dir2_file2]
+      dir2_deep_files = dir2_top_files + [dir2_subfile]
+
+      # Test Exists.
+      for dir1_path in dir1_deep_files:
+        self.assertTrue(filelib.Exists(dir1_path))
+      for dir2_path in dir2_deep_files:
+        self.assertFalse(filelib.Exists(dir2_path))
+
+      # Test ListFiles with various options.
+      self.assertEqual(set(dir1_top_files),
+                       set(filelib.ListFiles(dir1)))
+      self.assertEqual(set(dir1_deep_files),
+                       set(filelib.ListFiles(dir1, recurse=True)))
+      self.assertEqual(sorted(dir1_deep_files),
+                       filelib.ListFiles(dir1, recurse=True, sort=True))
+      self.assertEqual(set([dir1_file1, dir1_subfile]),
+                       set(filelib.ListFiles(dir1, recurse=True,
+                                             filepattern=self.FILE_GLOB)))
+      # Test CopyFiles from dir1 to dir2.
+      self.assertEqual(set(dir2_deep_files),
+                       set(filelib.CopyFiles(dir1, dir2)))
+      for dir2_path in dir2_deep_files:
+        self.assertTrue(filelib.Exists(dir2_path))
+
+      # Test Cmp.
+      self.assertTrue(filelib.Cmp(dir1_file1, dir2_file1))
+      self.assertTrue(filelib.Cmp(dir2_file2, dir1_file2))
+      self.assertFalse(filelib.Cmp(dir1_file2, dir2_file1))
+
+      # Test RemoveDirContents.
+      filelib.RemoveDirContents(dir2_subdir)
+      self.assertTrue(filelib.Exists(dir2_subdir, as_dir=True))
+      self.assertFalse(filelib.Exists(dir2_subfile))
+      filelib.RemoveDirContents(dir2)
+      self.assertTrue(filelib.Exists(dir2, as_dir=True))
+      for dir2_path in dir2_deep_files:
+        self.assertFalse(filelib.Exists(dir2_path))
+
+      filelib.RemoveDirContents(dir1)
+      self.assertTrue(filelib.Exists(dir1, as_dir=True))
+      for dir1_path in dir1_deep_files:
+        self.assertFalse(filelib.Exists(dir1_path))
+
+    finally:
+      for d in (dir1, dir2):
+        if d and os.path.isdir(d):
+          shutil.rmtree(d)
+
+
+class TestFileLib(cros_test_lib.MoxTempDirTestCase):
+  """Test filelib module."""
+
+  def _MD5Sum(self, file_path):
+    """Use RunCommand to get the md5sum of a file."""
+    md5_path = '/usr/bin/md5sum'
+    if not os.path.exists(md5_path):
+      self.fail('%s is required to test MD5 logic.')
+    cmd = [md5_path, file_path]
+    return utils.RunCommand(cmd, redirect_stdout=True).split(' ')[0]
+
+  def _SHA1Sum(self, file_path):
+    """Use sha1sum utility to get SHA1 of a file."""
+    # The sha1sum utility gives SHA1 in base 16 encoding.  We need
+    # base 64, so use combination of xxd and base64 utilities.
+    proc1 = subprocess.Popen(['sha1sum', file_path], stdout=subprocess.PIPE)
+    proc2 = subprocess.Popen(['cut', '-f1', '-d', ' '], stdin=proc1.stdout,
+                             stdout=subprocess.PIPE)
+    proc3 = subprocess.Popen(['xxd', '-r', '-p'], stdin=proc2.stdout,
+                             stdout=subprocess.PIPE)
+    proc4 = subprocess.Popen(['base64'], stdin=proc3.stdout,
+                             stdout=subprocess.PIPE)
+    result = proc4.communicate()
+    return result[0][:-1]
+
+  def _SHA256Sum(self, file_path):
+    """Use sha256 utility to get SHA256 of a file."""
+    # The sha256sum utility gives SHA256 in base 16 encoding.  We need
+    # base 64, so use combination of xxd and base64 utilities.
+    proc1 = subprocess.Popen(['sha256sum', file_path], stdout=subprocess.PIPE)
+    proc2 = subprocess.Popen(['cut', '-f1', '-d', ' '], stdin=proc1.stdout,
+                             stdout=subprocess.PIPE)
+    proc3 = subprocess.Popen(['xxd', '-r', '-p'], stdin=proc2.stdout,
+                             stdout=subprocess.PIPE)
+    proc4 = subprocess.Popen(['base64'], stdin=proc3.stdout,
+                             stdout=subprocess.PIPE)
+    result = proc4.communicate()
+    return result[0][:-1]
+
+  def testMD5Sum(self):
+    """Test MD5Sum output with the /usr/bin/md5sum binary."""
+    file_path = os.path.abspath(__file__)
+    self.assertEqual(self._MD5Sum(file_path), filelib.MD5Sum(file_path))
+
+  def testShaSums(self):
+    file_path = os.path.abspath(__file__)
+    expected_sha1 = self._SHA1Sum(file_path)
+    expected_sha256 = self._SHA256Sum(file_path)
+    sha1, sha256 = filelib.ShaSums(file_path)
+    self.assertEqual(expected_sha1, sha1)
+    self.assertEqual(expected_sha256, sha256)
+
+  def testCmp(self):
+    path1 = '/some/local/path'
+    path2 = '/other/local/path'
+
+    self.mox.StubOutWithMock(filelib.os.path, 'exists')
+    self.mox.StubOutWithMock(filelib.filecmp, 'cmp')
+
+    # Set up the test replay script.
+    # Run 1, both exist, are different.
+    filelib.os.path.exists(path1).AndReturn(True)
+    filelib.os.path.exists(path2).AndReturn(True)
+    filelib.filecmp.cmp(path1, path2).AndReturn(True)
+    # Run 2, both exist, are different.
+    filelib.os.path.exists(path1).AndReturn(True)
+    filelib.os.path.exists(path2).AndReturn(True)
+    filelib.filecmp.cmp(path1, path2).AndReturn(False)
+    # Run 3, second file missing.
+    filelib.os.path.exists(path1).AndReturn(True)
+    filelib.os.path.exists(path2).AndReturn(False)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertTrue(filelib.Cmp(path1, path2))
+    self.assertFalse(filelib.Cmp(path1, path2))
+    self.assertFalse(filelib.Cmp(path1, path2))
+    self.mox.VerifyAll()
+
+  def testCopy(self):
+    path1 = '/some/local/path'
+    path2 = '/other/local/path'
+    relative_path = 'relative.bin'
+
+    self.mox.StubOutWithMock(filelib, 'Exists')
+    self.mox.StubOutWithMock(filelib, 'Makedir')
+    self.mox.StubOutWithMock(filelib.shutil, 'copy2')
+
+    # Set up the test replay script.
+    # Run 1, path2 directory exists.
+    filelib.Exists(os.path.dirname(path2), as_dir=True).AndReturn(True)
+    filelib.shutil.copy2(path1, path2)
+    # Run 2, path2 directory does not exist.
+    filelib.Exists(os.path.dirname(path2), as_dir=True).AndReturn(False)
+    filelib.Makedir(os.path.dirname(path2), fill_path=True)
+    filelib.shutil.copy2(path1, path2)
+
+    # Run 3, there is target directory is '.', don't test existence.
+    filelib.shutil.copy2(path1, relative_path)
+    self.mox.ReplayAll()
+
+    # Run the test verifications, three times.
+    filelib.Copy(path1, path2)
+    filelib.Copy(path1, path2)
+    filelib.Copy(path1, relative_path)
+    self.mox.VerifyAll()
+
+  def testSize(self):
+    path = '/some/local/path'
+    size = 100
+
+    self.mox.StubOutWithMock(filelib.os.path, 'isfile')
+    self.mox.StubOutWithMock(filelib.os, 'stat')
+
+    # Set up the test replay script.
+    # Run 1, success.
+    filelib.os.path.isfile(path).AndReturn(True)
+    filelib.os.stat(path).AndReturn(cros_test_lib.EasyAttr(st_size=size))
+    # Run 2, file not found.
+    filelib.os.path.isfile(path).AndReturn(False)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertEqual(size, filelib.Size(path))
+    self.assertRaises(filelib.MissingFileError, filelib.Size, path)
+    self.mox.VerifyAll()
+
+  def testExists(self):
+    path = '/some/local/path'
+    result = 'TheResult'
+
+    self.mox.StubOutWithMock(filelib.os.path, 'isdir')
+    self.mox.StubOutWithMock(filelib.os.path, 'isfile')
+
+    # Set up the test replay script.
+    # Run 1, as file.
+    filelib.os.path.isfile(path).AndReturn(result)
+    # Run 2, as dir.
+    filelib.os.path.isdir(path).AndReturn(result)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertEqual(result, filelib.Exists(path))
+    self.assertEqual(result, filelib.Exists(path, as_dir=True))
+    self.mox.VerifyAll()
+
+  def _CreateSimpleFile(self, *args):
+    contents = 'Not important, can be anything'
+    for path in args:
+      with open(path, 'w') as out:
+        out.write(contents)
+
+  def testRemove(self):
+    # pylint: disable=E1101
+    path1 = os.path.join(self.tempdir, 'file1')
+    path2 = os.path.join(self.tempdir, 'file2')
+    missing_path = os.path.join(self.tempdir, 'missing')
+    subdir = os.path.join(self.tempdir, 'subdir')
+    subpath1 = os.path.join(subdir, 'file3')
+    subpath2 = os.path.join(subdir, 'file4')
+
+    # Test remove on path that does not exist.
+    self.assertRaises(filelib.MissingFileError, filelib.Remove, path1)
+    self.assertFalse(filelib.Remove(path1, ignore_no_match=True))
+
+    # Test remove on simple file.
+    self._CreateSimpleFile(path1)
+    self.assertTrue(filelib.Remove(path1))
+    self.assertRaises(filelib.MissingFileError, filelib.Remove, path1)
+    self.assertFalse(filelib.Remove(path1, ignore_no_match=True))
+
+    # Test remove on more than one file.
+    self._CreateSimpleFile(path1, path2)
+    self.assertTrue(filelib.Remove(path1, path2))
+
+    # Test remove on multiple files, with one missing.
+    self._CreateSimpleFile(path1, path2)
+    self.assertRaises(filelib.MissingFileError, filelib.Remove,
+                      path1, missing_path, path2)
+    # First path1 removed, but path2 not because it was after missing.
+    self.assertFalse(filelib.Exists(path1))
+    self.assertTrue(filelib.Exists(path2))
+
+    # Test remove multiple files, one missing, with ignore_no_match True.
+    self._CreateSimpleFile(path1, path2)
+    self.assertFalse(filelib.Remove(path1, missing_path, path2,
+                                    ignore_no_match=True))
+    self.assertFalse(filelib.Exists(path1))
+    self.assertFalse(filelib.Exists(path2))
+
+    # Test recursive Remove.
+    os.makedirs(subdir)
+    self._CreateSimpleFile(path1, path2, subpath1, subpath2)
+    self.assertTrue(filelib.Remove(path1, path2, subdir, recurse=True))
+    self.assertFalse(filelib.Exists(path1))
+    self.assertFalse(filelib.Exists(subpath1))
+
+    # Test recursive Remove with one missing path.
+    os.makedirs(subdir)
+    self._CreateSimpleFile(path1, path2, subpath1, subpath2)
+    self.assertRaises(filelib.MissingFileError, filelib.Remove,
+                      path1, subdir, missing_path, path2, recurse=True)
+    self.assertFalse(filelib.Exists(path1))
+    self.assertTrue(filelib.Exists(path2))
+    self.assertFalse(filelib.Exists(subpath1))
+
+    # Test recursive Remove with one missing path and ignore_no_match True.
+    os.makedirs(subdir)
+    self._CreateSimpleFile(path1, path2, subpath1, subpath2)
+    self.assertFalse(filelib.Remove(path1, subdir, missing_path, path2,
+                                    recurse=True, ignore_no_match=True))
+    self.assertFalse(filelib.Exists(path1))
+    self.assertFalse(filelib.Exists(path2))
+    self.assertFalse(filelib.Exists(subpath1))
diff --git a/lib/paygen/gslib.py b/lib/paygen/gslib.py
new file mode 100644
index 0000000..ddbeef4
--- /dev/null
+++ b/lib/paygen/gslib.py
@@ -0,0 +1,876 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common Google Storage interface library."""
+
+from __future__ import print_function
+
+import base64
+import datetime
+import errno
+import os
+import re
+
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib.paygen import filelib
+from chromite.lib.paygen import utils
+
+
+PROTOCOL = 'gs'
+RETRY_ATTEMPTS = 2
+GS_LS_STATUS_RE = re.compile(r'status=(\d+)')
+
+# Gsutil is filled in by "FindGsUtil" on first invocation.
+GSUTIL = None
+
+
+def FindGsUtil():
+  """Find which gsutil executuable to use.
+
+  This may download and cache the command if needed, and will return the
+  version pinned by chromite for general use. Will cache the result after
+  the first call.
+
+  This function is multi-process safe, but NOT THREAD SAFE. If you need
+  to use gsutil functionality in threads, call this function at least
+  once before creating the threads. That way the value will be safely
+  pre-cached.
+
+  Returns:
+    Full path to the gsutil command to use.
+  """
+  # TODO(dgarrett): This is a hack. Merge chromite and crostools to fix.
+
+  global GSUTIL  # pylint: disable=global-statement
+  if GSUTIL is None:
+    GSUTIL = gs.GSContext.GetDefaultGSUtilBin()
+
+  return GSUTIL
+
+
+class GsutilError(Exception):
+  """Base exception for errors where gsutil cannot be used for any reason."""
+
+
+class GsutilMissingError(GsutilError):
+  """Returned when the gsutil utility is missing from PATH."""
+
+  def __init__(self, msg='The gsutil utility must be installed.'):
+    GsutilError.__init__(self, msg)
+
+
+class GSLibError(Exception):
+  """Raised when gsutil command runs but gives an error."""
+
+
+class CopyFail(GSLibError):
+  """Raised if Copy fails in any way."""
+
+
+class MoveFail(GSLibError):
+  """Raised if Move fails in any way."""
+
+
+class RemoveFail(GSLibError):
+  """Raised if Remove fails in any way."""
+
+
+class AclFail(GSLibError):
+  """Raised if SetAcl fails in any way."""
+
+
+class CatFail(GSLibError):
+  """Raised if Cat fails in any way."""
+
+
+class StatFail(GSLibError):
+  """Raised if Stat fails in any way."""
+
+
+class BucketOperationError(GSLibError):
+  """Raised when a delete or create bucket command fails."""
+
+
+class URIError(GSLibError):
+  """Raised when URI does not behave as expected."""
+
+
+class ValidateGsutilFailure(GSLibError):
+  """We are unable to validate that gsutil is working correctly."""
+
+
+def RetryGSLib(func):
+  """Decorator to retry function calls that throw an exception.
+
+  If the decorated method throws a GSLibError exception, the exception
+  will be thrown away and the function will be run again until all retries
+  are exhausted. On the final attempt, the exception will be thrown normally.
+
+  Three attempts in total will be made to run the function (one more
+  than RETRY_ATTEMPTS).
+
+  @RetryGSLib
+  def MyFunctionHere(): pass
+  """
+  def RetryHandler(*args, **kwargs):
+    """Retry func with given args/kwargs RETRY_ATTEMPTS times."""
+    warning_msgs = []
+    for i in xrange(0, RETRY_ATTEMPTS + 1):
+      try:
+        return func(*args, **kwargs)
+      except GSLibError as ex:
+        # On the last try just pass the exception on up.
+        if i >= RETRY_ATTEMPTS:
+          raise
+
+        error_msg = str(ex)
+        RESUMABLE_ERROR_MESSAGE = (
+            gs.GSContext.RESUMABLE_DOWNLOAD_ERROR,
+            gs.GSContext.RESUMABLE_UPLOAD_ERROR,
+            'ResumableUploadException',
+            'ResumableDownloadException',
+            'ssl.SSLError: The read operation timed out',
+        )
+        if (func.__name__ == 'Copy' and
+            any(x in error_msg for x in RESUMABLE_ERROR_MESSAGE)):
+          logging.info(
+              'Resumable download/upload exception occured for %s', args[1])
+          # Pass the dest_path to get the tracker filename.
+          tracker_filenames = gs.GSContext.GetTrackerFilenames(args[1])
+          # This part of the code is copied from chromite.lib.gs with
+          # slight modifications. This is a temporary solution until
+          # we can deprecate crostools.lib.gslib (crbug.com/322740).
+          logging.info('Potential list of tracker files: %s',
+                       tracker_filenames)
+          for tracker_filename in tracker_filenames:
+            tracker_file_path = os.path.join(
+                gs.GSContext.DEFAULT_GSUTIL_TRACKER_DIR,
+                tracker_filename)
+            if os.path.exists(tracker_file_path):
+              logging.info('Deleting gsutil tracker file %s before retrying.',
+                           tracker_file_path)
+              logging.info('The content of the tracker file: %s',
+                           osutils.ReadFile(tracker_file_path))
+              osutils.SafeUnlink(tracker_file_path)
+        else:
+          if 'AccessDeniedException' in str(ex) or 'NoSuchKey' in str(ex):
+            raise
+
+        # Record a warning message to be issued if a retry actually helps.
+        warning_msgs.append('Try %d failed with error message:\n%s' %
+                            (i + 1, ex))
+      else:
+        # If the func succeeded, then log any accumulated warning messages.
+        if warning_msgs:
+          logging.warning('Failed %s %d times before success:\n%s',
+                          func.__name__, len(warning_msgs),
+                          '\n'.join(warning_msgs))
+
+  RetryHandler.__module__ = func.__module__
+  RetryHandler.__name__ = func.__name__
+  RetryHandler.__doc__ = func.__doc__
+  return RetryHandler
+
+
+def RunGsutilCommand(args,
+                     redirect_stdout=True,
+                     redirect_stderr=True,
+                     failed_exception=GSLibError,
+                     generation=None,
+                     headers=None,
+                     get_headers_from_stdout=False,
+                     **kwargs):
+  """Run gsutil with given args through RunCommand with given options.
+
+  Generally this method is intended for use within this module, see the various
+  command-specific wrappers provided for convenience.  However, it can be called
+  directly if 'gsutil' needs to be called in specific way.
+
+  A few of the options for RunCommand have their default values switched for
+  this function.  Those options are called out explicitly as options here, while
+  addition RunCommand options can be used through extra_run_command_opts.
+
+  Args:
+    args: List of arguments to use with 'gsutil'.
+    redirect_stdout: Boolean option passed directly to RunCommand.
+    redirect_stderr: Boolean option passed directly to RunCommand.
+    failed_exception: Exception class to raise if CommandFailedException is
+      caught.  It should be GSLibError or a subclass.
+    generation: Only run the specified command if the generation matches.
+       (See "Conditional Updates Using Object Versioning" in the gsutil docs.)
+    headers: Fill in this dictionary with header values captured from stderr.
+    get_headers_from_stdout: Whether header information is to be parsed from
+      stdout (default: stderr).
+    kwargs: Additional options to pass directly to RunCommand, beyond the
+      explicit ones above.  See RunCommand itself.
+
+  Returns:
+    Anything that RunCommand returns, which should be a CommandResult object.
+
+  Raises:
+    GsutilMissingError is the gsutil utility cannot be found.
+    GSLibError (or whatever is in failed_exception) if RunCommand failed (and
+      error_ok was not True).
+  """
+  # The -d flag causes gsutil to dump various metadata, including user
+  # credentials.  We therefore don't allow users to pass it in directly.
+  assert '-d' not in args, 'Cannot pass in the -d flag directly'
+
+  gsutil = FindGsUtil()
+
+  if generation is not None:
+    args = ['-h', 'x-goog-if-generation-match:%s' % generation] + args
+  if headers is not None:
+    args.insert(0, '-d')
+    assert redirect_stderr
+  cmd = [gsutil] + args
+  run_opts = {
+      'redirect_stdout': redirect_stdout,
+      'redirect_stderr': redirect_stderr,
+  }
+  run_opts.update(kwargs)
+
+  # Always use RunCommand with return_result on, which will be the default
+  # behavior for RunCommand itself someday.
+  run_opts['return_result'] = True
+
+  try:
+    result = utils.RunCommand(cmd, **run_opts)
+  except OSError as e:
+    if e.errno == errno.ENOENT:
+      raise GsutilMissingError()
+    raise
+  except utils.CommandFailedException as e:
+    # If headers is set, we have to hide the output here because it may contain
+    # credentials that we don't want to show in buildbot logs.
+    raise failed_exception('%r failed' % cmd if headers else e)
+
+  if headers is not None and result is not None:
+    assert redirect_stdout if get_headers_from_stdout else redirect_stderr
+    # Parse headers that look like this:
+    # header: x-goog-generation: 1359148994758000
+    # header: x-goog-metageneration: 1
+    headers_source = result.output if get_headers_from_stdout else result.error
+    for line in headers_source.splitlines():
+      if line.startswith('header: '):
+        header, _, value = line.partition(': ')[-1].partition(': ')
+        headers[header.replace('x-goog-', '')] = value
+
+    # Strip out stderr entirely to avoid showing credentials in logs; for
+    # commands that dump credentials to stdout, clobber that as well.
+    result.error = '<stripped>'
+    if get_headers_from_stdout:
+      result.output = '<stripped>'
+
+  return result
+
+
+def ValidateGsutilWorking(bucket):
+  """Validate that gsutil is working correctly.
+
+  There is a failure mode for gsutil in which all operations fail, and this
+  is indistinguishable from all gsutil ls operations matching nothing. We
+  check that there is at least one file in the root of the bucket.
+
+  Args:
+    bucket: bucket we are about to test.
+
+  Raises:
+    ValidateGsutilFailure: If we are unable to find any files in the bucket.
+  """
+  url = 'gs://%s/' % bucket
+  if not List(url):
+    raise ValidateGsutilFailure('Unable to find anything in: %s' % url)
+
+
+def GetGsutilVersion():
+  """Return the version string for the installed gsutil utility.
+
+  Returns:
+    The version string.
+
+  Raises:
+    GsutilMissingError if gsutil cannot be found.
+    GSLibError for any other error.
+  """
+  args = ['version']
+
+  # As of version 3.26, a quirk of 'gsutil version' is that if gsutil is
+  # outdated it will ask if you want to update (Y/n) before proceeding... but
+  # do it only the first time (for a particular update?  I'm not exactly sure).
+  # Prepare a 'n' answer just in case.
+  user_input = 'n\n'
+
+  result = RunGsutilCommand(args, error_ok=False, input=user_input)
+
+  output = '\n'.join(o for o in [result.output, result.error] if o)
+
+  if output:
+    match = re.search(r'^\s*gsutil\s+version\s+([\d\.]+)', output,
+                      re.IGNORECASE)
+    if match:
+      return match.group(1)
+    else:
+      logging.error('Unexpected output format from %r:\n%s',
+                    result.cmdstr, output)
+      raise GSLibError('Unexpected output format from %r.' % result.cmdstr)
+
+  else:
+    logging.error('No stdout output from %r.', result.cmdstr)
+    raise GSLibError('No stdout output from %r.', result.cmdstr)
+
+
+def UpdateGsutil():
+  """Update the gsutil utility to the latest version.
+
+  Returns:
+    The updated version, if updated, otherwise None.
+
+  Raises:
+    GSLibError if any error occurs.
+  """
+  original_version = GetGsutilVersion()
+  updated_version = None
+
+  # If an update is available the 'gsutil update' command will ask
+  # whether to continue.  Reply with 'y'.
+  user_input = 'y\n'
+  args = ['update']
+
+  result = RunGsutilCommand(args, error_ok=True, input=user_input)
+
+  if result.returncode != 0:
+    # Oddly, 'gsutil update' exits with error if no update is needed.
+    # Check the output to see if this is the situation, in which case the
+    # error is harmless (and expected).  Last line in stderr will be:
+    # "You already have the latest gsutil release installed."
+    if not result.error:
+      raise GSLibError('Failed command: %r' % result.cmdstr)
+
+    last_error_line = result.error.splitlines()[-1]
+    if not last_error_line.startswith('You already have'):
+      raise GSLibError(result.error)
+
+  else:
+    current_version = GetGsutilVersion()
+    if current_version != original_version:
+      updated_version = current_version
+
+  return updated_version
+
+
+@RetryGSLib
+def MD5Sum(gs_uri):
+  """Read the gsutil md5 sum from etag and gsutil ls -L.
+
+  Note that because this relies on 'gsutil ls -L' it suffers from the
+  eventual consistency issue, meaning this function could fail to find
+  the MD5 value for a recently created file in Google Storage.
+
+  Args:
+    gs_uri: An absolute Google Storage URI that refers directly to an object.
+      No globs are supported.
+
+  Returns:
+    A string that is an md5sum, or None if no object found.
+
+  Raises:
+    GSLibError if the gsutil command fails.  If there is no object at that path
+    that is not considered a failure.
+  """
+  gs_md5_regex = re.compile(r'.*?Hash \(md5\):\s+(.*)', re.IGNORECASE)
+  args = ['ls', '-L', gs_uri]
+
+  result = RunGsutilCommand(args, error_ok=True)
+
+  # If object was not found then output is completely empty.
+  if not result.output:
+    return None
+
+  for line in result.output.splitlines():
+    match = gs_md5_regex.match(line)
+    if match:
+      # gsutil now prints the MD5 sum in base64, but we want it in hex.
+      return base64.b16encode(base64.b64decode(match.group(1))).lower()
+
+  # This means there was some actual failure in the command.
+  raise GSLibError('Unable to determine MD5Sum for %r' % gs_uri)
+
+
+@RetryGSLib
+def Cmp(path1, path2):
+  """Return True if paths hold identical files, according to MD5 sum.
+
+  Note that this function relies on MD5Sum, which means it also can only
+  promise eventual consistency.  A recently uploaded file in Google Storage
+  may behave badly in this comparison function.
+
+  If either file is missing then always return False.
+
+  Args:
+    path1: URI to a file.  Local paths also supported.
+    path2: URI to a file.  Local paths also supported.
+
+  Returns:
+    True if files are the same, False otherwise.
+  """
+  md5_1 = MD5Sum(path1) if IsGsURI(path1) else filelib.MD5Sum(path1)
+  if not md5_1:
+    return False
+
+  md5_2 = MD5Sum(path2) if IsGsURI(path2) else filelib.MD5Sum(path2)
+
+  return md5_1 == md5_2
+
+
+@RetryGSLib
+def Copy(src_path, dest_path, acl=None, **kwargs):
+  """Run gsutil cp src_path dest_path supporting GS globs.
+
+  e.g.
+  gsutil cp /etc/* gs://etc/ where /etc/* is src_path with a glob and
+  gs://etc is dest_path.
+
+  This assumes that the src or dest path already exist.
+
+  Args:
+    src_path: The src of the path to copy, either a /unix/path or gs:// uri.
+    dest_path: The dest of the path to copy, either a /unix/path or gs:// uri.
+    acl: an ACL argument (predefined name or XML file) to pass to gsutil
+    kwargs: Additional options to pass directly to RunGsutilCommand, beyond the
+      explicit ones above.  See RunGsutilCommand itself.
+
+  Raises:
+    CopyFail: If the copy fails for any reason.
+  """
+  args = ['cp']
+  if acl:
+    args += ['-a', acl]
+  args += [src_path, dest_path]
+  RunGsutilCommand(args, failed_exception=CopyFail, **kwargs)
+
+
+@RetryGSLib
+def Move(src_path, dest_path, **kwargs):
+  """Run gsutil mv src_path dest_path supporting GS globs.
+
+  Note that the created time is changed to now for the moved object(s).
+
+  Args:
+    src_path: The src of the path to move, either a /unix/path or gs:// uri.
+    dest_path: The dest of the path to move, either a /unix/path or gs:// uri.
+    kwargs: Additional options to pass directly to RunGsutilCommand, beyond the
+      explicit ones above.  See RunGsutilCommand itself.
+
+  Raises:
+    MoveFail: If the move fails for any reason.
+  """
+  args = ['mv', src_path, dest_path]
+  RunGsutilCommand(args, failed_exception=MoveFail, **kwargs)
+
+
+@RetryGSLib
+def Remove(*paths, **kwargs):  # pylint: disable=docstring-misnamed-args
+  """Run gsutil rm on path supporting GS globs.
+
+  Args:
+    paths: Local path or gs URI, or list of same.
+    ignore_no_match: If True, then do not complain if anything was not
+      removed because no URI match was found.  Like rm -f.  Defaults to False.
+    recurse: Remove recursively starting at path.  Same as rm -R.  Defaults
+      to False.
+    kwargs: Additional options to pass directly to RunGsutilCommand, beyond the
+      explicit ones above.  See RunGsutilCommand itself.
+
+  Raises:
+    RemoveFail: If the remove fails for any reason.
+  """
+  ignore_no_match = kwargs.pop('ignore_no_match', False)
+  recurse = kwargs.pop('recurse', False)
+
+  args = ['rm']
+
+  if recurse:
+    args.append('-R')
+
+  args.extend(paths)
+
+  try:
+    RunGsutilCommand(args, failed_exception=RemoveFail, **kwargs)
+  except RemoveFail as e:
+    if not (ignore_no_match and 'No URLs matched' in str(e.args[0])):
+      raise
+
+
+def RemoveDirContents(gs_dir_uri):
+  """Remove all contents of a directory.
+
+  Args:
+    gs_dir_uri: directory to delete contents of.
+  """
+  Remove(os.path.join(gs_dir_uri, '**'), ignore_no_match=True)
+
+
+def CreateWithContents(gs_uri, contents, **kwargs):
+  """Creates the specified file with specified contents.
+
+  Args:
+    gs_uri: The URI of a file on Google Storage.
+    contents: Contents to write to the file.
+    kwargs: Additional options to pass directly to RunGsutilCommand, beyond the
+      explicit ones above.  See RunGsutilCommand itself.
+
+  Raises:
+    CopyFail: If it fails for any reason.
+  """
+  with utils.CreateTempFileWithContents(contents) as content_file:
+    Copy(content_file.name, gs_uri, **kwargs)
+
+
+def Cat(gs_uri, **kwargs):
+  """Return the contents of a file at the given GS URI
+
+  Args:
+    gs_uri: The URI of a file on Google Storage.
+    kwargs: Additional options to pass directly to RunGsutilCommand, beyond the
+      explicit ones above.  See RunGsutilCommand itself.
+
+  Raises:
+    CatFail: If the cat fails for any reason.
+  """
+  args = ['cat', gs_uri]
+  result = RunGsutilCommand(args, failed_exception=CatFail, **kwargs)
+  return result.output
+
+
+def Stat(gs_uri, **kwargs):
+  """Stats a file at the given GS URI (returns nothing).
+
+  Args:
+    gs_uri: The URI of a file on Google Storage.
+    kwargs: Additional options to pass directly to RunGsutilCommand, beyond the
+      explicit ones above.  See RunGsutilCommand itself.
+
+  Raises:
+    StatFail: If the stat fails for any reason.
+  """
+  args = ['stat', gs_uri]
+  # IMPORTANT! With stat, header information is dumped to standard output,
+  # rather than standard error, as with other gsutil commands. Hence,
+  # get_headers_from_stdout must be True to ensure both correct parsing of
+  # output and stripping of sensitive information.
+  RunGsutilCommand(args, failed_exception=StatFail,
+                   get_headers_from_stdout=True, **kwargs)
+
+
+def IsGsURI(path):
+  """Returns true if the path begins with gs://
+
+  Args:
+    path: An absolute Google Storage URI.
+
+  Returns:
+    True if path is really a google storage uri that begins with gs://
+    False otherwise.
+  """
+  return path and path.startswith(PROTOCOL + '://')
+
+
+def SplitGSUri(gs_uri):
+  """Returns tuple (bucket, uri_remainder) from GS URI.
+
+  Examples: 1) 'gs://foo/hi/there' returns ('foo', 'hi/there')
+            2) 'gs://foo/hi/there/' returns ('foo', 'hi/there/')
+            3) 'gs://foo' returns ('foo', '')
+            4) 'gs://foo/' returns ('foo', '')
+
+  Args:
+    gs_uri: A Google Storage URI.
+
+  Returns:
+    A tuple (bucket, uri_remainder)
+
+  Raises:
+    URIError if URI is not in recognized format
+  """
+  match = re.search(r'^gs://([^/]+)/?(.*)$', gs_uri)
+  if match:
+    return (match.group(1), match.group(2))
+  else:
+    raise URIError('Bad GS URI: %r' % gs_uri)
+
+
+# TODO(mtennant): Rename this "Size" for consistency.
+@RetryGSLib
+def FileSize(gs_uri, **kwargs):
+  """Return the size of the given gsutil file in bytes.
+
+  Args:
+    gs_uri: Google Storage URI (beginning with 'gs://') pointing
+      directly to a single file.
+    kwargs: Additional options to pass directly to RunGsutilCommand, beyond the
+      explicit ones above.  See RunGsutilCommand itself.
+
+  Returns:
+    Size of file in bytes.
+
+  Raises:
+    URIError: Raised when URI is unknown to Google Storage or when
+      URI matches more than one file.
+  """
+  headers = {}
+  try:
+    Stat(gs_uri, headers=headers, **kwargs)
+  except StatFail as e:
+    raise URIError('Unable to stat file at URI %r: %s' % (gs_uri, e))
+
+  size_str = headers.get('stored-content-length')
+  if size_str is None:
+    raise URIError('Failed to get size of %r' % gs_uri)
+
+  return int(size_str)
+
+
+def FileTimestamp(gs_uri, **kwargs):
+  """Return the timestamp of the given gsutil file.
+
+  Args:
+    gs_uri: Google Storage URI (beginning with 'gs://') pointing
+      directly to a single file.
+    kwargs: Additional options to pass directly to RunGsutilCommand, beyond the
+      explicit ones above.  See RunGsutilCommand itself.
+
+  Returns:
+    datetime of the files creation, or None
+
+  Raises:
+    URIError: Raised when URI is unknown to Google Storage or when
+      URI matches more than one file.
+  """
+  args = ['ls', '-l', gs_uri]
+  try:
+    result = RunGsutilCommand(args, **kwargs)
+    ls_lines = result.output.splitlines()
+
+    # We expect one line per file and a summary line.
+    if len(ls_lines) != 2:
+      raise URIError('More than one file matched URI %r' % gs_uri)
+
+    # Should have the format:
+    # <filesize> <date> <filepath>
+    return datetime.datetime.strptime(ls_lines[0].split()[1],
+                                      '%Y-%m-%dT%H:%M:%S')
+  except GSLibError:
+    raise URIError('Unable to locate file at URI %r' % gs_uri)
+
+
+def ExistsLazy(gs_uri, **kwargs):
+  """Return True if object exists at given GS URI.
+
+  Warning: This can return false negatives, because 'gsutil ls' relies on
+  a cache that is only eventually consistent.  But it is faster to run, and
+  it does accept URIs with glob expressions, where Exists does not.
+
+  Args:
+    gs_uri: Google Storage URI
+    kwargs: Additional options to pass directly to RunGsutilCommand, beyond the
+      explicit ones above.  See RunGsutilCommand itself.
+
+  Returns:
+    True if object exists and False otherwise.
+
+  Raises:
+    URIError if there is a problem with the URI other than the URI
+      not being found.
+  """
+  args = ['ls', gs_uri]
+  try:
+    RunGsutilCommand(args, **kwargs)
+    return True
+  except GSLibError as e:
+    # If the URI was simply not found, the output should be something like:
+    # CommandException: One or more URLs matched no objects.
+    msg = str(e).strip()
+    if not msg.startswith('CommandException: '):
+      raise URIError(e)
+
+    return False
+
+
+def Exists(gs_uri, **kwargs):
+  """Return True if object exists at given GS URI.
+
+  Args:
+    gs_uri: Google Storage URI.  Must be a fully-specified URI with
+      no glob expression.  Even if a glob expression matches this
+      method will return False.
+    kwargs: Additional options to pass directly to RunGsutilCommand, beyond the
+      explicit ones above.  See RunGsutilCommand itself.
+
+  Returns:
+    True if gs_uri points to an existing object, and False otherwise.
+  """
+  try:
+    Stat(gs_uri, **kwargs)
+  except StatFail:
+    return False
+
+  return True
+
+
+@RetryGSLib
+def List(root_uri, recurse=False, filepattern=None, sort=False):
+  """Return list of file and directory paths under given root URI.
+
+  Args:
+    root_uri: e.g. gs://foo/bar
+    recurse: Look in subdirectories, as well
+    filepattern: glob pattern to match against basename of path
+    sort: If True then do a default sort on paths
+
+  Returns:
+    List of GS URIs to paths that matched
+  """
+  gs_uri = root_uri
+  if recurse:
+    # In gs file patterns '**' absorbs any number of directory names,
+    # including none.
+    gs_uri = gs_uri.rstrip('/') + '/**'
+
+  # Now match the filename itself at the end of the URI.
+  if filepattern:
+    gs_uri = gs_uri.rstrip('/') + '/' + filepattern
+
+  args = ['ls', gs_uri]
+
+  try:
+    result = RunGsutilCommand(args)
+    paths = [path for path in result.output.splitlines() if path]
+
+    if sort:
+      paths = sorted(paths)
+
+    return paths
+
+  except GSLibError as e:
+    # The ls command will fail under normal operation if there was just
+    # nothing to be found. That shows up like this to stderr:
+    # CommandException: One or more URLs matched no objects.
+    if 'CommandException: One or more URLs matched no objects.' not in str(e):
+      raise
+
+  # Otherwise, assume a normal error.
+  # TODO(mtennant): It would be more functionally correct to return this
+  # if and only if the error is identified as a "file not found" error.
+  # We simply have to determine how to do that reliably.
+  return []
+
+
+def ListFiles(root_uri, recurse=False, filepattern=None, sort=False):
+  """Return list of file paths under given root URI.
+
+  Directories are intentionally excluded.
+
+  Args:
+    root_uri: e.g. gs://foo/bar
+    recurse: Look for files in subdirectories, as well
+    filepattern: glob pattern to match against basename of file
+    sort: If True then do a default sort on paths
+
+  Returns:
+    List of GS URIs to files that matched
+  """
+  paths = List(root_uri, recurse=recurse, filepattern=filepattern, sort=sort)
+
+  # Directory paths should be excluded from output, per ListFiles guarantee.
+  return [path for path in paths if not path.endswith('/')]
+
+
+def ListDirs(root_uri, recurse=False, filepattern=None, sort=False):
+  """Return list of dir paths under given root URI.
+
+  File paths are intentionally excluded.  The root_uri itself is excluded.
+
+  Args:
+    root_uri: e.g. gs://foo/bar
+    recurse: Look for directories in subdirectories, as well
+    filepattern: glob pattern to match against basename of director
+    sort: If True then do a default sort on paths
+
+  Returns:
+    List of GS URIs to directories that matched
+  """
+  paths = List(root_uri, recurse=recurse, filepattern=filepattern, sort=sort)
+
+  # Only include directory paths in output, per ListDirs guarantee.
+  return [path for path in paths if path.endswith('/')]
+
+
+@RetryGSLib
+def SetACL(gs_uri, acl_file, **kwargs):
+  """Set the ACLs of a file in Google Storage.
+
+  Args:
+    gs_uri: The GS URI to set the ACL on.
+    acl_file: A Google Storage xml ACL file.
+    kwargs: Additional options to pass directly to RunGsutilCommand, beyond the
+      explicit ones above.  See RunGsutilCommand itself.
+
+  Returns:
+    True if the ACL was successfully set
+
+  Raises:
+    AclFail: If SetACL fails for any reason.
+  """
+  args = ['setacl', acl_file, gs_uri]
+  RunGsutilCommand(args, failed_exception=AclFail, **kwargs)
+
+
+@RetryGSLib
+def CreateBucket(bucket, **kwargs):
+  """Create a Google Storage bucket using the users default credentials.
+
+  Args:
+    bucket: The name of the bucket to create.
+    kwargs: Additional options to pass directly to RunGsutilCommand, beyond the
+      explicit ones above.  See RunGsutilCommand itself.
+
+  Returns:
+    The GS URI of the bucket created.
+
+  Raises:
+    BucketOperationError if the bucket is not created properly.
+  """
+  gs_uri = 'gs://%s' % bucket
+  args = ['mb', gs_uri]
+  try:
+    RunGsutilCommand(args, **kwargs)
+  except GSLibError as e:
+    raise BucketOperationError('Error creating bucket %s.\n%s' % (bucket, e))
+
+  return gs_uri
+
+
+@RetryGSLib
+def DeleteBucket(bucket):
+  """Delete a Google Storage bucket using the users default credentials.
+
+  Warning: All contents will be deleted.
+
+  Args:
+    bucket: The name of the bucket to create.
+
+  Raises:
+    BucketOperationError if the bucket is not created properly.
+  """
+  bucket = bucket.strip('/')
+  gs_uri = 'gs://%s' % bucket
+  try:
+    RunGsutilCommand(['rm', '%s/*' % gs_uri], error_ok=True)
+    RunGsutilCommand(['rb', gs_uri])
+
+  except GSLibError as e:
+    raise BucketOperationError('Error deleting bucket %s.\n%s' % (bucket, e))
diff --git a/lib/paygen/gslib_unittest b/lib/paygen/gslib_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/lib/paygen/gslib_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/paygen/gslib_unittest.py b/lib/paygen/gslib_unittest.py
new file mode 100644
index 0000000..6a39c28
--- /dev/null
+++ b/lib/paygen/gslib_unittest.py
@@ -0,0 +1,804 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the gslib module."""
+
+from __future__ import print_function
+
+import base64
+import datetime
+import errno
+import mox
+import os
+
+from chromite.lib import cros_test_lib
+
+from chromite.lib.paygen import gslib
+from chromite.lib.paygen import utils
+
+
+# Typical output for a GS failure that is not our fault, and we should retry.
+GS_RETRY_FAILURE = ('GSResponseError: status=403, code=InvalidAccessKeyId,'
+                    'reason="Forbidden", message="Blah Blah Blah"')
+# Typical output for a failure that we should not retry.
+GS_DONE_FAILURE = ('AccessDeniedException:')
+
+
+class TestGsLib(cros_test_lib.MoxTestCase):
+  """Test gslib module."""
+
+  def setUp(self):
+    self.bucket_name = 'somebucket'
+    self.bucket_uri = 'gs://%s' % self.bucket_name
+
+    # Because of autodetection, we no longer know which gsutil binary
+    # will be used.
+    self.gsutil = mox.IsA(str)
+
+  def testRetryGSLib(self):
+    """Test our retry decorator"""
+    @gslib.RetryGSLib
+    def Success():
+      pass
+
+    @gslib.RetryGSLib
+    def SuccessArguments(arg1, arg2=False, arg3=False):
+      self.assertEqual(arg1, 1)
+      self.assertEqual(arg2, 2)
+      self.assertEqual(arg3, 3)
+
+    class RetryTestException(gslib.GSLibError):
+      """Testing gslib.GSLibError exception for Retrying cases."""
+
+      def __init__(self):
+        super(RetryTestException, self).__init__(GS_RETRY_FAILURE)
+
+    class DoneTestException(gslib.GSLibError):
+      """Testing gslib.GSLibError exception for Done cases."""
+
+      def __init__(self):
+        super(DoneTestException, self).__init__(GS_DONE_FAILURE)
+
+    @gslib.RetryGSLib
+    def Fail():
+      raise RetryTestException()
+
+    @gslib.RetryGSLib
+    def FailCount(counter, exception):
+      """Pass in [count] times to fail before passing.
+
+      Using [] means the same object is used each retry, but it's contents
+      are mutable.
+      """
+      counter[0] -= 1
+      if counter[0] >= 0:
+        raise exception()
+
+      if exception == RetryTestException:
+        # Make sure retries ran down to -1.
+        self.assertEquals(-1, counter[0])
+
+    Success()
+    SuccessArguments(1, 2, 3)
+    SuccessArguments(1, arg3=3, arg2=2)
+
+    FailCount([1], RetryTestException)
+    FailCount([2], RetryTestException)
+
+    self.assertRaises(RetryTestException, Fail)
+    self.assertRaises(DoneTestException, FailCount, [1], DoneTestException)
+    self.assertRaises(gslib.CopyFail, FailCount, [3], gslib.CopyFail)
+    self.assertRaises(gslib.CopyFail, FailCount, [4], gslib.CopyFail)
+
+  def testIsGsURI(self):
+    self.assertTrue(gslib.IsGsURI('gs://bucket/foo/bar'))
+    self.assertTrue(gslib.IsGsURI('gs://bucket'))
+    self.assertTrue(gslib.IsGsURI('gs://'))
+
+    self.assertFalse(gslib.IsGsURI('file://foo/bar'))
+    self.assertFalse(gslib.IsGsURI('/foo/bar'))
+
+  def testSplitGSUri(self):
+    self.assertEqual(('foo', 'hi/there'),
+                     gslib.SplitGSUri('gs://foo/hi/there'))
+    self.assertEqual(('foo', 'hi/there/'),
+                     gslib.SplitGSUri('gs://foo/hi/there/'))
+    self.assertEqual(('foo', ''),
+                     gslib.SplitGSUri('gs://foo'))
+    self.assertEqual(('foo', ''),
+                     gslib.SplitGSUri('gs://foo/'))
+    self.assertRaises(gslib.URIError, gslib.SplitGSUri,
+                      'file://foo/hi/there')
+    self.assertRaises(gslib.URIError, gslib.SplitGSUri,
+                      '/foo/hi/there')
+
+  def testRunGsutilCommand(self):
+    args = ['TheCommand', 'Arg1', 'Arg2']
+    cmd = [self.gsutil] + args
+
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+
+    # Set up the test replay script.
+    # Run 1.
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True).AndReturn(1)
+    # Run 2.
+    utils.RunCommand(cmd, redirect_stdout=False, redirect_stderr=True,
+                     return_result=True).AndReturn(2)
+    # Run 3.
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True, error_ok=True).AndReturn(3)
+    # Run 4.
+    utils.RunCommand(
+        cmd, redirect_stdout=True, redirect_stderr=True,
+        return_result=True).AndRaise(utils.CommandFailedException())
+    # Run 5.
+    utils.RunCommand(
+        cmd, redirect_stdout=True, redirect_stderr=True,
+        return_result=True).AndRaise(OSError(errno.ENOENT, 'errmsg'))
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertEqual(1, gslib.RunGsutilCommand(args))
+    self.assertEqual(2, gslib.RunGsutilCommand(args, redirect_stdout=False))
+    self.assertEqual(3, gslib.RunGsutilCommand(args, error_ok=True))
+    self.assertRaises(gslib.GSLibError, gslib.RunGsutilCommand, args)
+    self.assertRaises(gslib.GsutilMissingError, gslib.RunGsutilCommand, args)
+    self.mox.VerifyAll()
+
+  def testCopy(self):
+    src_path = '/path/to/some/file'
+    dest_path = 'gs://bucket/some/gs/path'
+
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+
+    # Set up the test replay script.
+    # Run 1, success.
+    cmd = [self.gsutil, 'cp', src_path, dest_path]
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True)
+    # Run 2, failure.
+    for _ix in xrange(gslib.RETRY_ATTEMPTS + 1):
+      cmd = [self.gsutil, 'cp', src_path, dest_path]
+      utils.RunCommand(
+          cmd, redirect_stdout=True, redirect_stderr=True, return_result=True
+      ).AndRaise(utils.CommandFailedException(GS_RETRY_FAILURE))
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    gslib.Copy(src_path, dest_path)
+    self.assertRaises(gslib.CopyFail, gslib.Copy, src_path, dest_path)
+    self.mox.VerifyAll()
+
+  def testMove(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    src_path = 'gs://bucket/some/gs/path'
+    dest_path = '/some/other/path'
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'mv', src_path, dest_path]
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    gslib.Move(src_path, dest_path)
+    self.mox.VerifyAll()
+
+  def testRemove(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    path1 = 'gs://bucket/some/gs/path'
+    path2 = 'gs://bucket/some/other/path'
+
+    # Set up the test replay script.
+    # Run 1, one path.
+    utils.RunCommand([self.gsutil, 'rm', path1],
+                     redirect_stdout=True, redirect_stderr=True,
+                     return_result=True)
+    # Run 2, two paths.
+    utils.RunCommand([self.gsutil, 'rm', path1, path2],
+                     redirect_stdout=True, redirect_stderr=True,
+                     return_result=True)
+    # Run 3, one path, recursive.
+    utils.RunCommand([self.gsutil, 'rm', '-R', path1],
+                     redirect_stdout=True, redirect_stderr=True,
+                     return_result=True)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    gslib.Remove(path1)
+    gslib.Remove(path1, path2)
+    gslib.Remove(path1, recurse=True)
+    self.mox.VerifyAll()
+
+  def testRemoveNoMatch(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    path = 'gs://bucket/some/gs/path'
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'rm', path]
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    gslib.Remove(path, ignore_no_match=True)
+    self.mox.VerifyAll()
+
+  def testRemoveFail(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    path = 'gs://bucket/some/gs/path'
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'rm', path]
+    for _ix in xrange(gslib.RETRY_ATTEMPTS + 1):
+      utils.RunCommand(
+          cmd, redirect_stdout=True, redirect_stderr=True, return_result=True,
+      ).AndRaise(utils.CommandFailedException(GS_RETRY_FAILURE))
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertRaises(gslib.RemoveFail,
+                      gslib.Remove, path)
+    self.mox.VerifyAll()
+
+  def testCreateWithContents(self):
+    gs_path = 'gs://chromeos-releases-test/create-with-contents-test'
+    contents = 'Stuff with Rocks In'
+
+    self.mox.StubOutWithMock(gslib, 'Copy')
+
+    gslib.Copy(mox.IsA(str), gs_path)
+    self.mox.ReplayAll()
+
+    gslib.CreateWithContents(gs_path, contents)
+    self.mox.VerifyAll()
+
+  def testCat(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    path = 'gs://bucket/some/gs/path'
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'cat', path]
+    result = cros_test_lib.EasyAttr(error='', output='TheContent')
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True).AndReturn(result)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    result = gslib.Cat(path)
+    self.assertEquals('TheContent', result)
+    self.mox.VerifyAll()
+
+  def testCatFail(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    path = 'gs://bucket/some/gs/path'
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'cat', path]
+    utils.RunCommand(
+        cmd, redirect_stdout=True, redirect_stderr=True,
+        return_result=True).AndRaise(utils.CommandFailedException())
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertRaises(gslib.CatFail, gslib.Cat, path)
+    self.mox.VerifyAll()
+
+  def testStat(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    path = 'gs://bucket/some/gs/path'
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'stat', path]
+    result = cros_test_lib.EasyAttr(error='', output='')
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True).AndReturn(result)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertIs(gslib.Stat(path), None)
+    self.mox.VerifyAll()
+
+  def testStatFail(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    path = 'gs://bucket/some/gs/path'
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'stat', path]
+    utils.RunCommand(
+        cmd, redirect_stdout=True, redirect_stderr=True,
+        return_result=True).AndRaise(utils.CommandFailedException())
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertRaises(gslib.StatFail, gslib.Stat, path)
+    self.mox.VerifyAll()
+
+  def testCreateBucket(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'mb', self.bucket_uri]
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    gslib.CreateBucket(self.bucket_name)
+    self.mox.VerifyAll()
+
+  def testCreateBucketFail(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'mb', self.bucket_uri]
+    for _ix in xrange(gslib.RETRY_ATTEMPTS + 1):
+      utils.RunCommand(
+          cmd, redirect_stdout=True, redirect_stderr=True, return_result=True
+      ).AndRaise(utils.CommandFailedException(GS_RETRY_FAILURE))
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertRaises(gslib.BucketOperationError,
+                      gslib.CreateBucket, self.bucket_name)
+    self.mox.VerifyAll()
+
+  def testDeleteBucket(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'rm', '%s/*' % self.bucket_uri]
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     error_ok=True, return_result=True)
+    cmd = [self.gsutil, 'rb', self.bucket_uri]
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    gslib.DeleteBucket(self.bucket_name)
+    self.mox.VerifyAll()
+
+  def testDeleteBucketFail(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'rm', '%s/*' % self.bucket_uri]
+    utils.RunCommand(
+        cmd, redirect_stdout=True, redirect_stderr=True, error_ok=True,
+        return_result=True).AndRaise(
+            utils.CommandFailedException(GS_DONE_FAILURE))
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertRaises(gslib.BucketOperationError,
+                      gslib.DeleteBucket, self.bucket_name)
+    self.mox.VerifyAll()
+
+  def testFileSize(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    gs_uri = '%s/%s' % (self.bucket_uri, 'some/file/path')
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, '-d', 'stat', gs_uri]
+    size = 96
+    output = '\n'.join(['header: x-goog-generation: 1386322968237000',
+                        'header: x-goog-metageneration: 1',
+                        'header: x-goog-stored-content-encoding: identity',
+                        'header: x-goog-stored-content-length: %d' % size,
+                        'header: Content-Type: application/octet-stream'])
+
+    utils.RunCommand(
+        cmd, redirect_stdout=True, redirect_stderr=True,
+        return_result=True).AndReturn(
+            cros_test_lib.EasyAttr(output=output))
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    result = gslib.FileSize(gs_uri)
+    self.assertEqual(size, result)
+    self.mox.VerifyAll()
+
+  def testFileTimestamp(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    gs_uri = '%s/%s' % (self.bucket_uri, 'some/file/path')
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'ls', '-l', gs_uri]
+    output = '\n'.join([
+        '        96  2012-05-17T14:00:33  gs://bucket/chromeos.bin.md5',
+        'TOTAL: 1 objects, 96 bytes (96.0 B)',
+    ])
+    cmd_result = cros_test_lib.EasyAttr(output=output)
+
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True).AndReturn(cmd_result)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    result = gslib.FileTimestamp(gs_uri)
+    self.assertEqual(datetime.datetime(2012, 5, 17, 14, 0, 33),
+                     result)
+    self.mox.VerifyAll()
+
+  def _TestCatWithHeaders(self, gs_uri, cmd_output, cmd_error):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+
+    # Set up the test replay script.
+    # Run 1, versioning not enabled in bucket, one line of output.
+    cmd = ['gsutil', '-d', 'cat', gs_uri]
+    cmd_result = cros_test_lib.EasyAttr(output=cmd_output,
+                                        error=cmd_error,
+                                        cmdstr=' '.join(cmd))
+    cmd[0] = mox.IsA(str)
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True).AndReturn(cmd_result)
+    self.mox.ReplayAll()
+
+  def testCatWithHeaders(self):
+    gs_uri = '%s/%s' % (self.bucket_uri, 'some/file/path')
+    generation = 123454321
+    metageneration = 2
+    error = '\n'.join([
+        'header: x-goog-generation: %d' % generation,
+        'header: x-goog-metageneration: %d' % metageneration,
+    ])
+    expected_output = 'foo'
+    self._TestCatWithHeaders(gs_uri, expected_output, error)
+
+    # Run the test verification.
+    headers = {}
+    result = gslib.Cat(gs_uri, headers=headers)
+    self.assertEqual(generation, int(headers['generation']))
+    self.assertEqual(metageneration, int(headers['metageneration']))
+    self.assertEqual(result, expected_output)
+    self.mox.VerifyAll()
+
+  def testFileSizeNoSuchFile(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    gs_uri = '%s/%s' % (self.bucket_uri, 'some/file/path')
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, '-d', 'stat', gs_uri]
+    for _ in xrange(0, gslib.RETRY_ATTEMPTS + 1):
+      utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                       return_result=True).AndRaise(
+                           utils.CommandFailedException)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertRaises(gslib.URIError, gslib.FileSize, gs_uri)
+    self.mox.VerifyAll()
+
+  def testListFiles(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    files = [
+        '%s/some/path' % self.bucket_uri,
+        '%s/some/file/path' % self.bucket_uri,
+    ]
+    directories = [
+        '%s/some/dir/' % self.bucket_uri,
+        '%s/some/dir/path/' % self.bucket_uri,
+    ]
+
+    gs_uri = '%s/**' % self.bucket_uri
+    cmd = [self.gsutil, 'ls', gs_uri]
+
+    # Prepare cmd_result for a good run.
+    # Fake a trailing empty line.
+    output = '\n'.join(files + directories + [''])
+    cmd_result_ok = cros_test_lib.EasyAttr(output=output, returncode=0)
+
+    # Prepare exception for a run that finds nothing.
+    stderr = 'CommandException: One or more URLs matched no objects.\n'
+    empty_exception = utils.CommandFailedException(stderr)
+
+    # Prepare exception for a run that triggers a GS failure.
+    failure_exception = utils.CommandFailedException(GS_RETRY_FAILURE)
+
+    # Set up the test replay script.
+    # Run 1, runs ok.
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True).AndReturn(cmd_result_ok)
+    # Run 2, runs ok, sorts files.
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True).AndReturn(cmd_result_ok)
+    # Run 3, finds nothing.
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True).AndRaise(empty_exception)
+    # Run 4, failure in GS.
+    for _ix in xrange(gslib.RETRY_ATTEMPTS + 1):
+      utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                       return_result=True).AndRaise(failure_exception)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    result = gslib.ListFiles(self.bucket_uri, recurse=True)
+    self.assertEqual(files, result)
+    result = gslib.ListFiles(self.bucket_uri, recurse=True, sort=True)
+    self.assertEqual(sorted(files), result)
+    result = gslib.ListFiles(self.bucket_uri, recurse=True)
+    self.assertEqual([], result)
+    self.assertRaises(gslib.GSLibError, gslib.ListFiles,
+                      self.bucket_uri, recurse=True)
+    self.mox.VerifyAll()
+
+  def testListDirs(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    files = [
+        '%s/some/path' % self.bucket_uri,
+        '%s/some/file/path' % self.bucket_uri,
+    ]
+    directories = [
+        '%s/some/dir/' % self.bucket_uri,
+        '%s/some/dir/path/' % self.bucket_uri,
+    ]
+
+    gs_uri = '%s/**' % self.bucket_uri
+    cmd = [self.gsutil, 'ls', gs_uri]
+
+    # Prepare cmd_result for a good run.
+    # Fake a trailing empty line.
+    output = '\n'.join(files + directories + [''])
+    cmd_result = cros_test_lib.EasyAttr(output=output, returncode=0)
+
+    # Set up the test replay script.
+    # Run 1.
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True).AndReturn(cmd_result)
+    # Run 2.
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True).AndReturn(cmd_result)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    result = gslib.ListDirs(self.bucket_uri, recurse=True)
+    self.assertEqual(directories, result)
+    result = gslib.ListDirs(self.bucket_uri, recurse=True, sort=True)
+    self.assertEqual(sorted(directories), result)
+    self.mox.VerifyAll()
+
+  def testCmp(self):
+    uri1 = 'gs://some/gs/path'
+    uri2 = 'gs://some/other/path'
+    local_path = '/some/local/path'
+    md5 = 'TheMD5Sum'
+
+    self.mox.StubOutWithMock(gslib, 'MD5Sum')
+    self.mox.StubOutWithMock(gslib.filelib, 'MD5Sum')
+
+    # Set up the test replay script.
+    # Run 1, same md5, both GS.
+    gslib.MD5Sum(uri1).AndReturn(md5)
+    gslib.MD5Sum(uri2).AndReturn(md5)
+    # Run 2, different md5, both GS.
+    gslib.MD5Sum(uri1).AndReturn(md5)
+    gslib.MD5Sum(uri2).AndReturn('Other' + md5)
+    # Run 3, same md5, one GS on local.
+    gslib.MD5Sum(uri1).AndReturn(md5)
+    gslib.filelib.MD5Sum(local_path).AndReturn(md5)
+    # Run 4, different md5, one GS on local.
+    gslib.MD5Sum(uri1).AndReturn(md5)
+    gslib.filelib.MD5Sum(local_path).AndReturn('Other' + md5)
+    # Run 5, missing file, both GS.
+    gslib.MD5Sum(uri1).AndReturn(None)
+    # Run 6, args are None.
+    gslib.filelib.MD5Sum(None).AndReturn(None)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertTrue(gslib.Cmp(uri1, uri2))
+    self.assertFalse(gslib.Cmp(uri1, uri2))
+    self.assertTrue(gslib.Cmp(uri1, local_path))
+    self.assertFalse(gslib.Cmp(uri1, local_path))
+    self.assertFalse(gslib.Cmp(uri1, uri2))
+    self.assertFalse(gslib.Cmp(None, None))
+    self.mox.VerifyAll()
+
+  def testMD5SumAccessError(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    gs_uri = 'gs://bucket/foo/bar/somefile'
+    crc32c = 'c96fd51e'
+    crc32c_64 = base64.b64encode(base64.b16decode(crc32c, casefold=True))
+    md5_sum = 'b026324c6904b2a9cb4b88d6d61c81d1'
+    md5_sum_64 = base64.b64encode(base64.b16decode(md5_sum, casefold=True))
+    output = '\n'.join([
+        '%s:' % gs_uri,
+        '        Creation time:          Tue, 04 Mar 2014 19:55:26 GMT',
+        '        Content-Language:       en',
+        '        Content-Length:         2',
+        '        Content-Type:           application/octet-stream',
+        '        Hash (crc32c):          %s' % crc32c_64,
+        '        Hash (md5):             %s' % md5_sum_64,
+        '        ETag:                   CMi938jU+bwCEAE=',
+        '        Generation:             1393962926989000',
+        '        Metageneration:         1',
+        '        ACL:                    ACCESS DENIED. Note: you need OWNER '
+        'permission',
+        '                                on the object to read its ACL.',
+    ])
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'ls', '-L', gs_uri]
+    utils.RunCommand(
+        cmd, redirect_stdout=True, redirect_stderr=True, error_ok=True,
+        return_result=True).AndReturn(
+            cros_test_lib.EasyAttr(output=output))
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    result = gslib.MD5Sum(gs_uri)
+    self.assertEqual(md5_sum, result)
+    self.mox.VerifyAll()
+
+  def testMD5SumAccessOK(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    gs_uri = 'gs://bucket/foo/bar/somefile'
+    crc32c = 'c96fd51e'
+    crc32c_64 = base64.b64encode(base64.b16decode(crc32c, casefold=True))
+    md5_sum = 'b026324c6904b2a9cb4b88d6d61c81d1'
+    md5_sum_64 = base64.b64encode(base64.b16decode(md5_sum, casefold=True))
+    output = '\n'.join([
+        '%s:' % gs_uri,
+        '        Creation time:          Tue, 04 Mar 2014 19:55:26 GMT',
+        '        Content-Language:       en',
+        '        Content-Length:         2',
+        '        Content-Type:           application/octet-stream',
+        '        Hash (crc32c):          %s' % crc32c_64,
+        '        Hash (md5):             %s' % md5_sum_64,
+        '        ETag:                   CMi938jU+bwCEAE=',
+        '        Generation:             1393962926989000',
+        '        Metageneration:         1',
+        '        ACL:            [',
+        '  {',
+        '    "entity": "project-owners-134157665460",',
+        '    "projectTeam": {',
+        '      "projectNumber": "134157665460",',
+        '      "team": "owners"',
+        '    },',
+        '    "role": "OWNER"',
+        '  }',
+        ']',
+    ])
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'ls', '-L', gs_uri]
+    utils.RunCommand(
+        cmd, redirect_stdout=True, redirect_stderr=True, error_ok=True,
+        return_result=True).AndReturn(
+            cros_test_lib.EasyAttr(output=output))
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    result = gslib.MD5Sum(gs_uri)
+    self.assertEqual(md5_sum, result)
+    self.mox.VerifyAll()
+
+  def testSetACL(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    gs_uri = 'gs://bucket/foo/bar/somefile'
+    acl_file = '/some/gs/acl/file'
+
+    # Set up the test replay script.
+    cmd = [self.gsutil, 'setacl', acl_file, gs_uri]
+    utils.RunCommand(cmd, redirect_stdout=True, redirect_stderr=True,
+                     return_result=True)
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    gslib.SetACL(gs_uri, acl_file)
+    self.mox.VerifyAll()
+
+  def testSetACLFail(self):
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    gs_uri = 'gs://bucket/foo/bar/somefile'
+    acl_file = '/some/gs/acl/file'
+
+    # Set up the test replay script. (Multiple times because of retry logic)
+    cmd = [self.gsutil, 'setacl', acl_file, gs_uri]
+    utils.RunCommand(
+        cmd, redirect_stdout=True, redirect_stderr=True,
+        return_result=True).MultipleTimes().AndRaise(
+            utils.CommandFailedException())
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertRaises(gslib.AclFail,
+                      gslib.SetACL, gs_uri, acl_file)
+    self.mox.VerifyAll()
+
+
+class TestGsLibAccess(cros_test_lib.MoxTempDirTestCase):
+  """Test access to gs lib functionality.
+
+  The tests here require GS .boto access to the gs://chromeos-releases-public
+  bucket, which is world-readable.  Any .boto setup should do, but without
+  a .boto there will be failures.
+  """
+  small_gs_path = 'gs://chromeos-releases-public/small-test-file'
+
+  @cros_test_lib.NetworkTest()
+  def testCopyAndMD5Sum(self):
+    """Higher-level functional test.  Test MD5Sum OK:
+
+    1) List files on GS.
+    2) Select a small one by asking for byte size of files on GS.
+    3) Get MD5 sum of file on GS.
+    4) Copy file down to local file.
+    5) Recalculate MD5 sum for local file.
+    6) Verify that MD5 values are the same.
+    """
+    gs_md5 = gslib.MD5Sum(self.small_gs_path)
+    local_path = os.path.join(self.tempdir, 'md5-check-file')
+    gslib.Copy(self.small_gs_path, local_path)
+    local_md5 = gslib.filelib.MD5Sum(local_path)
+    self.assertEqual(gs_md5, local_md5)
+
+  @cros_test_lib.NetworkTest()
+  def testExistsLazy(self):
+    self.assertTrue(gslib.ExistsLazy(self.small_gs_path))
+
+    bogus_gs_path = 'gs://chromeos-releases/wert/sdgi/sadg/sdgi'
+    self.assertFalse(gslib.ExistsLazy(bogus_gs_path))
+
+  @cros_test_lib.NetworkTest()
+  def testExists(self):
+    self.assertTrue(gslib.Exists(self.small_gs_path))
+
+    bogus_gs_path = 'gs://chromeos-releases/wert/sdgi/sadg/sdgi'
+    self.assertFalse(gslib.Exists(bogus_gs_path))
+
+  @cros_test_lib.NetworkTest()
+  def testExistsFalse(self):
+    """Test Exists logic with non-standard output from gsutil."""
+    expected_output = ('GSResponseError: status=404, code=NoSuchKey,'
+                       ' reason="Not Found",'
+                       ' message="The specified key does not exist."')
+    err1 = gslib.StatFail(expected_output)
+    err2 = gslib.StatFail('You are using a deprecated alias, "getacl",'
+                          'for the "acl" command.\n' +
+                          expected_output)
+
+    uri = 'gs://any/fake/uri/will/do'
+    cmd = ['stat', uri]
+
+    self.mox.StubOutWithMock(gslib, 'RunGsutilCommand')
+
+    # Set up the test replay script.
+    # Run 1, normal.
+    gslib.RunGsutilCommand(cmd, failed_exception=gslib.StatFail,
+                           get_headers_from_stdout=True).AndRaise(err1)
+    # Run 2, extra output.
+    gslib.RunGsutilCommand(cmd, failed_exception=gslib.StatFail,
+                           get_headers_from_stdout=True).AndRaise(err2)
+    self.mox.ReplayAll()
+
+    # Run the test verification
+    self.assertFalse(gslib.Exists(uri))
+    self.assertFalse(gslib.Exists(uri))
+    self.mox.VerifyAll()
+
+  @cros_test_lib.NetworkTest()
+  def testMD5SumBadPath(self):
+    """Higher-level functional test.  Test MD5Sum bad path:
+
+    1) Make up random, non-existent gs path
+    2) Ask for MD5Sum.  Make sure it fails, but with no exeption.
+    """
+
+    gs_path = 'gs://chromeos-releases/awsedrftgyhujikol'
+    gs_md5 = gslib.MD5Sum(gs_path)
+    self.assertTrue(gs_md5 is None)
+
+  @cros_test_lib.NetworkTest()
+  def testMD5SumBadBucket(self):
+    """Higher-level functional test.  Test MD5Sum bad bucket:
+
+    1) Make up random, non-existent gs bucket and path
+    2) Ask for MD5Sum.  Make sure it fails, with exception
+    """
+
+    gs_path = 'gs://lokijuhygtfrdesxcv/awsedrftgyhujikol'
+    gs_md5 = gslib.MD5Sum(gs_path)
+    self.assertTrue(gs_md5 is None)
diff --git a/lib/paygen/gslock.py b/lib/paygen/gslock.py
new file mode 100644
index 0000000..183cbce
--- /dev/null
+++ b/lib/paygen/gslock.py
@@ -0,0 +1,210 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This library can use Google Storage files as basis for locking.
+
+A lock is acquired by creating a GS file (file creation is atomic). This means
+the URL defines the lock.
+
+When we create a lock, we also find its 'generation', which is a version
+number for the file that will be unique over time. We can prefix most GS
+commands with our version number to ensure the file hasn't been changed
+underneath us somehow. This allows us to ensure our operations are atomic.
+
+Locks have a timeout value, and may be legally acquired by any one else after
+the timeout has expired. Generation values are important to making sure this is
+an atomic operation. This timeout is agreed upon in advance, and there can be
+confusion if different clients use different timeout values. The current lock
+owner is NOT notified if a lock is expired.
+
+A lock owner can 'Renew' the lock at any time to ensure it doesn't expire.
+
+Lock files will normally hold the hostname and pid of the process that created
+them, which can be useful for debugging purposes.
+
+Note that lock files will be left behind forever if not explicitly cleaned up by
+the creating server.
+"""
+
+from __future__ import print_function
+
+import datetime
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+
+
+class LockProbeError(Exception):
+  """Raised when there was an error probing a lock file."""
+
+
+class LockNotAcquired(Exception):
+  """Raised when the lock is already held by another process."""
+
+
+class Lock(object):
+  """This class manages a google storage file as a form of lock.
+
+  This class can be used in conjuction with a "with" clause to ensure
+  the lock is released, or directly.
+
+    try:
+      with gslock.Lock("gs://chromoes-releases/lock-file"):
+        # Protected code
+        ...
+    except LockNotAcquired:
+      # Error handling
+      ...
+
+    lock = gslock.Lock("gs://chromoes-releases/lock-file")
+    try:
+      lock.Acquire()
+    except LockNotAcquired:
+      # Error handling
+
+    # Protected code
+    ...
+
+    lock.Release()
+
+    Locking is strictly atomic, except when timeouts are involved.
+
+    It assumes that local server time is in sync with Google Storage server
+    time.
+  """
+
+  def __init__(self, gs_path, lock_timeout_mins=120, dry_run=False,
+               ctx=None):
+    """Initializer for the lock.
+
+    Args:
+      gs_path:
+        Path to the potential GS file we use for lock management.
+      lock_timeout_mins:
+        How long should an existing lock be considered valid? This timeout
+        should be long enough that it's never hit unless a server is
+        unexpectedly rebooted, lost network connectivity or had
+        some other catastrophic error.
+      dry_run: do nothing, always succeed
+      ctx: chromite.lib.gs.GSContext to use.
+    """
+    self._gs_path = gs_path
+    self._timeout = datetime.timedelta(minutes=lock_timeout_mins)
+    self._contents = cros_build_lib.MachineDetails()
+    self._generation = 0
+    self._dry_run = dry_run
+    self._ctx = ctx if ctx is not None else gs.GSContext(dry_run=dry_run)
+
+  def _LockExpired(self):
+    """Check to see if an existing lock has timed out.
+
+    Returns:
+      True if the lock is expired. False otherwise.
+    """
+    try:
+      stat_results = self._ctx.Stat(self._gs_path)
+    except gs.GSNoSuchKey:
+      # If we couldn't figure out when the file was last modified, it might
+      # have already been released. In any case, it's probably not safe to try
+      # to clear the lock, so we'll return False here.
+      return False, 0
+
+    modified = stat_results.creation_time
+    expired = datetime.datetime.utcnow() > modified + self._timeout
+
+    return expired, stat_results.generation
+
+  def _AcquireLock(self):
+    """Attempt to acquire the lock.
+
+    Raises:
+      LockNotAcquired: If the lock isn't acquired.
+    """
+    try:
+      self._generation = self._ctx.Copy(
+          '-', self._gs_path, input=self._contents, version=self._generation)
+      if self._generation is None:
+        self._generation = 0
+        if not self._dry_run:
+          raise LockProbeError('Unable to detect generation')
+    except gs.GSContextPreconditionFailed:
+      # Find the lock contents. Either use this for error reporting, or to find
+      # out if we already own it.
+      contents = 'Unknown'
+      try:
+        contents = self._ctx.Cat(self._gs_path)
+      except gs.GSContextException:
+        pass
+
+      # If we thought we were creating the file it's possible for us to already
+      # own it because the Copy command above can retry. If the first attempt
+      # works but returns a retryable error, it will fail with
+      # GSContextPreconditionFailed on the second attempt.
+      if self._generation == 0 and contents == self._contents:
+        # If the lock contains our contents, we own it, but don't know the
+        # generation.
+        try:
+          stat_results = self._ctx.Stat(self._gs_path)
+          self._generation = stat_results.generation
+          return
+        except gs.GSNoSuchKey:
+          # If we can't look up stats.... we didn't get the lock.
+          pass
+
+      # We didn't get the lock, raise the expected exception.
+      self._generation = 0
+      raise LockNotAcquired('Lock: %s held by: %s' % (self._gs_path, contents))
+
+  def Acquire(self):
+    """Attempt to acquire the lock.
+
+    Will remove an existing lock if it has timed out.
+
+    Raises:
+      LockNotAcquired if it is unable to get the lock.
+    """
+    try:
+      self._AcquireLock()
+    except LockNotAcquired:
+      # We failed to get the lock right away, try to expire then acquire.
+      expired, generation = self._LockExpired()
+      if not expired:
+        raise
+
+      # It is expired, grab it, but use it's existing generation to close
+      # a race condition with someone else who is also expiring it.
+      logging.warning('Attempting to time out lock at %s.', self._gs_path)
+      self._generation = generation
+      self._AcquireLock()
+
+  def Release(self):
+    """Release the lock."""
+    try:
+      self._ctx.Remove(self._gs_path, version=self._generation,
+                       ignore_missing=True)
+    except gs.GSContextPreconditionFailed:
+      if not self._LockExpired():
+        raise
+      logging.warning('Lock at %s expired and was stolen.', self._gs_path)
+    self._generation = 0
+
+  def Renew(self):
+    """Resets the timeout on a lock you are holding.
+
+    Raises:
+      LockNotAcquired if it can't Renew the lock for any reason.
+    """
+    if int(self._generation) == 0:
+      raise LockNotAcquired('Lock not held')
+    self.Acquire()
+
+  def __enter__(self):
+    """Support for entering a with clause."""
+    self.Acquire()
+    return self
+
+  def __exit__(self, _type, _value, _traceback):
+    """Support for exiting a with clause."""
+    self.Release()
diff --git a/lib/paygen/gslock_unittest b/lib/paygen/gslock_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/lib/paygen/gslock_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/paygen/gslock_unittest.py b/lib/paygen/gslock_unittest.py
new file mode 100644
index 0000000..70a60a6
--- /dev/null
+++ b/lib/paygen/gslock_unittest.py
@@ -0,0 +1,267 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test gslock library."""
+
+from __future__ import print_function
+
+import multiprocessing
+import os
+import socket
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import gs
+
+from chromite.lib.paygen import gslock
+
+
+# We access a lot of protected members during testing.
+# pylint: disable=protected-access
+
+
+def _InProcessAcquire(lock_uri):
+  """Acquire a lock in a sub-process, but don't release.
+
+  This helper has to be pickleable, so can't be a member of the test class.
+
+  Args:
+    lock_uri: URI of the lock to acquire.
+
+  Returns:
+    boolean telling if this method got the lock.
+  """
+  lock = gslock.Lock(lock_uri)
+  try:
+    lock.Acquire()
+    return True
+  except gslock.LockNotAcquired:
+    return False
+
+
+def _InProcessDoubleAcquire(lock_uri):
+  """Acquire a lock in a sub-process, and reacquire it a second time.
+
+  Do not release the lock after acquiring.
+
+  This helper has to be pickleable, so can't be a member of the test class.
+
+  Args:
+    lock_uri: URI of the lock to acquire.
+
+  Returns:
+    int describing how many times it acquired a lock.
+  """
+  count = 0
+
+  lock = gslock.Lock(lock_uri)
+  try:
+    lock.Acquire()
+    count += 1
+    lock.Acquire()
+    count += 1
+  except gslock.LockNotAcquired:
+    pass
+
+  return count
+
+
+def _InProcessDataUpdate(lock_uri_data_uri):
+  """Increment a number in a GS file protected by a lock.
+
+  Keeps looking until the lock is acquired, so effectively, blocking. Stores
+  or increments an integer in the data_uri by one, once.
+
+  This helper has to be pickleable, so can't be a member of the test class.
+
+  Args:
+    lock_uri_data_uri: Tuple containing (lock_uri, data_uri). Passed as
+                       a tuple, since multiprocessing.Pool.map only allows
+                       a single argument in.
+
+    lock_uri: URI of the lock to acquire.
+    data_uri: URI of the data file to create/increment.
+
+  Returns:
+    boolean describing if this method got the lock.
+  """
+  lock_uri, data_uri = lock_uri_data_uri
+  ctx = gs.GSContext()
+
+  # Keep trying until the lock is acquired.
+  while True:
+    try:
+      with gslock.Lock(lock_uri):
+        if ctx.Exists(data_uri):
+          data = int(ctx.Cat(data_uri)) + 1
+        else:
+          data = 1
+
+        ctx.CreateWithContents(data_uri, str(data))
+        return True
+
+    except gslock.LockNotAcquired:
+      pass
+
+
+class GSLockTest(cros_test_lib.MockTestCase):
+  """This test suite covers the GSLock file."""
+
+  @cros_test_lib.NetworkTest()
+  def setUp(self):
+    self.ctx = gs.GSContext()
+
+    # Use the unique id to make sure the tests can be run multiple places.
+    unique_id = '%s.%d' % (socket.gethostname(), os.getpid())
+
+    self.lock_uri = 'gs://chromeos-releases-test/test-%s-gslock' % unique_id
+    self.data_uri = 'gs://chromeos-releases-test/test-%s-data' % unique_id
+
+    # Clear out any flags left from previous failure
+    self.ctx.Remove(self.lock_uri, ignore_missing=True)
+    self.ctx.Remove(self.data_uri, ignore_missing=True)
+
+  @cros_test_lib.NetworkTest()
+  def tearDown(self):
+    self.assertFalse(self.ctx.Exists(self.lock_uri))
+    self.assertFalse(self.ctx.Exists(self.data_uri))
+
+  @cros_test_lib.NetworkTest()
+  def testLock(self):
+    """Test getting a lock."""
+    # Force a known host name.
+    self.PatchObject(cros_build_lib, 'MachineDetails', return_value='TestHost')
+
+    lock = gslock.Lock(self.lock_uri)
+
+    self.assertFalse(self.ctx.Exists(self.lock_uri))
+    lock.Acquire()
+    self.assertTrue(self.ctx.Exists(self.lock_uri))
+
+    contents = self.ctx.Cat(self.lock_uri)
+    self.assertEqual(contents, 'TestHost')
+
+    lock.Release()
+    self.assertFalse(self.ctx.Exists(self.lock_uri))
+
+  @cros_test_lib.NetworkTest()
+  def testLockRepetition(self):
+    """Test aquiring same lock multiple times."""
+    # Force a known host name.
+    self.PatchObject(cros_build_lib, 'MachineDetails', return_value='TestHost')
+
+    lock = gslock.Lock(self.lock_uri)
+
+    self.assertFalse(self.ctx.Exists(self.lock_uri))
+    lock.Acquire()
+    self.assertTrue(self.ctx.Exists(self.lock_uri))
+
+    lock.Acquire()
+    self.assertTrue(self.ctx.Exists(self.lock_uri))
+
+    lock.Release()
+    self.assertFalse(self.ctx.Exists(self.lock_uri))
+
+    lock.Acquire()
+    self.assertTrue(self.ctx.Exists(self.lock_uri))
+
+    lock.Release()
+    self.assertFalse(self.ctx.Exists(self.lock_uri))
+
+  @cros_test_lib.NetworkTest()
+  def testLockConflict(self):
+    """Test lock conflict."""
+
+    lock1 = gslock.Lock(self.lock_uri)
+    lock2 = gslock.Lock(self.lock_uri)
+
+    # Manually lock 1, and ensure lock2 can't lock.
+    lock1.Acquire()
+    self.assertRaises(gslock.LockNotAcquired, lock2.Acquire)
+    lock1.Release()
+
+    # Use a with clause on 2, and ensure 1 can't lock.
+    with lock2:
+      self.assertRaises(gslock.LockNotAcquired, lock1.Acquire)
+
+    # Ensure we can renew a given lock.
+    lock1.Acquire()
+    lock1.Renew()
+    lock1.Release()
+
+    # Ensure we get an error renewing a lock we don't hold.
+    self.assertRaises(gslock.LockNotAcquired, lock1.Renew)
+
+  @cros_test_lib.NetworkTest()
+  def testLockTimeout(self):
+    """Test getting a lock when an old timed out one is present."""
+
+    # Both locks are always timed out.
+    lock1 = gslock.Lock(self.lock_uri, lock_timeout_mins=-1)
+    lock2 = gslock.Lock(self.lock_uri, lock_timeout_mins=-1)
+
+    lock1.Acquire()
+    lock2.Acquire()
+
+    self.ctx.Remove(self.lock_uri)
+
+  @cros_test_lib.NetworkTest()
+  def testRaceToAcquire(self):
+    """Have lots of processes race to acquire the same lock."""
+    count = 20
+    pool = multiprocessing.Pool(processes=count)
+    results = pool.map(_InProcessAcquire, [self.lock_uri] * count)
+
+    # Clean up the lock since the processes explicitly only acquire.
+    self.ctx.Remove(self.lock_uri)
+
+    # Ensure that only one of them got the lock.
+    self.assertEqual(results.count(True), 1)
+
+  @cros_test_lib.NetworkTest()
+  def testRaceToDoubleAcquire(self):
+    """Have lots of processes race to double acquire the same lock."""
+    count = 20
+    pool = multiprocessing.Pool(processes=count)
+    results = pool.map(_InProcessDoubleAcquire, [self.lock_uri] * count)
+
+    # Clean up the lock sinc the processes explicitly only acquire.
+    self.ctx.Remove(self.lock_uri)
+
+    # Ensure that only one of them got the lock (and got it twice).
+    self.assertEqual(results.count(0), count - 1)
+    self.assertEqual(results.count(2), 1)
+
+  @cros_test_lib.NetworkTest()
+  def testMultiProcessDataUpdate(self):
+    """Have lots of processes update a GS file proctected by a lock."""
+    count = 20   # To really stress, bump up to 200.
+    pool = multiprocessing.Pool(processes=count)
+    results = pool.map(_InProcessDataUpdate,
+                       [(self.lock_uri, self.data_uri)] * count)
+
+    self.assertEqual(self.ctx.Cat(self.data_uri), str(count))
+
+    # Ensure that all report success
+    self.assertEqual(results.count(True), count)
+
+    # Clean up the data file.
+    self.ctx.Remove(self.data_uri)
+
+  @cros_test_lib.NetworkTest()
+  def testDryrunLock(self):
+    """Ensure that lcok can be obtained and released in dry-run mode."""
+    lock = gslock.Lock(self.lock_uri, dry_run=True)
+    self.assertIsNone(lock.Acquire())
+    self.assertFalse(self.ctx.Exists(self.lock_uri))
+    self.assertIsNone(lock.Release())
+
+  def testDryrunLockRepetition(self):
+    """Test aquiring same lock multiple times in dry-run mode."""
+    lock = gslock.Lock(self.lock_uri, dry_run=True)
+    self.assertIsNone(lock.Acquire())
+    self.assertIsNone(lock.Acquire())
+    self.assertIsNone(lock.Release())
+    self.assertIsNone(lock.Acquire())
+    self.assertIsNone(lock.Release())
diff --git a/lib/paygen/gspaths.py b/lib/paygen/gspaths.py
new file mode 100644
index 0000000..efcc6d7
--- /dev/null
+++ b/lib/paygen/gspaths.py
@@ -0,0 +1,715 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Centralize knowledge about how to create standardized Google Storage paths.
+
+This includes definitions for various build flags:
+
+  SKIP - means a given build is bad and should not have payloads generated.
+  FINISHED - means that the payloads have been fully generated.
+  LOCK - means that payload processing is in progress on the host which
+         owns the locks. Locks have a timeout associated with them in
+         case of error, but are not 100% atomic when a lock is timing out.
+
+  Example file paths:
+    gs://chromeos-releases/blah-channel/board-name/1.2.3/payloads/SKIP_flag
+    gs://chromeos-releases/blah-channel/board-name/1.2.3/payloads/FINISHED_flag
+    gs://chromeos-releases/blah-channel/board-name/1.2.3/payloads/LOCK_flag
+"""
+
+from __future__ import print_function
+
+import hashlib
+import os
+import random
+import re
+
+from chromite.lib.paygen import utils
+
+
+class Build(utils.RestrictedAttrDict):
+  """Define a ChromeOS Build.
+
+  The order of attributes in self._slots dictates the order attributes
+  are printed in by __str__ method of super class.  Keep the attributes
+  that are more helpful in identifying this build earlier in the list,
+  because this string ends up cut off in email subjects.
+
+  Fields:
+    board: The board of the image "x86-mario", etc.
+    bucket: The bucket of the image. "chromeos-releases" as default.
+    channel: The channel of the image "stable-channel", "nplusone", etc.
+    uri: The URI of the build directory.
+    version: The version of the image. "0.14.23.2", "3401.0.0", etc.
+  """
+  _slots = ('board', 'version', 'channel', 'bucket', 'uri')
+  _name = 'Build definition'
+
+  def __init__(self, *args, **kwargs):
+    super(Build, self).__init__(*args, **kwargs)
+
+    # If these match defaults, set to None.
+    self._clear_if_default('bucket', ChromeosReleases.BUCKET)
+
+
+class Image(utils.RestrictedAttrDict):
+  """Define a ChromeOS Image.
+
+  Fields:
+    board: The board of the image "x86-mario", etc.
+    bucket: The bucket of the image. "chromeos-releases" as default.
+    channel: The channel of the image "stable-channel", "nplusone", etc.
+    image_channel: Sometimes an image has a different channel than the build
+                   directory it's in. (ie: nplusone). None otherwise.
+    image_version: Sometimes an image has a different version than the build
+                   directory it's in. (ie: nplusone). None otherwise.
+    image_type: The type of the image. Currently, "recovery" or "base" types
+                are supported.
+    key: The key the image was signed with. "premp", "mp", "mp-v2"
+         This is not the board specific key name, but the general value used
+         in image/payload names.
+    uri: The URI of the image. This URI can be any format understood by
+         urilib.
+    version: The version of the image. "0.14.23.2", "3401.0.0", etc.
+  """
+  _name = 'Image definition'
+  _slots = ('board', 'version', 'channel', 'image_type', 'key',
+            'image_channel', 'image_version', 'bucket',
+            'uri')
+  DEFAULT_IMAGE_TYPE = 'recovery'
+
+  def __init__(self, *args, **kwargs):
+    super(Image, self).__init__(*args, **kwargs)
+
+    # If these match defaults, set to None.
+    self._clear_if_default('bucket', ChromeosReleases.BUCKET)
+    self._clear_if_default('image_channel', self['channel'])
+    self._clear_if_default('image_version', self['version'])
+    # Force a default image_type if unspecified.
+    if not self['image_type']:
+      self['image_type'] = Image.DEFAULT_IMAGE_TYPE
+
+  def __str__(self):
+    if self.uri:
+      return '%s' % self.uri.split('/')[-1]
+    else:
+      return ('Image: %s:%s/%s%s/%s%s/%s/%s (no uri)' %
+              (self.bucket, self.board, self.channel,
+               '(%s)' % self.image_channel if self.image_channel else '',
+               self.version,
+               '(%s)' % self.image_version if self.image_version else '',
+               self.image_type, self.key))
+
+
+class UnsignedImageArchive(utils.RestrictedAttrDict):
+  """Define a unsigned ChromeOS image archive.
+
+  Fields:
+    bucket: The bucket of the image. "chromeos-releases" as default.
+    channel: The channel of the image "stable-channel", "nplusone", etc.
+    board: The board of the image "x86-mario", etc.
+    version: The version of the image. "0.14.23.2", "3401.0.0", etc.
+    milestone: the most recent branch corresponding to the version; "R19" etc
+    image_type: "test", "recovery" or "base"
+    uri: The URI of the image. This URI can be any format understood by
+         urilib.
+  """
+  _name = 'Unsigned image archive definition'
+  _slots = ('bucket', 'channel', 'board', 'version', 'milestone', 'image_type',
+            'uri')
+
+  def __str__(self):
+    if self.uri:
+      return '%s' % self.uri.split('/')[-1]
+    else:
+      return ('Unsigned image archive: %s:%s/%s/%s-%s/%s (no uri)' %
+              (self.bucket, self.board, self.channel,
+               self.milestone, self.version,
+               self.image_type))
+
+
+class Payload(utils.RestrictedAttrDict):
+  """Define a ChromeOS Payload.
+
+  Fields:
+    tgt_image: A representation of image the payload updates to, either
+               Image or UnsignedImageArchive.
+    src_image: A representation of image it updates from. None for
+               Full updates, or the same type as tgt_image otherwise.
+    uri: The URI of the payload. This can be any format understood by urilib.
+  """
+  _name = 'Payload definition'
+  _slots = ('tgt_image', 'src_image', 'uri')
+
+  def __str__(self):
+    if self.uri:
+      return self.uri.split('/')[-1]
+    else:
+      return '%s -> %s (no uri)' % (self.src_image or 'any', self.tgt_image)
+
+
+class ChromeosReleases(object):
+  """Name space class for static methods for URIs in chromeos-releases."""
+
+  BUCKET = 'chromeos-releases'
+
+  # Build flags
+  SKIP = 'SKIP'
+  FINISHED = 'FINISHED'
+  LOCK = 'LOCK'
+
+  FLAGS = (SKIP, FINISHED, LOCK)
+
+  UNSIGNED_IMAGE_TYPES = ('test', 'recovery', 'base')
+
+  @staticmethod
+  def BuildUri(channel, board, version, bucket=None):
+    """Creates the gspath for a given build.
+
+    Args:
+      channel: What channel does the build belong too. Usually "xxx-channel".
+      board: What board is the build for? "x86-alex", "lumpy", etc.
+      version: "What is the build version. "3015.0.0", "1945.76.3", etc
+      bucket: What bucket is the build in? (None means ChromeosReleases.BUCKET)
+
+    Returns:
+      The url for the specified build artifacts. Should be of the form:
+      gs://chromeos-releases/blah-channel/board-name/1.2.3
+    """
+
+    if not bucket:
+      bucket = ChromeosReleases.BUCKET
+
+    return 'gs://%s/%s/%s/%s' % (bucket, channel, board, version)
+
+  @staticmethod
+  def GeneratorUri(channel, board, version, bucket=None):
+    """Creates the gspath for a given build image.
+
+    Args:
+      channel: What channel does the build belong too. Usually "xxx-channel".
+      board: What board is the build for? "x86-alex", "lumpy", etc.
+      version: What is the build version. "3015.0.0", "1945.76.3", etc
+      bucket: What bucket is the build in? Usually "chromeos-releases".
+
+    Returns:
+      The url for the specified build's delta generator zip file.
+    """
+    return os.path.join(ChromeosReleases.BuildUri(channel,
+                                                  board,
+                                                  version,
+                                                  bucket=bucket),
+                        'au-generator.zip')
+
+  @staticmethod
+  def BuildPayloadsUri(channel, board, version, bucket=None):
+    """Creates the gspath for the payloads of a given build.
+
+    Args:
+      channel: What channel does the build belong too. Usually "xxx-channel".
+      board: What board is the build for? "x86-alex", "lumpy", etc.
+      version: "What is the build version. "3015.0.0", "1945.76.3", etc
+      bucket: What bucket is the build in? (None means ChromeosReleases.BUCKET)
+
+    Returns:
+      The url for the specified build's payloads. Should be of the form:
+        gs://chromeos-releases/blah-channel/board-name/1.2.3/payloads
+    """
+    return os.path.join(ChromeosReleases.BuildUri(channel,
+                                                  board,
+                                                  version,
+                                                  bucket=bucket),
+                        'payloads')
+
+  @staticmethod
+  def BuildPayloadsSigningUri(channel, board, version, bucket=None):
+    """Creates the base gspath for payload signing files.
+
+    We create a number of files during signer interaction. This method creates
+    the base path for all such files associated with a given build. There
+    should still be subdirectories per-payload to avoid collisions, but by
+    using this uniform base pass clean up can be more reliable.
+
+    Args:
+      channel: What channel does the build belong to. Usually "xxx-channel".
+      board: What board is the build for? "x86-alex", "lumpy", etc.
+      version: What is the build version. "3015.0.0", "1945.76.3", etc
+      bucket: What bucket is the build in? (None means ChromeosReleases.BUCKET)
+
+    Returns:
+      The url for the specified build's payloads. Should be of the form:
+      gs://chromeos-releases/blah-channel/board-name/1.2.3/payloads/signing
+    """
+    return os.path.join(ChromeosReleases.BuildPayloadsUri(channel,
+                                                          board,
+                                                          version,
+                                                          bucket=bucket),
+                        'signing')
+
+  @staticmethod
+  def BuildPayloadsFlagUri(channel, board, version, flag, bucket=None):
+    """Creates the gspath for a given build flag.
+
+    SKIP - means a given build is bad and should not have payloads generated.
+    FINISHED - means that the payloads have been fully generated.
+    LOCK - means that payload processing is in progress on the host which
+           owns the locks. Locks have a timeout associated with them in
+           case of error, but are not 100% atomic when a lock is timing out.
+
+    Args:
+      channel: What channel does the build belong too. Usually "xxx-channel".
+      board: What board is the build for? "x86-alex", "lumpy", etc.
+      version: What is the build version. "3015.0.0", "1945.76.3", etc
+      flag: gs_paths.SKIP, gs_paths.FINISHED, or gs_paths.LOCK
+      bucket: What bucket is the build in? (None means ChromeosReleases.BUCKET)
+
+    Returns:
+      The url for the specified build's payloads. Should be of the form:
+      gs://chromeos-releases/blah-channel/board-name/1.2.3/payloads/SKIP_FLAG
+    """
+    assert flag in ChromeosReleases.FLAGS
+    return os.path.join(ChromeosReleases.BuildPayloadsUri(channel,
+                                                          board,
+                                                          version,
+                                                          bucket=bucket),
+                        '%s_flag' % flag)
+
+  @staticmethod
+  def ImageName(channel, board, version, key, image_type):
+    """Creates the base file name for a given build image.
+
+    Args:
+      channel: What channel does the build belong too. Usually xxx-channel.
+      board: What board is the build for? "x86-alex", "lumpy", etc.
+      version: "What is the build version. "3015.0.0", "1945.76.3", etc
+      key: "What is the signing key. "premp", "mp", "mp-v2", etc
+      image_type: The type of image.  It can be either "recovery" or "base".
+
+    Returns:
+      The name of the specified image. Should be of the form:
+        chromeos_1.2.3_board-name_recovery_blah-channel_key.bin
+    """
+
+    template = ('chromeos_%(version)s_%(board)s_%(image_type)s'
+                + '_%(channel)s_%(key)s.bin')
+
+    return template % {
+        'channel': channel,
+        'board': board,
+        'version': version,
+        'key': key,
+        'image_type': image_type,
+    }
+
+  @staticmethod
+  def UnsignedImageArchiveName(board, version, milestone, image_type):
+    """The base name for the tarball containing an unsigned build image.
+
+    Args:
+      board: What board is the build for? "x86-alex", "lumpy", etc.
+      version: What is the build version? "3015.0.0", "1945.76.3", etc
+      milestone: the most recent branch corresponding to the version; "R19" etc
+      image_type: either "recovery" or "test", currently
+
+    Returns:
+      The name of the specified image archive. Should be of the form:
+        ChromeOS-type-R19-1.2.3-board-name.tar.xz
+    """
+
+    template = (
+        'ChromeOS-%(image_type)s-%(milestone)s-%(version)s-%(board)s.tar.xz')
+
+    return template % {
+        'board': board,
+        'version': version,
+        'milestone': milestone,
+        'image_type': image_type,
+    }
+
+  @staticmethod
+  def ImageUri(channel, board, version, key, image_type,
+               image_channel=None, image_version=None,
+               bucket=None):
+    """Creates the gspath for a given build image.
+
+    Args:
+      channel: What channel does the build belong too? Usually "xxx-channel"
+      board: What board is the build for? "x86-alex", "lumpy", etc
+      version: What is the build version? "3015.0.0", "1945.76.3", etc
+      key: What is the signing key? "premp", "mp", "mp-v2", etc
+      image_type: The type of image.  It can be either "recovery" or "base".
+      image_channel: Sometimes an image has a different channel than the build
+                     directory it's in. (ie: nplusone).
+      image_version: Sometimes an image has a different version than the build
+                     directory it's in. (ie: nplusone).
+      bucket: What bucket is the build in? (None means ChromeosReleases.BUCKET)
+
+    Returns:
+      The url for the specified build's image. Should be of the form:
+        gs://chromeos-releases/blah-channel/board-name/1.2.3/
+          chromeos_1.2.3_board-name_recovery_blah-channel_key.bin
+    """
+    if not image_channel:
+      image_channel = channel
+
+    if not image_version:
+      image_version = version
+
+    return os.path.join(
+        ChromeosReleases.BuildUri(channel, board, version, bucket=bucket),
+        ChromeosReleases.ImageName(image_channel, board, image_version, key,
+                                   image_type))
+
+  @staticmethod
+  def UnsignedImageArchiveUri(channel, board, version, milestone, image_type,
+                              bucket=None):
+    """Creates the gspath for a given unsigned build image archive.
+
+    Args:
+      channel: What channel does the build belong too? Usually "xxx-channel"
+      board: What board is the build for? "x86-alex", "lumpy", etc
+      version: What is the build version? "3015.0.0", "1945.76.3", etc
+      milestone: the most recent branch corresponding to the version; "R19" etc
+      image_type: either "recovery" or "test", currently
+      bucket: What bucket is the build in? (None means ChromeosReleases.BUCKET)
+
+    Returns:
+      The url for the specified build's image. Should be of the form:
+        gs://chromeos-releases/blah-channel/board-name/1.2.3/
+          ChromeOS-type-R19-1.2.3-board-name.tar.xz
+    """
+    return os.path.join(
+        ChromeosReleases.BuildUri(channel, board, version, bucket=bucket),
+        ChromeosReleases.UnsignedImageArchiveName(board, version,
+                                                  milestone, image_type))
+
+  @classmethod
+  def ParseImageUri(cls, image_uri):
+    """Parse the URI of an image into an Image object."""
+
+    # The named values in this regex must match the arguments to gspaths.Image.
+    exp = (r'^gs://(?P<bucket>.*)/(?P<channel>.*)/(?P<board>.*)/'
+           r'(?P<version>.*)/chromeos_(?P<image_version>[^_]+)_'
+           r'(?P=board)_(?P<image_type>[^_]+)_(?P<image_channel>[^_]+)_'
+           '(?P<key>[^_]+).bin$')
+
+    m = re.match(exp, image_uri)
+
+    if not m:
+      return None
+
+    values = m.groupdict()
+
+    # Insert the URI
+    values['uri'] = image_uri
+
+    # Create an Image object using the values we parsed out.
+    return Image(values)
+
+  @classmethod
+  def ParseUnsignedImageArchiveUri(cls, image_uri):
+    """Parse the URI of an image into an UnsignedImageArchive object."""
+
+    # The named values in this regex must match the arguments to gspaths.Image.
+    exp = (r'gs://(?P<bucket>[^/]+)/(?P<channel>[^/]+)/'
+           r'(?P<board>[^/]+)/(?P<version>[^/]+)/'
+           r'ChromeOS-(?P<image_type>%s)-(?P<milestone>R[0-9]+)-'
+           r'(?P=version)-(?P=board).tar.xz' %
+           '|'.join(cls.UNSIGNED_IMAGE_TYPES))
+
+    m = re.match(exp, image_uri)
+
+    if not m:
+      return None
+
+    values = m.groupdict()
+
+    # Insert the URI
+    values['uri'] = image_uri
+
+    # Reset values if they match their defaults.
+    if values['bucket'] == cls.BUCKET:
+      values['bucket'] = None
+
+    # Create an Image object using the values we parsed out.
+    return UnsignedImageArchive(values)
+
+  @staticmethod
+  def PayloadName(channel, board, version, key=None, random_str=None,
+                  src_version=None, unsigned_image_type='test'):
+    """Creates the gspath for a payload associated with a given build.
+
+    Args:
+      channel: What channel does the build belong to? Usually "xxx-channel".
+      board: What board is the build for? "x86-alex", "lumpy", etc.
+      version: What is the build version? "3015.0.0", "1945.76.3", etc
+      key: What is the signing key? "premp", "mp", "mp-v2", etc; None (default)
+           indicates that the image is not signed, e.g. a test image
+      image_channel: Sometimes an image has a different channel than the build
+                     directory it's in. (ie: nplusone).
+      image_version: Sometimes an image has a different version than the build
+                     directory it's in. (ie: nplusone).
+      random_str: Force a given random string. None means generate one.
+      src_version: If this payload is a delta, this is the version of the image
+                   it updates from.
+      unsigned_image_type: the type descriptor (string) of an unsigned image;
+                           significant iff key is None (default: "test")
+
+    Returns:
+      The name for the specified build's payloads. Should be of the form:
+
+        chromeos_0.12.433.257-2913.377.0_x86-alex_stable-channel_
+        delta_mp-v3.bin-b334762d0f6b80f471069153bbe8b97a.signed
+
+        chromeos_2913.377.0_x86-alex_stable-channel_full_mp-v3.
+        bin-610c97c30fae8561bde01a6116d65cb9.signed
+    """
+    if random_str is None:
+      random.seed()
+      random_str = hashlib.md5(str(random.getrandbits(128))).hexdigest()
+
+    if key is None:
+      signed_ext = ''
+      key = unsigned_image_type
+    else:
+      signed_ext = '.signed'
+
+    if src_version:
+      template = ('chromeos_%(src_version)s-%(version)s_%(board)s_%(channel)s_'
+                  'delta_%(key)s.bin-%(random_str)s%(signed_ext)s')
+
+      return template % {
+          'channel': channel,
+          'board': board,
+          'version': version,
+          'key': key,
+          'random_str': random_str,
+          'src_version': src_version,
+          'signed_ext': signed_ext,
+      }
+    else:
+      template = ('chromeos_%(version)s_%(board)s_%(channel)s_'
+                  'full_%(key)s.bin-%(random_str)s%(signed_ext)s')
+
+      return template % {
+          'channel': channel,
+          'board': board,
+          'version': version,
+          'key': key,
+          'random_str': random_str,
+          'signed_ext': signed_ext,
+      }
+
+  @staticmethod
+  def PayloadUri(channel, board, version, random_str, key=None,
+                 image_channel=None, image_version=None,
+                 src_version=None, bucket=None):
+    """Creates the gspath for a payload associated with a given build.
+
+    Args:
+      channel: What channel does the build belong to? Usually "xxx-channel"
+      board: What board is the build for? "x86-alex", "lumpy", etc.
+      version: What is the build version? "3015.0.0", "1945.76.3", etc
+      key: What is the signing key? "premp", "mp", "mp-v2", etc; None means
+           that the image is unsigned (e.g. a test image)
+      image_channel: Sometimes an image has a different channel than the build
+                     directory it's in. (ie: nplusone).
+      image_version: Sometimes an image has a different version than the build
+                     directory it's in. (ie: nplusone).
+      random_str: Force a given random string. None means generate one.
+      src_version: If this payload is a delta, this is the version of the image
+                   it updates from.
+      bucket: What bucket is the build in? (None means ChromeosReleases.BUCKET)
+
+    Returns:
+      The url for the specified build's payloads. Should be of the form:
+
+        gs://chromeos-releases/stable-channel/x86-alex/2913.377.0/payloads/
+          chromeos_0.12.433.257-2913.377.0_x86-alex_stable-channel_
+          delta_mp-v3.bin-b334762d0f6b80f471069153bbe8b97a.signed
+
+        gs://chromeos-releases/stable-channel/x86-alex/2913.377.0/payloads/
+          chromeos_2913.377.0_x86-alex_stable-channel_full_mp-v3.
+          bin-610c97c30fae8561bde01a6116d65cb9.signed
+    """
+
+    if image_channel is None:
+      image_channel = channel
+
+    if image_version is None:
+      image_version = version
+
+    return os.path.join(ChromeosReleases.BuildPayloadsUri(channel,
+                                                          board,
+                                                          version,
+                                                          bucket=bucket),
+
+                        ChromeosReleases.PayloadName(image_channel,
+                                                     board,
+                                                     image_version,
+                                                     key,
+                                                     random_str,
+                                                     src_version))
+
+  @classmethod
+  def ParsePayloadUri(cls, payload_uri):
+    """Parse the URI of an image into an Image object."""
+
+    # Sample Delta URI:
+    #   gs://chromeos-releases/stable-channel/x86-mario/4731.72.0/payloads/
+    #   chromeos_4537.147.0-4731.72.0_x86-mario_stable-channel_delta_mp-v3.bin-
+    #   3a90d8666d1d42b7a7367660b897e8c9.signed
+
+    # Sample Full URI:
+    # gs://chromeos-releases/stable-channel/x86-mario/4731.72.0/payloads/
+    #   chromeos_4731.72.0_x86-mario_stable-channel_full_mp-v3.bin-
+    #   969f24ba8cbf2096ebe3c57d5f0253b7.signed
+
+    # Handle FULL payload URIs.
+    full_exp = (r'^gs://(?P<bucket>.*)/(?P<channel>.*)/(?P<board>.*)/'
+                r'(?P<version>.*)/payloads/chromeos_(?P<image_version>[^_]+)_'
+                r'(?P=board)_(?P<image_channel>[^_]+)_full_(?P<key>[^_]+)\.bin'
+                r'-[0-9A-Fa-f]+\.signed$')
+
+    m = re.match(full_exp, payload_uri)
+
+    if m:
+      image_values = m.groupdict()
+
+      # The image URIs can't be discovered from the payload URI.
+      image_values['uri'] = None
+
+      # Create the Payload.
+      tgt_image = Image(image_values)
+      return Payload(tgt_image=tgt_image, uri=payload_uri)
+
+    # Handle DELTA payload URIs.
+    delta_exp = (r'^gs://(?P<bucket>.*)/(?P<channel>.*)/(?P<board>.*)/'
+                 r'(?P<version>.*)/payloads/chromeos_(?P<src_version>[^_]+)-'
+                 r'(?P<image_version>[^_]+)_(?P=board)_'
+                 r'(?P<image_channel>[^_]+)_delta_(?P<key>[^_]+)\.bin'
+                 r'-[0-9A-Fa-f]+\.signed$')
+
+    m = re.match(delta_exp, payload_uri)
+
+    if m:
+      image_values = m.groupdict()
+
+      # The image URIs can't be discovered from the payload URI.
+      image_values['uri'] = None
+
+      # Remember the src_version for the src_image.
+      src_version = image_values['src_version']
+      del image_values['src_version']
+
+      # Create the payload.
+      tgt_image = Image(image_values)
+
+      # Set the values which are different for src versions.
+      image_values['version'] = src_version
+
+      # The payload URI doesn't tell us any of these values. However, it's
+      # a mostly safe bet that the src version has no
+      # image_version/image_channel.
+      # Not knowing the source key is problematic.
+      image_values['image_version'] = None
+      image_values['image_channel'] = None
+      image_values['key'] = None
+
+      src_image = Image(image_values)
+
+      return Payload(src_image=src_image, tgt_image=tgt_image, uri=payload_uri)
+
+    # The URI didn't match.
+    return None
+
+
+class ChromeosImageArchive(object):
+  """Name space class for static methods for URIs in chromeos-image-archive."""
+
+  BUCKET = 'chromeos-image-archive'
+
+  @classmethod
+  def BuildUri(cls, board, milestone, version, bucket=None):
+    """Creates the gspath for a given build.
+
+    Args:
+      board: What board is the build for? "x86-alex", "lumpy", etc.
+      milestone: a number that defines the milestone mark, e.g. 19 for R19
+      version: "What is the build version. "3015.0.0", "1945.76.3", etc
+      bucket: the bucket the build in (None means cls.BUCKET)
+
+    Returns:
+      The url for the specified build artifacts. Should be of the form:
+      gs://chromeos-image-archive/board-release/R23-4.5.6
+    """
+
+    bucket = bucket or cls.BUCKET
+
+    return 'gs://%s/%s-release/R%s-%s' % (bucket, board, milestone, version)
+
+
+def VersionKey(version):
+  """Convert a version string to a comparable value.
+
+  All old style values are considered older than all new style values.
+  The actual values returned should only be used for comparison against
+  other VersionKey results.
+
+  Args:
+    version: String with a build version "1.2.3" or "0.12.3.4"
+
+  Returns:
+    A value comparable against other version strings.
+  """
+
+  key = [int(n) for n in version.split('.')]
+
+  # 3 number versions are new style.
+  # 4 number versions are old style.
+  assert len(key) in (3, 4)
+
+  if len(key) == 3:
+    # 1.2.3 -> (1, 0, 1, 2, 3)
+    return [1, 0] + key
+  else:
+    # 0.12.3.4 -> (0, 0, 12, 3, 4)
+    return [0] + key
+
+
+def VersionGreater(left, right):
+  """Compare two version strings. left > right
+
+  Args:
+    left: String with lefthand version string "1.2.3" or "0.12.3.4"
+    right: String with righthand version string "1.2.3" or "0.12.3.4"
+
+  Returns:
+    left > right taking into account new style versions versus old style.
+  """
+  return VersionKey(left) > VersionKey(right)
+
+
+def IsImage(a):
+  """Return if the object is of Image type.
+
+  Args:
+    a: object whose type needs to be checked
+
+  Returns:
+    True if |a| is of Image type, False otherwise
+  """
+  return isinstance(a, Image)
+
+
+def IsUnsignedImageArchive(a):
+  """Return if the object is of UnsignedImageArchive type.
+
+  Args:
+    a: object whose type needs to be checked
+
+  Returns:
+    True if |a| is of UnsignedImageArchive type, False otherwise
+  """
+  return isinstance(a, UnsignedImageArchive)
diff --git a/lib/paygen/gspaths_unittest b/lib/paygen/gspaths_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/lib/paygen/gspaths_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/paygen/gspaths_unittest.py b/lib/paygen/gspaths_unittest.py
new file mode 100644
index 0000000..bc506ec
--- /dev/null
+++ b/lib/paygen/gspaths_unittest.py
@@ -0,0 +1,645 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test gspaths library."""
+
+from __future__ import print_function
+
+from chromite.lib import cros_test_lib
+from chromite.lib.paygen import gspaths
+
+
+class GsPathsDataTest(cros_test_lib.TestCase):
+  """Tests for structs defined in GsPaths."""
+
+  def testBuild(self):
+    default_input = {
+        'channel': 'foo-channel',
+        'board': 'board-name',
+        'version': '1.2.3',
+    }
+    default_expected = {
+        'bucket': None,
+        'channel': 'foo-channel',
+        'board': 'board-name',
+        'version': '1.2.3',
+        'uri': None,
+    }
+    expected_str = ("Build definition (board='board-name',"
+                    " version='1.2.3', channel='foo-channel')")
+
+    build = gspaths.Build(default_input)
+    self.assertEqual(build, default_expected)
+
+    self.assertEqual(expected_str, str(build))
+
+
+class GsPathsChromeosReleasesTest(cros_test_lib.TestCase):
+  """Tests for gspaths.ChromeosReleases."""
+  # Standard Chrome OS releases names.
+  _CHROMEOS_RELEASES_BUCKET = 'chromeos-releases'
+  _AU_GENERATOR_FILE_NAME = 'au-generator.zip'
+
+  # Google Storage path, image and payload name base templates.
+  _GS_BUILD_PATH_TEMPLATE = 'gs://%(bucket)s/%(channel)s/%(board)s/%(version)s'
+  _IMAGE_NAME_TEMPLATE = (
+      'chromeos_%(image_version)s_%(board)s_%(signed_image_type)s_'
+      '%(image_channel)s_%(key)s.bin')
+  _UNSIGNED_IMAGE_ARCHIVE_NAME_TEMPLATE = (
+      'ChromeOS-%(unsigned_image_type)s-%(milestone)s-%(image_version)s-'
+      '%(board)s.tar.xz')
+  _FULL_PAYLOAD_NAME_TEMPLATE = (
+      'chromeos_%(image_version)s_%(board)s_%(image_channel)s_full_%(key)s.bin-'
+      '%(random_str)s.signed')
+  _DELTA_PAYLOAD_NAME_TEMPLATE = (
+      'chromeos_%(src_version)s-%(image_version)s_%(board)s_%(image_channel)s_'
+      'delta_%(key)s.bin-%(random_str)s.signed')
+  _UNSIGNED_FULL_PAYLOAD_NAME_TEMPLATE = (
+      'chromeos_%(image_version)s_%(board)s_%(image_channel)s_full_'
+      '%(unsigned_image_type)s.bin-%(random_str)s')
+  _UNSIGNED_DELTA_PAYLOAD_NAME_TEMPLATE = (
+      'chromeos_%(src_version)s-%(image_version)s_%(board)s_%(image_channel)s_'
+      'delta_%(unsigned_image_type)s.bin-%(random_str)s')
+
+  # Compound templates.
+  _GS_IMAGE_PATH_TEMPLATE = '/'.join(
+      (_GS_BUILD_PATH_TEMPLATE, _IMAGE_NAME_TEMPLATE))
+  _GS_UNSIGNED_IMAGE_ARCHIVE_PATH_TEMPLATE = '/'.join(
+      (_GS_BUILD_PATH_TEMPLATE, _UNSIGNED_IMAGE_ARCHIVE_NAME_TEMPLATE))
+  _GS_PAYLOADS_PATH_TEMPLATE = '/'.join((_GS_BUILD_PATH_TEMPLATE, 'payloads'))
+  _GS_PAYLOADS_SIGNING_PATH_TEMPLATE = '/'.join((_GS_BUILD_PATH_TEMPLATE,
+                                                 'payloads', 'signing'))
+  _GS_FULL_PAYLOAD_PATH_TEMPLATE = '/'.join(
+      (_GS_PAYLOADS_PATH_TEMPLATE, _FULL_PAYLOAD_NAME_TEMPLATE))
+  _GS_DELTA_PAYLOAD_PATH_TEMPLATE = '/'.join(
+      (_GS_PAYLOADS_PATH_TEMPLATE, _DELTA_PAYLOAD_NAME_TEMPLATE))
+
+  def setUp(self):
+    # Shared attributes (signed + unsigned images).
+    self.bucket = 'crt'
+    self.channel = 'foo-channel'
+    self.board = 'board-name'
+    self.version = '1.2.3'
+
+    # Signed image attributes.
+    self.key = 'mp-v3'
+    self.signed_image_type = 'base'
+
+    # Unsigned (test) image attributes.
+    self.milestone = 'R12'
+    self.unsigned_image_type = 'test'
+
+    # Attributes used for payload testing.
+    self.src_version = '1.1.1'
+    self.random_str = '1234567890'
+
+    # Dictionaries for populating templates.
+    self.image_attrs = dict(
+        bucket=self.bucket,
+        channel=self.channel,
+        image_channel=self.channel,
+        board=self.board,
+        version=self.version,
+        image_version=self.version,
+        key=self.key,
+        signed_image_type=self.signed_image_type)
+    self.unsigned_image_archive_attrs = dict(
+        bucket=self.bucket,
+        channel=self.channel,
+        image_channel=self.channel,
+        board=self.board,
+        version=self.version,
+        image_version=self.version,
+        milestone=self.milestone,
+        unsigned_image_type=self.unsigned_image_type)
+    self.all_attrs = dict(self.image_attrs,
+                          src_version=self.src_version,
+                          random_str=self.random_str,
+                          **self.unsigned_image_archive_attrs)
+
+  def _Populate(self, template, **kwargs):
+    """Populates a template string with override attributes.
+
+    This will use the default test attributes to populate a given string
+    template. It will further override default field values with the values
+    provided by the optional named arguments.
+
+    Args:
+      template: a string with named substitution fields
+      kwargs: named attributes to override the defaults
+    """
+    attrs = dict(self.all_attrs, **kwargs)
+    return template % attrs
+
+  def _PopulateGsPath(self, base_path, suffix=None, **kwargs):
+    """Populates a Google Storage path template w/ optional suffix.
+
+    Args:
+      base_path: a path string template with named substitution fields
+      suffix: a path suffix to append to the given base path
+      kwargs: named attributes to override the defaults
+    """
+    template = base_path
+    if suffix:
+      template += '/' + suffix
+
+    return self._Populate(template, **kwargs)
+
+  def testBuildUri(self):
+    self.assertEquals(
+        gspaths.ChromeosReleases.BuildUri(self.channel,
+                                          self.board,
+                                          self.version,
+                                          bucket=self.bucket),
+        self._PopulateGsPath(self._GS_BUILD_PATH_TEMPLATE))
+
+    self.assertEquals(
+        gspaths.ChromeosReleases.BuildUri(self.channel,
+                                          self.board,
+                                          self.version),
+        self._PopulateGsPath(self._GS_BUILD_PATH_TEMPLATE,
+                             bucket=self._CHROMEOS_RELEASES_BUCKET))
+
+  def testGeneratorUri(self):
+    self.assertEquals(
+        gspaths.ChromeosReleases.GeneratorUri(self.channel,
+                                              self.board,
+                                              self.version,
+                                              bucket=self.bucket),
+        self._PopulateGsPath(self._GS_BUILD_PATH_TEMPLATE,
+                             suffix=self._AU_GENERATOR_FILE_NAME))
+
+    self.assertEquals(
+        gspaths.ChromeosReleases.GeneratorUri(self.channel,
+                                              self.board,
+                                              self.version),
+        self._PopulateGsPath(self._GS_BUILD_PATH_TEMPLATE,
+                             suffix=self._AU_GENERATOR_FILE_NAME,
+                             bucket=self._CHROMEOS_RELEASES_BUCKET))
+
+  def testBuildPayloadsUri(self):
+    self.assertEquals(
+        gspaths.ChromeosReleases.BuildPayloadsUri(self.channel,
+                                                  self.board,
+                                                  self.version,
+                                                  bucket=self.bucket),
+        self._PopulateGsPath(self._GS_PAYLOADS_PATH_TEMPLATE))
+
+    self.assertEquals(
+        gspaths.ChromeosReleases.BuildPayloadsUri(self.channel,
+                                                  self.board,
+                                                  self.version),
+        self._PopulateGsPath(self._GS_PAYLOADS_PATH_TEMPLATE,
+                             bucket=self._CHROMEOS_RELEASES_BUCKET))
+
+  def testBuildPayloadsSigningUri(self):
+    self.assertEquals(
+        gspaths.ChromeosReleases.BuildPayloadsSigningUri(self.channel,
+                                                         self.board,
+                                                         self.version,
+                                                         bucket=self.bucket),
+        self._PopulateGsPath(self._GS_PAYLOADS_SIGNING_PATH_TEMPLATE))
+
+    self.assertEquals(
+        gspaths.ChromeosReleases.BuildPayloadsSigningUri(self.channel,
+                                                         self.board,
+                                                         self.version),
+        self._PopulateGsPath(self._GS_PAYLOADS_SIGNING_PATH_TEMPLATE,
+                             bucket=self._CHROMEOS_RELEASES_BUCKET))
+
+  def testBuildPayloadsFlagUri(self):
+    self.assertEquals(
+        gspaths.ChromeosReleases.BuildPayloadsFlagUri(
+            self.channel,
+            self.board,
+            self.version,
+            gspaths.ChromeosReleases.SKIP,
+            bucket=self.bucket),
+        self._PopulateGsPath(self._GS_PAYLOADS_PATH_TEMPLATE,
+                             suffix='SKIP_flag'))
+
+    self.assertEquals(
+        gspaths.ChromeosReleases.BuildPayloadsFlagUri(
+            self.channel,
+            self.board,
+            self.version,
+            gspaths.ChromeosReleases.FINISHED,
+            bucket=self.bucket),
+        self._PopulateGsPath(self._GS_PAYLOADS_PATH_TEMPLATE,
+                             suffix='FINISHED_flag'))
+
+    self.assertEquals(
+        gspaths.ChromeosReleases.BuildPayloadsFlagUri(
+            self.channel,
+            self.board,
+            self.version,
+            gspaths.ChromeosReleases.LOCK,
+            bucket=self.bucket),
+        self._PopulateGsPath(self._GS_PAYLOADS_PATH_TEMPLATE,
+                             suffix='LOCK_flag'))
+
+    # Default bucket
+    self.assertEquals(
+        gspaths.ChromeosReleases.BuildPayloadsFlagUri(
+            self.channel,
+            self.board,
+            self.version,
+            gspaths.ChromeosReleases.SKIP),
+        self._PopulateGsPath(self._GS_PAYLOADS_PATH_TEMPLATE,
+                             suffix='SKIP_flag',
+                             bucket=self._CHROMEOS_RELEASES_BUCKET))
+
+  def testImageName(self):
+    self.assertEquals(
+        gspaths.ChromeosReleases.ImageName(self.channel,
+                                           self.board,
+                                           self.version,
+                                           self.key,
+                                           self.signed_image_type),
+        self._Populate(self._IMAGE_NAME_TEMPLATE))
+
+  def testUnsignedImageArchiveName(self):
+    self.assertEquals(
+        gspaths.ChromeosReleases.UnsignedImageArchiveName(
+            self.board,
+            self.version,
+            self.milestone,
+            self.unsigned_image_type),
+        self._Populate(self._UNSIGNED_IMAGE_ARCHIVE_NAME_TEMPLATE))
+
+  def testImageUri(self):
+    self.assertEquals(
+        gspaths.ChromeosReleases.ImageUri(self.channel,
+                                          self.board,
+                                          self.version,
+                                          self.key,
+                                          self.signed_image_type,
+                                          bucket=self.bucket),
+        self._Populate(self._GS_IMAGE_PATH_TEMPLATE))
+
+  def testUnsignedImageArchiveUri(self):
+    self.assertEquals(
+        gspaths.ChromeosReleases.UnsignedImageArchiveUri(
+            self.channel, self.board, self.version, self.milestone,
+            self.unsigned_image_type, bucket=self.bucket),
+        self._Populate(self._GS_UNSIGNED_IMAGE_ARCHIVE_PATH_TEMPLATE))
+
+  @staticmethod
+  def _IncrementVersion(version, inc_amount=1):
+    version_part = version.rpartition('.')
+    return '.'.join((version_part[0], str(int(version_part[2]) + inc_amount)))
+
+  def testParseImageUri(self):
+    npo_version = self._IncrementVersion(self.version)
+    npo_channel = 'nplusone-channel'
+
+    basic_dict = dict(self.image_attrs)
+    npo_dict = dict(self.image_attrs,
+                    bucket=self._CHROMEOS_RELEASES_BUCKET,
+                    image_version=npo_version,
+                    image_channel=npo_channel)
+    basic_dict['uri'] = uri_basic = self._GS_IMAGE_PATH_TEMPLATE % basic_dict
+    npo_dict['uri'] = uri_npo = self._GS_IMAGE_PATH_TEMPLATE % npo_dict
+
+    expected_basic = gspaths.Image(bucket=self.bucket,
+                                   channel=self.channel,
+                                   board=self.board,
+                                   version=self.version,
+                                   image_type=self.signed_image_type,
+                                   key=self.key,
+                                   uri=uri_basic)
+    expected_basic_str = gspaths.ChromeosReleases.ImageName(
+        expected_basic.channel, expected_basic.board, expected_basic.version,
+        expected_basic.key, expected_basic.image_type)
+
+    expected_npo = gspaths.Image(channel=self.channel,
+                                 board=self.board,
+                                 version=self.version,
+                                 key=self.key,
+                                 image_type=self.signed_image_type,
+                                 image_channel=npo_channel,
+                                 image_version=npo_version,
+                                 uri=uri_npo)
+
+    expected_npo_str = gspaths.ChromeosReleases.ImageName(
+        expected_npo.image_channel, expected_npo.board,
+        expected_npo.image_version, expected_npo.key, expected_npo.image_type)
+
+    basic_image = gspaths.ChromeosReleases.ParseImageUri(uri_basic)
+    self.assertEqual(basic_image, expected_basic)
+    self.assertEqual(str(basic_image), expected_basic_str)
+
+    npo_image = gspaths.ChromeosReleases.ParseImageUri(uri_npo)
+    self.assertEqual(npo_image, expected_npo)
+    self.assertEqual(str(npo_image), expected_npo_str)
+
+    signer_output = ('gs://chromeos-releases/dev-channel/link/4537.7.0/'
+                     'chromeos_4537.7.1_link_recovery_nplusone-channel_'
+                     'mp-v4.bin.1.payload.hash.update_signer.signed.bin')
+
+    bad_image = gspaths.ChromeosReleases.ParseImageUri(signer_output)
+    self.assertEqual(bad_image, None)
+
+  def testParseUnsignedImageArchiveUri(self):
+    attr_dict = dict(self.unsigned_image_archive_attrs)
+    attr_dict['uri'] = uri = (
+        self._GS_UNSIGNED_IMAGE_ARCHIVE_PATH_TEMPLATE % attr_dict)
+
+    expected = gspaths.UnsignedImageArchive(bucket=self.bucket,
+                                            channel=self.channel,
+                                            board=self.board,
+                                            version=self.version,
+                                            milestone=self.milestone,
+                                            image_type=self.unsigned_image_type,
+                                            uri=uri)
+    expected_str = gspaths.ChromeosReleases.UnsignedImageArchiveName(
+        expected.board, expected.version, expected.milestone,
+        expected.image_type)
+
+    image = gspaths.ChromeosReleases.ParseUnsignedImageArchiveUri(uri)
+    self.assertEqual(image, expected)
+    self.assertEqual(str(image), expected_str)
+
+  def testPayloadNamePreset(self):
+    full = gspaths.ChromeosReleases.PayloadName(channel=self.channel,
+                                                board=self.board,
+                                                version=self.version,
+                                                key=self.key,
+                                                random_str=self.random_str)
+
+    delta = gspaths.ChromeosReleases.PayloadName(channel=self.channel,
+                                                 board=self.board,
+                                                 version=self.version,
+                                                 key=self.key,
+                                                 src_version=self.src_version,
+                                                 random_str=self.random_str)
+
+    full_unsigned = gspaths.ChromeosReleases.PayloadName(
+        channel=self.channel,
+        board=self.board,
+        version=self.version,
+        random_str=self.random_str,
+        unsigned_image_type=self.unsigned_image_type)
+
+    delta_unsigned = gspaths.ChromeosReleases.PayloadName(
+        channel=self.channel,
+        board=self.board,
+        version=self.version,
+        src_version=self.src_version,
+        random_str=self.random_str,
+        unsigned_image_type=self.unsigned_image_type)
+
+    self.assertEqual(full, self._Populate(self._FULL_PAYLOAD_NAME_TEMPLATE))
+    self.assertEqual(delta, self._Populate(self._DELTA_PAYLOAD_NAME_TEMPLATE))
+    self.assertEqual(full_unsigned,
+                     self._Populate(self._UNSIGNED_FULL_PAYLOAD_NAME_TEMPLATE))
+    self.assertEqual(delta_unsigned,
+                     self._Populate(self._UNSIGNED_DELTA_PAYLOAD_NAME_TEMPLATE))
+
+  def testPayloadNameRandom(self):
+    full = gspaths.ChromeosReleases.PayloadName(channel=self.channel,
+                                                board=self.board,
+                                                version=self.version,
+                                                key=self.key)
+
+    delta = gspaths.ChromeosReleases.PayloadName(channel=self.channel,
+                                                 board=self.board,
+                                                 version=self.version,
+                                                 key=self.key,
+                                                 src_version=self.src_version)
+
+    # Isolate the actual random string, transplant it in the reference template.
+    full_random_str = full.split('-')[-1].partition('.')[0]
+    self.assertEqual(
+        full,
+        self._Populate(self._FULL_PAYLOAD_NAME_TEMPLATE,
+                       random_str=full_random_str))
+    delta_random_str = delta.split('-')[-1].partition('.')[0]
+    self.assertEqual(
+        delta,
+        self._Populate(self._DELTA_PAYLOAD_NAME_TEMPLATE,
+                       random_str=delta_random_str))
+
+  def testPayloadUri(self):
+    test_random_channel = 'test_random_channel'
+    test_max_version = '4.5.6'
+    test_min_version = '0.12.1.0'
+
+    min_full = gspaths.ChromeosReleases.PayloadUri(
+        channel=self.channel,
+        board=self.board,
+        version=self.version,
+        random_str=self.random_str,
+        key=self.key)
+
+    self.assertEqual(
+        min_full,
+        self._Populate(self._GS_FULL_PAYLOAD_PATH_TEMPLATE,
+                       bucket=self._CHROMEOS_RELEASES_BUCKET))
+
+    max_full = gspaths.ChromeosReleases.PayloadUri(
+        channel=self.channel,
+        board=self.board,
+        version=self.version,
+        random_str=self.random_str,
+        key=self.key,
+        image_channel=test_random_channel,
+        image_version=test_max_version,
+        bucket=self.bucket)
+
+    self.assertEqual(
+        max_full,
+        self._Populate(self._GS_FULL_PAYLOAD_PATH_TEMPLATE,
+                       image_channel=test_random_channel,
+                       image_version=test_max_version))
+
+    min_delta = gspaths.ChromeosReleases.PayloadUri(
+        channel=self.channel,
+        board=self.board,
+        version=self.version,
+        random_str=self.random_str,
+        key=self.key,
+        src_version=test_min_version)
+
+    self.assertEqual(
+        min_delta,
+        self._Populate(self._GS_DELTA_PAYLOAD_PATH_TEMPLATE,
+                       bucket=self._CHROMEOS_RELEASES_BUCKET,
+                       src_version=test_min_version))
+
+    max_delta = gspaths.ChromeosReleases.PayloadUri(
+        channel=self.channel,
+        board=self.board,
+        version=self.version,
+        random_str=self.random_str,
+        key=self.key,
+        image_channel=test_random_channel,
+        image_version=test_max_version,
+        src_version=test_min_version,
+        bucket=self.bucket)
+
+    self.assertEqual(
+        max_delta,
+        self._Populate(self._GS_DELTA_PAYLOAD_PATH_TEMPLATE,
+                       src_version=test_min_version,
+                       image_version=test_max_version,
+                       image_channel=test_random_channel))
+
+  def testParsePayloadUri(self):
+    """Test gsutils.ChromeosReleases.ParsePayloadUri()."""
+
+    image_version = '1.2.4'
+
+    full_uri = self._Populate(self._GS_FULL_PAYLOAD_PATH_TEMPLATE,
+                              bucket='chromeos-releases')
+
+    delta_uri = self._Populate(self._GS_DELTA_PAYLOAD_PATH_TEMPLATE,
+                               bucket='chromeos-releases')
+
+    max_full_uri = self._Populate(self._GS_FULL_PAYLOAD_PATH_TEMPLATE,
+                                  image_channel='image-channel',
+                                  image_version=image_version)
+
+    max_delta_uri = self._Populate(self._GS_DELTA_PAYLOAD_PATH_TEMPLATE,
+                                   image_channel='image-channel',
+                                   image_version=image_version)
+
+    self.assertDictEqual(
+        gspaths.ChromeosReleases.ParsePayloadUri(full_uri),
+        {
+            'tgt_image': gspaths.Image(board=self.board,
+                                       channel=self.channel,
+                                       version=self.version,
+                                       key=self.key),
+            'src_image': None,
+            'uri': full_uri,
+        })
+
+    self.assertDictEqual(
+        gspaths.ChromeosReleases.ParsePayloadUri(delta_uri),
+        {
+            'src_image': gspaths.Image(board=self.board,
+                                       channel=self.channel,
+                                       version=self.src_version),
+            'tgt_image': gspaths.Image(board=self.board,
+                                       channel=self.channel,
+                                       version=self.version,
+                                       key=self.key),
+            'uri': delta_uri,
+        })
+
+    self.assertDictEqual(
+        gspaths.ChromeosReleases.ParsePayloadUri(max_full_uri),
+        {
+            'tgt_image': gspaths.Image(bucket=self.bucket,
+                                       board=self.board,
+                                       channel=self.channel,
+                                       version=self.version,
+                                       key=self.key,
+                                       image_version=image_version,
+                                       image_channel='image-channel'),
+            'src_image': None,
+            'uri': max_full_uri,
+        })
+
+    self.assertDictEqual(
+        gspaths.ChromeosReleases.ParsePayloadUri(max_delta_uri),
+        {
+            'src_image': gspaths.Image(bucket=self.bucket,
+                                       board=self.board,
+                                       channel=self.channel,
+                                       version=self.src_version),
+            'tgt_image': gspaths.Image(bucket=self.bucket,
+                                       board=self.board,
+                                       channel=self.channel,
+                                       version=self.version,
+                                       key=self.key,
+                                       image_version=image_version,
+                                       image_channel='image-channel'),
+            'uri': max_delta_uri,
+        })
+
+
+class GsPathsTest(cros_test_lib.TestCase):
+  """Test general gspaths utilities."""
+
+  def testVersionKey(self):
+    """Test VersionKey, especially for new-style versus old-style."""
+
+    values = ['1.2.3', '1.2.2', '2.0.0', '1.1.4',
+              '1.2.3.4', '1.2.3.3', '1.2.4.4', '1.2.4.5', '1.3.3.4',
+              '0.1.2.3', '0.14.45.32']
+
+    sorted_values = sorted(values, key=gspaths.VersionKey)
+    reverse_sorted_values = sorted(reversed(values), key=gspaths.VersionKey)
+
+    expected_values = ['0.1.2.3', '0.14.45.32',
+                       '1.2.3.3', '1.2.3.4', '1.2.4.4', '1.2.4.5', '1.3.3.4',
+                       '1.1.4', '1.2.2', '1.2.3', '2.0.0']
+
+    self.assertEquals(sorted_values, expected_values)
+    self.assertEquals(reverse_sorted_values, expected_values)
+
+  def testVersionGreater(self):
+    """Test VersionGreater, especially for new-style versus old-style."""
+
+    self.assertTrue(gspaths.VersionGreater('1.2.3', '1.2.2'))
+    self.assertTrue(gspaths.VersionGreater('1.2.3', '1.1.4'))
+    self.assertTrue(gspaths.VersionGreater('2.0.0', '1.2.3'))
+
+    self.assertFalse(gspaths.VersionGreater('1.2.3', '1.2.3'))
+
+    self.assertFalse(gspaths.VersionGreater('1.2.2', '1.2.3'))
+    self.assertFalse(gspaths.VersionGreater('1.1.4', '1.2.3'))
+    self.assertFalse(gspaths.VersionGreater('1.2.3', '2.0.0'))
+
+    self.assertTrue(gspaths.VersionGreater('1.2.3.4', '1.2.3.3'))
+    self.assertTrue(gspaths.VersionGreater('1.2.4.4', '1.2.3.4'))
+    self.assertTrue(gspaths.VersionGreater('1.3.3.4', '1.2.4.5'))
+    self.assertTrue(gspaths.VersionGreater('2.0.0.0', '1.2.3.4'))
+
+    self.assertFalse(gspaths.VersionGreater('1.2.3.4', '1.2.3.4'))
+
+    self.assertFalse(gspaths.VersionGreater('1.2.3.3', '1.2.3.4'))
+    self.assertFalse(gspaths.VersionGreater('1.2.3.4', '1.2.4.4'))
+    self.assertFalse(gspaths.VersionGreater('1.2.4.5', '1.3.3.4'))
+    self.assertFalse(gspaths.VersionGreater('1.2.3.4', '2.0.0.0'))
+
+    self.assertTrue(gspaths.VersionGreater('1.2.3', '1.2.3.4'))
+    self.assertTrue(gspaths.VersionGreater('1.2.3', '0.1.2.3'))
+
+    self.assertFalse(gspaths.VersionGreater('1.2.3.4', '1.2.3'))
+    self.assertFalse(gspaths.VersionGreater('0.1.2.3', '1.2.3'))
+
+  def testIsImage(self):
+    a = float(3.14)
+    self.assertFalse(gspaths.IsImage(a))
+    b = gspaths.Image()
+    self.assertTrue(gspaths.IsImage(b))
+
+  def testIsUnsignedImageArchive(self):
+    a = float(3.14)
+    self.assertFalse(gspaths.IsUnsignedImageArchive(a))
+    b = gspaths.UnsignedImageArchive()
+    self.assertTrue(gspaths.IsUnsignedImageArchive(b))
+
+
+class ImageTest(cros_test_lib.TestCase):
+  """Test Image class implementation."""
+
+  def testImage_DefaultImageType(self):
+    default_image = gspaths.Image(bucket='bucket',
+                                  board='board',
+                                  channel='channel',
+                                  version='version')
+    self.assertEquals('recovery', default_image.image_type)
+
+  def testImage_CustomImageType(self):
+    custom_image_type = 'base'
+    custom_image = gspaths.Image(bucket='bucket',
+                                 board='board',
+                                 channel='channel',
+                                 version='version',
+                                 image_type=custom_image_type)
+    self.assertEquals(custom_image_type, custom_image.image_type)
diff --git a/lib/paygen/paygen_build_lib.py b/lib/paygen/paygen_build_lib.py
new file mode 100644
index 0000000..d86add2
--- /dev/null
+++ b/lib/paygen/paygen_build_lib.py
@@ -0,0 +1,1465 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""PayGen - Automatic Payload Generation.
+
+This library processes a single build at a time, and decides which payloads
+need to be generated. It then calls paygen_payload to generate each payload.
+
+This library is reponsible for locking builds during processing, and checking
+and setting flags to show that a build has been processed.
+"""
+
+from __future__ import print_function
+
+import ConfigParser
+import json
+import operator
+import os
+import shutil
+import socket
+import sys
+import tempfile
+import urlparse
+
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import failures_lib
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import parallel
+from chromite.lib import retry_util
+from chromite.lib.paygen import download_cache
+from chromite.lib.paygen import dryrun_lib
+from chromite.lib.paygen import gslib
+from chromite.lib.paygen import gslock
+from chromite.lib.paygen import gspaths
+from chromite.lib.paygen import paygen_payload_lib
+from chromite.lib.paygen import urilib
+
+
+# For crostools access.
+sys.path.insert(0, constants.SOURCE_ROOT)
+
+AUTOTEST_DIR = os.path.join(constants.SOURCE_ROOT, 'src', 'third_party',
+                            'autotest', 'files')
+sys.path.insert(0, AUTOTEST_DIR)
+
+# If we are an external only checkout, or a bootstrap environemnt these imports
+# will fail. We quietly ignore the failure, but leave bombs around that will
+# explode if people try to really use this library.
+try:
+  from crostools.config import config
+  from crostools.omaha import query
+
+  # pylint: disable=F0401
+  from site_utils.autoupdate.lib import test_params
+  from site_utils.autoupdate.lib import test_control
+  # pylint: enable=F0401
+
+except ImportError:
+  config = None
+  query = None
+  test_params = None
+  test_control = None
+
+
+# The oldest release milestone for which run_suite should be attempted.
+RUN_SUITE_MIN_MSTONE = 30
+
+# Used to format timestamps on archived paygen.log file names in GS.
+PAYGEN_LOG_TIMESTAMP_FORMAT = '%Y%m%d-%H%M%S-UTC'
+
+# Used to lookup all FSIs for all boards.
+FSI_URI = 'gs://chromeos-build-release-console/fsis.json'
+
+
+class Error(Exception):
+  """Exception base class for this module."""
+
+
+class EarlyExit(Error):
+  """Base class for paygen_build 'normal' errors.
+
+  There are a number of cases in which a paygen run fails for reasons that
+  require special reporting, but which are normal enough to avoid raising
+  big alarms. We signal these results using exceptions derived from this
+  class.
+
+  Note that the docs strings on the subclasses may be displayed directly
+  to the user, and RESULT may be returned as an exit code.
+  """
+
+  def __str__(self):
+    """Return the doc string to the user as the exception description."""
+    return self.__doc__
+
+
+class BuildFinished(EarlyExit):
+  """This build has already been marked as finished, no need to process."""
+  RESULT = 22
+
+
+class BuildLocked(EarlyExit):
+  """This build is locked and already being processed elsewhere."""
+  RESULT = 23
+
+
+class BuildSkip(EarlyExit):
+  """This build has been marked as skip, and should not be processed."""
+  RESULT = 24
+
+
+class BuildNotReady(EarlyExit):
+  """Not all images for this build are uploaded, don't process it yet."""
+  RESULT = 25
+
+
+class BoardNotConfigured(EarlyExit):
+  """The board does not exist in the crostools release config."""
+  RESULT = 26
+
+
+class BuildCorrupt(Error):
+  """Exception raised if a build has unexpected images."""
+
+
+class ImageMissing(Error):
+  """Exception raised if a build doesn't have expected images."""
+
+
+class PayloadTestError(Error):
+  """Raised when an error is encountered with generation of test artifacts."""
+
+
+class ArchiveError(Error):
+  """Raised when there was a failure to map a build to the images archive."""
+
+
+def _LogList(title, obj_list):
+  """Helper for logging a list of objects.
+
+  Generates:
+    1: ObjA
+    2: ObjB
+    3: ObjC
+    ...
+
+  Args:
+    title: Title string for the list.
+    obj_list: List of objects to convert to string and log, one per line.
+  """
+  logging.info('%s:', title)
+
+  if not obj_list:
+    logging.info(' (no objects listed)')
+    return
+
+  index = 0
+
+  for obj in obj_list:
+    index += 1
+    logging.info(' %2d: %s', index, obj)
+
+
+def _FilterForImages(artifacts):
+  """Return only instances of Image from a list of artifacts."""
+  return filter(gspaths.IsImage, artifacts)
+
+
+def _FilterForMp(artifacts):
+  """Return the MP keyed images in a list of artifacts.
+
+  This returns all images with key names of the form "mp", "mp-v3", etc.
+
+  Args:
+    artifacts: The list of artifacts to filter.
+
+  Returns:
+    List of MP images.
+  """
+  return [i for i in _FilterForImages(artifacts) if 'mp' in i.key.split('-')]
+
+
+def _FilterForPremp(artifacts):
+  """Return the PreMp keyed images in a list of artifacts.
+
+  The key for an images is expected to be of the form "premp", "mp", or
+  "mp-vX". This filter returns everything that is "premp".
+
+  Args:
+    artifacts: The list of artifacts to filter.
+
+  Returns:
+    List of PreMP images.
+  """
+  return [i for i in _FilterForImages(artifacts) if 'premp' in i.key.split('-')]
+
+
+def _FilterForBasic(artifacts):
+  """Return the basic (not NPO) images in a list of artifacts.
+
+  As an example, an image for a stable channel build might be in the
+  "stable-channel", or it might be in the "npo-channel". This only returns
+  the basic images that match "stable-channel".
+
+  Args:
+    artifacts: The list of artifacts to filter.
+
+  Returns:
+    List of basic images.
+  """
+  return [i for i in _FilterForImages(artifacts) if i.image_channel is None]
+
+
+def _FilterForNpo(artifacts):
+  """Return the NPO images in a list of artifacts.
+
+  Return the N Plus One images in the given list.
+
+  Args:
+    artifacts: The list of artifacts to filter.
+
+  Returns:
+    List of NPO images.
+  """
+  return [i for i in _FilterForImages(artifacts)
+          if i.image_channel == 'nplusone-channel']
+
+
+def _FilterForUnsignedImageArchives(artifacts):
+  """Return only instances of UnsignedImageArchive from a list of artifacts."""
+  return filter(gspaths.IsUnsignedImageArchive, artifacts)
+
+
+def _FilterForImageType(artifacts, image_type):
+  """Return only images for given |image_type|."""
+  return [i for i in artifacts if i.image_type == image_type]
+
+
+def _FilterForValidImageType(artifacts):
+  """Return only images with image types that paygen supports."""
+  v = gspaths.ChromeosReleases.UNSIGNED_IMAGE_TYPES
+  return reduce(operator.add, [_FilterForImageType(artifacts, x) for x in v])
+
+
+def _FilterForTest(artifacts):
+  """Return only test images archives."""
+  return [i for i in _FilterForUnsignedImageArchives(artifacts)
+          if i.image_type == 'test']
+
+
+def _GenerateSinglePayload(payload, work_dir, sign, au_generator_uri, dry_run):
+  """Generate a single payload.
+
+  This is intended to be safe to call inside a new process.
+
+  Args:
+    payload: gspath.Payload object defining the payloads to generate.
+    work_dir: Working directory for payload generation.
+    sign: boolean to decide if payload should be signed.
+    au_generator_uri: URI of the au_generator.zip to use, None for the default.
+    dry_run: boolean saying if this is a dry run.
+  """
+  # This cache dir will be shared with other processes, but we need our
+  # own instance of the cache manager to properly coordinate.
+  cache_dir = paygen_payload_lib.FindCacheDir()
+  with download_cache.DownloadCache(
+      cache_dir, cache_size=_PaygenBuild.CACHE_SIZE) as cache:
+    # Actually generate the payload.
+    paygen_payload_lib.CreateAndUploadPayload(
+        payload,
+        cache,
+        work_dir=work_dir,
+        sign=sign,
+        au_generator_uri=au_generator_uri,
+        dry_run=dry_run)
+
+
+class _PaygenBuild(object):
+  """This class is responsible for generating the payloads for a given build.
+
+  It operates across a single build at a time, and is responsible for locking
+  that build and for flagging it as finished when all payloads are generated.
+  """
+  # 50 GB of cache.
+  CACHE_SIZE = 50 * 1024 * 1024 * 1024
+
+  # Relative subpath for dumping control files inside the temp directory.
+  CONTROL_FILE_SUBDIR = os.path.join('autotest', 'au_control_files')
+
+  # The name of the suite of paygen-generated Autotest tests.
+  PAYGEN_AU_SUITE_TEMPLATE = 'paygen_au_%s'
+
+  # Name of the Autotest control file tarball.
+  CONTROL_TARBALL_TEMPLATE = PAYGEN_AU_SUITE_TEMPLATE + '_control.tar.bz2'
+
+  # Sleep time used in _DiscoverRequiredPayloads. Export so tests can change.
+  BUILD_DISCOVER_RETRY_SLEEP = 90
+
+  # Cache of full test payloads for a given version.
+  _version_to_full_test_payloads = {}
+
+  class PayloadTest(object):
+    """A payload test definition.
+
+    You must either use a delta payload, or specify both the src_channel and
+    src_version.
+
+    Attrs:
+      payload: A gspaths.Payload object describing the payload to be tested.
+
+      src_channel: The channel of the image to test updating from. Required
+                   if the payload is a full payload, required to be None if
+                   it's a delta.
+      src_version: The version of the image to test updating from. Required
+                   if the payload is a full payload, required to be None if
+                   it's a delta.
+        for a delta payload, as it already encodes the source version.
+    """
+    def __init__(self, payload, src_channel=None, src_version=None):
+      self.payload = payload
+
+      assert bool(src_channel) == bool(src_version), (
+          'src_channel(%s), src_version(%s) must both be set, or not set' %
+          (src_channel, src_version))
+
+      assert bool(src_channel and src_version) ^ bool(payload.src_image), (
+          'src_channel(%s), src_version(%s) required for full, not allowed'
+          ' for deltas. src_image: %s ' %
+          (src_channel, src_version, payload.src_image))
+
+      self.src_channel = src_channel or payload.src_image.channel
+      self.src_version = src_version or payload.src_image.version
+
+    def __str__(self):
+      return ('<test for %s%s>' %
+              (self.payload,
+               (' from version %s' % self.src_version)
+               if self.src_version else ''))
+
+    def __repr__(self):
+      return str(self)
+
+    def __eq__(self, other):
+      return (self.payload == other.payload and
+              self.src_channel == other.src_channel and
+              self.src_version == other.src_version)
+
+  def __init__(self, build, work_dir, site_config,
+               dry_run=False, ignore_finished=False,
+               skip_full_payloads=False, skip_delta_payloads=False,
+               skip_test_payloads=False, skip_nontest_payloads=False,
+               control_dir=None, output_dir=None,
+               run_parallel=False, run_on_builder=False, au_generator_uri=None):
+    """Initializer."""
+    self._build = build
+    self._work_dir = work_dir
+    self._site_config = site_config
+    self._drm = dryrun_lib.DryRunMgr(dry_run)
+    self._ignore_finished = dryrun_lib.DryRunMgr(ignore_finished)
+    self._skip_full_payloads = skip_full_payloads
+    self._skip_delta_payloads = skip_delta_payloads
+    self._skip_test_payloads = skip_test_payloads
+    self._skip_nontest_payloads = skip_nontest_payloads
+    self._control_dir = control_dir
+    self._output_dir = output_dir
+    self._previous_version = None
+    self._run_parallel = run_parallel
+    self._run_on_builder = run_on_builder
+    self._archive_board = None
+    self._archive_build = None
+    self._archive_build_uri = None
+    self._au_generator_uri = au_generator_uri
+
+  def _GetFlagURI(self, flag):
+    """Find the URI of the lock file associated with this build.
+
+    Args:
+      flag: Should be a member of gspaths.ChromeosReleases.FLAGS
+
+    Returns:
+      Returns a google storage path to the build flag requested.
+    """
+    return gspaths.ChromeosReleases.BuildPayloadsFlagUri(
+        self._build.channel, self._build.board, self._build.version, flag,
+        bucket=self._build.bucket)
+
+  def _MapToArchive(self, board, version):
+    """Returns the chromeos-image-archive equivalents for the build.
+
+    Args:
+      board: The board name (per chromeos-releases).
+      version: The build version.
+
+    Returns:
+      A tuple consisting of the archive board name, build name and build URI.
+
+    Raises:
+      ArchiveError: if we could not compute the mapping.
+    """
+    # Map chromeos-releases board name to its chromeos-image-archive equivalent.
+    archive_board_candidates = set([
+        archive_board for archive_board in self._site_config.GetBoards()
+        if archive_board.replace('_', '-') == board])
+    if len(archive_board_candidates) == 0:
+      raise ArchiveError('could not find build board name for %s' % board)
+    elif len(archive_board_candidates) > 1:
+      raise ArchiveError('found multiple build board names for %s: %s' %
+                         (board, ', '.join(archive_board_candidates)))
+
+    archive_board = archive_board_candidates.pop()
+
+    # Find something in the respective chromeos-image-archive build directory.
+    archive_build_search_uri = gspaths.ChromeosImageArchive.BuildUri(
+        archive_board, '*', version)
+    archive_build_file_uri_list = urilib.ListFiles(archive_build_search_uri)
+    if not archive_build_file_uri_list:
+      raise ArchiveError('cannot find archive build directory for %s' %
+                         archive_build_search_uri)
+
+    # Use the first search result.
+    uri_parts = urlparse.urlsplit(archive_build_file_uri_list[0])
+    archive_build_path = os.path.dirname(uri_parts.path)
+    archive_build = archive_build_path.strip('/')
+    archive_build_uri = urlparse.urlunsplit((uri_parts.scheme,
+                                             uri_parts.netloc,
+                                             archive_build_path,
+                                             '', ''))
+
+    return archive_board, archive_build, archive_build_uri
+
+  def _ValidateExpectedBuildImages(self, build, images):
+    """Validate that we got the expected images for a build.
+
+    We expect that for any given build will have at most the following four
+    builds:
+
+      premp basic build.
+      mp basic build.
+      premp NPO build.
+      mp NPO build.
+
+    We also expect that it will have at least one basic build, and never have
+    an NPO build for which it doesn't have a matching basic build.
+
+    Args:
+      build: The build the images are from.
+      images: The images discovered associated with the build.
+
+    Raises:
+      BuildCorrupt: Raised if unexpected images are found.
+      ImageMissing: Raised if expected images are missing.
+    """
+
+    premp_basic = _FilterForBasic(_FilterForPremp(images))
+    premp_npo = _FilterForNpo(_FilterForPremp(images))
+    mp_basic = _FilterForBasic(_FilterForMp(images))
+    mp_npo = _FilterForNpo(_FilterForMp(images))
+
+    # Make sure there is no more than one of each of our basic types.
+    for i in (premp_basic, premp_npo, mp_basic, mp_npo):
+      if len(i) > 1:
+        msg = '%s has unexpected filtered images: %s.' % (build, i)
+        raise BuildCorrupt(msg)
+
+    # Make sure there were no unexpected types of images.
+    if len(images) != len(premp_basic + premp_npo + mp_basic + mp_npo):
+      msg = '%s has unexpected unfiltered images: %s' % (build, images)
+      raise BuildCorrupt(msg)
+
+    # Make sure there is at least one basic image.
+    if not premp_basic and not mp_basic:
+      msg = '%s has no basic images.' % build
+      raise ImageMissing(msg)
+
+    # Can't have a premp NPO with the match basic image.
+    if premp_npo and not premp_basic:
+      msg = '%s has a premp NPO, but not a premp basic image.' % build
+      raise ImageMissing(msg)
+
+    # Can't have an mp NPO with the match basic image.
+    if mp_npo and not mp_basic:
+      msg = '%s has a mp NPO, but not a mp basic image.' % build
+      raise ImageMissing(msg)
+
+  def _DiscoverImages(self, build):
+    """Return a list of images associated with a given build.
+
+    Args:
+      build: The build to find images for.
+
+    Returns:
+      A list of images associated with the build. This may include premp, mp,
+      and premp/mp NPO images. We don't currently ever expect more than these
+      four combinations to be present.
+
+    Raises:
+      BuildCorrupt: Raised if unexpected images are found.
+      ImageMissing: Raised if expected images are missing.
+    """
+    # Ideally, |image_type| below should be constrained to the type(s) expected
+    # for the board. But the board signing configs are not easily accessible at
+    # this point, so we use the wildcard here and rely on the signers to upload
+    # the expected artifacts.
+    search_uri = gspaths.ChromeosReleases.ImageUri(
+        build.channel, build.board, build.version, key='*', image_type='*',
+        image_channel='*', image_version='*', bucket=build.bucket)
+
+    image_uris = urilib.ListFiles(search_uri)
+    images = [gspaths.ChromeosReleases.ParseImageUri(uri) for uri in image_uris]
+
+    # Unparsable URIs will result in Nones; filter them out.
+    images = [i for i in images if i]
+
+    # We only care about recovery and test image types, ignore all others.
+    images = _FilterForValidImageType(images)
+
+    self._ValidateExpectedBuildImages(build, images)
+
+    return images
+
+  def _DiscoverTestImageArchives(self, build):
+    """Return a list of unsigned image archives associated with a given build.
+
+    Args:
+      build: The build to find images for.
+
+    Returns:
+      A list of test image archives associated with the build. Normally, there
+      should be exactly one such item.
+
+    Raises:
+      BuildCorrupt: Raised if unexpected images are found.
+      ImageMissing: Raised if expected images are missing.
+    """
+    search_uri = gspaths.ChromeosReleases.UnsignedImageArchiveUri(
+        build.channel, build.board, build.version, milestone='*',
+        image_type='test', bucket=build.bucket)
+
+    image_uris = urilib.ListFiles(search_uri)
+    images = [gspaths.ChromeosReleases.ParseUnsignedImageArchiveUri(uri)
+              for uri in image_uris]
+
+    # Unparsable URIs will result in Nones; filter them out.
+    images = [i for i in images if i]
+
+    # Make sure we found the expected number of build images (1).
+    if len(images) > 1:
+      raise BuildCorrupt('%s has multiple test images: %s' % (build, images))
+
+    if self._control_dir and len(images) < 1:
+      raise ImageMissing('%s has no test image' % build)
+
+    return images
+
+  def _DiscoverActiveFsiBuilds(self):
+    """Read fsi_images in release.conf.
+
+    fsi_images is a list of chromeos versions. We assume each one is
+    from the same build/channel as we are and use it to identify a new
+    build. The values in release.conf are only valid for the stable-channel.
+
+    These results only include 'active' FSIs which are still generating a lot
+    of update requests. We normally expect to generate delta payloads for
+    these FSIs.
+
+    Returns:
+      List of gspaths.Build instances for each build so discovered. The list
+      may be empty.
+    """
+    # TODO(dgarrett): Switch to JSON mechanism in _DiscoverAllFsiBuilds
+    #   after it's in production, and after we clear the change with the TPMs.
+    #   At that time, check and ignore FSIs without the is_delta_supported flag.
+    # TODO(pprabhu): Can't switch to _DiscoverAllFsiBuilds till the HACK there
+    #   is removed.
+
+    # FSI versions are only defined for the stable-channel.
+    if self._build.channel != 'stable-channel':
+      return []
+
+    try:
+      fsi_versions = config.GetListValue(self._build.board, 'fsi_images')
+    except ConfigParser.NoOptionError:
+      # fsi_images is an optional field.
+      return []
+
+    results = []
+    for version in fsi_versions:
+      results.append(gspaths.Build(version=version,
+                                   board=self._build.board,
+                                   channel=self._build.channel,
+                                   bucket=self._build.bucket))
+    return results
+
+  def _DiscoverAllFsiBuilds(self):
+    """Pull FSI list from Golden Eye.
+
+    Returns a list of chromeos versions. We assume each one is
+    from the same build/channel as we are and use it to identify a new
+    build. This assumption is currently valid, but not 100% safe.
+
+    Returns a list of all FSI images for a given board, even 'inactive' values.
+
+    Returns:
+      List of gspaths.Build instances for each build so discovered. The list
+      may be empty.
+    """
+    results = []
+    # XXX:HACK -- FSI builds for this board is known to brick the DUTs in the
+    # lab. As a workaround, we're dropping test coverage for this board
+    # temporarily (crbug.com/460174).
+    # TODO(pprabhu) Remove hack once we have a real solution (crbug.com/462320).
+    if self._build.board == 'peach-pit':
+      return results
+
+    contents = json.loads(gslib.Cat(FSI_URI))
+
+    for fsi in contents.get('fsis', []):
+      fsi_board = fsi['board']['public_codename']
+      fsi_version = fsi['chrome_os_version']
+
+      if fsi_board == self._build.board:
+        results.append(fsi_version)
+
+    return results
+
+  def _DiscoverNmoBuild(self):
+    """Find the currently published version to our channel/board.
+
+    We assume it was actually built with our current channel/board. This also
+    updates an object member with the previous build, in the case that
+    subsequent logic needs to make use of this knowledge.
+
+    Returns:
+      List of gspaths.Build for previously published builds. Since we can only
+      know about the currently published version, this always contain zero or
+      one entries.
+    """
+    self._previous_version = query.FindLatestPublished(self._build.channel,
+                                                       self._build.board)
+
+    if self._previous_version:
+      return [gspaths.Build(gspaths.Build(version=self._previous_version,
+                                          board=self._build.board,
+                                          channel=self._build.channel,
+                                          bucket=self._build.bucket))]
+
+    return []
+
+  def _DiscoverRequiredFullPayloads(self, images):
+    """Find the Payload objects for the images from the current build.
+
+    In practice, this creates a full payload definition for every image passed
+    in.
+
+    Args:
+      images: The images for the current build.
+
+    Returns:
+      A list of gspaths.Payload objects for full payloads for every image.
+    """
+    return [gspaths.Payload(tgt_image=i) for i in images]
+
+  def _DiscoverRequiredNpoDeltas(self, images):
+    """Find the NPO deltas for the images from the current build.
+
+    Images from the current build, already filtered to be all MP or all PREMP.
+
+    Args:
+      images: The key-filtered images for the current build.
+
+    Returns:
+      A list of gspaths.Payload objects for the deltas needed for NPO testing.
+      May be empty.
+    """
+    basics = _FilterForBasic(images)
+    # If previously filtered for premp, and filtered for npo, there can only
+    # be one of each.
+    assert len(basics) <= 1, 'Unexpected images found %s' % basics
+    if basics:
+      npos = _FilterForImageType(_FilterForNpo(images), basics[0].image_type)
+      assert len(npos) <= 1, 'Unexpected NPO images found %s' % npos
+      if npos:
+        return [gspaths.Payload(tgt_image=npos[0], src_image=basics[0])]
+
+    return []
+
+  # TODO(garnold) The reason we need this separately from
+  # _DiscoverRequiredNpoDeltas is that, with test images, we generate
+  # a current -> current delta rather than a real current -> NPO one (there are
+  # no test NPO images generated, unfortunately). Also, the naming of signed
+  # images is different from that of test image archives, so we need different
+  # filtering logic. In all likelihood, we will stop generating NPO deltas with
+  # signed images once this feature stabilizes; at this point, there will no
+  # longer be any use for a signed NPO.
+  def _DiscoverRequiredTestNpoDeltas(self, images):
+    """Find the NPO deltas test-equivalent for images from the current build.
+
+    Args:
+      images: The pre-filtered test images for the current build.
+
+    Returns:
+      A (possibly empty) list of gspaths.Payload objects representing NPO
+      deltas of test images.
+    """
+    # If previously filtered for test images, there must be at most one image.
+    assert len(images) <= 1, 'Unexpected test images found %s' % images
+
+    if images:
+      return [gspaths.Payload(tgt_image=images[0], src_image=images[0])]
+
+    return []
+
+  def _DiscoverRequiredFromPreviousDeltas(self, images, previous_images):
+    """Find the deltas from previous builds.
+
+    All arguements should already be filtered to be all MP or all PREMP.
+
+    Args:
+      images: The key-filtered images for the current build.
+      previous_images: The key-filtered images from previous builds from
+                       which delta payloads should be generated.
+
+    Returns:
+      A list of gspaths.Payload objects for the deltas needed from the previous
+      builds, which may be empty.
+    """
+    # If we have no images to delta to, no results.
+    if not images:
+      return []
+
+    # After filtering for NPO, and for MP/PREMP, there can be only one!
+    assert len(images) == 1, 'Unexpected images found %s.' % images
+    image = images[0]
+    # Filter artifacts that have the same |image_type| as that of |image|.
+    previous_images_by_type = _FilterForImageType(previous_images,
+                                                  image.image_type)
+
+    results = []
+
+    # We should never generate downgrades, they are unsafe. Deltas to the
+    # same images are useless. Neither case normally happens unless
+    # we are re-generating payloads for old builds.
+    for prev in previous_images_by_type:
+      if gspaths.VersionGreater(image.version, prev.version):
+        # A delta from each previous image to current image.
+        results.append(gspaths.Payload(tgt_image=image, src_image=prev))
+      else:
+        logging.info('Skipping %s is not older than target', prev)
+
+    return results
+
+  def _DiscoverRequiredPayloads(self):
+    """Find the payload definitions for the current build.
+
+    This method finds the images for the current build, and for all builds we
+    need deltas from, and decides what payloads are needed.
+
+    IMPORTANT: The order in which payloads are listed is significant as it
+    reflects on the payload generation order. The current way is to list test
+    payloads last, as they are of lesser importance from the release process
+    standpoint, and may incur failures that do not affect the signed payloads
+    and may be otherwise detrimental to the release schedule.
+
+    Returns:
+      A list of tuples of the form (payload, skip), where payload is an
+      instance of gspath.Payload and skip is a Boolean that says whether it
+      should be skipped (i.e. not generated).
+
+    Raises:
+      BuildNotReady: If the current build doesn't seem to have all of it's
+          images available yet. This commonly happens because the signer hasn't
+          finished signing the current build.
+      BuildCorrupt: If current or previous builds have unexpected images.
+      ImageMissing: Raised if expected images are missing for previous builds.
+    """
+    # Initiate a list that will contain lists of payload subsets, along with a
+    # Boolean stating whether or not we need to skip generating them.
+    payload_sublists_skip = []
+
+    try:
+      # When discovering the images for our current build, they might
+      # discoverable right away (GS eventual consistency). So, we retry.
+      images = retry_util.RetryException(ImageMissing, 3,
+                                         self._DiscoverImages, self._build,
+                                         sleep=self.BUILD_DISCOVER_RETRY_SLEEP)
+      images += self._DiscoverTestImageArchives(self._build)
+    except ImageMissing as e:
+      # If the main build doesn't have the final build images, then it's
+      # not ready.
+      logging.info(e)
+      raise BuildNotReady()
+
+    _LogList('Images found', images)
+
+    # Discover active FSI builds we need deltas from.
+    fsi_builds = self._DiscoverActiveFsiBuilds()
+    if fsi_builds:
+      _LogList('Active FSI builds considered', fsi_builds)
+    else:
+      logging.info('No active FSI builds found')
+
+    # Discover other previous builds we need deltas from.
+    previous_builds = [b for b in self._DiscoverNmoBuild()
+                       if b not in fsi_builds]
+    if previous_builds:
+      _LogList('Other previous builds considered', previous_builds)
+    else:
+      logging.info('No other previous builds found')
+
+    # Discover the images from those previous builds, and put them into
+    # a single list. Raises ImageMissing if no images are found.
+    previous_images = []
+    for b in previous_builds:
+      try:
+        previous_images += self._DiscoverImages(b)
+      except ImageMissing as e:
+        # Temporarily allow generation of delta payloads to fail because of
+        # a missing previous build until crbug.com/243916 is addressed.
+        # TODO(mtennant): Remove this when bug is fixed properly.
+        logging.warning('Previous build image is missing, skipping: %s', e)
+
+        # We also clear the previous version field so that subsequent code does
+        # not attempt to generate a full update test from the N-1 version;
+        # since this version has missing images, no payloads were generated for
+        # it and test generation is bound to fail.
+        # TODO(garnold) This should be reversed together with the rest of this
+        # block.
+        self._previous_version = None
+
+        # In this case, we should also skip test image discovery; since no
+        # signed deltas will be generated from this build, we don't need to
+        # generate test deltas from it.
+        continue
+
+      previous_images += self._DiscoverTestImageArchives(b)
+
+    for b in fsi_builds:
+      previous_images += self._DiscoverImages(b)
+      previous_images += self._DiscoverTestImageArchives(b)
+
+    # Only consider base (signed) and test previous images.
+    filtered_previous_images = _FilterForBasic(previous_images)
+    filtered_previous_images += _FilterForTest(previous_images)
+    previous_images = filtered_previous_images
+
+    # Generate full payloads for all non-test images in the current build.
+    # Include base, NPO, premp, and mp (if present).
+    payload_sublists_skip.append(
+        (self._skip_full_payloads or self._skip_nontest_payloads,
+         self._DiscoverRequiredFullPayloads(_FilterForImages(images))))
+
+    # Deltas for current -> NPO (pre-MP and MP).
+    payload_sublists_skip.append(
+        (self._skip_delta_payloads or self._skip_nontest_payloads,
+         self._DiscoverRequiredNpoDeltas(_FilterForPremp(images))))
+    payload_sublists_skip.append(
+        (self._skip_delta_payloads or self._skip_nontest_payloads,
+         self._DiscoverRequiredNpoDeltas(_FilterForMp(images))))
+
+    # Deltas for previous -> current (pre-MP and MP).
+    payload_sublists_skip.append(
+        (self._skip_delta_payloads or self._skip_nontest_payloads,
+         self._DiscoverRequiredFromPreviousDeltas(
+             _FilterForPremp(_FilterForBasic(images)),
+             _FilterForPremp(previous_images))))
+    payload_sublists_skip.append(
+        (self._skip_delta_payloads or self._skip_nontest_payloads,
+         self._DiscoverRequiredFromPreviousDeltas(
+             _FilterForMp(_FilterForBasic(images)),
+             _FilterForMp(previous_images))))
+
+    # Only discover test payloads if Autotest is not disabled.
+    if self._control_dir:
+      # Full test payloads.
+      payload_sublists_skip.append(
+          (self._skip_full_payloads or self._skip_test_payloads,
+           self._DiscoverRequiredFullPayloads(_FilterForTest(images))))
+
+      # Delta for current -> NPO (test payloads).
+      payload_sublists_skip.append(
+          (self._skip_delta_payloads or self._skip_test_payloads,
+           self._DiscoverRequiredTestNpoDeltas(_FilterForTest(images))))
+
+      # Deltas for previous -> current (test payloads).
+      payload_sublists_skip.append(
+          (self._skip_delta_payloads or self._skip_test_payloads,
+           self._DiscoverRequiredFromPreviousDeltas(
+               _FilterForTest(images), _FilterForTest(previous_images))))
+
+    # Organize everything into a single list of (payload, skip) pairs; also, be
+    # sure to fill in a URL for each payload.
+    payloads_skip = []
+    for (do_skip, payloads) in payload_sublists_skip:
+      for payload in payloads:
+        paygen_payload_lib.FillInPayloadUri(payload)
+        payloads_skip.append((payload, do_skip))
+
+    return payloads_skip
+
+  def _GeneratePayloads(self, payloads, lock=None):
+    """Generate the payloads called for by a list of payload definitions.
+
+    It will keep going, even if there is a failure.
+
+    Args:
+      payloads: gspath.Payload objects defining all of the payloads to generate.
+      lock: gslock protecting this paygen_build run.
+
+    Raises:
+      Any arbitrary exception raised by CreateAndUploadPayload.
+    """
+    payloads_args = [(payload,
+                      self._work_dir,
+                      isinstance(payload.tgt_image, gspaths.Image),
+                      self._au_generator_uri,
+                      bool(self._drm))
+                     for payload in payloads]
+
+    if self._run_parallel:
+      parallel.RunTasksInProcessPool(_GenerateSinglePayload, payloads_args)
+    else:
+      for args in payloads_args:
+        _GenerateSinglePayload(*args)
+
+        # This can raise LockNotAcquired, if the lock timed out during a
+        # single payload generation.
+        if lock:
+          lock.Renew()
+
+  def _FindFullTestPayloads(self, channel, version):
+    """Returns a list of full test payloads for a given version.
+
+    Uses the current build's board and bucket values. This method caches the
+    full test payloads previously discovered as we may be using them for
+    multiple tests in a single run.
+
+    Args:
+      channel: Channel to look in for payload.
+      version: A build version whose payloads to look for.
+
+    Returns:
+      A (possibly empty) list of payload URIs.
+    """
+    assert channel
+    assert version
+
+    if (channel, version) in self._version_to_full_test_payloads:
+      # Serve from cache, if possible.
+      return self._version_to_full_test_payloads[(channel, version)]
+
+    payload_search_uri = gspaths.ChromeosReleases.PayloadUri(
+        channel, self._build.board, version, '*',
+        bucket=self._build.bucket)
+
+    payload_candidate = urilib.ListFiles(payload_search_uri)
+
+    # We create related files for each payload that have the payload name
+    # plus these extensions. Skip these files.
+    NOT_PAYLOAD = ('.json', '.log')
+    full_test_payloads = [u for u in payload_candidate
+                          if not any([u.endswith(n) for n in NOT_PAYLOAD])]
+    # Store in cache.
+    self._version_to_full_test_payloads[(channel, version)] = full_test_payloads
+    return full_test_payloads
+
+  def _EmitControlFile(self, payload_test, suite_name, control_dump_dir):
+    """Emit an Autotest control file for a given payload test."""
+    # Figure out the source version for the test.
+    payload = payload_test.payload
+    src_version = payload_test.src_version
+    src_channel = payload_test.src_channel
+
+    # Discover the full test payload that corresponds to the source version.
+    src_payload_uri_list = self._FindFullTestPayloads(src_channel, src_version)
+    if not src_payload_uri_list:
+      logging.error('Cannot find full test payload for source version (%s), '
+                    'control file not generated', src_version)
+      raise PayloadTestError('cannot find source payload for testing %s' %
+                             payload)
+
+    if len(src_payload_uri_list) != 1:
+      logging.error('Found multiple (%d) full test payloads for source version '
+                    '(%s), control file not generated:\n%s',
+                    len(src_payload_uri_list), src_version,
+                    '\n'.join(src_payload_uri_list))
+      raise PayloadTestError('multiple source payloads found for testing %s' %
+                             payload)
+
+    src_payload_uri = src_payload_uri_list[0]
+    logging.info('Source full test payload found at %s', src_payload_uri)
+
+    release_archive_uri = gspaths.ChromeosReleases.BuildUri(
+        src_channel, self._build.board, src_version)
+
+    # TODO(dgarrett): Remove if block after finishing crbug.com/523122
+    if not urilib.Exists(os.path.join(release_archive_uri, 'stateful.tgz')):
+      logging.warning('Falling back to chromeos-image-archive: %s', payload)
+      try:
+        _, _, source_archive_uri = self._MapToArchive(
+            payload.tgt_image.board, src_version)
+      except ArchiveError as e:
+        raise PayloadTestError(
+            'error mapping source build to images archive: %s' % e)
+      stateful_archive_uri = os.path.join(source_archive_uri, 'stateful.tgz')
+      logging.info('Copying stateful.tgz from %s -> %s',
+                   stateful_archive_uri, release_archive_uri)
+      urilib.Copy(stateful_archive_uri, release_archive_uri)
+
+    test = test_params.TestConfig(
+        self._archive_board,
+        suite_name,               # Name of the test (use the suite name).
+        False,                    # Using test images.
+        bool(payload.src_image),  # Whether this is a delta.
+        src_version,
+        payload.tgt_image.version,
+        src_payload_uri,
+        payload.uri,
+        suite_name=suite_name,
+        source_archive_uri=release_archive_uri)
+
+    with open(test_control.get_control_file_name()) as f:
+      control_code = f.read()
+    control_file = test_control.dump_autotest_control_file(
+        test, None, control_code, control_dump_dir)
+    logging.info('Control file emitted at %s', control_file)
+    return control_file
+
+  def _ScheduleAutotestTests(self, suite_name):
+    """Run the appropriate command to schedule the Autotests we have prepped.
+
+    Args:
+      suite_name: The name of the test suite.
+    """
+    # Because of crbug.com/383481, if we run delta updates against
+    # a source payload earlier than R38, the DUTs assigned to the
+    # test may fail when rebooting.  The failed devices can be hard
+    # to recover.  The bug affects spring and skate, but as of this
+    # writing, only spring is still losing devices.
+    #
+    # So, until it's all sorted, we're going to skip testing on
+    # spring devices.
+    if self._archive_board == 'daisy_spring':
+      logging.info('Skipping payload autotest for board %s',
+                   self._archive_board)
+      return
+    timeout_mins = config_lib.HWTestConfig.DEFAULT_HW_TEST_TIMEOUT / 60
+    if self._run_on_builder:
+      try:
+        commands.RunHWTestSuite(board=self._archive_board,
+                                build=self._archive_build,
+                                suite=suite_name,
+                                file_bugs=True,
+                                pool='bvt',
+                                priority=constants.HWTEST_BUILD_PRIORITY,
+                                retry=True,
+                                wait_for_results=True,
+                                timeout_mins=timeout_mins,
+                                suite_min_duts=2,
+                                debug=bool(self._drm))
+      except failures_lib.TestWarning as e:
+        logging.warning('Warning running test suite; error output:\n%s', e)
+    else:
+      # Run run_suite.py locally.
+      cmd = [
+          os.path.join(AUTOTEST_DIR, 'site_utils', 'run_suite.py'),
+          '--board', self._archive_board,
+          '--build', self._archive_build,
+          '--suite_name', suite_name,
+          '--file_bugs', 'True',
+          '--pool', 'bvt',
+          '--retry', 'True',
+          '--timeout_mins', str(timeout_mins),
+          '--no_wait', 'False',
+          '--suite_min_duts', '2',
+      ]
+      logging.info('Running autotest suite: %s', ' '.join(cmd))
+      try:
+        cros_build_lib.RunCommand(cmd)
+      except cros_build_lib.RunCommandError as e:
+        if e.result.returncode:
+          logging.error('Error (%d) running test suite; error output:\n%s',
+                        e.result.returncode, e.result.error)
+          raise PayloadTestError('failed to run test (return code %d)' %
+                                 e.result.returncode)
+
+  def _AutotestPayloads(self, payload_tests):
+    """Create necessary test artifacts and initiate Autotest runs.
+
+    Args:
+      payload_tests: An iterable of PayloadTest objects defining payload tests.
+    """
+    # Create inner hierarchy for dumping Autotest control files.
+    control_dump_dir = os.path.join(self._control_dir,
+                                    self.CONTROL_FILE_SUBDIR)
+    os.makedirs(control_dump_dir)
+
+    # Customize the test suite's name based on this build's channel.
+    test_channel = self._build.channel.rpartition('-')[0]
+    suite_name = (self.PAYGEN_AU_SUITE_TEMPLATE % test_channel)
+
+    # Emit a control file for each payload.
+    logging.info('Emitting control files into %s', control_dump_dir)
+    for payload_test in payload_tests:
+      self._EmitControlFile(payload_test, suite_name, control_dump_dir)
+
+    tarball_name = self.CONTROL_TARBALL_TEMPLATE % test_channel
+
+    # Must use an absolute tarball path since tar is run in a different cwd.
+    tarball_path = os.path.join(self._control_dir, tarball_name)
+
+    # Create the tarball.
+    logging.info('Packing %s in %s into %s', self.CONTROL_FILE_SUBDIR,
+                 self._control_dir, tarball_path)
+    cmd_result = cros_build_lib.CreateTarball(
+        tarball_path, self._control_dir,
+        compression=cros_build_lib.COMP_BZIP2,
+        inputs=[self.CONTROL_FILE_SUBDIR])
+    if cmd_result.returncode != 0:
+      logging.error('Error (%d) when tarring control files',
+                    cmd_result.returncode)
+      raise PayloadTestError(
+          'failed to create autotest tarball (return code %d)' %
+          cmd_result.returncode)
+
+    # Upload the tarball, be sure to make it world-readable.
+    upload_target = os.path.join(self._archive_build_uri, tarball_name)
+    logging.info('Uploading autotest control tarball to %s', upload_target)
+    gslib.Copy(tarball_path, upload_target, acl='public-read')
+
+    # Do not run the suite for older builds whose suite staging logic is
+    # broken.  We use the build's milestone number as a rough estimate to
+    # whether or not it's recent enough. We derive the milestone number from
+    # the archive build name, which takes the form
+    # boardname-release/R12-3456.78.9 (in this case it is 12).
+    try:
+      build_mstone = int(self._archive_build.partition('/')[2]
+                         .partition('-')[0][1:])
+      if build_mstone < RUN_SUITE_MIN_MSTONE:
+        logging.warning('Build milestone < %s, test suite scheduling skipped',
+                        RUN_SUITE_MIN_MSTONE)
+        return
+    except ValueError:
+      raise PayloadTestError(
+          'Failed to infer archive build milestone number (%s)' %
+          self._archive_build)
+
+    # Actually have the tests run.
+    self._ScheduleAutotestTests(suite_name)
+
+  @staticmethod
+  def _IsTestDeltaPayload(payload):
+    """Returns True iff a given payload is a test delta one."""
+    return (payload.tgt_image.get('image_type', 'signed') != 'signed' and
+            payload.src_image is not None)
+
+  def _CreateFsiPayloadTests(self, payload, fsi_versions):
+    """Create PayloadTests against a list of board FSIs.
+
+    Args:
+      payload: The payload we are trying to test.
+      fsi_versions: The list of known FSIs for this board.
+
+    Returns:
+      A list of PayloadTest objects to test with, may be empty.
+    """
+    # Make sure we try oldest FSIs first for testing.
+    fsi_versions = sorted(fsi_versions, key=gspaths.VersionKey)
+    logging.info('Considering FSI tests against: %s', ', '.join(fsi_versions))
+
+    for fsi in fsi_versions:
+      # If the FSI is newer than what we are generating, skip it.
+      if gspaths.VersionGreater(fsi, payload.tgt_image.version):
+        logging.info(
+            '  FSI newer than payload, Skipping FSI test against: %s', fsi)
+        continue
+
+      # Validate that test artifacts exist. The results are thrown away.
+      if not self._FindFullTestPayloads('stable-channel', fsi):
+        # Some of our old FSIs have no test artifacts, so not finding them
+        # isn't an error. Skip that FSI and try the next.
+        logging.info('  No artifacts, skipping FSI test against: %s', fsi)
+        continue
+
+      logging.info('  Scheduling FSI test against: %s', fsi)
+      return [self.PayloadTest(
+          payload, src_channel='stable-channel', src_version=fsi)]
+
+    # If there are no FSIs, or no testable FSIs, no tests.
+    logging.info('No FSIs with artifacts, not scheduling FSI update test.')
+    return []
+
+  def _CreatePayloadTests(self, payloads):
+    """Returns a list of test configurations for a given list of payloads.
+
+    Args:
+      payloads: A list of (already generated) build payloads.
+
+    Returns:
+      A list of PayloadTest objects defining payload test cases.
+    """
+    payload_tests = []
+    for payload in payloads:
+      # We are only testing test payloads.
+      if payload.tgt_image.get('image_type', 'signed') == 'signed':
+        continue
+
+      # Distinguish between delta (source version encoded) and full payloads.
+      if payload.src_image is None:
+        # Create a full update test from NMO, if we are newer.
+        if not self._previous_version:
+          logging.warning('No previous build, not testing full update %s from '
+                          'NMO', payload)
+        elif gspaths.VersionGreater(
+            self._previous_version, payload.tgt_image.version):
+          logging.warning(
+              'NMO (%s) is newer than target (%s), skipping NMO full '
+              'update test.', self._previous_version, payload)
+        else:
+          payload_tests.append(self.PayloadTest(
+              payload, src_channel=self._build.channel,
+              src_version=self._previous_version))
+
+        # Create a full update test from the current version to itself.
+        payload_tests.append(self.PayloadTest(
+            payload,
+            src_channel=self._build.channel,
+            src_version=self._build.version))
+
+        # Create a full update test from oldest viable FSI.
+        payload_tests += self._CreateFsiPayloadTests(
+            payload, self._DiscoverAllFsiBuilds())
+      else:
+        # Create a delta update test.
+        payload_tests.append(self.PayloadTest(payload))
+
+    return payload_tests
+
+  def _CleanupBuild(self):
+    """Clean up any leaked temp files associated with this build in GS."""
+    # Clean up any signer client files that leaked on this or previous
+    # runs.
+    self._drm(gslib.Remove,
+              gspaths.ChromeosReleases.BuildPayloadsSigningUri(
+                  self._build.channel, self._build.board, self._build.version,
+                  bucket=self._build.bucket),
+              recurse=True, ignore_no_match=True)
+
+  def CreatePayloads(self):
+    """Get lock on this build, and Process if we succeed.
+
+    While holding the lock, check assorted build flags to see if we should
+    process this build.
+
+    Raises:
+      BuildSkip: If the build was marked with a skip flag.
+      BuildFinished: If the build was already marked as finished.
+      BuildLocked: If the build is locked by another server or process.
+    """
+    lock_uri = self._GetFlagURI(gspaths.ChromeosReleases.LOCK)
+    skip_uri = self._GetFlagURI(gspaths.ChromeosReleases.SKIP)
+    finished_uri = self._GetFlagURI(gspaths.ChromeosReleases.FINISHED)
+
+    logging.info('Examining: %s', self._build)
+
+    try:
+      with gslock.Lock(lock_uri, dry_run=bool(self._drm)) as build_lock:
+        # If the build was marked to skip, skip
+        if gslib.Exists(skip_uri):
+          raise BuildSkip()
+
+        # If the build was already marked as finished, we're finished.
+        if self._ignore_finished(gslib.Exists, finished_uri):
+          raise BuildFinished()
+
+        logging.info('Starting: %s', self._build)
+
+        payloads_skip = self._DiscoverRequiredPayloads()
+
+        # Assume we can finish the build until we find a reason we can't.
+        can_finish = True
+
+        if self._output_dir:
+          can_finish = False
+
+        # Find out which payloads already exist, updating the payload object's
+        # URI accordingly. In doing so we're creating a list of all payload
+        # objects and their skip/exist attributes. We're also recording whether
+        # this run will be skipping any actual work.
+        payloads_attrs = []
+        for payload, skip in payloads_skip:
+          if self._output_dir:
+            # output_dir means we are forcing all payloads to be generated
+            # with a new destination.
+            result = [os.path.join(self._output_dir,
+                                   os.path.basename(payload.uri))]
+            exists = False
+          else:
+            result = paygen_payload_lib.FindExistingPayloads(payload)
+            exists = bool(result)
+
+          if result:
+            paygen_payload_lib.SetPayloadUri(payload, result[0])
+          elif skip:
+            can_finish = False
+
+          payloads_attrs.append((payload, skip, exists))
+
+        # Display payload generation list, including payload name and whether
+        # or not it already exists or will be skipped.
+        log_items = []
+        for payload, skip, exists in payloads_attrs:
+          desc = str(payload)
+          if exists:
+            desc += ' (exists)'
+          elif skip:
+            desc += ' (skipped)'
+          log_items.append(desc)
+
+        _LogList('All payloads for the build', log_items)
+
+        # Generate new payloads.
+        new_payloads = [payload for payload, skip, exists in payloads_attrs
+                        if not (skip or exists)]
+        if new_payloads:
+          logging.info('Generating %d new payload(s)', len(new_payloads))
+          self._GeneratePayloads(new_payloads, build_lock)
+        else:
+          logging.info('No new payloads to generate')
+
+        # Test payloads.
+        if not self._control_dir:
+          logging.info('Payload autotesting disabled.')
+        elif not can_finish:
+          logging.warning('Not all payloads were generated/uploaded, '
+                          'skipping payload autotesting.')
+        else:
+          try:
+            # Check that the build has a corresponding archive directory. If it
+            # does not, then testing should not be attempted.
+            archive_board, archive_build, archive_build_uri = (
+                self._MapToArchive(self._build.board, self._build.version))
+            self._archive_board = archive_board
+            self._archive_build = archive_build
+            self._archive_build_uri = archive_build_uri
+
+            # We have a control file directory and all payloads have been
+            # generated. Lets create the list of tests to conduct.
+            payload_tests = self._CreatePayloadTests(
+                [payload for payload, _, _ in payloads_attrs])
+            if payload_tests:
+              logging.info('Initiating %d payload tests', len(payload_tests))
+              self._drm(self._AutotestPayloads, payload_tests)
+          except ArchiveError as e:
+            logging.warning('Cannot map build to images archive, skipping '
+                            'payload autotesting.')
+            can_finish = False
+
+        self._CleanupBuild()
+        if can_finish:
+          self._drm(gslib.CreateWithContents, finished_uri,
+                    socket.gethostname())
+        else:
+          logging.warning('Not all payloads were generated, uploaded or '
+                          'tested; not marking build as finished')
+
+        logging.info('Finished: %s', self._build)
+
+    except gslock.LockNotAcquired as e:
+      logging.info('Build already being processed: %s', e)
+      raise BuildLocked()
+
+    except EarlyExit:
+      logging.info('Nothing done: %s', self._build)
+      raise
+
+    except Exception:
+      logging.error('Failed: %s', self._build)
+      raise
+
+
+def _FindControlFileDir(work_dir):
+  """Decide the directory for emitting control files.
+
+  If a working directory is passed in, we create a unique directory inside
+  it; otherwise we use Python's default tempdir.
+
+  Args:
+    work_dir: Create the control file directory here (None for the default).
+
+  Returns:
+    Path to a unique directory that the caller is responsible for cleaning up.
+  """
+  # Setup assorted working directories.
+  # It is safe for multiple parallel instances of paygen_payload to share the
+  # same working directory.
+  if work_dir and not os.path.exists(work_dir):
+    os.makedirs(work_dir)
+
+  # If work_dir is None, then mkdtemp will use '/tmp'
+  return tempfile.mkdtemp(prefix='paygen_build-control_files.', dir=work_dir)
+
+
+def ValidateBoardConfig(board):
+  """Validate that we have config values for the specified |board|.
+
+  Args:
+    board: Name of board to check.
+
+  Raises:
+    BoardNotConfigured if the board is unknown.
+  """
+  # Right now, we just validate that the board exists.
+  if board not in config.GetCompleteBoardSet():
+    raise BoardNotConfigured(board)
+
+
+def CreatePayloads(build, work_dir, site_config, dry_run=False,
+                   ignore_finished=False, skip_full_payloads=False,
+                   skip_delta_payloads=False, skip_test_payloads=False,
+                   skip_nontest_payloads=False, disable_tests=False,
+                   output_dir=None, run_parallel=False, run_on_builder=False,
+                   au_generator_uri=None):
+  """Helper method than generates payloads for a given build.
+
+  Args:
+    build: gspaths.Build instance describing the build to generate payloads for.
+    work_dir: Directory to contain both scratch and long-term work files.
+    dry_run: Do not generate payloads (optional).
+    site_config: A valid SiteConfig. Only used to map board names.
+    ignore_finished: Ignore the FINISHED flag (optional).
+    skip_full_payloads: Do not generate full payloads.
+    skip_delta_payloads: Do not generate delta payloads.
+    skip_test_payloads: Do not generate test payloads.
+    skip_nontest_payloads: Do not generate non-test payloads.
+    disable_tests: Do not attempt generating test artifacts or running tests.
+    output_dir: Directory for payload files, or None for GS default locations.
+    run_parallel: Generate payloads in parallel processes.
+    run_on_builder: Running in a cbuildbot environment on a builder.
+    au_generator_uri: URI of au_generator.zip to use, None to use the default.
+  """
+  ValidateBoardConfig(build.board)
+
+  control_dir = None
+  try:
+    if not disable_tests:
+      control_dir = _FindControlFileDir(work_dir)
+
+    _PaygenBuild(build, work_dir, site_config,
+                 dry_run=dry_run,
+                 ignore_finished=ignore_finished,
+                 skip_full_payloads=skip_full_payloads,
+                 skip_delta_payloads=skip_delta_payloads,
+                 skip_test_payloads=skip_test_payloads,
+                 skip_nontest_payloads=skip_nontest_payloads,
+                 control_dir=control_dir, output_dir=output_dir,
+                 run_parallel=run_parallel,
+                 run_on_builder=run_on_builder,
+                 au_generator_uri=au_generator_uri).CreatePayloads()
+
+  finally:
+    if control_dir:
+      shutil.rmtree(control_dir)
diff --git a/lib/paygen/paygen_build_lib_unittest b/lib/paygen/paygen_build_lib_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/lib/paygen/paygen_build_lib_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/paygen/paygen_build_lib_unittest.py b/lib/paygen/paygen_build_lib_unittest.py
new file mode 100644
index 0000000..39e7439
--- /dev/null
+++ b/lib/paygen/paygen_build_lib_unittest.py
@@ -0,0 +1,1780 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for paygen_build_lib."""
+
+from __future__ import print_function
+
+import itertools
+import mox
+import os
+import shutil
+import tempfile
+import unittest
+
+from chromite.cbuildbot import commands
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import config_lib_unittest
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import parallel
+
+from chromite.lib.paygen import download_cache
+from chromite.lib.paygen import gslock
+from chromite.lib.paygen import gslib
+from chromite.lib.paygen import gspaths
+from chromite.lib.paygen import urilib
+from chromite.lib.paygen import paygen_build_lib
+from chromite.lib.paygen import paygen_payload_lib
+from chromite.lib.paygen import utils
+
+
+# We access a lot of protected members during testing.
+# pylint: disable=protected-access
+
+
+class BasePaygenBuildLibTest(cros_test_lib.MoxTempDirTestCase):
+  """Base class for testing PaygenBuildLib class."""
+
+  def _GetPaygenBuildInstance(self, skip_test_payloads=False,
+                              disable_tests=False):
+    """Helper method to create a standard Paygen instance."""
+    control_dir = None if disable_tests else '/tmp/foo'
+
+    return paygen_build_lib._PaygenBuild(self.foo_build, self.work_dir,
+                                         config_lib_unittest.MockSiteConfig(),
+                                         control_dir=control_dir,
+                                         skip_test_payloads=skip_test_payloads)
+
+  def _GetBuildTestImage(self, build):
+    """Returns a test image object for the build.
+
+    Args:
+      build: gspaths.Build object describing the build to create fake images
+        for.
+    """
+    return gspaths.UnsignedImageArchive(bucket=build.bucket,
+                                        channel=build.channel,
+                                        board=build.board,
+                                        version=build.version,
+                                        milestone='R12',
+                                        image_type='test')
+
+  def _TestDiscoverArtifacts(self, list_files_uri, list_files_result,
+                             test_func, test_args, should_succeed,
+                             expected_result):
+    """Test artifact discovery using mocked gsutil results."""
+    self.mox.StubOutWithMock(urilib, 'ListFiles')
+    urilib.ListFiles(list_files_uri).AndReturn(list_files_result)
+    self.mox.ReplayAll()
+
+    if should_succeed:
+      self.assertEqual(test_func(*test_args), expected_result)
+    else:
+      self.assertRaises(expected_result, test_func, *test_args)
+
+
+class PaygenBuildLibTest(BasePaygenBuildLibTest):
+  """Test PaygenBuildLib class."""
+
+  def setUp(self):
+    self.work_dir = '/work/foo'
+
+    self.prev_image = gspaths.Image(channel='foo-channel',
+                                    board='foo-board',
+                                    version='1.0.0',
+                                    key='mp')
+    self.prev2_image = gspaths.Image(channel='foo-channel',
+                                     board='foo-board',
+                                     version='1.1.0',
+                                     key='mp')
+
+    self.foo_build = gspaths.Build(bucket='crt',
+                                   channel='foo-channel',
+                                   board='foo-board',
+                                   version='1.2.3')
+
+    # Create an additional 'special' image like NPO that isn't NPO,
+    # and keyed with a weird key. It should match none of the filters.
+    self.special_image = gspaths.Image(bucket='crt',
+                                       channel='foo-channel',
+                                       board='foo-board',
+                                       version='1.2.3',
+                                       key='foo-key',
+                                       image_channel='special-channel')
+
+    self.images = self._GetBuildImages(self.foo_build)
+    (self.basic_image, self.premp_image,
+     self.npo_image, self.premp_npo_image) = self.images
+
+    self.test_image = self._GetBuildTestImage(self.foo_build)
+
+  def _GetBuildImages(self, build):
+    """Create basic_image, npo_image, premp_image, premp_npo_image.
+
+    Args:
+      build: gspaths.Build object describing the build to create fake images
+        for.
+    """
+    # NPOs should have image_version incremented, but it doesn't matter for our
+    # testing.
+    basic_image = gspaths.Image(key='mp-v2', **build)
+    npo_image = gspaths.Image(key='mp-v2',
+                              image_channel='nplusone-channel',
+                              image_version=build.version,
+                              **build)
+    premp_image = gspaths.Image(key='premp', **build)
+    premp_npo_image = gspaths.Image(key='premp',
+                                    image_channel='nplusone-channel',
+                                    image_version=build.version,
+                                    **build)
+
+    # Code in several places depends on the order.
+    return [basic_image, premp_image, npo_image, premp_npo_image]
+
+  def testGetFlagURI(self):
+    """Validate the helper method to create flag URIs for our current build."""
+    paygen = self._GetPaygenBuildInstance()
+
+    self.assertEqual(
+        paygen._GetFlagURI(gspaths.ChromeosReleases.LOCK),
+        'gs://crt/foo-channel/foo-board/1.2.3/payloads/LOCK_flag')
+    self.assertEqual(
+        paygen._GetFlagURI(gspaths.ChromeosReleases.SKIP),
+        'gs://crt/foo-channel/foo-board/1.2.3/payloads/SKIP_flag')
+    self.assertEqual(
+        paygen._GetFlagURI(gspaths.ChromeosReleases.FINISHED),
+        'gs://crt/foo-channel/foo-board/1.2.3/payloads/FINISHED_flag')
+
+  def testFilterHelpers(self):
+    """Test _FilterForMp helper method."""
+
+    # All of the filter helpers should handle empty list.
+    self.assertEqual(paygen_build_lib._FilterForMp([]), [])
+    self.assertEqual(paygen_build_lib._FilterForPremp([]), [])
+    self.assertEqual(paygen_build_lib._FilterForBasic([]), [])
+    self.assertEqual(paygen_build_lib._FilterForNpo([]), [])
+
+    # prev_image lets us test with an 'mp' key, instead of an 'mp-v2' key.
+    images = list(self.images) + [self.special_image, self.prev_image]
+
+    self.assertEqual(paygen_build_lib._FilterForMp(images),
+                     [self.basic_image, self.npo_image, self.prev_image])
+
+    self.assertEqual(paygen_build_lib._FilterForPremp(images),
+                     [self.premp_image, self.premp_npo_image])
+
+    self.assertEqual(paygen_build_lib._FilterForBasic(images),
+                     [self.basic_image, self.premp_image, self.prev_image])
+
+    self.assertEqual(paygen_build_lib._FilterForNpo(images),
+                     [self.npo_image, self.premp_npo_image])
+
+  def testValidateExpectedBuildImages(self):
+    """Test a function that validates expected images are found on a build."""
+    paygen = self._GetPaygenBuildInstance()
+
+    # Test with basic mp image only.
+    paygen._ValidateExpectedBuildImages(self.foo_build, (self.basic_image,))
+
+    # Test with basic mp and mp npo images.
+    paygen._ValidateExpectedBuildImages(self.foo_build, (self.basic_image,
+                                                         self.npo_image))
+    # Test with basic mp and premp images.
+    paygen._ValidateExpectedBuildImages(self.foo_build, (self.basic_image,
+                                                         self.premp_image))
+
+    # Test with basic mp and premp images.
+    paygen._ValidateExpectedBuildImages(self.foo_build, (self.basic_image,
+                                                         self.premp_image,
+                                                         self.npo_image))
+
+    # Test with 4 different images.
+    paygen._ValidateExpectedBuildImages(self.foo_build, (self.basic_image,
+                                                         self.premp_image,
+                                                         self.npo_image,
+                                                         self.premp_npo_image))
+
+    # No images isn't valid.
+    with self.assertRaises(paygen_build_lib.ImageMissing):
+      paygen._ValidateExpectedBuildImages(self.foo_build, [])
+
+    # NPO image only isn't valid.
+    with self.assertRaises(paygen_build_lib.ImageMissing):
+      paygen._ValidateExpectedBuildImages(self.foo_build, (self.npo_image,))
+
+    # NPO without matching basic isn't valid.
+    with self.assertRaises(paygen_build_lib.ImageMissing):
+      paygen._ValidateExpectedBuildImages(self.foo_build,
+                                          (self.premp_image,
+                                           self.npo_image,
+                                           self.premp_npo_image))
+
+    # More than one of the same type of image should trigger BuildCorrupt
+    with self.assertRaises(paygen_build_lib.BuildCorrupt):
+      paygen._ValidateExpectedBuildImages(self.foo_build, (self.basic_image,
+                                                           self.basic_image))
+
+    # Unexpected images should trigger BuildCorrupt
+    with self.assertRaises(paygen_build_lib.BuildCorrupt):
+      paygen._ValidateExpectedBuildImages(self.foo_build,
+                                          (self.basic_image,
+                                           self.npo_image,
+                                           self.special_image))
+
+  def testDiscoverImages(self):
+    """Test _DiscoverImages."""
+    paygen = self._GetPaygenBuildInstance()
+    uri_base = 'gs://crt/foo-channel/foo-board/1.2.3'
+
+    uri_basic = os.path.join(
+        uri_base, 'chromeos_1.2.3_foo-board_recovery_foo-channel_mp-v3.bin')
+    uri_premp = os.path.join(
+        uri_base, 'chromeos_1.2.3_foo-board_recovery_foo-channel_premp.bin')
+    uri_npo = os.path.join(
+        uri_base,
+        'chromeos_1.2.4_foo-board_recovery_nplusone-channel_mp-v3.bin')
+    file_list_result = [uri_basic, uri_premp, uri_npo]
+
+    base_image_params = {'channel': 'foo-channel',
+                         'board': 'foo-board',
+                         'version': '1.2.3',
+                         'bucket': 'crt'}
+    expected_basic = gspaths.Image(key='mp-v3', uri=uri_basic,
+                                   **base_image_params)
+    expected_premp = gspaths.Image(key='premp', uri=uri_premp,
+                                   **base_image_params)
+    expected_npo = gspaths.Image(key='mp-v3', image_channel='nplusone-channel',
+                                 image_version='1.2.4', uri=uri_npo,
+                                 **base_image_params)
+    expected_result = [expected_basic, expected_premp, expected_npo]
+
+    self._TestDiscoverArtifacts(
+        os.path.join(uri_base, 'chromeos_*_foo-board_*_*_*.bin'),
+        file_list_result,
+        paygen._DiscoverImages,
+        [self.foo_build],
+        True,
+        expected_result)
+
+  def testDiscoverTestImageArchives(self):
+    """Test _DiscoverTestImageArchives (success)."""
+    paygen = self._GetPaygenBuildInstance()
+    uri_base = 'gs://crt/foo-channel/foo-board/1.2.3'
+
+    uri_test_archive = os.path.join(
+        uri_base, 'ChromeOS-test-R12-1.2.3-foo-board.tar.xz')
+    file_list_result = [uri_test_archive]
+
+    expected_test_archive = gspaths.UnsignedImageArchive(
+        channel='foo-channel',
+        board='foo-board',
+        version='1.2.3',
+        bucket='crt',
+        uri=uri_test_archive,
+        milestone='R12',
+        image_type='test')
+    expected_result = [expected_test_archive]
+
+    self._TestDiscoverArtifacts(
+        os.path.join(uri_base, 'ChromeOS-test-*-1.2.3-foo-board.tar.xz'),
+        file_list_result,
+        paygen._DiscoverTestImageArchives,
+        [self.foo_build],
+        True,
+        expected_result)
+
+  def testDiscoverTestImageArchivesMultipleResults(self):
+    """Test _DiscoverTestImageArchives (fails due to multiple results)."""
+    paygen = self._GetPaygenBuildInstance()
+    uri_base = 'gs://crt/foo-channel/foo-board/1.2.3'
+
+    uri_test_archive1 = os.path.join(
+        uri_base, 'ChromeOS-test-R12-1.2.3-foo-board.tar.xz')
+    uri_test_archive2 = os.path.join(
+        uri_base, 'ChromeOS-test-R13-1.2.3-foo-board.tar.xz')
+    file_list_result = [uri_test_archive1, uri_test_archive2]
+
+    self._TestDiscoverArtifacts(
+        os.path.join(uri_base, 'ChromeOS-test-*-1.2.3-foo-board.tar.xz'),
+        file_list_result,
+        paygen._DiscoverTestImageArchives,
+        [self.foo_build],
+        False,
+        paygen_build_lib.BuildCorrupt)
+
+  def testDiscoverTestImageArchivesMissing(self):
+    """Test _DiscoverTestImageArchives (fails due to missing images)."""
+    paygen = self._GetPaygenBuildInstance()
+    uri_base = 'gs://crt/foo-channel/foo-board/1.2.3'
+
+    self._TestDiscoverArtifacts(
+        os.path.join(uri_base, 'ChromeOS-test-*-1.2.3-foo-board.tar.xz'),
+        [],
+        paygen._DiscoverTestImageArchives,
+        [self.foo_build],
+        False,
+        paygen_build_lib.ImageMissing)
+
+  @unittest.skipIf(not paygen_build_lib.config, 'Internal crostools required.')
+  def testDiscoverActiveFsiBuilds(self):
+    """Using test release.conf values, test _DiscoverActiveFsiBuilds."""
+
+    test_config = """
+[valid-board]
+fsi_images: 2913.331.0,2465.105.0
+
+[no-fsi-board]
+"""
+    paygen_build_lib.config.LoadTestConfig(test_config)
+
+    # Test a board with FSI values on stable-channel.
+    paygen = paygen_build_lib._PaygenBuild(
+        gspaths.Build(channel='stable-channel', board='valid-board',
+                      version='1.2.3'),
+        self.work_dir,
+        config_lib_unittest.MockSiteConfig())
+
+    self.assertEqual(
+        sorted(paygen._DiscoverActiveFsiBuilds()),
+        [gspaths.Build(board='valid-board',
+                       channel='stable-channel',
+                       version='2465.105.0'),
+         gspaths.Build(board='valid-board',
+                       channel='stable-channel',
+                       version='2913.331.0')])
+
+    # Test a board without FSI values on stable-channel.
+    paygen = paygen_build_lib._PaygenBuild(
+        gspaths.Build(channel='stable-channel', board='no-fsi-board',
+                      version='1.2.3'),
+        self.work_dir,
+        config_lib_unittest.MockSiteConfig())
+
+    self.assertEqual(paygen._DiscoverActiveFsiBuilds(), [])
+
+    # Test a board with FSI values on non-stable-channel.
+    paygen = paygen_build_lib._PaygenBuild(
+        gspaths.Build(channel='beta-channel', board='valid-board',
+                      version='1.2.3'),
+        self.work_dir,
+        config_lib_unittest.MockSiteConfig())
+
+    self.assertEqual(paygen._DiscoverActiveFsiBuilds(), [])
+
+    paygen_build_lib.config.LoadGlobalConfig()
+
+  @cros_test_lib.NetworkTest()
+  @unittest.skipIf(not paygen_build_lib.config, 'Internal crostools required.')
+  def testDiscoverAllFsiBuilds(self):
+    """Using test release.conf values, test _DiscoverActiveFsiBuilds."""
+    paygen = paygen_build_lib._PaygenBuild(
+        gspaths.Build(channel='stable-channel', board='x86-alex-he',
+                      version='1.2.3'),
+        self.work_dir,
+        config_lib_unittest.MockSiteConfig())
+
+    # Search for real FSIs for an older/live board.
+    self.assertEqual(paygen._DiscoverAllFsiBuilds(),
+                     ['0.12.433.257', '0.14.811.132', '1412.205.0'])
+
+  @unittest.skipIf(not paygen_build_lib.query, 'Internal crostools required.')
+  def testDiscoverNmoBuild(self):
+    """Test _DiscoverNmoBuild (N minus One)."""
+    paygen = self._GetPaygenBuildInstance()
+
+    self.mox.StubOutWithMock(paygen_build_lib.query, 'FindLatestPublished')
+
+    # Set up the test replay script.
+    paygen_build_lib.query.FindLatestPublished(
+        'foo-channel', 'foo-board').AndReturn('1.0.0')
+
+    paygen_build_lib.query.FindLatestPublished(
+        'foo-channel', 'foo-board').AndReturn(None)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    self.assertEqual(paygen._DiscoverNmoBuild(),
+                     [gspaths.Build(bucket='crt',
+                                    channel='foo-channel',
+                                    board='foo-board',
+                                    version='1.0.0')])
+
+    self.assertEqual(paygen._DiscoverNmoBuild(), [])
+
+  def testDiscoverRequiredFullPayloads(self):
+    """Test _DiscoverRequiredFullPayloads."""
+    paygen = self._GetPaygenBuildInstance()
+
+    self.assertEqual(paygen._DiscoverRequiredFullPayloads([]), [])
+
+    self.assertItemsEqual(
+        paygen._DiscoverRequiredFullPayloads(self.images + [self.test_image]),
+        [gspaths.Payload(tgt_image=self.basic_image),
+         gspaths.Payload(tgt_image=self.npo_image),
+         gspaths.Payload(tgt_image=self.premp_image),
+         gspaths.Payload(tgt_image=self.premp_npo_image),
+         gspaths.Payload(tgt_image=self.test_image)])
+
+  def testDiscoverRequiredNpoDeltas(self):
+    """Test _DiscoverRequiredNpoDeltas."""
+    paygen = self._GetPaygenBuildInstance()
+
+    self.assertEqual(paygen._DiscoverRequiredNpoDeltas([]), [])
+
+    self.assertEqual(paygen._DiscoverRequiredNpoDeltas([self.basic_image]), [])
+
+    self.assertEqual(paygen._DiscoverRequiredNpoDeltas([self.npo_image]), [])
+
+    expected = [gspaths.Payload(tgt_image=self.npo_image,
+                                src_image=self.basic_image)]
+    self.assertEqual(paygen._DiscoverRequiredNpoDeltas([self.basic_image,
+                                                        self.npo_image]),
+                     expected)
+
+    self.assertEqual(paygen._DiscoverRequiredNpoDeltas([self.npo_image,
+                                                        self.basic_image]),
+                     expected)
+
+    self.assertEqual(paygen._DiscoverRequiredNpoDeltas([self.premp_image,
+                                                        self.premp_npo_image]),
+                     [gspaths.Payload(tgt_image=self.premp_npo_image,
+                                      src_image=self.premp_image)])
+
+  def testDiscoverRequiredTestNpoDeltas(self):
+    """Test _DiscoverRequiredTestNpoDeltas."""
+    paygen = self._GetPaygenBuildInstance()
+
+    self.assertEqual(
+        paygen._DiscoverRequiredTestNpoDeltas([]), [])
+    self.assertItemsEqual(
+        paygen._DiscoverRequiredTestNpoDeltas([self.test_image]),
+        [gspaths.Payload(tgt_image=self.test_image,
+                         src_image=self.test_image)])
+
+  def testDiscoverRequiredFromPreviousDeltas(self):
+    """Test _DiscoverRequiredFromPreviousDeltas."""
+    paygen = self._GetPaygenBuildInstance()
+
+    images = [self.basic_image]
+    prevs = [self.prev_image, self.prev2_image]
+
+    # Empty lists.
+    results = paygen._DiscoverRequiredFromPreviousDeltas([], [])
+    expected = []
+    self.assertEqual(results, expected)
+
+    # Empty previous list.
+    results = paygen._DiscoverRequiredFromPreviousDeltas(images, [])
+    expected = []
+    self.assertEqual(results, expected)
+
+    # Empty target list.
+    results = paygen._DiscoverRequiredFromPreviousDeltas([], prevs)
+    expected = []
+    self.assertEqual(results, expected)
+
+    # Basic list.
+    results = paygen._DiscoverRequiredFromPreviousDeltas(images, prevs)
+    expected = [gspaths.Payload(tgt_image=self.basic_image,
+                                src_image=self.prev_image),
+                gspaths.Payload(tgt_image=self.basic_image,
+                                src_image=self.prev2_image)]
+    self.assertEqual(results, expected)
+
+    # Inverted order (should return nothing).
+    results = paygen._DiscoverRequiredFromPreviousDeltas(
+        [self.prev_image], images)
+    expected = []
+    self.assertEqual(results, expected)
+
+  def testDiscoverRequiredPayloadsIncompleteBuild(self):
+    """Test _DiscoverRequiredPayloads."""
+
+    paygen = self._GetPaygenBuildInstance()
+
+    self.mox.StubOutWithMock(paygen, '_DiscoverImages')
+    self.mox.StubOutWithMock(paygen, '_DiscoverNmoBuild')
+    self.mox.StubOutWithMock(paygen, '_DiscoverActiveFsiBuilds')
+
+    paygen.BUILD_DISCOVER_RETRY_SLEEP = 0
+
+    # Check that we retry 3 times.
+    paygen._DiscoverImages(paygen._build).AndRaise(
+        paygen_build_lib.ImageMissing())
+    paygen._DiscoverImages(paygen._build).AndRaise(
+        paygen_build_lib.ImageMissing())
+    paygen._DiscoverImages(paygen._build).AndRaise(
+        paygen_build_lib.ImageMissing())
+    paygen._DiscoverImages(paygen._build).AndRaise(
+        paygen_build_lib.ImageMissing())
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    with self.assertRaises(paygen_build_lib.BuildNotReady):
+      paygen._DiscoverRequiredPayloads()
+
+  def testDiscoverRequiredPayloads(self):
+    """Test _DiscoverRequiredPayloads."""
+
+    paygen = self._GetPaygenBuildInstance()
+
+    output_uri = 'gs://foo'
+
+    self.mox.StubOutWithMock(paygen, '_DiscoverImages')
+    self.mox.StubOutWithMock(paygen, '_DiscoverTestImageArchives')
+    self.mox.StubOutWithMock(paygen, '_DiscoverNmoBuild')
+    self.mox.StubOutWithMock(paygen, '_DiscoverActiveFsiBuilds')
+    self.mox.StubOutWithMock(paygen_payload_lib, 'DefaultPayloadUri')
+
+    nmo_build = gspaths.Build(bucket='crt',
+                              channel='foo-channel',
+                              board='foo-board',
+                              version='1.2.2')
+    fsi1_build = gspaths.Build(bucket='crt',
+                               channel='foo-channel',
+                               board='foo-board',
+                               version='1.0.0')
+    fsi2_build = gspaths.Build(bucket='crt',
+                               channel='foo-channel',
+                               board='foo-board',
+                               version='1.1.0')
+
+    nmo_images = self._GetBuildImages(nmo_build)
+    nmo_test_image = self._GetBuildTestImage(nmo_build)
+    fsi1_images = self._GetBuildImages(fsi1_build)
+    fsi1_test_image = self._GetBuildTestImage(fsi1_build)
+    fsi2_images = self._GetBuildImages(fsi2_build)
+    fsi2_test_image = self._GetBuildTestImage(fsi2_build)
+
+    paygen._DiscoverImages(paygen._build).AndReturn(self.images)
+    paygen._DiscoverTestImageArchives(paygen._build).AndReturn(
+        [self.test_image])
+    paygen._DiscoverNmoBuild().AndReturn([nmo_build])
+    paygen._DiscoverActiveFsiBuilds().AndReturn([fsi1_build, fsi2_build])
+    paygen._DiscoverImages(nmo_build).AndReturn(nmo_images)
+    paygen._DiscoverTestImageArchives(nmo_build).AndReturn([nmo_test_image])
+    paygen._DiscoverImages(fsi1_build).AndReturn(fsi1_images)
+    paygen._DiscoverTestImageArchives(fsi1_build).AndReturn([fsi1_test_image])
+    paygen._DiscoverImages(fsi2_build).AndReturn(fsi2_images)
+    paygen._DiscoverTestImageArchives(fsi2_build).AndReturn([fsi2_test_image])
+
+    # Simplify the output URIs, so it's easy to check them below.
+    paygen_payload_lib.DefaultPayloadUri(
+        mox.IsA(gspaths.Payload), None).MultipleTimes().AndReturn(output_uri)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    results = paygen._DiscoverRequiredPayloads()
+
+    expected = [gspaths.Payload(tgt_image=self.basic_image, uri=output_uri),
+                gspaths.Payload(tgt_image=self.npo_image, uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_image, uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_npo_image, uri=output_uri),
+                # NPO Deltas
+                gspaths.Payload(tgt_image=self.npo_image,
+                                src_image=self.basic_image,
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_npo_image,
+                                src_image=self.premp_image,
+                                uri=output_uri),
+                # NMO Delta
+                gspaths.Payload(tgt_image=self.basic_image,
+                                src_image=nmo_images[0],
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_image,
+                                src_image=nmo_images[1],
+                                uri=output_uri),
+                # FSI Deltas
+                gspaths.Payload(tgt_image=self.basic_image,
+                                src_image=fsi1_images[0],
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_image,
+                                src_image=fsi1_images[1],
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.basic_image,
+                                src_image=fsi2_images[0],
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_image,
+                                src_image=fsi2_images[1],
+                                uri=output_uri),
+
+                # Test full payload.
+                gspaths.Payload(tgt_image=self.test_image,
+                                uri=output_uri),
+
+                # Test NPO delta.
+                gspaths.Payload(tgt_image=self.test_image,
+                                src_image=self.test_image,
+                                uri=output_uri),
+
+                # Test NMO delta.
+                gspaths.Payload(tgt_image=self.test_image,
+                                src_image=nmo_test_image,
+                                uri=output_uri),
+
+                # Test FSI deltas.
+                gspaths.Payload(tgt_image=self.test_image,
+                                src_image=fsi1_test_image,
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.test_image,
+                                src_image=fsi2_test_image,
+                                uri=output_uri)]
+    expected = zip(expected, itertools.repeat(False))
+
+    self.assertItemsEqual(sorted(results), sorted(expected))
+
+  def testDiscoverRequiredPayloadsPreviousSkipped(self):
+    """Test _DiscoverRequiredPayload.
+
+    Ensures that no test delta payload is generated if generation of a
+    signed delta from the same build was skipped.
+    """
+
+    paygen = self._GetPaygenBuildInstance()
+
+    output_uri = 'gs://foo'
+
+    self.mox.StubOutWithMock(paygen, '_DiscoverImages')
+    self.mox.StubOutWithMock(paygen, '_DiscoverTestImageArchives')
+    self.mox.StubOutWithMock(paygen, '_DiscoverNmoBuild')
+    self.mox.StubOutWithMock(paygen, '_DiscoverActiveFsiBuilds')
+    self.mox.StubOutWithMock(paygen_payload_lib, 'DefaultPayloadUri')
+
+    nmo_build = gspaths.Build(bucket='crt',
+                              channel='foo-channel',
+                              board='foo-board',
+                              version='1.2.2')
+    fsi1_build = gspaths.Build(bucket='crt',
+                               channel='foo-channel',
+                               board='foo-board',
+                               version='1.0.0')
+    fsi2_build = gspaths.Build(bucket='crt',
+                               channel='foo-channel',
+                               board='foo-board',
+                               version='1.1.0')
+
+    fsi1_images = self._GetBuildImages(fsi1_build)
+    fsi1_test_image = self._GetBuildTestImage(fsi1_build)
+    fsi2_images = self._GetBuildImages(fsi2_build)
+    fsi2_test_image = self._GetBuildTestImage(fsi2_build)
+
+    paygen._DiscoverImages(paygen._build).AndReturn(self.images)
+    paygen._DiscoverTestImageArchives(paygen._build).AndReturn(
+        [self.test_image])
+    paygen._DiscoverNmoBuild().AndReturn([nmo_build])
+    paygen._DiscoverActiveFsiBuilds().AndReturn([fsi1_build, fsi2_build])
+    paygen._DiscoverImages(nmo_build).AndRaise(
+        paygen_build_lib.ImageMissing('nmo build is missing some image'))
+    # _DiscoverTestImageArchives(nmo_build) should NOT be called.
+    paygen._DiscoverImages(fsi1_build).AndReturn(fsi1_images)
+    paygen._DiscoverTestImageArchives(fsi1_build).AndReturn([fsi1_test_image])
+    paygen._DiscoverImages(fsi2_build).AndReturn(fsi2_images)
+    paygen._DiscoverTestImageArchives(fsi2_build).AndReturn([fsi2_test_image])
+
+    # Simplify the output URIs, so it's easy to check them below.
+    paygen_payload_lib.DefaultPayloadUri(
+        mox.IsA(gspaths.Payload), None).MultipleTimes().AndReturn(output_uri)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    results = paygen._DiscoverRequiredPayloads()
+
+    # IMPORTANT: we intentionally omit the NMO payload from the expected list
+    # of payloads as it is a duplicate of one of the FSIs.
+    expected = [gspaths.Payload(tgt_image=self.basic_image, uri=output_uri),
+                gspaths.Payload(tgt_image=self.npo_image, uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_image, uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_npo_image, uri=output_uri),
+                # NPO Deltas
+                gspaths.Payload(tgt_image=self.npo_image,
+                                src_image=self.basic_image,
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_npo_image,
+                                src_image=self.premp_image,
+                                uri=output_uri),
+                # FSI Deltas
+                gspaths.Payload(tgt_image=self.basic_image,
+                                src_image=fsi1_images[0],
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_image,
+                                src_image=fsi1_images[1],
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.basic_image,
+                                src_image=fsi2_images[0],
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_image,
+                                src_image=fsi2_images[1],
+                                uri=output_uri),
+
+                # Test full payload.
+                gspaths.Payload(tgt_image=self.test_image,
+                                uri=output_uri),
+
+                # Test NPO delta.
+                gspaths.Payload(tgt_image=self.test_image,
+                                src_image=self.test_image,
+                                uri=output_uri),
+
+                # Test FSI deltas.
+                gspaths.Payload(tgt_image=self.test_image,
+                                src_image=fsi1_test_image,
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.test_image,
+                                src_image=fsi2_test_image,
+                                uri=output_uri)]
+    expected = zip(expected, itertools.repeat(False))
+
+    self.assertItemsEqual(sorted(results), sorted(expected))
+
+  def testDiscoverRequiredPayloadsNmoIsAlsoFsi(self):
+    """Test _DiscoverRequiredPayloads."""
+
+    paygen = self._GetPaygenBuildInstance()
+
+    output_uri = 'gs://foo'
+
+    self.mox.StubOutWithMock(paygen, '_DiscoverImages')
+    self.mox.StubOutWithMock(paygen, '_DiscoverTestImageArchives')
+    self.mox.StubOutWithMock(paygen, '_DiscoverNmoBuild')
+    self.mox.StubOutWithMock(paygen, '_DiscoverActiveFsiBuilds')
+    self.mox.StubOutWithMock(paygen_payload_lib, 'DefaultPayloadUri')
+
+    nmo_build = gspaths.Build(bucket='crt',
+                              channel='foo-channel',
+                              board='foo-board',
+                              version='1.2.2')
+    fsi1_build = gspaths.Build(bucket='crt',
+                               channel='foo-channel',
+                               board='foo-board',
+                               version='1.0.0')
+    fsi2_build = gspaths.Build(bucket='crt',
+                               channel='foo-channel',
+                               board='foo-board',
+                               version='1.2.2')
+
+    fsi1_images = self._GetBuildImages(fsi1_build)
+    fsi1_test_image = self._GetBuildTestImage(fsi1_build)
+    fsi2_images = self._GetBuildImages(fsi2_build)
+    fsi2_test_image = self._GetBuildTestImage(fsi2_build)
+
+    paygen._DiscoverImages(paygen._build).AndReturn(self.images)
+    paygen._DiscoverTestImageArchives(paygen._build).AndReturn(
+        [self.test_image])
+    paygen._DiscoverActiveFsiBuilds().AndReturn([fsi1_build, fsi2_build])
+    paygen._DiscoverNmoBuild().AndReturn([nmo_build])
+    paygen._DiscoverImages(fsi1_build).AndReturn(fsi1_images)
+    paygen._DiscoverImages(fsi2_build).AndReturn(fsi2_images)
+    paygen._DiscoverTestImageArchives(fsi1_build).AndReturn([fsi1_test_image])
+    paygen._DiscoverTestImageArchives(fsi2_build).AndReturn([fsi2_test_image])
+
+    # Simplify the output URIs, so it's easy to check them below.
+    paygen_payload_lib.DefaultPayloadUri(
+        mox.IsA(gspaths.Payload), None).MultipleTimes().AndReturn(output_uri)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    results = paygen._DiscoverRequiredPayloads()
+
+    expected = [gspaths.Payload(tgt_image=self.basic_image, uri=output_uri),
+                gspaths.Payload(tgt_image=self.npo_image, uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_image, uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_npo_image, uri=output_uri),
+                # NPO Deltas
+                gspaths.Payload(tgt_image=self.npo_image,
+                                src_image=self.basic_image,
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_npo_image,
+                                src_image=self.premp_image,
+                                uri=output_uri),
+                # FSI Deltas
+                gspaths.Payload(tgt_image=self.basic_image,
+                                src_image=fsi1_images[0],
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_image,
+                                src_image=fsi1_images[1],
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.basic_image,
+                                src_image=fsi2_images[0],
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.premp_image,
+                                src_image=fsi2_images[1],
+                                uri=output_uri),
+
+                # Test full payload.
+                gspaths.Payload(tgt_image=self.test_image,
+                                uri=output_uri),
+
+                # Test NPO delta.
+                gspaths.Payload(tgt_image=self.test_image,
+                                src_image=self.test_image,
+                                uri=output_uri),
+
+                # Test FSI deltas.
+                gspaths.Payload(tgt_image=self.test_image,
+                                src_image=fsi1_test_image,
+                                uri=output_uri),
+                gspaths.Payload(tgt_image=self.test_image,
+                                src_image=fsi2_test_image,
+                                uri=output_uri)]
+
+    expected = zip(expected, itertools.repeat(False))
+
+    self.assertItemsEqual(sorted(results), sorted(expected))
+
+  def testFindFullTestPayloads(self):
+    paygen = self._GetPaygenBuildInstance()
+
+    self.mox.StubOutWithMock(urilib, 'ListFiles')
+
+    urilib.ListFiles(
+        'gs://crt/find_channel/foo-board/find_full_version/payloads/'
+        'chromeos_find_full_version_foo-board_find_channel_full_test.bin-*'
+    ).AndReturn(['foo', 'foo.json', 'foo.log', 'bar'])
+
+    urilib.ListFiles(
+        'gs://crt/diff_channel/foo-board/find_full_version/payloads/'
+        'chromeos_find_full_version_foo-board_diff_channel_full_test.bin-*'
+    ).AndReturn(['foo'])
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    # Call once and use mocked look up. Make sure we filter properly.
+    self.assertEqual(
+        paygen._FindFullTestPayloads('find_channel', 'find_full_version'),
+        ['foo', 'bar'])
+
+    # Call with different channel, which does a different lookup.
+    self.assertEqual(
+        paygen._FindFullTestPayloads('diff_channel', 'find_full_version'),
+        ['foo'])
+
+
+    # Call a second time to verify we get cached results (no lookup).
+    self.assertEqual(
+        paygen._FindFullTestPayloads('find_channel', 'find_full_version'),
+        ['foo', 'bar'])
+
+  def DoGeneratePayloadsTest(self, run_parallel, test_dry_run):
+    """Test paygen_build_lib._GeneratePayloads."""
+    paygen = paygen_build_lib._PaygenBuild(
+        self.foo_build, self.tempdir,
+        config_lib_unittest.MockSiteConfig(),
+        dry_run=test_dry_run,
+        run_parallel=run_parallel)
+
+    basic_payload = gspaths.Payload(tgt_image=self.npo_image,
+                                    src_image=self.basic_image)
+    premp_payload = gspaths.Payload(tgt_image=self.premp_npo_image,
+                                    src_image=self.premp_image)
+
+    self.mox.StubOutWithMock(parallel, 'RunTasksInProcessPool')
+    self.mox.StubOutWithMock(paygen_build_lib, '_GenerateSinglePayload')
+
+    expected_payload_args = [
+        (basic_payload, mox.IsA(str), True, None, test_dry_run),
+        (premp_payload, mox.IsA(str), True, None, test_dry_run)
+    ]
+
+    if run_parallel:
+      parallel.RunTasksInProcessPool(paygen_build_lib._GenerateSinglePayload,
+                                     expected_payload_args)
+    else:
+      paygen_build_lib._GenerateSinglePayload(basic_payload, mox.IsA(str),
+                                              True, None, test_dry_run)
+
+      paygen_build_lib._GenerateSinglePayload(premp_payload, mox.IsA(str),
+                                              True, None, test_dry_run)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    paygen._GeneratePayloads((basic_payload, premp_payload), lock=None)
+
+    self.mox.UnsetStubs()
+
+  def testGeneratePayloads(self):
+    """Test paygen_build_lib._GeneratePayloads, no dry_run."""
+
+    # Test every combination of the boolean arguments.
+    for run_parallel in (True, False):
+      for test_dry_run in (True, False):
+        self.DoGeneratePayloadsTest(run_parallel, test_dry_run)
+
+  def testGeneratePayloadInProcess(self):
+    """Make sure the _GenerateSinglePayload calls into paygen_payload_lib."""
+
+    basic_payload = gspaths.Payload(tgt_image=self.npo_image,
+                                    src_image=self.basic_image)
+
+    self.mox.StubOutWithMock(paygen_payload_lib, 'CreateAndUploadPayload')
+
+    # Verify that we actually generate the payload.
+    paygen_payload_lib.CreateAndUploadPayload(
+        basic_payload,
+        mox.IsA(download_cache.DownloadCache),
+        work_dir=self.tempdir,
+        sign=False,
+        dry_run=True,
+        au_generator_uri='foo.zip')
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    paygen_build_lib._GenerateSinglePayload(basic_payload, self.tempdir,
+                                            False, 'foo.zip', True)
+
+  def testCleanupBuild(self):
+    """Test _PaygenBuild._CleanupBuild."""
+    paygen = self._GetPaygenBuildInstance()
+
+    self.mox.StubOutWithMock(gslib, 'Remove')
+    gslib.Remove('gs://crt/foo-channel/foo-board/1.2.3/payloads/signing',
+                 recurse=True, ignore_no_match=True)
+    self.mox.ReplayAll()
+
+    paygen._CleanupBuild()
+
+  def _CreatePayloadsSetup(self, skip_test_payloads=False, disable_tests=False):
+    """Helper method for related CreatePayloads tests."""
+    paygen = self._GetPaygenBuildInstance(skip_test_payloads=skip_test_payloads,
+                                          disable_tests=disable_tests)
+
+    self.mox.StubOutWithMock(gslock, 'Lock')
+    self.mox.StubOutWithMock(gslib, 'CreateWithContents')
+    self.mox.StubOutWithMock(gslib, 'Exists')
+    self.mox.StubOutWithMock(gslib, 'Remove')
+    self.mox.StubOutWithMock(paygen, '_DiscoverRequiredPayloads')
+    self.mox.StubOutWithMock(paygen, '_MapToArchive')
+    self.mox.StubOutWithMock(paygen, '_GeneratePayloads')
+    self.mox.StubOutWithMock(paygen, '_AutotestPayloads')
+    self.mox.StubOutWithMock(paygen, '_CreatePayloadTests')
+    self.mox.StubOutWithMock(paygen, '_CleanupBuild')
+
+    return paygen
+
+  def testCreatePayloadsLockedBuild(self):
+    """Test paygen_build_lib._GeneratePayloads if the build is locked."""
+    paygen = self._CreatePayloadsSetup()
+    lock_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.LOCK)
+
+    gslock.Lock(lock_uri, dry_run=False).AndRaise(gslock.LockNotAcquired())
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    with self.assertRaises(paygen_build_lib.BuildLocked):
+      paygen.CreatePayloads()
+
+  def testCreatePayloadsSkipBuild(self):
+    """Test paygen_build_lib._GeneratePayloads if the build marked skip."""
+    paygen = self._CreatePayloadsSetup()
+    lock_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.LOCK)
+    skip_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.SKIP)
+
+    lock = self.mox.CreateMockAnything()
+
+    gslock.Lock(lock_uri, dry_run=False).AndReturn(lock)
+    lock.__enter__().AndReturn(lock)
+    gslib.Exists(skip_uri).AndReturn(True)
+    lock.__exit__(
+        mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(None)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    with self.assertRaises(paygen_build_lib.BuildSkip):
+      paygen.CreatePayloads()
+
+  def testCreatePayloadsFinishedBuild(self):
+    """Test paygen_build_lib._GeneratePayloads if the build marked finished."""
+    paygen = self._CreatePayloadsSetup()
+
+    lock_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.LOCK)
+    skip_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.SKIP)
+    finished_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.FINISHED)
+
+    lock = self.mox.CreateMockAnything()
+
+    gslock.Lock(lock_uri, dry_run=False).AndReturn(lock)
+    lock.__enter__().AndReturn(lock)
+    gslib.Exists(skip_uri).AndReturn(False)
+    gslib.Exists(finished_uri).AndReturn(True)
+    lock.__exit__(
+        mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(None)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    with self.assertRaises(paygen_build_lib.BuildFinished):
+      paygen.CreatePayloads()
+
+  def testCreatePayloadsBuildNotReady(self):
+    """Test paygen_build_lib._GeneratePayloads if not all images are there."""
+    paygen = self._CreatePayloadsSetup()
+
+    lock_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.LOCK)
+    skip_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.SKIP)
+    finished_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.FINISHED)
+
+    lock = self.mox.CreateMockAnything()
+
+    gslock.Lock(lock_uri, dry_run=False).AndReturn(lock)
+    lock.__enter__().AndReturn(lock)
+    gslib.Exists(skip_uri).AndReturn(False)
+    gslib.Exists(finished_uri).AndReturn(False)
+    paygen._DiscoverRequiredPayloads(
+        ).AndRaise(paygen_build_lib.BuildNotReady())
+    lock.__exit__(
+        mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(None)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    with self.assertRaises(paygen_build_lib.BuildNotReady):
+      paygen.CreatePayloads()
+
+  def testCreatePayloadsCreateFailed(self):
+    """Test paygen_build_lib._GeneratePayloads if payload generation failed."""
+    paygen = self._CreatePayloadsSetup()
+
+    lock_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.LOCK)
+    skip_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.SKIP)
+    finished_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.FINISHED)
+
+    lock = self.mox.CreateMockAnything()
+    payload = 'foo'
+    payload_list = [payload]
+    payload_skip_list = [(payload, False)]
+    mock_exception = Exception()
+
+    gslock.Lock(lock_uri, dry_run=False).AndReturn(lock)
+    lock.__enter__().AndReturn(lock)
+    gslib.Exists(skip_uri).AndReturn(False)
+    gslib.Exists(finished_uri).AndReturn(False)
+    paygen._DiscoverRequiredPayloads(
+        ).AndReturn(payload_skip_list)
+    self.mox.StubOutWithMock(paygen_payload_lib, 'FindExistingPayloads')
+    paygen_payload_lib.FindExistingPayloads(payload).AndReturn([])
+    paygen._GeneratePayloads(payload_list, lock).AndRaise(mock_exception)
+    lock.__exit__(
+        mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(None)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    with self.assertRaises(Exception):
+      paygen.CreatePayloads()
+
+  def testCreatePayloadsSuccess(self):
+    """Test paygen_build_lib._GeneratePayloads success."""
+    paygen = self._CreatePayloadsSetup()
+
+    lock_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.LOCK)
+    skip_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.SKIP)
+    finished_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.FINISHED)
+
+    lock = self.mox.CreateMockAnything()
+    payload = 'foo'
+    payload_list = [payload]
+    payload_skip_list = [(payload, False)]
+
+    gslock.Lock(lock_uri, dry_run=False).AndReturn(lock)
+    lock.__enter__().AndReturn(lock)
+    gslib.Exists(skip_uri).AndReturn(False)
+    gslib.Exists(finished_uri).AndReturn(False)
+    paygen._DiscoverRequiredPayloads(
+        ).AndReturn(payload_skip_list)
+    self.mox.StubOutWithMock(paygen_payload_lib, 'FindExistingPayloads')
+    paygen_payload_lib.FindExistingPayloads(payload).AndReturn([])
+    paygen._GeneratePayloads(payload_list, lock)
+    paygen._MapToArchive('foo-board', '1.2.3').AndReturn(
+        ('archive_board', 'archive_build', 'archive_build_uri'))
+    paygen._CreatePayloadTests(['foo']).AndReturn(['Test Payloads'])
+    paygen._AutotestPayloads(['Test Payloads'])
+
+    paygen._CleanupBuild()
+    gslib.CreateWithContents(finished_uri, mox.IgnoreArg())
+    lock.__exit__(
+        mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(None)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    paygen.CreatePayloads()
+
+  def testCreatePayloadsAlreadyExists(self):
+    """Test paygen_build_lib._GeneratePayloads success."""
+    paygen = self._CreatePayloadsSetup()
+
+    lock_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.LOCK)
+    skip_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.SKIP)
+    finished_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.FINISHED)
+
+    lock = self.mox.CreateMockAnything()
+    self.mox.StubOutWithMock(paygen_payload_lib, 'FindExistingPayloads')
+    self.mox.StubOutWithMock(paygen_payload_lib, 'SetPayloadUri')
+    payload_existing = 'foo'
+    payload_new = 'bar'
+    payload_list = [(payload_existing, False), (payload_new, False)]
+
+    gslock.Lock(lock_uri, dry_run=False).AndReturn(lock)
+    lock.__enter__().AndReturn(lock)
+    gslib.Exists(skip_uri).AndReturn(False)
+    gslib.Exists(finished_uri).AndReturn(False)
+    paygen._DiscoverRequiredPayloads(
+        ).AndReturn(payload_list)
+    paygen_payload_lib.FindExistingPayloads(payload_existing).AndReturn(
+        [payload_existing])
+    paygen_payload_lib.FindExistingPayloads(payload_new).AndReturn([])
+    paygen_payload_lib.SetPayloadUri(payload_existing, payload_existing)
+    paygen._GeneratePayloads([payload_new], lock)
+    paygen._MapToArchive('foo-board', '1.2.3').AndReturn(
+        ('archive_board', 'archive_build', 'archive_build_uri'))
+    paygen._CreatePayloadTests(['foo', 'bar']).AndReturn(['Test Payloads'])
+    paygen._AutotestPayloads(['Test Payloads'])
+    gslib.CreateWithContents(finished_uri, mox.IgnoreArg())
+    paygen._CleanupBuild()
+    lock.__exit__(
+        mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(None)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    paygen.CreatePayloads()
+
+  def testCreatePayloadsSkipTests(self):
+    """Test paygen_build_lib._GeneratePayloads success."""
+    paygen = self._CreatePayloadsSetup(skip_test_payloads=True,
+                                       disable_tests=True)
+
+    lock_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.LOCK)
+    skip_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.SKIP)
+    finished_uri = paygen._GetFlagURI(gspaths.ChromeosReleases.FINISHED)
+
+    lock = self.mox.CreateMockAnything()
+    payload = 'foo'
+    payload_list = [payload]
+    payload_skip_list = [(payload, False)]
+
+    gslock.Lock(lock_uri, dry_run=False).AndReturn(lock)
+    lock.__enter__().AndReturn(lock)
+    gslib.Exists(skip_uri).AndReturn(False)
+    gslib.Exists(finished_uri).AndReturn(False)
+    paygen._DiscoverRequiredPayloads(
+        ).AndReturn(payload_skip_list)
+    self.mox.StubOutWithMock(paygen_payload_lib, 'FindExistingPayloads')
+    paygen_payload_lib.FindExistingPayloads(payload).AndReturn([])
+    paygen._GeneratePayloads(payload_list, lock)
+    paygen._CleanupBuild()
+    gslib.CreateWithContents(finished_uri, mox.IgnoreArg())
+    lock.__exit__(
+        mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(None)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    paygen.CreatePayloads()
+
+  def setupCreatePayloadTests(self):
+    paygen = self._GetPaygenBuildInstance()
+
+    self.mox.StubOutWithMock(paygen, '_DiscoverAllFsiBuilds')
+    self.mox.StubOutWithMock(paygen, '_FindFullTestPayloads')
+
+    return paygen
+
+  def testCreatePayloadTestsEmpty(self):
+
+    payloads = []
+    paygen = self.setupCreatePayloadTests()
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    expected = paygen._CreatePayloadTests(payloads)
+    self.assertEqual(expected, [])
+
+  def testCreatePayloadTestsPopulated(self):
+
+    payloads = [
+        gspaths.Payload(tgt_image=self.test_image),
+        gspaths.Payload(tgt_image=self.prev_image, src_image=self.test_image)
+    ]
+    paygen = self.setupCreatePayloadTests()
+
+    # We search for FSIs once for each full payload.
+    paygen._DiscoverAllFsiBuilds().AndReturn(['0.9.9', '1.0.0'])
+    paygen._FindFullTestPayloads('stable-channel', '0.9.9').AndReturn(False)
+    paygen._FindFullTestPayloads('stable-channel', '1.0.0').AndReturn(True)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    self.maxDiff = None
+
+    expected = paygen._CreatePayloadTests(payloads)
+    self.assertEqual(expected, [
+        paygen.PayloadTest(
+            payloads[0], src_channel='foo-channel', src_version='1.2.3'),
+        paygen.PayloadTest(
+            payloads[0], src_channel='stable-channel', src_version='1.0.0'),
+        paygen.PayloadTest(
+            payloads[1]),
+    ])
+
+
+  def testFindControlFileDir(self):
+    """Test that we find control files in the proper directory."""
+    # Test default dir in /tmp.
+    result = paygen_build_lib._FindControlFileDir(None)
+    self.assertTrue(os.path.isdir(result))
+    tempdir = tempfile.tempdir or '/tmp'
+    self.assertTrue(result.startswith(tempdir + '/'))
+    shutil.rmtree(result)
+
+    # Test in specified dir.
+    result = paygen_build_lib._FindControlFileDir(self.tempdir)
+    self.assertTrue(os.path.isdir(result))
+    self.assertTrue(result.startswith(
+        os.path.join(self.tempdir, 'paygen_build-control_files')))
+
+  @unittest.skipIf(not paygen_build_lib.config,
+                   'Internal crostools repository needed.')
+  @unittest.skipIf(not paygen_build_lib.test_control,
+                   'Autotest repository needed.')
+  def testEmitControlFile(self):
+    """Test that we emit control files correctly."""
+    payload = gspaths.Payload(tgt_image=self.npo_image,
+                              src_image=self.basic_image)
+
+    site_config = config_lib_unittest.MockSiteConfig()
+    site_config.Add('build_to_introduce_boards',
+                    boards=['foo_board'])
+
+    suite_name = 'paygen_foo'
+    control_dir = tempfile.mkdtemp(prefix='control_dir-')
+    paygen = paygen_build_lib._PaygenBuild(
+        self.foo_build, self.tempdir, site_config,
+        control_dir=control_dir)
+    with tempfile.NamedTemporaryFile(prefix='control_file-', delete=False) as f:
+      control_file_name = f.name
+      f.write("""
+AUTHOR = "Chromium OS"
+NAME = "autoupdate_EndToEndTest"
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+DOC = "Faux doc"
+
+""")
+
+    self.mox.StubOutWithMock(urilib, 'ListFiles')
+    self.mox.StubOutWithMock(urilib, 'Exists')
+
+    urilib.Exists(
+        'gs://chromeos-releases/foo-channel/foo-board/1.2.3/stateful.tgz'
+        ).AndReturn(True)
+
+    urilib.ListFiles(
+        gspaths.ChromeosReleases.PayloadUri(
+            self.basic_image.channel, self.basic_image.board,
+            self.basic_image.version,
+            '*', bucket=self.basic_image.bucket)).AndReturn(
+                ['gs://foo/bar.tar.bz2'])
+
+    self.mox.StubOutWithMock(
+        paygen_build_lib.test_control, 'get_control_file_name')
+    paygen_build_lib.test_control.get_control_file_name().AndReturn(
+        control_file_name)
+
+    self.mox.ReplayAll()
+
+    payload_test = paygen_build_lib._PaygenBuild.PayloadTest(payload)
+    cf = paygen._EmitControlFile(payload_test, suite_name, control_dir)
+
+    control_contents = osutils.ReadFile(cf)
+
+    self.assertEqual(control_contents, '''name = 'paygen_foo'
+image_type = 'test'
+update_type = 'delta'
+source_release = '1.2.3'
+target_release = '1.2.3'
+source_image_uri = 'gs://foo/bar.tar.bz2'
+target_payload_uri = 'None'
+SUITE = 'paygen_foo'
+source_archive_uri = 'gs://chromeos-releases/foo-channel/foo-board/1.2.3'
+
+AUTHOR = "Chromium OS"
+NAME = "autoupdate_EndToEndTest_paygen_foo_delta_1.2.3"
+TIME = "MEDIUM"
+TEST_CATEGORY = "Functional"
+TEST_CLASS = "platform"
+TEST_TYPE = "server"
+DOC = "Faux doc"
+
+''')
+
+    shutil.rmtree(control_dir)
+    os.remove(control_file_name)
+
+  def testAutotestPayloads(self):
+    """Test the process of scheduling HWLab tests."""
+    control_dir = '/tmp/control_dir'
+    paygen = paygen_build_lib._PaygenBuild(
+        self.foo_build, self.tempdir, config_lib_unittest.MockSiteConfig(),
+        control_dir=control_dir)
+    control_dump_dir = os.path.join(control_dir, paygen.CONTROL_FILE_SUBDIR)
+    payloads = ['foo', 'bar']
+    test_channel = self.foo_build.channel.rpartition('-')[0]
+    suite_name = paygen.PAYGEN_AU_SUITE_TEMPLATE % test_channel
+    tarball_name = paygen.CONTROL_TARBALL_TEMPLATE % test_channel
+    tarball_path = os.path.join(control_dir, tarball_name)
+    test_archive_build = '%s-release/R99-%s' % (self.foo_build.board,
+                                                self.foo_build.version)
+    test_archive_build_uri = ('gs://chromeos-image-archive/%s' %
+                              test_archive_build)
+    test_upload_path = os.path.join(test_archive_build_uri, tarball_name)
+
+    self.mox.StubOutWithMock(os, 'makedirs')
+    os.makedirs(os.path.join(control_dir, paygen.CONTROL_FILE_SUBDIR))
+
+    self.mox.StubOutWithMock(paygen, '_EmitControlFile')
+    paygen._EmitControlFile('foo', suite_name, control_dump_dir)
+    paygen._EmitControlFile('bar', suite_name, control_dump_dir)
+
+    self.mox.StubOutWithMock(cros_build_lib, 'CreateTarball')
+    cros_build_lib.CreateTarball(
+        tarball_path, control_dir,
+        compression=cros_build_lib.COMP_BZIP2,
+        inputs=[paygen.CONTROL_FILE_SUBDIR]).AndReturn(
+            cros_build_lib.CommandResult(returncode=0))
+
+    # Setup preliminary values needed for running autotests.
+    paygen._archive_board = self.foo_build.board
+    paygen._archive_build = test_archive_build
+    paygen._archive_build_uri = test_archive_build_uri
+
+    self.mox.StubOutWithMock(gslib, 'Copy')
+    gslib.Copy(tarball_path, test_upload_path, acl='public-read')
+
+    # Both utils and cros_build_lib versions of RunCommand exist. For now, stub
+    # them both out just to be safe (don't want unit tests running actual
+    # commands).
+    # TODO(garnold) remove the dryrun argument.
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    self.mox.StubOutWithMock(cros_build_lib, 'RunCommand')
+
+    timeout_mins = config_lib.HWTestConfig.DEFAULT_HW_TEST_TIMEOUT / 60
+    expected_command = [
+        mox.StrContains('site_utils/run_suite.py'),
+        '--board', 'foo-board',
+        '--build', 'foo-board-release/R99-1.2.3',
+        '--suite_name', 'paygen_au_foo',
+        '--file_bugs', 'True',
+        '--pool', 'bvt',
+        '--retry', 'True',
+        '--timeout_mins', str(timeout_mins),
+        '--no_wait', 'False',
+        '--suite_min_duts', '2']
+
+    job_id_output = '''
+Autotest instance: cautotest
+02-23-2015 [06:26:51] Submitted create_suite_job rpc
+02-23-2015 [06:26:53] Created suite job: http://cautotest.corp.google.com/afe/#tab_id=view_job&object_id=26960110
+@@@STEP_LINK@Suite created@http://cautotest.corp.google.com/afe/#tab_id=view_job&object_id=26960110@@@
+The suite job has another 3:09:50.012887 till timeout.
+The suite job has another 2:39:39.789250 till timeout.
+    '''
+
+    cros_build_lib.RunCommand(
+        expected_command).AndReturn(
+            utils.CommandResult(returncode=0, output=job_id_output))
+
+    self.mox.ReplayAll()
+
+    paygen._AutotestPayloads(payloads)
+
+  def testScheduleAutotestTestsNormal(self):
+    """Test scheduling autotest tests with run_suite.py."""
+    paygen = paygen_build_lib._PaygenBuild(
+        self.foo_build, self.tempdir,
+        config_lib_unittest.MockSiteConfig())
+
+    self.mox.StubOutWithMock(commands, 'RunHWTestSuite')
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    self.mox.StubOutWithMock(cros_build_lib, 'RunCommand')
+
+    timeout_mins = config_lib.HWTestConfig.DEFAULT_HW_TEST_TIMEOUT / 60
+    expected_command = [
+        mox.StrContains('site_utils/run_suite.py'),
+        '--board', 'foo-board',
+        '--build', 'foo-board-release/R99-1.2.3',
+        '--suite_name', 'paygen_au_foo',
+        '--file_bugs', 'True',
+        '--pool', 'bvt',
+        '--retry', 'True',
+        '--timeout_mins', str(timeout_mins),
+        '--no_wait', 'False',
+        '--suite_min_duts', '2']
+    cros_build_lib.RunCommand(
+        expected_command).AndReturn(
+            utils.CommandResult(returncode=0, output=''))
+
+    self.mox.ReplayAll()
+
+    # Setup preliminary values needed for scheduling autotests.
+    paygen._archive_board = 'foo-board'
+    paygen._archive_build = 'foo-board-release/R99-1.2.3'
+
+    paygen._ScheduleAutotestTests('paygen_au_foo')
+
+  def testScheduleAutotestTestsBuilderEnvironment(self):
+    """Test scheduling autotest tests with build autotest proxy."""
+    paygen = paygen_build_lib._PaygenBuild(
+        self.foo_build, self.tempdir,
+        config_lib_unittest.MockSiteConfig(), run_on_builder=True)
+
+    self.mox.StubOutWithMock(commands, 'RunHWTestSuite')
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    self.mox.StubOutWithMock(cros_build_lib, 'RunCommand')
+
+    timeout_mins = config_lib.HWTestConfig.DEFAULT_HW_TEST_TIMEOUT / 60
+    paygen_build_lib.commands.RunHWTestSuite(
+        board='foo-board', build='foo-board-release/R99-1.2.3', file_bugs=True,
+        pool='bvt', priority=constants.HWTEST_BUILD_PRIORITY,
+        suite='paygen_au_foo', timeout_mins=timeout_mins,
+        retry=True, wait_for_results=True, suite_min_duts=2, debug=False)
+
+    self.mox.ReplayAll()
+
+    # Setup preliminary values needed for scheduling autotests.
+    paygen._archive_board = 'foo-board'
+    paygen._archive_build = 'foo-board-release/R99-1.2.3'
+
+    paygen._ScheduleAutotestTests('paygen_au_foo')
+
+  def testScheduleAutotestTestsBuilderEnvironmentWarn(self):
+    """Test scheduling autotest tests with build autotest proxy."""
+    paygen = paygen_build_lib._PaygenBuild(
+        self.foo_build, self.tempdir,
+        config_lib_unittest.MockSiteConfig(), run_on_builder=True)
+
+    self.mox.StubOutWithMock(commands, 'RunHWTestSuite')
+    self.mox.StubOutWithMock(utils, 'RunCommand')
+    self.mox.StubOutWithMock(cros_build_lib, 'RunCommand')
+
+    timeout_mins = config_lib.HWTestConfig.DEFAULT_HW_TEST_TIMEOUT / 60
+    paygen_build_lib.commands.RunHWTestSuite(
+        board='foo-board', build='foo-board-release/R99-1.2.3', file_bugs=True,
+        pool='bvt', priority=constants.HWTEST_BUILD_PRIORITY,
+        suite='paygen_au_foo', timeout_mins=timeout_mins,
+        retry=True, wait_for_results=True, suite_min_duts=2,
+        debug=False).AndRaise(
+            failures_lib.TestWarning('** Suite passed with a warning code **'))
+
+    self.mox.ReplayAll()
+
+    # Setup preliminary values needed for scheduling autotests.
+    paygen._archive_board = 'foo-board'
+    paygen._archive_build = 'foo-board-release/R99-1.2.3'
+
+    paygen._ScheduleAutotestTests('paygen_au_foo')
+
+  def testMapToArchive(self):
+    """Test that mapping to images archive names/locations works."""
+    self.mox.StubOutWithMock(urilib, 'ListFiles')
+    urilib.ListFiles(
+        gspaths.ChromeosImageArchive.BuildUri(
+            'foo_board', '*', '1.2.3')).AndReturn(
+                ['gs://foo-archive/foo_board/R11-1.2.3/somefile'])
+
+    self.mox.ReplayAll()
+
+    site_config = config_lib_unittest.MockSiteConfig()
+    site_config.Add('build_to_introduce_boards',
+                    boards=['foo_board', 'bar_board', 'bar-board'])
+
+    paygen = paygen_build_lib._PaygenBuild(
+        self.foo_build, self.tempdir, site_config)
+
+    # Case 1: mapping successful.
+    self.assertEqual(
+        paygen._MapToArchive('foo-board', '1.2.3'),
+        ('foo_board', 'foo_board/R11-1.2.3',
+         'gs://foo-archive/foo_board/R11-1.2.3'))
+
+    # Case 2: failure, too many build board names found.
+    with self.assertRaises(paygen_build_lib.ArchiveError):
+      paygen._MapToArchive('bar-board', '1.2.3')
+
+    # Case 3: failure, build board name not found.
+    with self.assertRaises(paygen_build_lib.ArchiveError):
+      paygen._MapToArchive('baz-board', '1.2.3')
+
+  def testValidateBoardConfig(self):
+    """Test ValidateBoardConfig."""
+
+    # If we are running on an external builder, we can't see the config.
+    # Without the config, we can't validate.
+    if not paygen_build_lib.config:
+      return
+
+    # Test a known board works.
+    paygen_build_lib.ValidateBoardConfig('x86-mario')
+
+    # Test an unknown board doesn't.
+    self.assertRaises(paygen_build_lib.BoardNotConfigured,
+                      paygen_build_lib.ValidateBoardConfig, 'goofy-board')
+
+
+class PaygenBuildLibTest_ImageTypes(BasePaygenBuildLibTest):
+  """Test PaygenBuildLib class for mixed image types."""
+
+  def setUp(self):
+    self.work_dir = '/work/foo'
+
+    self.prev_image = gspaths.Image(channel='foo-channel',
+                                    board='foo-board',
+                                    version='1.0.0',
+                                    key='mp')
+    self.prev2_image = gspaths.Image(channel='foo-channel',
+                                     board='foo-board',
+                                     version='1.1.0',
+                                     key='mp',
+                                     image_type='base')
+    self.prev3_image = gspaths.Image(channel='foo-channel',
+                                     board='foo-board',
+                                     version='1.2.0',
+                                     key='mp',
+                                     image_type='base')
+    self.foo_build = gspaths.Build(bucket='crt',
+                                   channel='foo-channel',
+                                   board='foo-board',
+                                   version='1.2.3')
+    self.images = self._GetBuildImages(self.foo_build)
+    (self.basic_image, self.premp_image,
+     self.npo_image, self.premp_npo_image) = self.images
+
+  def _GetBuildImages(self, build):
+    """Create basic_image, npo_image, premp_image, premp_npo_image.
+
+    Args:
+      build: gspaths.Build object describing the build to create fake images
+        for.
+    """
+    # NPOs should have image_version incremented, but it doesn't matter for our
+    # testing.
+    basic_image = gspaths.Image(image_type='base', key='mp-v2', **build)
+    npo_image = gspaths.Image(key='mp-v2',
+                              image_channel='nplusone-channel',
+                              image_version=build.version,
+                              **build)
+    premp_image = gspaths.Image(image_type='base', key='premp', **build)
+    premp_npo_image = gspaths.Image(key='premp',
+                                    image_channel='nplusone-channel',
+                                    image_version=build.version,
+                                    **build)
+
+    # Code in several places depends on the order.
+    return [basic_image, premp_image, npo_image, premp_npo_image]
+
+  def testImageTypeFilter(self):
+    """Test filtering based on image types."""
+    self.assertEqual(paygen_build_lib._FilterForImageType([], ''), [])
+    images = list(self.images) + [self.prev_image, self.prev2_image,
+                                  self.prev3_image]
+    self.assertEqual(paygen_build_lib._FilterForImageType(images, 'base'),
+                     [self.basic_image, self.premp_image, self.prev2_image,
+                      self.prev3_image])
+
+  def testDiscoverImages(self):
+    """Test _DiscoverImages."""
+    paygen = self._GetPaygenBuildInstance()
+    uri_base = 'gs://crt/foo-channel/foo-board/1.2.3'
+
+    uri_basic = os.path.join(
+        uri_base, 'chromeos_1.2.3_foo-board_base_foo-channel_mp-v3.bin')
+    uri_premp = os.path.join(
+        uri_base, 'chromeos_1.2.3_foo-board_base_foo-channel_premp.bin')
+    uri_npo = os.path.join(
+        uri_base,
+        'chromeos_1.2.4_foo-board_recovery_nplusone-channel_mp-v3.bin')
+    file_list_result = [uri_basic, uri_premp, uri_npo]
+
+    base_image_params = {'channel': 'foo-channel',
+                         'board': 'foo-board',
+                         'version': '1.2.3',
+                         'bucket': 'crt'}
+    expected_basic = gspaths.Image(key='mp-v3',
+                                   image_type='base',
+                                   uri=uri_basic,
+                                   **base_image_params)
+    expected_premp = gspaths.Image(key='premp',
+                                   image_type='base',
+                                   uri=uri_premp,
+                                   **base_image_params)
+    expected_npo = gspaths.Image(key='mp-v3',
+                                 image_channel='nplusone-channel',
+                                 image_version='1.2.4', uri=uri_npo,
+                                 **base_image_params)
+    expected_result = [expected_npo, expected_basic, expected_premp]
+
+    self._TestDiscoverArtifacts(
+        os.path.join(uri_base, 'chromeos_*_foo-board_*_*_*.bin'),
+        file_list_result,
+        paygen._DiscoverImages,
+        [self.foo_build],
+        True,
+        expected_result)
+
+  def testDiscoverRequiredNpoDeltas(self):
+    """Test _DiscoverRequiredNpoDeltas."""
+    paygen = self._GetPaygenBuildInstance()
+
+    self.assertEqual(paygen._DiscoverRequiredNpoDeltas([]), [])
+
+    self.assertEqual(paygen._DiscoverRequiredNpoDeltas([self.basic_image]), [])
+
+    self.assertEqual(paygen._DiscoverRequiredNpoDeltas([self.npo_image]), [])
+
+    # Basic and NPO images have different types, so there should be no delta
+    # payloads.
+    self.assertEqual(paygen._DiscoverRequiredNpoDeltas([self.basic_image,
+                                                        self.npo_image]),
+                     [])
+    self.assertEqual(paygen._DiscoverRequiredNpoDeltas([self.premp_image,
+                                                        self.premp_npo_image]),
+                     [])
+
+  def testDiscoverRequiredFromPreviousDeltas(self):
+    """Test _DiscoverRequiredFromPreviousDeltas."""
+    paygen = self._GetPaygenBuildInstance()
+
+    images = [self.basic_image]
+    prevs = [self.prev_image, self.prev2_image, self.prev3_image]
+
+    # Empty lists.
+    results = paygen._DiscoverRequiredFromPreviousDeltas([], [])
+    expected = []
+    self.assertEqual(results, expected)
+
+    # Empty previous list.
+    results = paygen._DiscoverRequiredFromPreviousDeltas(images, [])
+    expected = []
+    self.assertEqual(results, expected)
+
+    # Empty target list.
+    results = paygen._DiscoverRequiredFromPreviousDeltas([], prevs)
+    expected = []
+    self.assertEqual(results, expected)
+
+    # Basic list.
+    results = paygen._DiscoverRequiredFromPreviousDeltas(images, prevs)
+    expected = [gspaths.Payload(tgt_image=self.basic_image,
+                                src_image=self.prev2_image),
+                gspaths.Payload(tgt_image=self.basic_image,
+                                src_image=self.prev3_image)]
+    self.assertEqual(results, expected)
+
+    # Inverted order (should return nothing).
+    results = paygen._DiscoverRequiredFromPreviousDeltas(
+        [self.prev_image], images)
+    expected = []
+    self.assertEqual(results, expected)
+
+  def testDiscoverRequiredPayloads(self):
+    """Test _DiscoverRequiredPayloads."""
+
+    paygen = self._GetPaygenBuildInstance()
+
+    output_uri = 'gs://foo'
+
+    self.mox.StubOutWithMock(paygen, '_DiscoverImages')
+    self.mox.StubOutWithMock(paygen, '_DiscoverTestImageArchives')
+    self.mox.StubOutWithMock(paygen, '_DiscoverNmoBuild')
+    self.mox.StubOutWithMock(paygen, '_DiscoverActiveFsiBuilds')
+    self.mox.StubOutWithMock(paygen_payload_lib, 'DefaultPayloadUri')
+
+    nmo_build = gspaths.Build(bucket='crt',
+                              channel='foo-channel',
+                              board='foo-board',
+                              version='1.2.2')
+    fsi1_build = gspaths.Build(bucket='crt',
+                               channel='foo-channel',
+                               board='foo-board',
+                               version='1.0.0')
+    fsi2_build = gspaths.Build(bucket='crt',
+                               channel='foo-channel',
+                               board='foo-board',
+                               version='1.1.0')
+
+    nmo_images = self._GetBuildImages(nmo_build)
+    nmo_test_image = self._GetBuildTestImage(nmo_build)
+    fsi1_images = self._GetBuildImages(fsi1_build)
+    fsi1_test_image = self._GetBuildTestImage(fsi1_build)
+    fsi2_images = self._GetBuildImages(fsi2_build)
+    fsi2_test_image = self._GetBuildTestImage(fsi2_build)
+
+    paygen._DiscoverImages(paygen._build).AndReturn(self.images)
+    paygen._DiscoverTestImageArchives(paygen._build).AndReturn([])
+    paygen._DiscoverNmoBuild().AndReturn([nmo_build])
+    paygen._DiscoverActiveFsiBuilds().AndReturn([fsi1_build, fsi2_build])
+    paygen._DiscoverImages(nmo_build).AndReturn(nmo_images)
+    paygen._DiscoverTestImageArchives(nmo_build).AndReturn([nmo_test_image])
+    paygen._DiscoverImages(fsi1_build).AndReturn(fsi1_images)
+    paygen._DiscoverTestImageArchives(fsi1_build).AndReturn([fsi1_test_image])
+    paygen._DiscoverImages(fsi2_build).AndReturn(fsi2_images)
+    paygen._DiscoverTestImageArchives(fsi2_build).AndReturn([fsi2_test_image])
+
+    # Simplify the output URIs, so it's easy to check them below.
+    paygen_payload_lib.DefaultPayloadUri(
+        mox.IsA(gspaths.Payload), None).MultipleTimes().AndReturn(output_uri)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    results = paygen._DiscoverRequiredPayloads()
+
+    expected = [
+        gspaths.Payload(tgt_image=self.basic_image, uri=output_uri),
+        gspaths.Payload(tgt_image=self.npo_image, uri=output_uri),
+        gspaths.Payload(tgt_image=self.premp_image, uri=output_uri),
+        gspaths.Payload(tgt_image=self.premp_npo_image, uri=output_uri),
+        # No NPO Deltas because the basic images have different image types.
+
+        # NMO deltas.
+        gspaths.Payload(tgt_image=self.basic_image,
+                        src_image=nmo_images[0],
+                        uri=output_uri),
+        gspaths.Payload(tgt_image=self.premp_image,
+                        src_image=nmo_images[1],
+                        uri=output_uri),
+
+        # FSI Deltas.
+        gspaths.Payload(tgt_image=self.basic_image,
+                        src_image=fsi1_images[0],
+                        uri=output_uri),
+        gspaths.Payload(tgt_image=self.premp_image,
+                        src_image=fsi1_images[1],
+                        uri=output_uri),
+        gspaths.Payload(tgt_image=self.basic_image,
+                        src_image=fsi2_images[0],
+                        uri=output_uri),
+        gspaths.Payload(tgt_image=self.premp_image,
+                        src_image=fsi2_images[1],
+                        uri=output_uri)]
+    expected = zip(expected, itertools.repeat(False))
+    self.assertItemsEqual(sorted(results), sorted(expected))
diff --git a/lib/paygen/paygen_payload_lib.py b/lib/paygen/paygen_payload_lib.py
new file mode 100644
index 0000000..7273115
--- /dev/null
+++ b/lib/paygen/paygen_payload_lib.py
@@ -0,0 +1,860 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Hold the functions that do the real work generating payloads."""
+
+from __future__ import print_function
+
+import base64
+import datetime
+import filecmp
+import json
+import os
+import shutil
+import sys
+import tempfile
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import path_util
+from chromite.lib.paygen import dryrun_lib
+from chromite.lib.paygen import filelib
+from chromite.lib.paygen import gspaths
+from chromite.lib.paygen import signer_payloads_client
+from chromite.lib.paygen import urilib
+from chromite.lib.paygen import utils
+
+
+# Needed for the dev.host.lib import below.
+sys.path.insert(0, os.path.join(constants.SOURCE_ROOT, 'src', 'platform'))
+
+
+DESCRIPTION_FILE_VERSION = 2
+
+
+class Error(Exception):
+  """Base class for payload generation errors."""
+
+
+class UnexpectedSignerResultsError(Error):
+  """This is raised when signer results don't match our expectations."""
+
+
+class PayloadVerificationError(Error):
+  """Raised when the generated payload fails to verify."""
+
+
+class _PaygenPayload(object):
+  """Class to manage the process of generating and signing a payload."""
+
+  # What keys do we sign payloads with, and what size are they?
+  PAYLOAD_SIGNATURE_KEYSETS = ('update_signer',)
+  PAYLOAD_SIGNATURE_SIZES_BYTES = (2048 / 8,)  # aka 2048 bits in bytes.
+
+  TEST_IMAGE_NAME = 'chromiumos_test_image.bin'
+  RECOVERY_IMAGE_NAME = 'chromiumos_recovery_image.bin'
+  BASE_IMAGE_NAME = 'chromiumos_base_image.bin'
+
+  # Default names used by cros_generate_update_payload for extracting old/new
+  # kernel/rootfs partitions.
+  _DEFAULT_OLD_KERN_PART = 'old_kern.dat'
+  _DEFAULT_OLD_ROOT_PART = 'old_root.dat'
+  _DEFAULT_NEW_KERN_PART = 'new_kern.dat'
+  _DEFAULT_NEW_ROOT_PART = 'new_root.dat'
+
+  def __init__(self, payload, cache, work_dir, sign, verify,
+               au_generator_uri_override, dry_run=False):
+    """Init for _PaygenPayload.
+
+    Args:
+      payload: An instance of gspaths.Payload describing the payload to
+               generate.
+      cache: An instance of DownloadCache for retrieving files.
+      work_dir: A working directory for output files. Can NOT be shared.
+      sign: Boolean saying if the payload should be signed (normally, you do).
+      verify: whether the payload should be verified after being generated
+      au_generator_uri_override: URI to override standard au_generator.zip
+          rules.
+      dry_run: do not do any actual work
+    """
+    self.payload = payload
+    self.cache = cache
+    self.work_dir = work_dir
+    self._verify = verify
+    self._au_generator_uri_override = au_generator_uri_override
+    self._drm = dryrun_lib.DryRunMgr(dry_run)
+
+    self.generator_dir = os.path.join(work_dir, 'au-generator')
+    self.src_image_file = os.path.join(work_dir, 'src_image.bin')
+    self.tgt_image_file = os.path.join(work_dir, 'tgt_image.bin')
+
+    self.payload_file = os.path.join(work_dir, 'delta.bin')
+    self.delta_log_file = os.path.join(work_dir, 'delta.log')
+    self.description_file = os.path.join(work_dir, 'delta.json')
+
+    self.signer = None
+
+    # If we are a bootstrap environment, this import will fail, so don't
+    # perform it until we need it.
+    from dev.host.lib import update_payload
+
+    self._update_payload = update_payload
+
+    if sign:
+      self.signed_payload_file = self.payload_file + '.signed'
+      self.metadata_signature_file = self._MetadataUri(self.signed_payload_file)
+
+      self.signer = signer_payloads_client.SignerPayloadsClientGoogleStorage(
+          payload.tgt_image.channel,
+          payload.tgt_image.board,
+          payload.tgt_image.version,
+          payload.tgt_image.bucket)
+
+  def _MetadataUri(self, uri):
+    """Given a payload uri, find the uri for the metadata signature."""
+    return uri + '.metadata-signature'
+
+  def _DeltaLogsUri(self, uri):
+    """Given a payload uri, find the uri for the delta generator logs."""
+    return uri + '.log'
+
+  def _JsonUri(self, uri):
+    """Given a payload uri, find the uri for the json payload description."""
+    return uri + '.json'
+
+  def _PrepareGenerator(self):
+    """Download, and extract au-generator.zip into self.generator_dir."""
+    if self._au_generator_uri_override:
+      generator_uri = self._au_generator_uri_override
+    else:
+      generator_uri = gspaths.ChromeosReleases.GeneratorUri(
+          self.payload.tgt_image.channel,
+          self.payload.tgt_image.board,
+          self.payload.tgt_image.version,
+          self.payload.tgt_image.bucket)
+
+    logging.info('Preparing au-generator.zip from %s.', generator_uri)
+
+    # Extract zipped delta generator files to the expected directory.
+    tmp_zip = self.cache.GetFileInTempFile(generator_uri)
+    utils.RunCommand(['unzip', '-o', '-d', self.generator_dir, tmp_zip.name],
+                     redirect_stdout=True, redirect_stderr=True)
+    tmp_zip.close()
+
+  def _RunGeneratorCmd(self, cmd):
+    """Wrapper for RunCommand for programs in self.generator_dir.
+
+    Adjusts the program name for the current self.au_generator directory, and
+    sets up the special requirements needed for these 'out of chroot'
+    programs. Will automatically log the command output if execution resulted
+    in a nonzero exit code. Note that the command's stdout and stderr are
+    combined into a single string. This also sets the TMPDIR variable
+    accordingly in the spawned process' environment.
+
+    Args:
+      cmd: Program and argument list in a list. ['delta_generator', '--help']
+
+    Returns:
+      The output of the executed command.
+
+    Raises:
+      cros_build_lib.RunCommandError if the command exited with a nonzero code.
+    """
+    # Adjust the command name to match the directory it's in.
+    cmd[0] = os.path.join(self.generator_dir, cmd[0])
+
+    # Modify the PATH and TMPDIR when running the script.
+    extra_env = {
+        'PATH': utils.PathPrepend(self.generator_dir),
+        'TMPDIR': self.work_dir}
+
+    # Run the command.
+    result = cros_build_lib.RunCommand(
+        cmd,
+        cwd=self.generator_dir,
+        redirect_stdout=True,
+        combine_stdout_stderr=True,
+        error_code_ok=True,
+        extra_env=extra_env)
+
+    # Dump error output and raise an exception if things went awry.
+    if result.returncode:
+      logging.error('Nonzero exit code (%d), dumping command output:\n%s',
+                    result.returncode, result.output)
+      raise cros_build_lib.RunCommandError(
+          'Command failed: %s (cwd=%s)' % (' '.join(cmd), self.generator_dir),
+          result)
+
+    return result.output
+
+  @staticmethod
+  def _BuildArg(flag, dict_obj, key, default=None):
+    """Returns a command-line argument iff its value is present in a dictionary.
+
+    Args:
+      flag: the flag name to use with the argument value, e.g. --foo; if None
+            or an empty string, no flag will be used
+      dict_obj: a dictionary mapping possible keys to values
+      key: the key of interest; e.g. 'foo'
+      default: a default value to use if key is not in dict_obj (optional)
+
+    Returns:
+      If dict_obj[key] contains a non-False value or default is non-False,
+      returns a list containing the flag and value arguments (e.g. ['--foo',
+      'bar']), unless flag is empty/None, in which case returns a list
+      containing only the value argument (e.g.  ['bar']). Otherwise, returns an
+      empty list.
+    """
+    arg_list = []
+    val = dict_obj.get(key) or default
+    if val:
+      arg_list = [str(val)]
+      if flag:
+        arg_list.insert(0, flag)
+
+    return arg_list
+
+  def _PrepareImage(self, image, image_file):
+    """Download an prepare an image for delta generation.
+
+    Preparation includes downloading, extracting and converting the image into
+    an on-disk format, as necessary.
+
+    Args:
+      image: an object representing the image we're processing, either
+             UnsignedImageArchive or Image type from gspaths module.
+      image_file: file into which the prepared image should be copied.
+    """
+
+    logging.info('Preparing image from %s as %s', image.uri, image_file)
+
+    # Figure out what we're downloading and how to handle it.
+    image_handling_by_type = {
+        'signed': (None, True),
+        'test': (self.TEST_IMAGE_NAME, False),
+        'recovery': (self.RECOVERY_IMAGE_NAME, True),
+        'base': (self.BASE_IMAGE_NAME, True),
+    }
+    if gspaths.IsImage(image):
+      # No need to extract.
+      extract_file = None
+    elif gspaths.IsUnsignedImageArchive(image):
+      extract_file, _ = image_handling_by_type[image.get('image_type',
+                                                         'signed')]
+    else:
+      raise Error('Unknown image type %s' % type(image))
+
+    # Are we donwloading an archive that contains the image?
+    if extract_file:
+      # Archive will be downloaded to a temporary location.
+      with tempfile.NamedTemporaryFile(
+          prefix='image-archive-', suffix='.tar.xz', dir=self.work_dir,
+          delete=False) as temp_file:
+        download_file = temp_file.name
+    else:
+      download_file = image_file
+
+    # Download the image file or archive.
+    self.cache.GetFileCopy(image.uri, download_file)
+
+    # If we downloaded an archive, extract the image file from it.
+    if extract_file:
+      cmd = ['tar', '-xJf', download_file, extract_file]
+      cros_build_lib.RunCommand(cmd, cwd=self.work_dir)
+
+      # Rename it into the desired image name.
+      shutil.move(os.path.join(self.work_dir, extract_file), image_file)
+
+      # It's safe to delete the archive at this point.
+      os.remove(download_file)
+
+  def _GenerateUnsignedPayload(self):
+    """Generate the unsigned delta into self.payload_file."""
+    # Note that the command run here requires sudo access.
+
+    logging.info('Generating unsigned payload as %s', self.payload_file)
+
+    tgt_image = self.payload.tgt_image
+    cmd = ['cros_generate_update_payload',
+           '--outside_chroot',
+           '--output', self.payload_file,
+           '--image', self.tgt_image_file,
+           '--channel', tgt_image.channel,
+           '--board', tgt_image.board,
+           '--version', tgt_image.version]
+    cmd += self._BuildArg('--key', tgt_image, 'key', default='test')
+    cmd += self._BuildArg('--build_channel', tgt_image, 'image_channel',
+                          default=tgt_image.channel)
+    cmd += self._BuildArg('--build_version', tgt_image, 'image_version',
+                          default=tgt_image.version)
+
+    if self.payload.src_image:
+      src_image = self.payload.src_image
+      cmd += ['--src_image', self.src_image_file,
+              '--src_channel', src_image.channel,
+              '--src_board', src_image.board,
+              '--src_version', src_image.version]
+      cmd += self._BuildArg('--src_key', src_image, 'key', default='test')
+      cmd += self._BuildArg('--src_build_channel', src_image, 'image_channel',
+                            default=src_image.channel)
+      cmd += self._BuildArg('--src_build_version', src_image, 'image_version',
+                            default=src_image.version)
+
+    delta_log = self._RunGeneratorCmd(cmd)
+    self._StoreDeltaLog(delta_log)
+
+  def _GenPayloadHash(self):
+    """Generate a hash of payload and metadata.
+
+    Works from an unsigned update payload.
+
+    Returns:
+      payload_hash as a string.
+    """
+    logging.info('Calculating payload hashes on %s.', self.payload_file)
+
+    # How big will the signatures be.
+    signature_sizes = [str(size) for size in self.PAYLOAD_SIGNATURE_SIZES_BYTES]
+
+    with tempfile.NamedTemporaryFile('rb') as payload_hash_file:
+      cmd = ['delta_generator',
+             '-in_file=' + self.payload_file,
+             '-out_hash_file=' + payload_hash_file.name,
+             '-signature_size=' + ':'.join(signature_sizes)]
+
+      self._RunGeneratorCmd(cmd)
+      return payload_hash_file.read()
+
+  def _MetadataSize(self, payload_file):
+    """Discover the metadata size.
+
+    The payload generator should return this information when calculating the
+    metadata hash, but would require a lot of new plumbing. Instead we just
+    look it up ourselves.
+
+    Args:
+      payload_file: Which payload file to extract metadata size from.
+
+    Returns:
+      int value of the metadata size.
+    """
+    with open(payload_file) as payload_fd:
+      payload = self._update_payload.Payload(payload_fd)
+      payload.Init()
+      return payload.data_offset
+
+  def _GenMetadataHash(self):
+    """Generate a hash of payload and metadata.
+
+    Works from an unsigned update payload.
+
+    Returns:
+      metadata_hash as a string.
+    """
+    logging.info('Calculating payload hashes on %s.', self.payload_file)
+
+    # How big will the signatures be.
+    signature_sizes = [str(size) for size in self.PAYLOAD_SIGNATURE_SIZES_BYTES]
+
+    with tempfile.NamedTemporaryFile('rb') as metadata_hash_file:
+      cmd = ['delta_generator',
+             '-in_file=' + self.payload_file,
+             '-out_metadata_hash_file=' + metadata_hash_file.name,
+             '-signature_size=' + ':'.join(signature_sizes)]
+
+      self._RunGeneratorCmd(cmd)
+      return metadata_hash_file.read()
+
+  def _GenerateSignerResultsError(self, format_str, *args):
+    """Helper for reporting errors with signer results."""
+    msg = format_str % args
+    logging.error(msg)
+    raise UnexpectedSignerResultsError(msg)
+
+  def _SignHashes(self, hashes):
+    """Get the signer to sign the hashes with the update payload key via GS.
+
+    May sign each hash with more than one key, based on how many keysets are
+    required.
+
+    Args:
+      hashes: List of hashes to be signed.
+
+    Returns:
+      List of lists which contain each signed hash.
+      [[hash_1_sig_1, hash_1_sig_2], [hash_2_sig_1, hash_2_sig_2]]
+    """
+    logging.info('Signing payload hashes with %s.',
+                 ', '.join(self.PAYLOAD_SIGNATURE_KEYSETS))
+
+    # Results look like:
+    #  [[hash_1_sig_1, hash_1_sig_2], [hash_2_sig_1, hash_2_sig_2]]
+    hashes_sigs = self.signer.GetHashSignatures(
+        hashes,
+        keysets=self.PAYLOAD_SIGNATURE_KEYSETS)
+
+    if hashes_sigs is None:
+      self._GenerateSignerResultsError('Signing of hashes failed')
+    if len(hashes_sigs) != len(hashes):
+      self._GenerateSignerResultsError(
+          'Count of hashes signed (%d) != Count of hashes (%d).',
+          len(hashes_sigs),
+          len(hashes))
+
+    # Make sure that the results we get back the expected number of signatures.
+    for hash_sigs in hashes_sigs:
+      # Make sure each hash has the right number of signatures.
+      if len(hash_sigs) != len(self.PAYLOAD_SIGNATURE_SIZES_BYTES):
+        self._GenerateSignerResultsError(
+            'Signature count (%d) != Expected signature count (%d)',
+            len(hash_sigs),
+            len(self.PAYLOAD_SIGNATURE_SIZES_BYTES))
+
+      # Make sure each hash signature is the expected size.
+      for sig, sig_size in zip(hash_sigs, self.PAYLOAD_SIGNATURE_SIZES_BYTES):
+        if len(sig) != sig_size:
+          self._GenerateSignerResultsError(
+              'Signature size (%d) != expected size(%d)',
+              len(sig),
+              sig_size)
+
+    return hashes_sigs
+
+  def _InsertPayloadSignatures(self, signatures):
+    """Put payload signatures into the payload they sign.
+
+    Args:
+      signatures: List of signatures for the payload.
+    """
+    logging.info('Inserting payload signatures into %s.',
+                 self.signed_payload_file)
+
+    signature_files = [utils.CreateTempFileWithContents(s) for s in signatures]
+    signature_file_names = [f.name for f in signature_files]
+
+    cmd = ['delta_generator',
+           '-in_file=' + self.payload_file,
+           '-signature_file=' + ':'.join(signature_file_names),
+           '-out_file=' + self.signed_payload_file]
+
+    self._RunGeneratorCmd(cmd)
+
+    for f in signature_files:
+      f.close()
+
+  def _StoreMetadataSignatures(self, signatures):
+    """Store metadata signatures related to the payload.
+
+    Our current format for saving metadata signatures only supports a single
+    signature at this time.
+
+    Args:
+      signatures: A list of metadata signatures in binary string format.
+    """
+    if len(signatures) != 1:
+      self._GenerateSignerResultsError(
+          'Received %d metadata signatures, only a single signature supported.',
+          len(signatures))
+
+    logging.info('Saving metadata signatures in %s.',
+                 self.metadata_signature_file)
+
+    encoded_signature = base64.b64encode(signatures[0])
+
+    with open(self.metadata_signature_file, 'w+') as f:
+      f.write(encoded_signature)
+
+  def _StorePayloadJson(self, metadata_signatures):
+    """Generate the payload description json file.
+
+    The payload description contains a dictionary with the following
+    fields populated.
+
+    {
+      "version": 2,
+      "sha1_hex": <payload sha1 hash as a hex encoded string>,
+      "sha256_hex": <payload sha256 hash as a hex encoded string>,
+      "md5_hex": <payload md5 hash as a hex encoded string>,
+      "metadata_size": <integer of payload metadata covered by signature>,
+      "metadata_signature": <metadata signature as base64 encoded string or nil>
+    }
+
+    Args:
+      metadata_signatures: A list of signatures in binary string format.
+    """
+    # Decide if we use the signed or unsigned payload file.
+    payload_file = self.payload_file
+    if self.signer:
+      payload_file = self.signed_payload_file
+
+    # Locate everything we put in the json.
+    sha1_hex, sha256_hex = filelib.ShaSums(payload_file)
+    md5_hex = filelib.MD5Sum(payload_file)
+
+    metadata_signature = None
+    if metadata_signatures:
+      if len(metadata_signatures) != 1:
+        self._GenerateSignerResultsError(
+            'Received %d metadata signatures, only one supported.',
+            len(metadata_signatures))
+      metadata_signature = base64.b64encode(metadata_signatures[0])
+
+    # Bundle it up in a map matching the Json format.
+    # Increment DESCRIPTION_FILE_VERSION, if changing this map.
+    payload_map = {
+        'version': DESCRIPTION_FILE_VERSION,
+        'sha1_hex': sha1_hex,
+        'sha256_hex': sha256_hex,
+        'md5_hex': md5_hex,
+        'metadata_size': self._MetadataSize(payload_file),
+        'metadata_signature': metadata_signature,
+    }
+
+    # Convert to Json.
+    payload_json = json.dumps(payload_map, sort_keys=True)
+
+    # Write out the results.
+    osutils.WriteFile(self.description_file, payload_json)
+
+  def _StoreDeltaLog(self, delta_log):
+    """Store delta log related to the payload.
+
+    Write out the delta log to a known file name. Mostly in it's own function
+    to simplify unittest mocks.
+
+    Args:
+      delta_log: The delta logs as a single string.
+    """
+    with open(self.delta_log_file, 'w+') as f:
+      f.write(delta_log)
+
+  def _SignPayload(self):
+    """Wrap all the steps for signing an existing payload.
+
+    Returns:
+      List of payload signatures, List of metadata signatures.
+    """
+    # Create hashes to sign.
+    payload_hash = self._GenPayloadHash()
+    metadata_hash = self._GenMetadataHash()
+
+    # Sign them.
+    # pylint: disable=unpacking-non-sequence
+    payload_signatures, metadata_signatures = self._SignHashes(
+        [payload_hash, metadata_hash])
+    # pylint: enable=unpacking-non-sequence
+
+    # Insert payload signature(s).
+    self._InsertPayloadSignatures(payload_signatures)
+
+    # Store Metadata signature(s).
+    self._StoreMetadataSignatures(metadata_signatures)
+
+    return (payload_signatures, metadata_signatures)
+
+  def _Create(self):
+    """Create a given payload, if it doesn't already exist."""
+
+    logging.info('Generating %s payload %s',
+                 'delta' if self.payload.src_image else 'full', self.payload)
+
+    # Fetch and extract the delta generator.
+    self._PrepareGenerator()
+
+    # Fetch and prepare the tgt image.
+    self._PrepareImage(self.payload.tgt_image, self.tgt_image_file)
+
+    # Fetch and prepare the src image.
+    if self.payload.src_image:
+      self._PrepareImage(self.payload.src_image, self.src_image_file)
+
+    # Generate the unsigned payload.
+    self._GenerateUnsignedPayload()
+
+    # Sign the payload, if needed.
+    metadata_signatures = None
+    if self.signer:
+      _, metadata_signatures = self._SignPayload()
+
+    # Store hash and signatures json.
+    self._StorePayloadJson(metadata_signatures)
+
+  def _CheckPayloadIntegrity(self, payload, is_delta, metadata_sig_file_name):
+    """Checks the integrity of a generated payload.
+
+    Args:
+      payload: an pre-initialized update_payload.Payload object.
+      is_delta: whether or not this is a delta payload (Boolean).
+      metadata_sig_file_name: metadata signature file.
+
+    Raises:
+      PayloadVerificationError: when an error is encountered.
+    """
+    logging.info('Checking payload integrity')
+    with utils.CheckedOpen(metadata_sig_file_name) as metadata_sig_file:
+      try:
+        # TODO(garnold)(chromium:243559) partition sizes should be embedded in
+        # the payload; ditch the default values once it's done.
+        # TODO(garnold)(chromium:261417) this disables the check for unmoved
+        # blocks in MOVE sequences, which is an inefficiency but not
+        # necessarily a problem.  It should be re-enabled once the delta
+        # generator can optimize away such cases.
+        payload.Check(metadata_sig_file=metadata_sig_file,
+                      assert_type=('delta' if is_delta else 'full'),
+                      disabled_tests=['move-same-src-dst-block'])
+      except self._update_payload.PayloadError as e:
+        raise PayloadVerificationError(
+            'Payload integrity check failed: %s' % e)
+
+  def _ApplyPayload(self, payload, is_delta):
+    """Applies a generated payload and verifies the result.
+
+    Args:
+      payload: an pre-initialized update_payload.Payload object.
+      is_delta: whether or not this is a delta payload (Boolean).
+
+    Raises:
+      PayloadVerificationError: when an error occurs.
+    """
+    # Extract the source/target kernel/rootfs partitions.
+    # TODO(garnold)(chromium:243561) this is a redundant operation as the
+    # partitions are already extracted (in some form) for the purpose of
+    # payload generation. We should only do this once.
+    cmd = ['cros_generate_update_payload',
+           '--outside_chroot',
+           '--extract',
+           '--image', self.tgt_image_file]
+    part_files = {}
+    part_files['new_kernel_part'] = self._DEFAULT_NEW_KERN_PART
+    part_files['new_rootfs_part'] = self._DEFAULT_NEW_ROOT_PART
+    if is_delta:
+      cmd += ['--src_image', self.src_image_file]
+      part_files['old_kernel_part'] = self._DEFAULT_OLD_KERN_PART
+      part_files['old_rootfs_part'] = self._DEFAULT_OLD_ROOT_PART
+
+    self._RunGeneratorCmd(cmd)
+
+    for part_name, part_file in part_files.items():
+      part_file = os.path.join(self.generator_dir, part_file)
+      if not os.path.isfile(part_file):
+        raise PayloadVerificationError('Failed to extract partition (%s)' %
+                                       part_file)
+      part_files[part_name] = part_file
+
+    # Apply the payload and verify the result; make sure to pass in the
+    # explicit path to the bspatch binary in the au-generator directory (the
+    # one we need to be using), and not to depend on PATH resolution etc. Also
+    # note that we instruct the call to generate files with a .test suffix,
+    # which we can later compare to the actual target partition (as it was
+    # extracted from the target image above).
+    logging.info('Applying %s payload and verifying result',
+                 'delta' if is_delta else 'full')
+    ref_new_kern_part = part_files['new_kernel_part']
+    part_files['new_kernel_part'] += '.test'
+    ref_new_root_part = part_files['new_rootfs_part']
+    part_files['new_rootfs_part'] += '.test'
+    bspatch_path = os.path.join(self.generator_dir, 'bspatch')
+    try:
+      payload.Apply(bspatch_path=bspatch_path, **part_files)
+    except self._update_payload.PayloadError as e:
+      raise PayloadVerificationError('Payload failed to apply: %s' % e)
+
+    # Prior to comparing, remove unused space past the filesystem boundary
+    # in the extracted target partitions.
+    filelib.TruncateToSize(ref_new_kern_part,
+                           os.path.getsize(part_files['new_kernel_part']))
+    filelib.TruncateToSize(ref_new_root_part,
+                           os.path.getsize(part_files['new_rootfs_part']))
+
+    # Compare resulting partitions with the ones from the target image.
+    if not filecmp.cmp(ref_new_kern_part, part_files['new_kernel_part']):
+      raise PayloadVerificationError('Resulting kernel partition corrupted')
+    if not filecmp.cmp(ref_new_root_part, part_files['new_rootfs_part']):
+      raise PayloadVerificationError('Resulting rootfs partition corrupted')
+
+  def _VerifyPayload(self):
+    """Checks the integrity of the generated payload.
+
+    Raises:
+      PayloadVerificationError when the payload fails to verify.
+    """
+    if self.signer:
+      payload_file_name = self.signed_payload_file
+      metadata_sig_file_name = self.metadata_signature_file
+    else:
+      payload_file_name = self.payload_file
+      metadata_sig_file_name = None
+
+    with open(payload_file_name) as payload_file:
+      payload = self._update_payload.Payload(payload_file)
+      is_delta = bool(self.payload.src_image)
+      try:
+        payload.Init()
+
+        # First, verify the payload's integrity.
+        self._CheckPayloadIntegrity(payload, is_delta, metadata_sig_file_name)
+
+        # Second, try to apply the payload and check the result.
+        self._ApplyPayload(payload, is_delta)
+
+      except self._update_payload.PayloadError as e:
+        raise PayloadVerificationError('Payload failed to verify: %s' % e)
+
+  def _UploadResults(self):
+    """Copy the payload generation results to the specified destination."""
+
+    logging.info('Uploading payload to %s.', self.payload.uri)
+
+    # Deliver the payload to the final location.
+    if self.signer:
+      urilib.Copy(self.signed_payload_file, self.payload.uri)
+      urilib.Copy(self.metadata_signature_file,
+                  self._MetadataUri(self.payload.uri))
+    else:
+      urilib.Copy(self.payload_file, self.payload.uri)
+
+    # Upload payload related artifacts.
+    urilib.Copy(self.delta_log_file, self._DeltaLogsUri(self.payload.uri))
+    urilib.Copy(self.description_file, self._JsonUri(self.payload.uri))
+
+  def Run(self):
+    """Create, verify and upload the results."""
+    self._drm(self._Create)
+    if self._verify:
+      self._drm(self._VerifyPayload)
+    self._drm(self._UploadResults)
+
+
+def DefaultPayloadUri(payload, random_str=None):
+  """Compute the default output URI for a payload.
+
+  For a glob that matches all potential URIs for this
+  payload, pass in a random_str of '*'.
+
+  Args:
+    payload: gspaths.Payload instance.
+    random_str: A hook to force a specific random_str. None means generate it.
+
+  Returns:
+    Default URI for the payload.
+  """
+  src_version = None
+  if payload.src_image:
+    src_version = payload.src_image['version']
+
+  if gspaths.IsImage(payload.tgt_image):
+    # Signed payload.
+    return gspaths.ChromeosReleases.PayloadUri(
+        channel=payload.tgt_image.channel,
+        board=payload.tgt_image.board,
+        version=payload.tgt_image.version,
+        random_str=random_str,
+        key=payload.tgt_image.key,
+        image_channel=payload.tgt_image.image_channel,
+        image_version=payload.tgt_image.image_version,
+        src_version=src_version,
+        bucket=payload.tgt_image.bucket)
+  elif gspaths.IsUnsignedImageArchive(payload.tgt_image):
+    # Unsigned test payload.
+    return gspaths.ChromeosReleases.PayloadUri(
+        channel=payload.tgt_image.channel,
+        board=payload.tgt_image.board,
+        version=payload.tgt_image.version,
+        random_str=random_str,
+        src_version=src_version,
+        bucket=payload.tgt_image.bucket)
+  else:
+    raise Error('Unknown image type %s' % type(payload.tgt_image))
+
+
+def SetPayloadUri(payload, uri):
+  """Sets (overrides) the URI in a payload object.
+
+  Args:
+    payload: gspaths.Payload instance.
+    uri: A URI (string) to the payload file.
+  """
+  payload.uri = uri
+
+
+def FillInPayloadUri(payload, random_str=None):
+  """Fill in default output URI for a payload if missing.
+
+  Args:
+    payload: gspaths.Payload instance.
+    random_str: A hook to force a specific random_str. None means generate it.
+  """
+  if not payload.uri:
+    SetPayloadUri(payload, DefaultPayloadUri(payload, random_str))
+
+
+def _FilterNonPayloadUris(payload_uris):
+  """Filters out non-payloads from a list of GS URIs.
+
+  This essentially filters out known auxiliary artifacts whose names resemble /
+  derive from a respective payload name, such as files with .log and
+  .metadata-signature extensions.
+
+  Args:
+    payload_uris: a list of GS URIs (potentially) corresopnding to payloads
+
+  Returns:
+    A filtered list of URIs.
+  """
+  return [uri for uri in payload_uris
+          if not (uri.endswith('.log') or uri.endswith('.metadata-signature'))]
+
+
+def FindExistingPayloads(payload):
+  """Look to see if any matching payloads already exist.
+
+  Since payload names contain a random component, there can be multiple
+  names for a given payload. This function lists all existing payloads
+  that match the default URI for the given payload.
+
+  Args:
+    payload: gspaths.Payload instance.
+
+  Returns:
+    List of URIs for existing payloads that match the default payload pattern.
+  """
+  search_uri = DefaultPayloadUri(payload, random_str='*')
+  return _FilterNonPayloadUris(urilib.ListFiles(search_uri))
+
+
+def FindCacheDir():
+  """Helper for deciding what cache directory to use.
+
+  Returns:
+    Returns a directory suitable for use with a DownloadCache.
+  """
+  # Discover which directory to use for caching
+  return os.path.join(path_util.GetCacheDir(), 'paygen_cache')
+
+
+def CreateAndUploadPayload(payload, cache, work_dir, sign=True, verify=True,
+                           dry_run=False, au_generator_uri=None):
+  """Helper to create a PaygenPayloadLib instance and use it.
+
+  Args:
+    payload: An instance of utils.Payload describing the payload to generate.
+    cache: An instance of DownloadCache for retrieving files.
+    work_dir: A working directory that can hold scratch files. Will be cleaned
+              up when done, and won't interfere with other users. None for /tmp.
+    sign: Boolean saying if the payload should be signed (normally, you do).
+    verify: whether the payload should be verified (default: True)
+    dry_run: don't perform actual work
+    au_generator_uri: URI to override standard au_generator.zip rules.
+  """
+  with osutils.TempDir(prefix='paygen_payload.', base_dir=work_dir) as gen_dir:
+    logging.info('* Starting payload generation')
+    start_time = datetime.datetime.now()
+
+    _PaygenPayload(payload, cache, gen_dir, sign, verify, au_generator_uri,
+                   dry_run=dry_run).Run()
+
+    end_time = datetime.datetime.now()
+    logging.info('* Finished payload generation in %s', end_time - start_time)
diff --git a/lib/paygen/paygen_payload_lib_unittest b/lib/paygen/paygen_payload_lib_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/lib/paygen/paygen_payload_lib_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/paygen/paygen_payload_lib_unittest.py b/lib/paygen/paygen_payload_lib_unittest.py
new file mode 100644
index 0000000..9526998
--- /dev/null
+++ b/lib/paygen/paygen_payload_lib_unittest.py
@@ -0,0 +1,779 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test paygen_payload_lib library."""
+
+from __future__ import print_function
+
+import mock
+import mox
+import os
+import shutil
+import tempfile
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+
+from chromite.lib.paygen import download_cache
+from chromite.lib.paygen import gspaths
+from chromite.lib.paygen import paygen_payload_lib
+from chromite.lib.paygen import signer_payloads_client
+from chromite.lib.paygen import urilib
+
+
+# We access a lot of protected members during testing.
+# pylint: disable=protected-access
+
+
+class PaygenPayloadLibTest(cros_test_lib.MoxTempDirTestCase):
+  """PaygenPayloadLib tests base class."""
+
+  def setUp(self):
+    self.old_image = gspaths.Image(
+        channel='dev-channel',
+        board='x86-alex',
+        version='1620.0.0',
+        key='mp-v3',
+        uri=('gs://chromeos-releases-test/dev-channel/x86-alex/1620.0.0/'
+             'chromeos_1620.0.0_x86-alex_recovery_dev-channel_mp-v3.bin'))
+
+    self.old_base_image = gspaths.Image(
+        channel='dev-channel',
+        board='x86-alex',
+        version='1620.0.0',
+        key='mp-v3',
+        image_type='base',
+        uri=('gs://chromeos-releases-test/dev-channel/x86-alex/1620.0.0/'
+             'chromeos_1620.0.0_x86-alex_base_dev-channel_mp-v3.bin'))
+
+    self.new_image = gspaths.Image(
+        channel='dev-channel',
+        board='x86-alex',
+        version='4171.0.0',
+        key='mp-v3',
+        uri=('gs://chromeos-releases-test/dev-channel/x86-alex/4171.0.0/'
+             'chromeos_4171.0.0_x86-alex_recovery_dev-channel_mp-v3.bin'))
+
+    self.new_nplusone_image = gspaths.Image(
+        channel='dev-channel',
+        board='x86-alex',
+        version='4171.0.0',
+        key='mp-v3',
+        image_channel='nplusone-channel',
+        image_version='4171.0.1',
+        uri=('gs://chromeos-releases-test/dev-channel/x86-alex/4171.0.0/'
+             'chromeos_4171.0.1_x86-alex_recovery_nplusone-channel_mp-v3.bin'))
+
+    self.old_test_image = gspaths.UnsignedImageArchive(
+        channel='dev-channel',
+        board='x86-alex',
+        version='1620.0.0',
+        uri=('gs://chromeos-releases-test/dev-channel/x86-alex/1620.0.0/'
+             'chromeos_1620.0.0_x86-alex_recovery_dev-channel_test.bin'))
+
+    self.new_test_image = gspaths.Image(
+        channel='dev-channel',
+        board='x86-alex',
+        version='4171.0.0',
+        uri=('gs://chromeos-releases-test/dev-channel/x86-alex/4171.0.0/'
+             'chromeos_4171.0.0_x86-alex_recovery_dev-channel_test.bin'))
+
+    self.full_payload = gspaths.Payload(tgt_image=self.old_base_image,
+                                        src_image=None,
+                                        uri='gs://full_old_foo/boo')
+
+    self.delta_payload = gspaths.Payload(tgt_image=self.new_image,
+                                         src_image=self.old_image,
+                                         uri='gs://delta_new_old/boo')
+
+    self.nplusone_payload = gspaths.Payload(tgt_image=self.new_nplusone_image,
+                                            src_image=self.new_image,
+                                            uri='gs://delta_npo_new/boo')
+
+    self.full_test_payload = gspaths.Payload(tgt_image=self.old_test_image,
+                                             src_image=None,
+                                             uri='gs://full_old_foo/boo-test')
+
+    self.delta_test_payload = gspaths.Payload(tgt_image=self.new_test_image,
+                                              src_image=self.old_test_image,
+                                              uri='gs://delta_new_old/boo-test')
+
+  @classmethod
+  def setUpClass(cls):
+    cls.cache_dir = tempfile.mkdtemp(prefix='crostools-unittest-cache')
+    cls.cache = download_cache.DownloadCache(cls.cache_dir)
+
+  @classmethod
+  def tearDownClass(cls):
+    cls.cache = None
+    shutil.rmtree(cls.cache_dir)
+
+
+class PaygenPayloadLibBasicTest(PaygenPayloadLibTest):
+  """PaygenPayloadLib basic (and quick) testing."""
+
+  def _GetStdGenerator(self, work_dir=None, payload=None, sign=True,
+                       au_generator_uri_override=None):
+    """Helper function to create a standardized PayloadGenerator."""
+    if payload is None:
+      payload = self.full_payload
+
+    if work_dir is None:
+      work_dir = self.tempdir
+
+    if not au_generator_uri_override:
+      au_generator_uri_override = gspaths.ChromeosReleases.GeneratorUri(
+          payload.tgt_image.channel, payload.tgt_image.board, '6351.0.0')
+
+    return paygen_payload_lib._PaygenPayload(
+        payload=payload,
+        cache=self.cache,
+        work_dir=work_dir,
+        sign=sign,
+        verify=False,
+        au_generator_uri_override=au_generator_uri_override)
+
+  def testWorkingDirNames(self):
+    """Make sure that some of the files we create have the expected names."""
+    gen = self._GetStdGenerator(work_dir='/foo')
+
+    self.assertEqual(gen.generator_dir, '/foo/au-generator')
+    self.assertEqual(gen.src_image_file, '/foo/src_image.bin')
+    self.assertEqual(gen.tgt_image_file, '/foo/tgt_image.bin')
+    self.assertEqual(gen.payload_file, '/foo/delta.bin')
+    self.assertEqual(gen.delta_log_file, '/foo/delta.log')
+
+    # Siged image specific values.
+    self.assertEqual(gen.signed_payload_file, '/foo/delta.bin.signed')
+    self.assertEqual(gen.metadata_signature_file,
+                     '/foo/delta.bin.signed.metadata-signature')
+
+  def testUriManipulators(self):
+    """Validate _MetadataUri."""
+    gen = self._GetStdGenerator(work_dir='/foo')
+
+    self.assertEqual(gen._MetadataUri('/foo/bar'),
+                     '/foo/bar.metadata-signature')
+    self.assertEqual(gen._MetadataUri('gs://foo/bar'),
+                     'gs://foo/bar.metadata-signature')
+
+    self.assertEqual(gen._DeltaLogsUri('/foo/bar'),
+                     '/foo/bar.log')
+    self.assertEqual(gen._DeltaLogsUri('gs://foo/bar'),
+                     'gs://foo/bar.log')
+
+    self.assertEqual(gen._JsonUri('/foo/bar'),
+                     '/foo/bar.json')
+    self.assertEqual(gen._JsonUri('gs://foo/bar'),
+                     'gs://foo/bar.json')
+
+  @cros_test_lib.NetworkTest()
+  def testPrepareGenerator(self):
+    """Validate that we can download an unzip a generator artifact."""
+    gen = self._GetStdGenerator()
+    gen._PrepareGenerator()
+
+    # Ensure that the expected executables in the au-generator are available.
+    expected = os.path.join(gen.generator_dir, 'cros_generate_update_payload')
+    self.assertTrue(os.path.exists(expected))
+
+    expected = os.path.join(gen.generator_dir, 'delta_generator')
+    self.assertTrue(os.path.exists(expected))
+
+  def testRunGeneratorCmd(self):
+    """Test the specialized command to run programs from au-generate.zip."""
+    test_cmd = ['cmd', 'bar', 'jo nes']
+    expected_cmd = ['/foo/au-generator/cmd', 'bar', 'jo nes']
+    original_environ = os.environ.copy()
+    gen = self._GetStdGenerator(work_dir='/foo')
+
+    self.mox.StubOutWithMock(cros_build_lib, 'RunCommand')
+
+    mock_result = cros_build_lib.CommandResult()
+    mock_result.output = 'foo output'
+
+    # Set up the test replay script.
+    cros_build_lib.RunCommand(
+        expected_cmd, cwd='/foo/au-generator',
+        redirect_stdout=True,
+        combine_stdout_stderr=True,
+        error_code_ok=True,
+        extra_env=mox.IgnoreArg()).AndReturn(mock_result)
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+    self.assertEqual(gen._RunGeneratorCmd(test_cmd),
+                     'foo output')
+
+    # Demonstrate that the PATH was restored.
+    self.assertEqual(os.environ, original_environ)
+
+  def testBuildArg(self):
+    """Make sure the function semantics is satisfied."""
+    gen = self._GetStdGenerator(work_dir='/work')
+    test_dict = {'foo': 'bar'}
+
+    # Value present.
+    self.assertEqual(gen._BuildArg('--foo', test_dict, 'foo'),
+                     ['--foo', 'bar'])
+    self.assertEqual(gen._BuildArg(None, test_dict, 'foo'),
+                     ['bar'])
+
+    # Value present, default has no impact.
+    self.assertEqual(gen._BuildArg('--foo', test_dict, 'foo', default='baz'),
+                     ['--foo', 'bar'])
+
+    # Value missing, default kicking in.
+    self.assertEqual(gen._BuildArg('--foo2', test_dict, 'foo2', default='baz'),
+                     ['--foo2', 'baz'])
+
+  def _DoPrepareImageTest(self, image_type):
+    """Test _PrepareImage via mox."""
+    download_uri = 'gs://bucket/foo/image.bin'
+    image_file = '/work/image.bin'
+    test_work_dir = tempfile.gettempdir()  # for testing purposes
+    gen = self._GetStdGenerator(work_dir=test_work_dir)
+
+    if image_type == 'Image':
+      image_obj = gspaths.Image(
+          channel='dev-channel',
+          board='x86-alex',
+          version='4171.0.0',
+          key='mp-v3',
+          uri=download_uri)
+      test_extract_file = None
+    elif image_type == 'UnsignedImageArchive':
+      image_obj = gspaths.UnsignedImageArchive(
+          channel='dev-channel',
+          board='x86-alex',
+          version='4171.0.0',
+          image_type='test',
+          uri=download_uri)
+      test_extract_file = paygen_payload_lib._PaygenPayload.TEST_IMAGE_NAME
+    else:
+      raise ValueError('invalid image type descriptor (%s)' % image_type)
+
+    # Stub out and record the expected function calls.
+    self.mox.StubOutWithMock(download_cache.DownloadCache,
+                             'GetFileCopy')
+    if test_extract_file:
+      download_file = mox.IsA(str)
+    else:
+      download_file = image_file
+    self.cache.GetFileCopy(download_uri, download_file)
+
+    if test_extract_file:
+      self.mox.StubOutWithMock(cros_build_lib, 'RunCommand')
+      cros_build_lib.RunCommand(['tar', '-xJf', download_file,
+                                 test_extract_file], cwd=test_work_dir)
+      self.mox.StubOutWithMock(shutil, 'move')
+      shutil.move(os.path.join(test_work_dir, test_extract_file), image_file)
+
+    # Run the test.
+    self.mox.ReplayAll()
+    gen._PrepareImage(image_obj, image_file)
+
+  def testPrepareImageNormal(self):
+    """Test preparing a normal image."""
+    self._DoPrepareImageTest('Image')
+
+  def testPrepareImageTest(self):
+    """Test preparing a test image."""
+    self._DoPrepareImageTest('UnsignedImageArchive')
+
+  def testGenerateUnsignedPayloadFull(self):
+    """Test _GenerateUnsignedPayload with full payload."""
+    gen = self._GetStdGenerator(payload=self.full_payload, work_dir='/work')
+
+    # Stub out the required functions.
+    self.mox.StubOutWithMock(gen, '_RunGeneratorCmd')
+    self.mox.StubOutWithMock(gen, '_StoreDeltaLog')
+
+    # Record the expected function calls.
+    cmd = ['cros_generate_update_payload',
+           '--outside_chroot',
+           '--output', gen.payload_file,
+           '--image', gen.tgt_image_file,
+           '--channel', 'dev-channel',
+           '--board', 'x86-alex',
+           '--version', '1620.0.0',
+           '--key', 'mp-v3',
+           '--build_channel', 'dev-channel',
+           '--build_version', '1620.0.0']
+    gen._RunGeneratorCmd(cmd).AndReturn('log contents')
+    gen._StoreDeltaLog('log contents')
+
+    # Run the test.
+    self.mox.ReplayAll()
+    gen._GenerateUnsignedPayload()
+
+  def testGenerateUnsignedPayloadDelta(self):
+    """Test _GenerateUnsignedPayload with delta payload."""
+    gen = self._GetStdGenerator(payload=self.delta_payload, work_dir='/work')
+
+    # Stub out the required functions.
+    self.mox.StubOutWithMock(gen, '_RunGeneratorCmd')
+    self.mox.StubOutWithMock(gen, '_StoreDeltaLog')
+
+    # Record the expected function calls.
+    cmd = ['cros_generate_update_payload',
+           '--outside_chroot',
+           '--output', gen.payload_file,
+           '--image', gen.tgt_image_file,
+           '--channel', 'dev-channel',
+           '--board', 'x86-alex',
+           '--version', '4171.0.0',
+           '--key', 'mp-v3',
+           '--build_channel', 'dev-channel',
+           '--build_version', '4171.0.0',
+           '--src_image', gen.src_image_file,
+           '--src_channel', 'dev-channel',
+           '--src_board', 'x86-alex',
+           '--src_version', '1620.0.0',
+           '--src_key', 'mp-v3',
+           '--src_build_channel', 'dev-channel',
+           '--src_build_version', '1620.0.0']
+    gen._RunGeneratorCmd(cmd).AndReturn('log contents')
+    gen._StoreDeltaLog('log contents')
+
+    # Run the test.
+    self.mox.ReplayAll()
+    gen._GenerateUnsignedPayload()
+
+  def testGenerateUnsignedTestPayloadFull(self):
+    """Test _GenerateUnsignedPayload with full test payload."""
+    gen = self._GetStdGenerator(payload=self.full_test_payload,
+                                work_dir='/work')
+
+    # Stub out the required functions.
+    self.mox.StubOutWithMock(gen, '_RunGeneratorCmd')
+    self.mox.StubOutWithMock(gen, '_StoreDeltaLog')
+
+    # Record the expected function calls.
+    cmd = ['cros_generate_update_payload',
+           '--outside_chroot',
+           '--output', gen.payload_file,
+           '--image', gen.tgt_image_file,
+           '--channel', 'dev-channel',
+           '--board', 'x86-alex',
+           '--version', '1620.0.0',
+           '--key', 'test',
+           '--build_channel', 'dev-channel',
+           '--build_version', '1620.0.0']
+    gen._RunGeneratorCmd(cmd).AndReturn('log contents')
+    gen._StoreDeltaLog('log contents')
+
+    # Run the test.
+    self.mox.ReplayAll()
+    gen._GenerateUnsignedPayload()
+
+  def testGenerateUnsignedTestPayloadDelta(self):
+    """Test _GenerateUnsignedPayload with delta payload."""
+    gen = self._GetStdGenerator(payload=self.delta_test_payload,
+                                work_dir='/work')
+
+    # Stub out the required functions.
+    self.mox.StubOutWithMock(gen, '_RunGeneratorCmd')
+    self.mox.StubOutWithMock(gen, '_StoreDeltaLog')
+
+    # Record the expected function calls.
+    cmd = ['cros_generate_update_payload',
+           '--outside_chroot',
+           '--output', gen.payload_file,
+           '--image', gen.tgt_image_file,
+           '--channel', 'dev-channel',
+           '--board', 'x86-alex',
+           '--version', '4171.0.0',
+           '--key', 'test',
+           '--build_channel', 'dev-channel',
+           '--build_version', '4171.0.0',
+           '--src_image', gen.src_image_file,
+           '--src_channel', 'dev-channel',
+           '--src_board', 'x86-alex',
+           '--src_version', '1620.0.0',
+           '--src_key', 'test',
+           '--src_build_channel', 'dev-channel',
+           '--src_build_version', '1620.0.0']
+    gen._RunGeneratorCmd(cmd).AndReturn('log contents')
+    gen._StoreDeltaLog('log contents')
+
+    # Run the test.
+    self.mox.ReplayAll()
+    gen._GenerateUnsignedPayload()
+
+  def testGenPayloadHashes(self):
+    """Test _GenPayloadHash via mox."""
+    gen = self._GetStdGenerator()
+
+    # Stub out the required functions.
+    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
+                             '_RunGeneratorCmd')
+
+    # Record the expected function calls.
+    cmd = ['delta_generator',
+           '-in_file=' + gen.payload_file,
+           mox.IsA(str),
+           '-signature_size=256']
+    gen._RunGeneratorCmd(cmd)
+
+    # Run the test.
+    self.mox.ReplayAll()
+    self.assertEqual(gen._GenPayloadHash(), '')
+
+  def testGenMetadataHashes(self):
+    """Test _GenPayloadHash via mox."""
+    gen = self._GetStdGenerator()
+
+    # Stub out the required functions.
+    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
+                             '_RunGeneratorCmd')
+
+    # Record the expected function calls.
+    cmd = ['delta_generator',
+           '-in_file=' + gen.payload_file,
+           mox.IsA(str),
+           '-signature_size=256']
+    gen._RunGeneratorCmd(cmd)
+
+    # Run the test.
+    self.mox.ReplayAll()
+    self.assertEqual(gen._GenMetadataHash(), '')
+
+  def testSignHashes(self):
+    """Test _SignHashes via mox."""
+    hashes = ('foo', 'bar')
+    signatures = (('0' * 256,), ('1' * 256,))
+
+    gen = self._GetStdGenerator(work_dir='/work')
+
+    # Stub out the required functions.
+    self.mox.StubOutWithMock(
+        signer_payloads_client.SignerPayloadsClientGoogleStorage,
+        'GetHashSignatures')
+
+    gen.signer.GetHashSignatures(
+        hashes,
+        keysets=gen.PAYLOAD_SIGNATURE_KEYSETS).AndReturn(signatures)
+
+    # Run the test.
+    self.mox.ReplayAll()
+    self.assertEqual(gen._SignHashes(hashes),
+                     signatures)
+
+  def testInsertPayloadSignatures(self):
+    """Test inserting payload signatures."""
+    gen = self._GetStdGenerator(payload=self.delta_payload)
+    payload_signatures = ('0' * 256,)
+
+    # Stub out the required functions.
+    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
+                             '_RunGeneratorCmd')
+
+    # Record the expected function calls.
+    cmd = ['delta_generator',
+           '-in_file=' + gen.payload_file,
+           mox.IsA(str),
+           '-out_file=' + gen.signed_payload_file]
+    gen._RunGeneratorCmd(cmd)
+
+    # Run the test.
+    self.mox.ReplayAll()
+    gen._InsertPayloadSignatures(payload_signatures)
+
+  def testStoreMetadataSignatures(self):
+    """Test how we store metadata signatures."""
+    gen = self._GetStdGenerator(payload=self.delta_payload)
+    metadata_signatures = ('1' * 256,)
+    encoded_metadata_signature = (
+        'MTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMT'
+        'ExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTEx'
+        'MTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMT'
+        'ExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTEx'
+        'MTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMTExMT'
+        'ExMTExMTExMQ==')
+
+    gen._StoreMetadataSignatures(metadata_signatures)
+
+    with file(gen.metadata_signature_file, 'rb') as f:
+      self.assertEqual(f.read(), encoded_metadata_signature)
+
+  def testPayloadJson(self):
+    """Test how we store the payload description in json."""
+    gen = self._GetStdGenerator(payload=self.delta_payload, sign=False)
+    # Intentionally don't create signed file, to ensure it's never used.
+    osutils.WriteFile(gen.payload_file, 'Fake payload contents.')
+
+    metadata_signatures = ()
+
+    expected_json = (
+        '{"md5_hex": "75218643432e5f621386d4ffcbedf9ba",'
+        ' "metadata_signature": null,'
+        ' "metadata_size": 10,'
+        ' "sha1_hex": "FDwoNOUO+kNwrQJMSLnLDY7iZ/E=",'
+        ' "sha256_hex": "gkm9207E7xbqpNRBFjEPO43nxyp/MNGQfyH3IYrq2kE=",'
+        ' "version": 2}')
+
+    # To really look up the metadata size, we'd need a real payload for parsing.
+    with mock.patch.object(gen, '_MetadataSize', return_value=10) as mock_size:
+      gen._StorePayloadJson(metadata_signatures)
+
+    # Validate we fetched size from the right file.
+    mock_size.assert_called_once_with(gen.payload_file)
+
+    # Validate the results.
+    self.assertEqual(osutils.ReadFile(gen.description_file), expected_json)
+
+  def testPayloadJsonSigned(self):
+    """Test how we store the payload description in json."""
+    gen = self._GetStdGenerator(payload=self.delta_payload, sign=True)
+    # Intentionally don't create unsigned file, to ensure it's never used.
+    osutils.WriteFile(gen.signed_payload_file, 'Fake signed payload contents.')
+
+    metadata_signatures = ('1',)
+
+    expected_json = (
+        '{"md5_hex": "ad8f67319ca16e691108ca703636b3ad",'
+        ' "metadata_signature": "MQ==",'
+        ' "metadata_size": 10,'
+        ' "sha1_hex": "99zX3vZhTfwRJCi4zGK1A14AY3Y=",'
+        ' "sha256_hex": "yZjWgvsNdzclJzJOleQrTjVFBQy810ZlUAU5+i0okME=",'
+        ' "version": 2}')
+
+    # To really look up the metadata size, we'd need a real payload for parsing.
+    with mock.patch.object(gen, '_MetadataSize', return_value=10) as mock_size:
+      gen._StorePayloadJson(metadata_signatures)
+
+    # Validate we fetched size from the right file.
+    mock_size.assert_called_once_with(gen.signed_payload_file)
+
+    # Validate the results.
+    self.assertEqual(osutils.ReadFile(gen.description_file), expected_json)
+
+  def testSignPayload(self):
+    """Test the overall payload signature process via mox."""
+    payload_hash = 'payload_hash'
+    metadata_hash = 'metadata_hash'
+    payload_sigs = ('payload_sig',)
+    metadata_sigs = ('metadata_sig',)
+
+    gen = self._GetStdGenerator(payload=self.delta_payload, work_dir='/work')
+
+    # Set up stubs.
+    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
+                             '_GenPayloadHash')
+    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
+                             '_GenMetadataHash')
+    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
+                             '_SignHashes')
+    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
+                             '_InsertPayloadSignatures')
+    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
+                             '_StoreMetadataSignatures')
+
+    # Record expected calls.
+    gen._GenPayloadHash().AndReturn(payload_hash)
+    gen._GenMetadataHash().AndReturn(metadata_hash)
+    gen._SignHashes([payload_hash, metadata_hash]).AndReturn(
+        (payload_sigs, metadata_sigs))
+    gen._InsertPayloadSignatures(payload_sigs)
+    gen._StoreMetadataSignatures(metadata_sigs)
+
+    # Run the test.
+    self.mox.ReplayAll()
+    result_payload_sigs, result_metadata_sigs = gen._SignPayload()
+
+    self.assertEqual(payload_sigs, result_payload_sigs)
+    self.assertEqual(metadata_sigs, result_metadata_sigs)
+
+  def testCreateSignedDelta(self):
+    """Test the overall payload generation process via mox."""
+    payload = self.delta_payload
+    gen = self._GetStdGenerator(payload=payload, work_dir='/work')
+
+    # Set up stubs.
+    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
+                             '_PrepareGenerator')
+    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
+                             '_PrepareImage')
+    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
+                             '_GenerateUnsignedPayload')
+    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
+                             '_SignPayload')
+    self.mox.StubOutWithMock(paygen_payload_lib._PaygenPayload,
+                             '_StorePayloadJson')
+
+    # Record expected calls.
+    gen._PrepareGenerator()
+    gen._PrepareImage(payload.tgt_image, gen.tgt_image_file)
+    gen._PrepareImage(payload.src_image, gen.src_image_file)
+    gen._GenerateUnsignedPayload()
+    gen._SignPayload().AndReturn((['payload_sigs'], ['metadata_sigs']))
+    gen._StorePayloadJson(['metadata_sigs'])
+
+    # Run the test.
+    self.mox.ReplayAll()
+    gen._Create()
+
+  def testUploadResults(self):
+    """Test the overall payload generation process via mox."""
+    gen_sign = self._GetStdGenerator(work_dir='/work', sign=True)
+    gen_nosign = self._GetStdGenerator(work_dir='/work', sign=False)
+
+    # Set up stubs.
+    self.mox.StubOutWithMock(urilib, 'Copy')
+    self.mox.StubOutWithMock(urilib, 'ListFiles')
+
+    # Record signed calls.
+    urilib.Copy('/work/delta.bin.signed',
+                'gs://full_old_foo/boo')
+    urilib.Copy('/work/delta.bin.signed.metadata-signature',
+                'gs://full_old_foo/boo.metadata-signature')
+    urilib.Copy('/work/delta.log',
+                'gs://full_old_foo/boo.log')
+    urilib.Copy('/work/delta.json',
+                'gs://full_old_foo/boo.json')
+
+    # Record unsigned calls.
+    urilib.Copy('/work/delta.bin',
+                'gs://full_old_foo/boo')
+    urilib.Copy('/work/delta.log',
+                'gs://full_old_foo/boo.log')
+    urilib.Copy('/work/delta.json',
+                'gs://full_old_foo/boo.json')
+
+    # Run the test.
+    self.mox.ReplayAll()
+    gen_sign._UploadResults()
+    gen_nosign._UploadResults()
+
+  def testDefaultPayloadUri(self):
+    """Test paygen_payload_lib.DefaultPayloadUri."""
+
+    # Test a Full Payload
+    result = paygen_payload_lib.DefaultPayloadUri(self.full_payload,
+                                                  random_str='abc123')
+    self.assertEqual(
+        result,
+        'gs://chromeos-releases/dev-channel/x86-alex/1620.0.0/payloads/'
+        'chromeos_1620.0.0_x86-alex_dev-channel_full_mp-v3.bin-abc123.signed')
+
+    # Test a Delta Payload
+    result = paygen_payload_lib.DefaultPayloadUri(self.delta_payload,
+                                                  random_str='abc123')
+    self.assertEqual(
+        result,
+        'gs://chromeos-releases/dev-channel/x86-alex/4171.0.0/payloads/'
+        'chromeos_1620.0.0-4171.0.0_x86-alex_dev-channel_delta_mp-v3.bin-'
+        'abc123.signed')
+
+    # Test an N Plus One Delta Payload
+    result = paygen_payload_lib.DefaultPayloadUri(self.nplusone_payload,
+                                                  random_str='abc123')
+    self.assertEqual(
+        result,
+        'gs://chromeos-releases/dev-channel/x86-alex/4171.0.0/payloads/'
+        'chromeos_4171.0.0-4171.0.1_x86-alex_nplusone-channel_delta_mp-v3.bin-'
+        'abc123.signed')
+
+    # Test changing channel, board, and keys
+    src_image = gspaths.Image(
+        channel='dev-channel',
+        board='x86-alex',
+        version='3588.0.0',
+        key='premp')
+    tgt_image = gspaths.Image(
+        channel='stable-channel',
+        board='x86-alex-he',
+        version='3590.0.0',
+        key='mp-v3')
+    payload = gspaths.Payload(src_image=src_image, tgt_image=tgt_image)
+
+    result = paygen_payload_lib.DefaultPayloadUri(payload,
+                                                  random_str='abc123')
+    self.assertEqual(
+        result,
+        'gs://chromeos-releases/stable-channel/x86-alex-he/3590.0.0/payloads/'
+        'chromeos_3588.0.0-3590.0.0_x86-alex-he_stable-channel_delta_mp-v3.bin-'
+        'abc123.signed')
+
+  def testFillInPayloadUri(self):
+    """Test filling in the payload URI of a gspaths.Payload object."""
+    # Assert that it doesn't change if already present.
+    pre_uri = self.full_payload.uri
+    paygen_payload_lib.FillInPayloadUri(self.full_payload,
+                                        random_str='abc123')
+    self.assertEqual(self.full_payload.uri,
+                     pre_uri)
+
+    # Test that it does change if not present.
+    payload = gspaths.Payload(tgt_image=self.old_image)
+    paygen_payload_lib.FillInPayloadUri(payload,
+                                        random_str='abc123')
+    self.assertEqual(
+        payload.uri,
+        'gs://chromeos-releases/dev-channel/x86-alex/1620.0.0/payloads/'
+        'chromeos_1620.0.0_x86-alex_dev-channel_full_mp-v3.bin-abc123.signed')
+
+  def testFindExistingPayloads(self):
+    """Test finding already existing payloads."""
+    self.mox.StubOutWithMock(urilib, 'ListFiles')
+
+    # Set up the test replay script.
+    urilib.ListFiles('gs://chromeos-releases/dev-channel/x86-alex/1620.0.0/'
+                     'payloads/chromeos_1620.0.0_x86-alex_dev-channel_full_'
+                     'mp-v3.bin-*.signed').AndReturn(['foo_result'])
+
+    # Run the test verification.
+    self.mox.ReplayAll()
+
+    self.assertEqual(
+        paygen_payload_lib.FindExistingPayloads(self.full_payload),
+        ['foo_result'])
+
+  def testFindCacheDir(self):
+    """Test calculating the location of the cache directory."""
+    result = paygen_payload_lib.FindCacheDir()
+
+    # The correct result is based on the system cache directory, which changes.
+    # Ensure it ends with the right directory name.
+    self.assertEqual(os.path.basename(result), 'paygen_cache')
+
+
+class PaygenPayloadLibEndToEndTest(PaygenPayloadLibTest):
+  """PaygenPayloadLib end-to-end testing."""
+
+  def _EndToEndIntegrationTest(self, tgt_image, src_image, sign):
+    """Helper test function for validating end to end payload generation."""
+    output_uri = os.path.join(self.tempdir, 'expected_payload_out')
+    output_metadata_uri = output_uri + '.metadata-signature'
+    output_metadata_json = output_uri + '.json'
+
+    payload = gspaths.Payload(tgt_image=tgt_image,
+                              src_image=src_image,
+                              uri=output_uri)
+
+    paygen_payload_lib.CreateAndUploadPayload(
+        payload=payload,
+        cache=self.cache,
+        work_dir=self.tempdir,
+        au_generator_uri=gspaths.ChromeosReleases.GeneratorUri(
+            payload.tgt_image.channel, payload.tgt_image.board, '6351.0.0'),
+        sign=sign)
+
+    self.assertTrue(os.path.exists(output_uri))
+    self.assertEqual(os.path.exists(output_metadata_uri), sign)
+    self.assertTrue(os.path.exists(output_metadata_json))
+
+  @cros_test_lib.NetworkTest()
+  def testEndToEndIntegrationFull(self):
+    """Integration test to generate a full payload for old_image."""
+    self._EndToEndIntegrationTest(self.old_image, None, sign=True)
+
+  @cros_test_lib.NetworkTest()
+  def testEndToEndIntegrationDelta(self):
+    """Integration test to generate a delta payload for new_image -> NPO."""
+    self._EndToEndIntegrationTest(self.new_nplusone_image,
+                                  self.new_image,
+                                  sign=False)
diff --git a/lib/paygen/signer_payloads_client.py b/lib/paygen/signer_payloads_client.py
new file mode 100644
index 0000000..21d92aa
--- /dev/null
+++ b/lib/paygen/signer_payloads_client.py
@@ -0,0 +1,387 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This library manages the interfaces to the signer for update payloads."""
+
+from __future__ import print_function
+
+import os
+import re
+import shutil
+import tempfile
+import time
+import threading
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib.paygen import gslock
+from chromite.lib.paygen import gspaths
+from chromite.lib.paygen import utils
+
+
+# How long to sleep between polling GS to see if signer results are present.
+DELAY_CHECKING_FOR_SIGNER_RESULTS_SECONDS = 10
+
+# Signer priority value, slightly higher than the common value 50.
+SIGNER_PRIORITY = 45
+
+
+class SignerPayloadsClientGoogleStorage(object):
+  """This class implements the Google Storage signer interface for payloads."""
+
+  def __init__(self, channel, board, version, bucket=None, unique=None,
+               ctx=None):
+    """This initializer identifies the build an payload that need signatures.
+
+    Args:
+      channel: Channel of the build whose payload is being signed.
+      board: Board of the build whose payload is being signed.
+      version: Version of the build whose payload is being signed.
+      bucket: Bucket used to reach the signer. [defaults 'chromeos-releases']
+      unique: Force known 'unique' id. Mostly for unittests.
+      ctx: GS Context to use for GS operations.
+    """
+    self.channel = channel
+    self.board = board
+    self.version = version
+    self.bucket = bucket if bucket else gspaths.ChromeosReleases.BUCKET
+    self._ctx = ctx if ctx is not None else gs.GSContext()
+
+    build_signing_uri = gspaths.ChromeosReleases.BuildPayloadsSigningUri(
+        channel,
+        board,
+        version,
+        bucket=bucket)
+
+    # Uniquify the directory using our pid/thread-id. This can't collide
+    # with other hosts because the build is locked to our host in
+    # paygen_build.
+    if unique is None:
+      unique = '%d-%d' % (os.getpid(), threading.current_thread().ident)
+
+    # This is a partial URI that is extended for a lot of other URIs we use.
+    self.signing_base_dir = os.path.join(build_signing_uri, unique)
+
+    self.archive_uri = os.path.join(self.signing_base_dir,
+                                    'payload.hash.tar.bz2')
+
+  def _CleanSignerFilesByKeyset(self, hashes, keyset, timeout=600):
+    """Helper method that cleans up GS files associated with a single keyset.
+
+    Args:
+      hashes: A list of hash values to be signed by the signer in string
+              format. They are all expected to be 32 bytes in length.
+      keyset: keyset to have the hashes signed with.
+      timeout: Timeout for acquiring the lock on the files to clean.
+
+    Raises:
+      gslock.LockNotAcquired if we can't get a lock on the data within timeout.
+    """
+    hash_names = self._CreateHashNames(len(hashes))
+
+    instructions_uri = self._CreateInstructionsURI(keyset)
+    request_uri = self._SignerRequestUri(instructions_uri)
+    signature_uris = self._CreateSignatureURIs(hash_names, keyset)
+
+    paths = [instructions_uri, request_uri]
+    paths += signature_uris
+    paths += [s + '.md5' for s in signature_uris]
+
+    end_time = time.time() + timeout
+
+    while True:
+      try:
+        with gslock.Lock(request_uri + '.lock'):
+          for path in paths:
+            self._ctx.Remove(path, ignore_missing=True)
+
+          return
+      except gslock.LockNotAcquired:
+        # If we have timed out.
+        if time.time() > end_time:
+          raise
+
+        time.sleep(DELAY_CHECKING_FOR_SIGNER_RESULTS_SECONDS)
+
+  def _CleanSignerFiles(self, hashes, keysets):
+    """Helper method that cleans up all GS files associated with a signing.
+
+    Safe to call repeatedly.
+
+    Args:
+      hashes: A list of hash values to be signed by the signer in string
+              format. They are all expected to be 32 bytes in length.
+      keysets: list of keysets to have the hashes signed with.
+
+    Raises:
+      May raise GSLibError if there is an extraordinary GS problem.
+    """
+    for keyset in keysets:
+      self._CleanSignerFilesByKeyset(hashes, keyset)
+
+    # After all keysets have been cleaned up, clean up the archive.
+    self._ctx.Remove(self.signing_base_dir, recursive=True, ignore_missing=True)
+
+  def _CreateInstructionsURI(self, keyset):
+    """Construct the URI used to upload a set of instructions.
+
+    Args:
+      keyset: name of the keyset contained in this instruction set.
+
+    Result:
+      URI for the given instruction set as a string.
+    """
+    return os.path.join(self.signing_base_dir,
+                        '%s.payload.signer.instructions' % keyset)
+
+  def _CreateHashNames(self, hash_count):
+    """Helper method that creates file names for each hash in GS.
+
+    These names are arbitrary, and only used when working with the signer.
+
+    Args:
+      hash_count: How many hash names are needed?
+    """
+    result = []
+    for i in xrange(1, hash_count + 1):
+      result.append('%d.payload.hash' % i)
+    return result
+
+  def _CreateSignatureURIs(self, hash_names, keyset):
+    """Helper method that creates URIs for the signature output files.
+
+    These names are the actual URIs the signer will populate with ".bin"
+    already included.
+
+    Args:
+      hash_names: The list of input_names passed to the signer.
+      keyset: Keyset name passed to the signer.
+    Result:
+      List of URIs expected back from the signer.
+    """
+    result = []
+    for hash_name in hash_names:
+      # Based on the pattern defined in _CreateInstructions.
+      expanded_name = '%s.%s.signed.bin' % (hash_name, keyset)
+      result.append(os.path.join(self.signing_base_dir, expanded_name))
+    return result
+
+  def _CreateArchive(self, archive_file, hashes, hash_names):
+    """Take the hash strings and bundle them in the signer request format.
+
+    Take the contents of an array of strings, and put them into a specified
+    file in .tar.bz2 format. Each string is named with a specified name in
+    the tar file.
+
+    The number of hashes and number of hash_names must be equal. The
+    archive_file will be created or overridden as needed. It's up to
+    the caller to ensure it's cleaned up.
+
+    Args:
+      archive_file: Name of file to put the tar contents into.
+      hashes: List of hashes to sign, stored in strings.
+      hash_names: File names expected in the signer request.
+    """
+    try:
+      tmp_dir = tempfile.mkdtemp()
+
+      # Copy hash files into tmp_dir with standard hash names.
+      for h, hash_name in zip(hashes, hash_names):
+        with open(os.path.join(tmp_dir, hash_name), 'wb') as f:
+          f.write(h)
+
+      cmd = ['tar', '-cjf', archive_file] + hash_names
+      utils.RunCommand(cmd,
+                       redirect_stdout=True,
+                       redirect_stderr=True,
+                       cwd=tmp_dir)
+    finally:
+      # Cleanup.
+      shutil.rmtree(tmp_dir)
+
+  def _CreateInstructions(self, hash_names, keyset):
+    """Create the signing instructions to send to the signer.
+
+    Args:
+      hash_names: The names of the hash files in the archive to sign.
+      keyset: Which keyset to sign the hashes with. Valid keysets are
+              defined on the signer. 'update_signer' is currently valid.
+
+    Returns:
+      A string that contains the contents of the instructions to send.
+    """
+
+    pattern = """
+# Auto-generated instruction file for signing payload hashes.
+
+[insns]
+generate_metadata = false
+keyset = %(keyset)s
+channel = %(channel)s
+
+input_files = %(input_files)s
+output_names = @BASENAME@.@KEYSET@.signed
+
+[general]
+archive = metadata-disable.instructions
+type = update_payload
+board = %(board)s
+
+archive = %(archive_name)s
+
+# We reuse version for version rev because we may not know the
+# correct versionrev "R24-1.2.3"
+version = %(version)s
+versionrev = %(version)s
+"""
+
+    # foo-channel -> foo
+    channel = self.channel.replace('-channel', '')
+
+    archive_name = os.path.basename(self.archive_uri)
+    input_files = ' '.join(hash_names)
+
+    return pattern % {
+        'channel': channel,
+        'board': self.board,
+        'version': self.version,
+        'archive_name': archive_name,
+        'input_files': input_files,
+        'keyset': keyset,
+    }
+
+  def _SignerRequestUri(self, instructions_uri):
+    """Find the URI of the empty file to create to ask the signer to sign."""
+
+    exp = r'^gs://%s/(?P<postbucket>.*)$' % self.bucket
+    m = re.match(exp, instructions_uri)
+    relative_uri = m.group('postbucket')
+
+    return 'gs://%s/tobesigned/%d,%s' % (
+        self.bucket,
+        SIGNER_PRIORITY,
+        relative_uri.replace('/', ','))
+
+  def _WaitForSignatures(self, signature_uris, timeout=1800):
+    """Wait until all uris exist, or timeout.
+
+    Args:
+      signature_uris: list of uris to check for.
+      timeout: time in seconds to wait for all uris to be created.
+
+    Returns:
+      True if the signatures all exist, or False.
+    """
+    end_time = time.time() + timeout
+
+    missing_signatures = signature_uris[:]
+
+    while missing_signatures and time.time() < end_time:
+      while missing_signatures and self._ctx.Exists(missing_signatures[0]):
+        missing_signatures.pop(0)
+
+      if missing_signatures:
+        time.sleep(DELAY_CHECKING_FOR_SIGNER_RESULTS_SECONDS)
+
+    # If none are missing, we found them all.
+    return not missing_signatures
+
+  def _DownloadSignatures(self, signature_uris):
+    """Download the list of URIs to in-memory strings.
+
+    Args:
+      signature_uris: List of URIs to download.
+
+    Returns:
+      List of signatures in strings.
+    """
+
+    results = []
+    for uri in signature_uris:
+      with tempfile.NamedTemporaryFile(delete=False) as sig_file:
+        sig_file_name = sig_file.name
+      try:
+        self._ctx.Copy(uri, sig_file_name)
+        with open(sig_file_name) as sig_file:
+          results.append(sig_file.read())
+      finally:
+        # Cleanup the temp file, in case it's still there.
+        if os.path.exists(sig_file_name):
+          os.remove(sig_file_name)
+
+    return results
+
+  def GetHashSignatures(self, hashes, keysets=('update_signer',)):
+    """Take an arbitrary list of hash files, and get them signed.
+
+    Args:
+      hashes: A list of hash values to be signed by the signer in string
+              format. They are all expected to be 32 bytes in length.
+      keysets: list of keysets to have the hashes signed with. The default
+               is almost certainly what you want. These names must match
+               valid keysets on the signer.
+
+    Returns:
+      A dictionary keyed by hash with a list of signatures in string format.
+      The list of signatures will correspond to the list of keysets passed
+      in.
+
+      hashes, keysets=['update_signer', 'update_signer-v2'] ->
+
+      { hashes[0] : [sig_update_signer, sig_update_signer-v2], ... }
+
+      Returns None if the process failed.
+
+    Raises:
+      Can raise a variety of GSLibError errors in extraordinary conditions.
+    """
+
+    try:
+      # Hash and signature names.
+      hash_names = self._CreateHashNames(len(hashes))
+
+      # Create and upload the archive of hashes to sign.
+      with tempfile.NamedTemporaryFile() as archive_file:
+        self._CreateArchive(archive_file.name, hashes, hash_names)
+        self._ctx.Copy(archive_file.name, self.archive_uri)
+
+      # [sig_uri, ...]
+      all_signature_uris = []
+
+      # { hash : [sig_uri, ...], ... }
+      hash_signature_uris = dict([(h, []) for h in hashes])
+
+      # Upload one signing instruction file and signing request for
+      # each keyset.
+      for keyset in keysets:
+        instructions_uri = self._CreateInstructionsURI(keyset)
+
+        self._ctx.CreateWithContents(
+            instructions_uri,
+            self._CreateInstructions(hash_names, keyset))
+
+        # Create signer request file with debug friendly contents.
+        self._ctx.CreateWithContents(
+            self._SignerRequestUri(instructions_uri),
+            cros_build_lib.MachineDetails())
+
+        # Remember which signatures we just requested.
+        uris = self._CreateSignatureURIs(hash_names, keyset)
+
+        all_signature_uris += uris
+        for h, sig_uri in zip(hashes, uris):
+          hash_signature_uris[h].append(sig_uri)
+
+      # Wait for the signer to finish all keysets.
+      if not self._WaitForSignatures(all_signature_uris):
+        logging.error('Signer request timed out.')
+        return None
+
+      # Download the results.
+      return [self._DownloadSignatures(hash_signature_uris[h]) for h in hashes]
+
+    finally:
+      # Clean up the signature related files from this run.
+      self._CleanSignerFiles(hashes, keysets)
diff --git a/lib/paygen/signer_payloads_client_unittest b/lib/paygen/signer_payloads_client_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/lib/paygen/signer_payloads_client_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/paygen/signer_payloads_client_unittest.py b/lib/paygen/signer_payloads_client_unittest.py
new file mode 100644
index 0000000..2d553d1
--- /dev/null
+++ b/lib/paygen/signer_payloads_client_unittest.py
@@ -0,0 +1,417 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test signer_payloads_client library."""
+
+from __future__ import print_function
+
+import mock
+import os
+import shutil
+import socket
+import tempfile
+
+from chromite.lib import cros_test_lib
+from chromite.lib import gs
+from chromite.lib import gs_unittest
+
+from chromite.lib.paygen import gslock
+from chromite.lib.paygen import signer_payloads_client
+from chromite.lib.paygen import utils
+
+
+# pylint: disable=protected-access
+
+
+class SignerPayloadsClientGoogleStorageTest(gs_unittest.AbstractGSContextTest):
+  """Test suite for the class SignerPayloadsClientGoogleStorage."""
+
+  orig_timeout = (
+      signer_payloads_client.DELAY_CHECKING_FOR_SIGNER_RESULTS_SECONDS)
+
+  def setUp(self):
+    """Setup for tests, and store off some standard expected values."""
+    self.hash_names = [
+        '1.payload.hash',
+        '2.payload.hash',
+        '3.payload.hash']
+
+    self.build_uri = ('gs://foo-bucket/foo-channel/foo-board/foo-version/'
+                      'payloads/signing/foo-unique')
+
+    # Some tests depend on this timeout. Make it smaller, then restore.
+    signer_payloads_client.DELAY_CHECKING_FOR_SIGNER_RESULTS_SECONDS = 0.01
+
+  def tearDown(self):
+    """Teardown after tests, and restore values test might adjust."""
+    # Some tests modify this timeout. Restore the original value.
+    signer_payloads_client.DELAY_CHECKING_FOR_SIGNER_RESULTS_SECONDS = (
+        self.orig_timeout)
+
+  def createStandardClient(self):
+    """Test helper method to create a client with standard arguments."""
+
+    client = signer_payloads_client.SignerPayloadsClientGoogleStorage(
+        'foo-channel',
+        'foo-board',
+        'foo-version',
+        bucket='foo-bucket',
+        unique='foo-unique',
+        ctx=self.ctx)
+    return client
+
+  def testUris(self):
+    """Test that the URIs on the client are correct."""
+
+    client = self.createStandardClient()
+
+    expected_build_uri = self.build_uri
+
+    self.assertEquals(
+        client.signing_base_dir,
+        expected_build_uri)
+
+    self.assertEquals(
+        client.archive_uri,
+        expected_build_uri + '/payload.hash.tar.bz2')
+
+  def testCleanSignerFilesByKeyset(self):
+    """Test the keyset specific cleanup works as expected."""
+
+    hashes = ('hash-1', 'hash-2')
+    keyset = 'foo-keys'
+
+    lock_uri = ('gs://foo-bucket/tobesigned/45,foo-channel,foo-board,'
+                'foo-version,payloads,signing,foo-unique,'
+                'foo-keys.payload.signer.instructions.lock')
+
+    signing_dir = ('gs://foo-bucket/foo-channel/foo-board/foo-version/'
+                   'payloads/signing/foo-unique')
+
+    expected_removals = (
+        # Signing Request
+        'gs://foo-bucket/tobesigned/45,foo-channel,foo-board,foo-version,'
+        'payloads,signing,foo-unique,'
+        'foo-keys.payload.signer.instructions',
+
+        # Signing Instructions
+        signing_dir + '/foo-keys.payload.signer.instructions',
+
+        # Signed Results`
+        signing_dir + '/1.payload.hash.foo-keys.signed.bin',
+        signing_dir + '/1.payload.hash.foo-keys.signed.bin.md5',
+        signing_dir + '/2.payload.hash.foo-keys.signed.bin',
+        signing_dir + '/2.payload.hash.foo-keys.signed.bin.md5',
+    )
+
+    client = self.createStandardClient()
+
+    # Fake lock failed then acquired.
+    lock = self.PatchObject(gslock, 'Lock', autospec=True,
+                            side_effect=[gslock.LockNotAcquired(),
+                                         mock.MagicMock()])
+
+    # Do the work.
+    client._CleanSignerFilesByKeyset(hashes, keyset)
+
+    # Assert locks created with expected lock_uri.
+    lock.assert_called_with(lock_uri)
+
+    # Verify all expected files were removed.
+    for uri in expected_removals:
+      self.gs_mock.assertCommandContains(['rm', uri])
+
+  def testCleanSignerFiles(self):
+    """Test that GS cleanup works as expected."""
+
+    hashes = ('hash-1', 'hash-2')
+    keysets = ('foo-keys-1', 'foo-keys-2')
+
+    lock_uri1 = ('gs://foo-bucket/tobesigned/45,foo-channel,foo-board,'
+                 'foo-version,payloads,signing,foo-unique,'
+                 'foo-keys-1.payload.signer.instructions.lock')
+
+    lock_uri2 = ('gs://foo-bucket/tobesigned/45,foo-channel,foo-board,'
+                 'foo-version,payloads,signing,foo-unique,'
+                 'foo-keys-2.payload.signer.instructions.lock')
+
+    signing_dir = ('gs://foo-bucket/foo-channel/foo-board/foo-version/'
+                   'payloads/signing/foo-unique')
+
+    expected_removals = (
+        # Signing Request
+        'gs://foo-bucket/tobesigned/45,foo-channel,foo-board,foo-version,'
+        'payloads,signing,foo-unique,'
+        'foo-keys-1.payload.signer.instructions',
+
+        'gs://foo-bucket/tobesigned/45,foo-channel,foo-board,foo-version,'
+        'payloads,signing,foo-unique,'
+        'foo-keys-2.payload.signer.instructions',
+
+        # Signing Instructions
+        signing_dir + '/foo-keys-1.payload.signer.instructions',
+        signing_dir + '/foo-keys-2.payload.signer.instructions',
+
+        # Signed Results
+        signing_dir + '/1.payload.hash.foo-keys-1.signed.bin',
+        signing_dir + '/1.payload.hash.foo-keys-1.signed.bin.md5',
+        signing_dir + '/2.payload.hash.foo-keys-1.signed.bin',
+        signing_dir + '/2.payload.hash.foo-keys-1.signed.bin.md5',
+        signing_dir + '/1.payload.hash.foo-keys-2.signed.bin',
+        signing_dir + '/1.payload.hash.foo-keys-2.signed.bin.md5',
+        signing_dir + '/2.payload.hash.foo-keys-2.signed.bin',
+        signing_dir + '/2.payload.hash.foo-keys-2.signed.bin.md5',
+    )
+
+    client = self.createStandardClient()
+
+    # Fake lock failed then acquired.
+    lock = self.PatchObject(gslock, 'Lock', autospec=True)
+
+    # Do the work.
+    client._CleanSignerFiles(hashes, keysets)
+
+    # Check created with lock_uri1, lock_uri2.
+    self.assertEqual(lock.call_args_list,
+                     [mock.call(lock_uri1), mock.call(lock_uri2)])
+
+    # Verify expected removals.
+    for uri in expected_removals:
+      self.gs_mock.assertCommandContains(['rm', uri])
+
+    self.gs_mock.assertCommandContains(['rm', signing_dir])
+
+  def testCreateInstructionsUri(self):
+    """Test that the expected instructions URI is correct."""
+
+    client = self.createStandardClient()
+
+    signature_uri = client._CreateInstructionsURI('keyset_foo')
+
+    expected_signature_uri = (
+        self.build_uri +
+        '/keyset_foo.payload.signer.instructions')
+
+    self.assertEqual(signature_uri, expected_signature_uri)
+
+  def testCreateHashNames(self):
+    """Test that the expected hash names are generated."""
+
+    client = self.createStandardClient()
+
+    hash_names = client._CreateHashNames(3)
+
+    expected_hash_names = self.hash_names
+
+    self.assertEquals(hash_names, expected_hash_names)
+
+  def testCreateSignatureURIs(self):
+    """Test that the expected signature URIs are generated."""
+
+    client = self.createStandardClient()
+
+    signature_uris = client._CreateSignatureURIs(self.hash_names,
+                                                 'keyset_foo')
+
+    expected_signature_uris = [
+        self.build_uri + '/1.payload.hash.keyset_foo.signed.bin',
+        self.build_uri + '/2.payload.hash.keyset_foo.signed.bin',
+        self.build_uri + '/3.payload.hash.keyset_foo.signed.bin',
+    ]
+
+    self.assertEquals(signature_uris, expected_signature_uris)
+
+  def testCreateArchive(self):
+    """Test that we can correctly archive up hash values for the signer."""
+
+    client = self.createStandardClient()
+
+    tmp_dir = None
+    hashes = ['Hash 1', 'Hash 2', 'Hash 3']
+
+    try:
+      with tempfile.NamedTemporaryFile() as archive_file:
+        client._CreateArchive(archive_file.name, hashes, self.hash_names)
+
+        # Make sure the archive file created exists
+        self.assertTrue(os.path.exists(archive_file.name))
+
+        tmp_dir = tempfile.mkdtemp()
+
+        cmd = ['tar', '-xjf', archive_file.name]
+        utils.RunCommand(cmd,
+                         redirect_stdout=True,
+                         redirect_stderr=True,
+                         cwd=tmp_dir)
+
+        # Check that the expected (and only the expected) contents are present
+        extracted_file_names = os.listdir(tmp_dir)
+        self.assertEquals(len(extracted_file_names), len(self.hash_names))
+        for name in self.hash_names:
+          self.assertTrue(name in extracted_file_names)
+
+        # Make sure each file has the expected contents
+        for h, hash_name in zip(hashes, self.hash_names):
+          with open(os.path.join(tmp_dir, hash_name), 'r') as f:
+            self.assertEqual([h], f.readlines())
+
+    finally:
+      # Clean up at the end of the test
+      if tmp_dir:
+        shutil.rmtree(tmp_dir)
+
+  def testCreateInstructions(self):
+    """Test that we can correctly create signer instructions."""
+
+    client = self.createStandardClient()
+
+    instructions = client._CreateInstructions(self.hash_names, 'keyset_foo')
+
+    expected_instructions = """
+# Auto-generated instruction file for signing payload hashes.
+
+[insns]
+generate_metadata = false
+keyset = keyset_foo
+channel = foo
+
+input_files = %s
+output_names = @BASENAME@.@KEYSET@.signed
+
+[general]
+archive = metadata-disable.instructions
+type = update_payload
+board = foo-board
+
+archive = payload.hash.tar.bz2
+
+# We reuse version for version rev because we may not know the
+# correct versionrev "R24-1.2.3"
+version = foo-version
+versionrev = foo-version
+""" % ' '.join(['1.payload.hash',
+                '2.payload.hash',
+                '3.payload.hash'])
+
+    self.assertEquals(instructions, expected_instructions)
+
+  def testSignerRequestUri(self):
+    """Test that we can create signer request URI."""
+
+    client = self.createStandardClient()
+
+    instructions_uri = client._CreateInstructionsURI('foo_keyset')
+    signer_request_uri = client._SignerRequestUri(instructions_uri)
+
+    expected = ('gs://foo-bucket/tobesigned/45,foo-channel,foo-board,'
+                'foo-version,payloads,signing,foo-unique,'
+                'foo_keyset.payload.signer.instructions')
+
+    self.assertEquals(signer_request_uri, expected)
+
+  def testWaitForSignaturesInstant(self):
+    """Test that we can correctly wait for a list of URIs to be created."""
+    uris = ['foo', 'bar', 'is']
+
+    # All Urls exist.
+    exists = self.PatchObject(self.ctx, 'Exists', returns=True)
+
+    client = self.createStandardClient()
+
+    self.assertTrue(client._WaitForSignatures(uris, timeout=0.02))
+
+    # Make sure it really looked for every URL listed.
+    self.assertEqual(exists.call_args_list,
+                     [mock.call(u) for u in uris])
+
+  def testWaitForSignaturesNever(self):
+    """Test that we can correctly timeout waiting for a list of URIs."""
+    uris = ['foo', 'bar', 'is']
+
+    # Default mock GSContext behavior is nothing Exists.
+    client = self.createStandardClient()
+    self.assertFalse(client._WaitForSignatures(uris, timeout=0.02))
+
+    # We don't care which URLs it checked, since it doesn't have to check
+    # them all in this case.
+
+
+class SignerPayloadsClientIntegrationTest(cros_test_lib.TestCase):
+  """Test suite integration with live signer servers."""
+
+  def setUp(self):
+    # This is in the real production chromeos-releases, but the listed
+    # build has never, and will never exist.
+    self.client = signer_payloads_client.SignerPayloadsClientGoogleStorage(
+        'test-channel',
+        'crostools-client',
+        'Rxx-Ryy')
+
+  @cros_test_lib.NetworkTest()
+  def testDownloadSignatures(self):
+    """Test that we can correctly download a list of URIs."""
+    uris = ['gs://chromeos-releases-test/sigining-test/foo',
+            'gs://chromeos-releases-test/sigining-test/bar']
+
+    downloads = self.client._DownloadSignatures(uris)
+    self.assertEquals(downloads, ['FooSig\r\n\r', 'BarSig'])
+
+  @cros_test_lib.NetworkTest()
+  def testGetHashSignatures(self):
+    """Integration test that talks to the real signer with test hashes."""
+    ctx = gs.GSContext()
+
+    unique_id = '%s.%d' % (socket.gethostname(), os.getpid())
+    clean_uri = ('gs://chromeos-releases/test-channel/%s/'
+                 'crostools-client/**') % unique_id
+
+    # Cleanup before we start
+    ctx.Remove(clean_uri, ignore_missing=True)
+
+    try:
+      hashes = ['0' * 32,
+                '1' * 32,
+                ('29834370e415b3124a926c903906f18b'
+                 '3d52e955147f9e6accd67e9512185a63')]
+
+      keysets = ['update_signer']
+
+      expected_sigs_hex = (
+          ('ba4c7a86b786c609bf6e4c5fb9c47525608678caa532bea8acc457aa6dd32b43'
+           '5f094b331182f2e167682916990c40ff7b6b0128de3fa45ad0fd98041ec36d6f'
+           '63b867bcf219804200616590a41a727c2685b48340efb4b480f1ef448fc7bc3f'
+           'b1c4b53209e950ecc721b07a52a41d9c025fd25602340c93d5295211308caa29'
+           'a03ed18516cf61411c508097d5b47620d643ed357b05213b2b9fa3a3f938d6c4'
+           'f52b85c3f9774edc376902458344d1c1cd72bc932f033c076c76fee2400716fe'
+           '652306871ba923021ce245e0c778ad9e0e50e87a169b2aea338c4dc8b5c0c716'
+           'aabfb6133482e8438b084a09503db27ca546e910f8938f7805a8a76a3b0d0241',),
+
+          ('2d909ca5b33a7fb6f2323ca0bf9de2e4f2266c73da4b6948a517dffa96783e08'
+           'ca36411d380f6e8a20011f599d8d73576b2a141a57c0873d089726e24f62c7e0'
+           '346ba5fbde68414b0f874b627fb1557a6e9658c8fac96c54f458161ea770982b'
+           'fa9fe514120635e5ccb32e8219b9069cb0bf8063fba48d60d649c5af203cccef'
+           'ca5dbc2191f81f0215edbdee4ec8c1553e69b83036aca3e840227d317ff6cf8b'
+           '968c973f698db1ce59f6871303dcdbe839400c5df4d2e6e505d68890010a4459'
+           '6ca9fee77f4db6ea3448d98018437c319fc8c5f4603ef94b04e3a4eafa206b73'
+           '91a2640d43128310285bc0f1c7e5060d37c433d663b1c6f01110b9a43f2a74f4',),
+
+          ('23791c99ab937f1ae5d4988afc9ceca39c290ac90e3da9f243f9a0b1c86c3c32'
+           'ab7241d43dfc233da412bab989cf02f15a01fe9ea4b2dc7dc9182117547836d6'
+           '9310af3aa005ee3a6deb9602bc676dcc103bf3f7831d64ab844b4785c5c8b4b1'
+           '4467e6b5ab6bf34c12f7534e0d5140151c8f28e8276e703dd6332c2bab9e7f4a'
+           '495215998ff56e476b81bd6b8d765e1f87da50c22cd52c9afa8c43a6528ab898'
+           '6d7a273d9136d5aff5c4d95985d16eeec7380539ef963e0784a0de42b42890df'
+           'c83702179f69f5c6eca4630807fbc4ab6241017e0942b15feada0b240e9729bf'
+           '33bf456bd419da63302477e147963550a45c6cf60925ff48ad7b309fa158dcb2',))
+
+      expected_sigs = [[sig[0].decode('hex')] for sig in expected_sigs_hex]
+
+      all_signatures = self.client.GetHashSignatures(hashes, keysets)
+
+      self.assertEquals(all_signatures, expected_sigs)
+      self.assertRaises(gs.GSNoSuchKey, ctx.List, clean_uri)
+
+    finally:
+      # Cleanup when we are over
+      ctx.Remove(clean_uri, ignore_missing=True)
diff --git a/lib/paygen/urilib.py b/lib/paygen/urilib.py
new file mode 100644
index 0000000..b4067fd
--- /dev/null
+++ b/lib/paygen/urilib.py
@@ -0,0 +1,481 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for standard operations on URIs of different kinds."""
+
+from __future__ import print_function
+
+import re
+import sys
+import urllib
+import urllib2
+
+from chromite.lib.paygen import filelib
+from chromite.lib.paygen import gslib
+
+
+# This module allows files from different storage types to be handled
+# in a common way, for supported operations.
+
+
+PROTOCOL_GS = gslib.PROTOCOL
+PROTOCOL_HTTP = 'http'
+PROTOCOL_HTTPS = 'https'
+
+PROTOCOLS = (PROTOCOL_GS,
+             PROTOCOL_HTTP,
+             PROTOCOL_HTTPS)
+
+PROTOCOL_SEP = '://'
+
+EXTRACT_PROTOCOL_RE = re.compile(r'^(\w+)%s' % PROTOCOL_SEP)
+SPLIT_URI_RE = re.compile(r'^(\w+)%s(.*)$' % PROTOCOL_SEP)
+
+TYPE_GS = PROTOCOL_GS
+TYPE_HTTP = PROTOCOL_HTTP
+TYPE_HTTPS = PROTOCOL_HTTPS
+TYPE_LOCAL = 'file'
+
+
+class NotSupportedForType(RuntimeError):
+  """Raised when operation is not supported for a particular file type"""
+
+  def __init__(self, uri_type, extra_msg=None):
+    # pylint: disable=protected-access
+    function = sys._getframe(1).f_code.co_name
+    msg = 'Function %s not supported for %s URIs' % (function, uri_type)
+    if extra_msg:
+      msg += ', ' + extra_msg
+
+    RuntimeError.__init__(self, msg)
+
+
+class NotSupportedForTypes(RuntimeError):
+  """Raised when operation is not supported for all particular file type"""
+
+  def __init__(self, extra_msg=None, *uri_types):
+    # pylint: disable=protected-access
+    function = sys._getframe(1).f_code.co_name
+    msg = ('Function %s not supported for set of URIs with types: %s' %
+           (function, ', '.join(uri_types)))
+    if extra_msg:
+      msg += ', ' + extra_msg
+
+    RuntimeError.__init__(self, msg)
+
+
+class NotSupportedBetweenTypes(RuntimeError):
+  """Raised when operation is not supported between particular file types"""
+
+  def __init__(self, uri_type1, uri_type2, extra_msg=None):
+    # pylint: disable=protected-access
+    function = sys._getframe(1).f_code.co_name
+    msg = ('Function %s not supported between %s and %s URIs' %
+           (function, uri_type1, uri_type2))
+    if extra_msg:
+      msg += ', ' + extra_msg
+
+    RuntimeError.__init__(self, msg)
+
+
+class MissingURLError(RuntimeError):
+  """Raised when nothing exists at URL."""
+
+
+def ExtractProtocol(uri):
+  """Take a URI and return the protocol it is using, if any.
+
+  Examples:
+  'gs://some/path' ==> 'gs'
+  'file:///some/path' ==> 'file'
+  '/some/path' ==> None
+  '/cns/some/colossus/path' ==> None
+
+  Args:
+    uri: The URI to get protocol from.
+
+  Returns:
+    Protocol string that is found, or None.
+  """
+  match = EXTRACT_PROTOCOL_RE.search(uri)
+  if match:
+    return match.group(1)
+
+  return None
+
+
+def GetUriType(uri):
+  """Get the type of a URI.
+
+  See the TYPE_* constants for examples.  This is mostly based
+  on URI protocols, with Colossus and local files as exceptions.
+
+  Args:
+    uri: The URI to consider
+
+  Returns:
+    The URI type.
+  """
+  protocol = ExtractProtocol(uri)
+  if protocol:
+    return protocol
+
+  return TYPE_LOCAL
+
+
+def SplitURI(uri):
+  """Get the protocol and path from a URI
+
+  Examples:
+  'gs://some/path' ==> ('gs', 'some/path')
+  'file:///some/path' ==> ('file', '/some/path')
+  '/some/path' ==> (None, '/some/path')
+  '/cns/some/colossus/path' ==> (None, '/cns/some/colossus/path')
+
+  Args:
+    uri: The uri to get protocol and path from.
+
+  Returns;
+    Tuple (protocol, path)
+  """
+  match = SPLIT_URI_RE.search(uri)
+  if match:
+    return (match.group(1), match.group(2))
+
+  return (None, uri)
+
+
+def IsGsURI(uri):
+  """Returns True if given uri uses Google Storage protocol."""
+  return PROTOCOL_GS == ExtractProtocol(uri)
+
+
+def IsFileURI(uri):
+  """Return True if given uri is a file URI (or path).
+
+  If uri uses the file protocol or it is a plain non-Colossus path
+  then return True.
+
+  Args:
+    uri: Any URI or path.
+
+  Returns:
+    True or False as described above.
+  """
+  return TYPE_LOCAL == GetUriType(uri)
+
+
+def IsHttpURI(uri, https_ok=False):
+  """Returns True if given uri uses http, or optionally https, protocol.
+
+  Args:
+    uri: The URI to check.
+    https_ok: If True, then accept https protocol as well.
+
+  Returns:
+    Boolean
+  """
+  uri_type = GetUriType(uri)
+  return TYPE_HTTP == uri_type or (https_ok and TYPE_HTTPS == uri_type)
+
+
+def IsHttpsURI(uri):
+  """Returns True if given uri uses https protocol."""
+  return TYPE_HTTPS == GetUriType(uri)
+
+
+def MD5Sum(uri):
+  """Compute or retrieve MD5 sum of uri.
+
+  Supported for: local files, GS files.
+
+  Args:
+    uri: The /unix/path or gs:// uri to compute the md5sum on.
+
+  Returns:
+    A string representing the md5sum of the file/uri passed in.
+    None if we do not understand the uri passed in or cannot compute
+    the md5sum.
+  """
+
+  uri_type = GetUriType(uri)
+
+  if uri_type == TYPE_LOCAL:
+    return filelib.MD5Sum(uri)
+  elif uri_type == TYPE_GS:
+    try:
+      return gslib.MD5Sum(uri)
+    except gslib.GSLibError:
+      return None
+
+  # Colossus does not have a command for getting MD5 sum.  We could
+  # copy the file to local disk and calculate it, but it seems better
+  # to explicitly say it is not supported.
+
+  raise NotSupportedForType(uri_type)
+
+
+def Cmp(uri1, uri2):
+  """Return True if paths hold identical files.
+
+  If either file is missing then always return False.
+
+  Args:
+    uri1: URI to a file.
+    uri2: URI to a file.
+
+  Returns:
+    True if files are the same, False otherwise.
+
+  Raises:
+    NotSupportedBetweenTypes if Cmp cannot be done between the two
+      URIs provided.
+  """
+  uri_type1 = GetUriType(uri1)
+  uri_type2 = GetUriType(uri2)
+  uri_types = set([uri_type1, uri_type2])
+
+  if TYPE_GS in uri_types:
+    # GS only supported between other GS files or local files.
+    if len(uri_types) == 1 or TYPE_LOCAL in uri_types:
+      return gslib.Cmp(uri1, uri2)
+
+  if TYPE_LOCAL in uri_types and len(uri_types) == 1:
+    return filelib.Cmp(uri1, uri2)
+
+  raise NotSupportedBetweenTypes(uri_type1, uri_type2)
+
+
+class URLopener(urllib.FancyURLopener):
+  """URLopener that will actually complain when download fails."""
+  # The urllib.urlretrieve function, which seems like a good fit for this,
+  # does not give access to error code.
+
+  def http_error_default(self, *args, **kwargs):
+    urllib.URLopener.http_error_default(self, *args, **kwargs)
+
+
+def URLRetrieve(src_url, dest_path):
+  """Download file from given URL to given local file path.
+
+  Args:
+    src_url: URL to download from.
+    dest_path: Path to download to.
+
+  Raises:
+    MissingURLError if URL cannot be downloaded.
+  """
+  opener = URLopener()
+
+  try:
+    opener.retrieve(src_url, dest_path)
+  except IOError as e:
+    # If the domain is valid but download failed errno shows up as None.
+    if e.errno is None:
+      raise MissingURLError('Unable to download %s' % src_url)
+
+    # If the domain is invalid the errno shows up as 'socket error', weirdly.
+    try:
+      int(e.errno)
+
+      # This means there was some normal error writing to the dest_path.
+      raise
+    except ValueError:
+      raise MissingURLError('Unable to download %s (bad domain?)' % src_url)
+
+
+def Copy(src_uri, dest_uri):
+  """Copy one uri to another.
+
+  Args:
+    src_uri: URI to copy from.
+    dest_uri: Path to copy to.
+
+  Raises:
+    NotSupportedBetweenTypes if Cmp cannot be done between the two
+      URIs provided.
+  """
+  uri_type1 = GetUriType(src_uri)
+  uri_type2 = GetUriType(dest_uri)
+  uri_types = set([uri_type1, uri_type2])
+
+  if TYPE_GS in uri_types:
+    # GS only supported between other GS files or local files.
+    if len(uri_types) == 1 or TYPE_LOCAL in uri_types:
+      return gslib.Copy(src_uri, dest_uri)
+
+  if TYPE_LOCAL in uri_types and len(uri_types) == 1:
+    return filelib.Copy(src_uri, dest_uri)
+
+  if uri_type1 in (TYPE_HTTP, TYPE_HTTPS) and uri_type2 == TYPE_LOCAL:
+    # Download file from URL.
+    return URLRetrieve(src_uri, dest_uri)
+
+  raise NotSupportedBetweenTypes(uri_type1, uri_type2)
+
+
+def Remove(*args, **kwargs):
+  """Delete the file(s) at uris, or directory(s) with recurse set.
+
+  Args:
+    args: One or more URIs.
+    ignore_no_match: If True, then do not complain if anything was not
+      removed because no URI match was found.  Like rm -f.  Defaults to False.
+    recurse: Remove recursively starting at path.  Same as rm -R.  Defaults
+      to False.
+  """
+  uri_types = set([GetUriType(u) for u in args])
+
+  if TYPE_GS in uri_types:
+    # GS support only allows local files among list.
+    if len(uri_types) == 1 or (TYPE_LOCAL in uri_types and len(uri_types) == 2):
+      return gslib.Remove(*args, **kwargs)
+
+  if TYPE_LOCAL in uri_types and len(uri_types) == 1:
+    return filelib.Remove(*args, **kwargs)
+
+  raise NotSupportedForTypes(*list(uri_types))
+
+
+def Size(uri):
+  """Return size of file at URI in bytes.
+
+  Args:
+    uri: URI to consider
+
+  Returns:
+    Size of file at given URI in bytes.
+
+  Raises:
+    MissingURLError if uri is a URL and cannot be found.
+  """
+
+  uri_type = GetUriType(uri)
+
+  if TYPE_GS == uri_type:
+    return gslib.FileSize(uri)
+
+  if TYPE_LOCAL == uri_type:
+    return filelib.Size(uri)
+
+  if TYPE_HTTP == uri_type or TYPE_HTTPS == uri_type:
+    try:
+      response = urllib2.urlopen(uri)
+      if response.getcode() == 200:
+        return int(response.headers.getheader('Content-Length'))
+
+    except urllib2.HTTPError as e:
+      # Interpret 4** errors as our own MissingURLError.
+      if e.code < 400 or e.code >= 500:
+        raise
+
+    raise MissingURLError('No such file at URL %s' % uri)
+
+  raise NotSupportedForType(uri_type)
+
+
+def Exists(uri, as_dir=False):
+  """Return True if file exists at given URI.
+
+  If URI is a directory and as_dir is False then this will return False.
+
+  Args:
+    uri: URI to consider
+    as_dir: If True then check URI as a directory, otherwise check as a file.
+
+  Returns:
+    True if file (or directory) exists at URI, False otherwise.
+  """
+  uri_type = GetUriType(uri)
+
+  if TYPE_GS == uri_type:
+    if as_dir:
+      # GS does not contain directories.
+      return False
+
+    return gslib.Exists(uri)
+
+  if TYPE_LOCAL == uri_type:
+    return filelib.Exists(uri, as_dir=as_dir)
+
+  if TYPE_HTTP == uri_type or TYPE_HTTPS == uri_type:
+    if as_dir:
+      raise NotSupportedForType(uri_type, extra_msg='with as_dir=True')
+
+    try:
+      response = urllib2.urlopen(uri)
+      return response.getcode() == 200
+    except urllib2.HTTPError:
+      return False
+
+  raise NotSupportedForType(uri_type)
+
+
+def ListFiles(root_path, recurse=False, filepattern=None, sort=False):
+  """Return list of file paths under given root path.
+
+  Directories are intentionally excluded from results.  The root_path
+  argument can be a local directory path, a Google storage directory URI,
+  or a Colossus (/cns) directory path.
+
+  Args:
+    root_path: A local path, CNS path, or GS path to directory.
+    recurse: Look for files in subdirectories, as well
+    filepattern: glob pattern to match against basename of file
+    sort: If True then do a default sort on paths
+
+  Returns:
+    List of paths to files that matched
+  """
+  uri_type = GetUriType(root_path)
+
+  if TYPE_GS == uri_type:
+    return gslib.ListFiles(root_path, recurse=recurse,
+                           filepattern=filepattern, sort=sort)
+
+  if TYPE_LOCAL == uri_type:
+    return filelib.ListFiles(root_path, recurse=recurse,
+                             filepattern=filepattern, sort=sort)
+
+  raise NotSupportedForType(uri_type)
+
+
+def CopyFiles(src_dir, dst_dir):
+  """Recursively copy all files from src_dir into dst_dir
+
+  This leverages the Copy method, so the restrictions there for what
+  copies are supported apply here.
+
+  Args:
+    src_dir: A local, CNS, or GS directory to copy from.
+    dst_dir: A local, CNS, or GS directory to copy into.
+
+  Returns:
+    A list of absolute path files for all copied files.
+  """
+  dst_paths = []
+  src_paths = ListFiles(src_dir, recurse=True)
+  for src_path in src_paths:
+    dst_path = src_path.replace(src_dir, dst_dir)
+    Copy(src_path, dst_path)
+    dst_paths.append(dst_path)
+
+  return dst_paths
+
+
+def RemoveDirContents(base_dir):
+  """Remove all contents of a directory.
+
+  Args:
+    base_dir: directory to delete contents of.
+  """
+  uri_type = GetUriType(base_dir)
+
+  if TYPE_GS == uri_type:
+    return gslib.RemoveDirContents(base_dir)
+
+  if TYPE_LOCAL == uri_type:
+    return filelib.RemoveDirContents(base_dir)
+
+  raise NotSupportedForType(uri_type)
diff --git a/lib/paygen/urilib_unittest b/lib/paygen/urilib_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/lib/paygen/urilib_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/paygen/urilib_unittest.py b/lib/paygen/urilib_unittest.py
new file mode 100644
index 0000000..aa5c8ac
--- /dev/null
+++ b/lib/paygen/urilib_unittest.py
@@ -0,0 +1,515 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the urilib module."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+
+from chromite.lib.paygen import filelib
+from chromite.lib.paygen import gslib
+from chromite.lib.paygen import urilib
+
+
+# We access private members to test them.
+# pylint: disable=protected-access
+
+
+class FakeHttpResponse(object):
+  """For simulating http response objects."""
+
+  class FakeHeaders(object):
+    """Helper class for faking HTTP headers in a response."""
+
+    def __init__(self, headers_dict):
+      self.headers_dict = headers_dict
+
+    def getheader(self, name):
+      return self.headers_dict.get(name)
+
+  def __init__(self, code, headers_dict=None):
+    self.code = code
+    self.headers = FakeHttpResponse.FakeHeaders(headers_dict)
+
+  def getcode(self):
+    return self.code
+
+
+class TestFileManipulation(cros_test_lib.TempDirTestCase):
+  """Test general urilib file methods together."""
+
+  # pylint: disable=attribute-defined-outside-init
+
+  FILE1 = 'file1'
+  FILE2 = 'file2'
+  SUBDIR = 'subdir'
+  SUBFILE = '%s/file3' % SUBDIR
+
+  FILE1_CONTENTS = 'Howdy doody there dandy'
+  FILE2_CONTENTS = 'Once upon a time in a galaxy far far away.'
+  SUBFILE_CONTENTS = 'Five little monkeys jumped on the bed.'
+
+  GS_DIR = 'gs://chromeos-releases-public/unittest'
+
+  def setUp(self):
+    # Use a subdir specifically for the cache so we can use the tempdir for
+    # other things (including tempfiles by gsutil/etc...).
+    self.filesdir = os.path.join(self.tempdir, 'unittest-cache')
+    osutils.SafeMakedirs(self.filesdir)
+
+  def _SetUpDirs(self):
+    self.file1_local = os.path.join(self.filesdir, self.FILE1)
+    self.file2_local = os.path.join(self.filesdir, self.FILE2)
+    self.subdir_local = os.path.join(self.filesdir, self.SUBDIR)
+    self.subfile_local = os.path.join(self.filesdir, self.SUBFILE)
+
+    self.file1_gs = os.path.join(self.GS_DIR, self.FILE1)
+    self.file2_gs = os.path.join(self.GS_DIR, self.FILE2)
+    self.subdir_gs = os.path.join(self.GS_DIR, self.SUBDIR)
+    self.subfile_gs = os.path.join(self.GS_DIR, self.SUBFILE)
+
+    # Pre-populate local dir with contents.
+    with open(self.file1_local, 'w') as out1:
+      out1.write(self.FILE1_CONTENTS)
+
+    with open(self.file2_local, 'w') as out2:
+      out2.write(self.FILE2_CONTENTS)
+
+    os.makedirs(self.subdir_local)
+
+    with open(self.subfile_local, 'w') as out3:
+      out3.write(self.SUBFILE_CONTENTS)
+
+    # Make sure gs:// directory is ready (empty).
+    gslib.Remove(os.path.join(self.GS_DIR, '*'), recurse=True,
+                 ignore_no_match=True)
+
+  @cros_test_lib.NetworkTest()
+  def testIntegration(self):
+    self._SetUpDirs()
+
+    self.assertTrue(urilib.Exists(self.filesdir, as_dir=True))
+    self.assertTrue(urilib.Exists(self.file1_local))
+    self.assertTrue(urilib.Exists(self.file2_local))
+    self.assertTrue(urilib.Exists(self.subfile_local))
+    self.assertTrue(urilib.Exists(self.subdir_local, as_dir=True))
+
+    self.assertFalse(urilib.Exists(self.file1_gs))
+    self.assertFalse(urilib.Exists(self.file2_gs))
+    self.assertFalse(urilib.Exists(self.subfile_gs))
+
+    shallow_local_files = [self.file1_local, self.file2_local]
+    deep_local_files = shallow_local_files + [self.subfile_local]
+    shallow_gs_files = [self.file1_gs, self.file2_gs]
+    deep_gs_files = shallow_gs_files + [self.subfile_gs]
+
+    # Test ListFiles, local version.
+    self.assertEquals(set(shallow_local_files),
+                      set(urilib.ListFiles(self.filesdir)))
+    self.assertEquals(set(deep_local_files),
+                      set(urilib.ListFiles(self.filesdir, recurse=True)))
+
+    # Test CopyFiles, from local to GS.
+    self.assertEquals(set(deep_gs_files),
+                      set(urilib.CopyFiles(self.filesdir, self.GS_DIR)))
+
+    # Test ListFiles, GS version.
+    self.assertEquals(set(shallow_gs_files),
+                      set(urilib.ListFiles(self.GS_DIR)))
+    self.assertEquals(set(deep_gs_files),
+                      set(urilib.ListFiles(self.GS_DIR, recurse=True)))
+
+    # Test Cmp between some files.
+    self.assertTrue(urilib.Cmp(self.file1_local, self.file1_gs))
+    self.assertFalse(urilib.Cmp(self.file2_local, self.file1_gs))
+
+    # Test RemoveDirContents, local version.
+    urilib.RemoveDirContents(self.filesdir)
+    self.assertFalse(urilib.ListFiles(self.filesdir))
+
+    # Test CopyFiles, from GS to local.
+    self.assertEquals(set(deep_local_files),
+                      set(urilib.CopyFiles(self.GS_DIR, self.filesdir)))
+
+    # Test RemoveDirContents, GS version.
+    urilib.RemoveDirContents(self.GS_DIR)
+    self.assertFalse(urilib.ListFiles(self.GS_DIR))
+
+
+class TestUrilib(cros_test_lib.MoxTempDirTestCase):
+  """Test urilib module."""
+
+  def testExtractProtocol(self):
+    tests = {'gs': ['gs://',
+                    'gs://foo',
+                    'gs://foo/bar'],
+             'abc': ['abc://',
+                     'abc://foo',
+                     'abc://foo/bar'],
+             None: ['foo/bar',
+                    '/foo/bar',
+                    '://garbage/path']}
+
+    for protocol in tests:
+      for uri in tests[protocol]:
+        self.assertEquals(protocol, urilib.ExtractProtocol(uri))
+
+  def testGetUriType(self):
+    tests = {'gs': ['gs://',
+                    'gs://foo',
+                    'gs://foo/bar'],
+             'abc': ['abc://',
+                     'abc://foo',
+                     'abc://foo/bar'],
+             'file': ['foo/bar',
+                      '/foo/bar',
+                      '://garbage/path',
+                      '/cnsfoo/bar']}
+
+    for uri_type in tests:
+      for uri in tests[uri_type]:
+        self.assertEquals(uri_type, urilib.GetUriType(uri))
+
+  def testSplitURI(self):
+    tests = [
+        ['gs', 'foo', 'gs://foo'],
+        ['gs', 'foo/bar', 'gs://foo/bar'],
+        ['file', '/foo/bar', 'file:///foo/bar'],
+        [None, '/foo/bar', '/foo/bar'],
+    ]
+
+    for test in tests:
+      uri = test[2]
+      protocol, path = urilib.SplitURI(uri)
+      self.assertEquals(test[0], protocol)
+      self.assertEquals(test[1], path)
+
+  def testIsGsURI(self):
+    tests_true = ('gs://',
+                  'gs://foo',
+                  'gs://foo/bar')
+    for test in tests_true:
+      self.assertTrue(urilib.IsGsURI(test))
+
+    tests_false = ('gsfoo/bar',
+                   'gs/foo/bar',
+                   'gs',
+                   '/foo/bar',
+                   '/gs',
+                   '/gs/foo/bar'
+                   'file://foo/bar',
+                   'http://foo/bar')
+    for test in tests_false:
+      self.assertFalse(urilib.IsGsURI(test))
+
+  def testIsFileURI(self):
+    tests_true = ('file://',
+                  'file://foo/bar',
+                  'file:///foo/bar',
+                  '/foo/bar',
+                  'foo/bar',
+                  'foo',
+                  '')
+    for test in tests_true:
+      self.assertTrue(urilib.IsFileURI(test))
+
+    tests_false = ('gs://',
+                   'foo://',
+                   'gs://foo/bar')
+    for test in tests_false:
+      self.assertFalse(urilib.IsFileURI(test))
+
+  def testIsHttpURI(self):
+    tests_true = ('http://',
+                  'http://foo',
+                  'http://foo/bar')
+    for test in tests_true:
+      self.assertTrue(urilib.IsHttpURI(test))
+
+    tests_https_true = ('https://',
+                        'https://foo',
+                        'https://foo/bar')
+    for test in tests_https_true:
+      self.assertTrue(urilib.IsHttpURI(test, https_ok=True))
+    for test in tests_https_true:
+      self.assertFalse(urilib.IsHttpURI(test))
+
+    tests_false = ('httpfoo/bar',
+                   'http/foo/bar',
+                   'http',
+                   '/foo/bar',
+                   '/http',
+                   '/http/foo/bar'
+                   'file:///foo/bar',
+                   'gs://foo/bar')
+    for test in tests_false:
+      self.assertFalse(urilib.IsHttpURI(test))
+
+  def testIsHttpsURI(self):
+    tests_true = ('https://',
+                  'https://foo',
+                  'https://foo/bar')
+    for test in tests_true:
+      self.assertTrue(urilib.IsHttpsURI(test))
+
+    tests_false = ('http://',
+                   'http://foo',
+                   'http://foo/bar',
+                   'httpfoo/bar',
+                   'http/foo/bar',
+                   'http',
+                   '/foo/bar',
+                   '/http',
+                   '/http/foo/bar'
+                   'file:///foo/bar',
+                   'gs://foo/bar')
+    for test in tests_false:
+      self.assertFalse(urilib.IsHttpsURI(test))
+
+  def testMD5Sum(self):
+    gs_path = 'gs://bucket/some/path'
+    local_path = '/some/local/path'
+    http_path = 'http://host.domain/some/path'
+
+    self.mox.StubOutWithMock(gslib, 'MD5Sum')
+    self.mox.StubOutWithMock(filelib, 'MD5Sum')
+
+    # Set up the test replay script.
+    # Run 1, GS.
+    gslib.MD5Sum(gs_path).AndReturn('TheResult')
+    # Run 3, local file.
+    filelib.MD5Sum(local_path).AndReturn('TheResult')
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertEquals('TheResult', urilib.MD5Sum(gs_path))
+    self.assertEquals('TheResult', urilib.MD5Sum(local_path))
+    self.assertRaises(urilib.NotSupportedForType, urilib.MD5Sum, http_path)
+    self.mox.VerifyAll()
+
+  def testCmp(self):
+    gs_path = 'gs://bucket/some/path'
+    local_path = '/some/local/path'
+    http_path = 'http://host.domain/some/path'
+
+    result = 'TheResult'
+
+    self.mox.StubOutWithMock(gslib, 'Cmp')
+    self.mox.StubOutWithMock(filelib, 'Cmp')
+
+    # Set up the test replay script.
+    # Run 1, two local files.
+    filelib.Cmp(local_path, local_path + '.1').AndReturn(result)
+    # Run 2, local and GS.
+    gslib.Cmp(local_path, gs_path).AndReturn(result)
+    # Run 4, GS and GS
+    gslib.Cmp(gs_path, gs_path + '.1').AndReturn(result)
+    # Run 7, local and HTTP
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertEquals(result, urilib.Cmp(local_path, local_path + '.1'))
+    self.assertEquals(result, urilib.Cmp(local_path, gs_path))
+    self.assertEquals(result, urilib.Cmp(gs_path, gs_path + '.1'))
+    self.assertRaises(urilib.NotSupportedBetweenTypes, urilib.Cmp,
+                      local_path, http_path)
+    self.mox.VerifyAll()
+
+  @cros_test_lib.NetworkTest()
+  def testURLRetrieve(self):
+    good_url = 'https://codereview.chromium.org/download/issue11731004_1_2.diff'
+    bad_domain_url = 'http://notarealdomainireallyhope.com/some/path'
+    bad_path_url = 'https://dl.google.com/dl/edgedl/x/y/z/a/b/c/foobar'
+    local_path = os.path.join(self.tempdir, 'downloaded_file')
+    bad_local_path = '/tmp/a/b/c/d/x/y/z/foobar'
+
+    git_index1 = 'e6c0d72a5122171deb4c458991d1c7547f31a2f0'
+    git_index2 = '3d0f7d3edfd8146031e66dc3f45926920d3ded78'
+    expected_contents = """Index: LICENSE
+diff --git a/LICENSE b/LICENSE
+index %s..%s 100644
+--- a/LICENSE
++++ b/LICENSE
+@@ -1,4 +1,4 @@
+-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
++// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+ //
+ // Redistribution and use in source and binary forms, with or without
+ // modification, are permitted provided that the following conditions are
+""" % (git_index1, git_index2)
+
+    self.assertRaises(urilib.MissingURLError, urilib.URLRetrieve,
+                      bad_path_url, local_path)
+    self.assertRaises(urilib.MissingURLError, urilib.URLRetrieve,
+                      bad_domain_url, local_path)
+
+    urilib.URLRetrieve(good_url, local_path)
+    with open(local_path, 'r') as f:
+      actual_contents = f.read()
+    self.assertEqual(expected_contents, actual_contents)
+
+    self.assertRaises(IOError, urilib.URLRetrieve, good_url, bad_local_path)
+
+  def testCopy(self):
+    gs_path = 'gs://bucket/some/path'
+    local_path = '/some/local/path'
+    http_path = 'http://host.domain/some/path'
+
+    result = 'TheResult'
+
+    self.mox.StubOutWithMock(gslib, 'Copy')
+    self.mox.StubOutWithMock(filelib, 'Copy')
+    self.mox.StubOutWithMock(urilib, 'URLRetrieve')
+
+    # Set up the test replay script.
+    # Run 1, two local files.
+    filelib.Copy(local_path, local_path + '.1').AndReturn(result)
+    # Run 2, local and GS.
+    gslib.Copy(local_path, gs_path).AndReturn(result)
+    # Run 4, GS and GS
+    gslib.Copy(gs_path, gs_path + '.1').AndReturn(result)
+    # Run 7, HTTP and local
+    urilib.URLRetrieve(http_path, local_path).AndReturn(result)
+    # Run 8, local and HTTP
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertEquals(result, urilib.Copy(local_path, local_path + '.1'))
+    self.assertEquals(result, urilib.Copy(local_path, gs_path))
+    self.assertEquals(result, urilib.Copy(gs_path, gs_path + '.1'))
+    self.assertEquals(result, urilib.Copy(http_path, local_path))
+    self.assertRaises(urilib.NotSupportedBetweenTypes, urilib.Copy,
+                      local_path, http_path)
+    self.mox.VerifyAll()
+
+  def testRemove(self):
+    gs_path = 'gs://bucket/some/path'
+    local_path = '/some/local/path'
+    http_path = 'http://host.domain/some/path'
+
+    self.mox.StubOutWithMock(gslib, 'Remove')
+    self.mox.StubOutWithMock(filelib, 'Remove')
+
+    # Set up the test replay script.
+    # Run 1, two local files.
+    filelib.Remove(local_path, local_path + '.1')
+    # Run 2, local and GS.
+    gslib.Remove(local_path, gs_path, ignore_no_match=True)
+    # Run 4, GS and GS
+    gslib.Remove(gs_path, gs_path + '.1',
+                 ignore_no_match=True, recurse=True)
+    # Run 7, local and HTTP
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    urilib.Remove(local_path, local_path + '.1')
+    urilib.Remove(local_path, gs_path, ignore_no_match=True)
+    urilib.Remove(gs_path, gs_path + '.1', ignore_no_match=True, recurse=True)
+    self.assertRaises(urilib.NotSupportedForTypes, urilib.Remove,
+                      local_path, http_path)
+    self.mox.VerifyAll()
+
+  def testSize(self):
+    gs_path = 'gs://bucket/some/path'
+    local_path = '/some/local/path'
+    http_path = 'http://host.domain/some/path'
+    ftp_path = 'ftp://host.domain/some/path'
+
+    result = 100
+    http_response = FakeHttpResponse(200, {'Content-Length': str(result)})
+
+    self.mox.StubOutWithMock(gslib, 'FileSize')
+    self.mox.StubOutWithMock(filelib, 'Size')
+    self.mox.StubOutWithMock(urilib.urllib2, 'urlopen')
+
+    # Set up the test replay script.
+    # Run 1, local.
+    filelib.Size(local_path).AndReturn(result)
+    # Run 2, GS.
+    gslib.FileSize(gs_path).AndReturn(result)
+    # Run 4, HTTP.
+    urilib.urllib2.urlopen(http_path).AndReturn(http_response)
+    # Run 5, FTP.
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertEquals(result, urilib.Size(local_path))
+    self.assertEquals(result, urilib.Size(gs_path))
+    self.assertEquals(result, urilib.Size(http_path))
+    self.assertRaises(urilib.NotSupportedForType, urilib.Size, ftp_path)
+    self.mox.VerifyAll()
+
+  def testExists(self):
+    gs_path = 'gs://bucket/some/path'
+    local_path = '/some/local/path'
+    http_path = 'http://host.domain/some/path'
+    ftp_path = 'ftp://host.domain/some/path'
+
+    result = 'TheResult'
+
+    self.mox.StubOutWithMock(gslib, 'Exists')
+    self.mox.StubOutWithMock(filelib, 'Exists')
+    self.mox.StubOutWithMock(urilib.urllib2, 'urlopen')
+
+    # Set up the test replay script.
+    # Run 1, local, as_dir=False
+    filelib.Exists(local_path, as_dir=False).AndReturn(result)
+    # Run 2, GS, as_dir=False.
+    gslib.Exists(gs_path).AndReturn(result)
+    # Run 3, GS, as_dir=True.
+    # Run 6, HTTP, as_dir=False, code=200.
+    urilib.urllib2.urlopen(http_path).AndReturn(FakeHttpResponse(200))
+    # Run 7, HTTP, as_dir=False, code=404.
+    urilib.urllib2.urlopen(http_path).AndReturn(FakeHttpResponse(404))
+    # Run 8, HTTP, as_dir=False, HTTPError.
+    urilib.urllib2.urlopen(http_path).AndRaise(
+        urilib.urllib2.HTTPError('url', 404, 'msg', None, None))
+    # Run 9, HTTP, as_dir=True.
+    # Run 10, FTP, as_dir=False.
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertEquals(result, urilib.Exists(local_path))
+    self.assertEquals(result, urilib.Exists(gs_path))
+    self.assertEquals(False, urilib.Exists(gs_path, as_dir=True))
+    self.assertTrue(urilib.Exists(http_path))
+    self.assertFalse(urilib.Exists(http_path))
+    self.assertFalse(urilib.Exists(http_path))
+    self.assertRaises(urilib.NotSupportedForType,
+                      urilib.Exists, http_path, as_dir=True)
+    self.assertRaises(urilib.NotSupportedForType, urilib.Exists, ftp_path)
+    self.mox.VerifyAll()
+
+  def testListFiles(self):
+    gs_path = 'gs://bucket/some/path'
+    local_path = '/some/local/path'
+    http_path = 'http://host.domain/some/path'
+
+    result = 'TheResult'
+    patt = 'TheFilePattern'
+
+    self.mox.StubOutWithMock(gslib, 'ListFiles')
+    self.mox.StubOutWithMock(filelib, 'ListFiles')
+
+    # Set up the test replay script.
+    # Run 1, local.
+    filelib.ListFiles(
+        local_path, recurse=True, filepattern=None,
+        sort=False).AndReturn(result)
+    # Run 2, GS.
+    gslib.ListFiles(
+        gs_path, recurse=False, filepattern=patt, sort=True).AndReturn(result)
+    # Run 4, HTTP.
+    self.mox.ReplayAll()
+
+    # Run the test verification.
+    self.assertEquals(result, urilib.ListFiles(local_path, recurse=True))
+    self.assertEquals(result, urilib.ListFiles(gs_path, filepattern=patt,
+                                               sort=True))
+    self.assertRaises(urilib.NotSupportedForType, urilib.ListFiles, http_path)
+    self.mox.VerifyAll()
diff --git a/lib/paygen/utils.py b/lib/paygen/utils.py
new file mode 100644
index 0000000..93ffed9
--- /dev/null
+++ b/lib/paygen/utils.py
@@ -0,0 +1,532 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common python commands used by various internal build scripts."""
+
+from __future__ import print_function
+
+from contextlib import contextmanager
+import datetime
+import itertools
+import os
+import Queue
+import subprocess
+import tempfile
+import threading
+import time
+import traceback
+
+from chromite.lib import cros_logging as logging
+
+
+# Give preference to /usr/local/google/tmp for space reasons.
+TMPS = ('/usr/local/google/tmp', '/tmp')
+TMP = next(d for d in TMPS if os.path.exists(d))
+
+ONE_GIG = 2 ** 30      # One gig in bytes
+TWO_GIGS = ONE_GIG * 2  # Two gigs in bytes
+
+
+class CommandFailedException(Exception):
+  """Exception gets thrown for a command that fails to execute."""
+
+
+class UnableToCreateTmpDir(Exception):
+  """Raised if we are unable to find a suitable tmp area."""
+
+
+class Timer(object):
+  """Timer class to measure the time that is spent during a task."""
+  # pylint: disable=W0201
+
+  def __enter__(self):
+    self.__start = time.time()
+
+  def __exit__(self, exc_type, exc_value, exc_traceback):
+    self.__finish = time.time()
+
+  def ElapsedTime(self):
+    """Returns the time since the invocation of the object."""
+    return self.__finish - self.__start
+
+
+class CommandResult(object):
+  """An object to store various attributes of a child process."""
+
+  def __init__(self, cmd=None, error=None, output=None, returncode=None):
+    self.cmd = cmd
+    self.error = error
+    self.output = output
+    self.returncode = returncode
+
+  @property
+  def cmdstr(self):
+    """Return self.cmd as space-separated string."""
+    if self.cmd:
+      return ' '.join(self.cmd)
+
+
+def CreateTmpDir(prefix='cros-rel', tmps=TMPS, minimum_size=0):
+  """Return a unique tmp dir with enough free space (if specified).
+
+  Check if any tmp in tmps exists that also meets the minimum_size
+  free space requirement. If so, return a unique tmp dir in that path.
+
+  Args:
+    prefix: Prefix to use with tempfile.mkdtemp.
+    tmps: An iterable of directories to consider for tmp space.
+    minimum_size: The minimum size the tmp dir needs to have. Default: 0.
+
+  Raises:
+    UnableToCreateTmpDir: If we are unable to find a suitable tmp dir.
+  """
+  for entry in tmps:
+    if os.path.exists(entry):
+      if not minimum_size or GetFreeSpace(entry) > minimum_size:
+        return tempfile.mkdtemp(prefix=prefix, dir=entry)
+      else:
+        logging.warning('Not enough space in %s to create %s temp dir.',
+                        entry, prefix)
+
+  raise UnableToCreateTmpDir('Unable to find a suitable %s tmp dir.'
+                             '  Considered: %s', prefix, ', '.join(tmps))
+
+
+# TODO(mtennant): The RunCommand function in chromite/lib/cros_build_lib is
+# more mature and has more functionality than this function.  Try using it here.
+# In particular, note that this function handles redirect_stdout differently
+# from redirect_stderr.  For stdout the output is returned, but for stderr
+# the output is simply discarded (in both cases the output does not go to
+# the standard stdout/stderr handlers if "redirected").
+def RunCommand(cmd, error_ok=False, redirect_stdout=False,
+               redirect_stderr=False, cwd=None, input=None, dryrun=False,
+               shell=False, env=None, return_result=False):
+  """Runs the given command passed in 'cmd'.
+
+  Args:
+    cmd: list of command and arguments to run.
+    error_ok: ignore failures.
+    redirect_stdout: boolean. If true redirects stdout.
+    redirect_stderr: boolean. If true mutes stderr.  Returned
+      output will not include stderr output, just stdout.
+    cwd: Path to change to when running command. Default: None.
+    input: input handle to the child process.
+    dryrun: boolean.if True print the command an not execute.
+    shell: boolean. if True run in a shell. default: False
+    env: Environ dict to pass to the command. Default: None.
+    return_result: If True, return CommandResult object instead of stdout.
+      This behavior should become standard someday.
+
+  Returns:
+    If return_result is True then a CommandResult object is returned.
+      Otherwise, return the stdout output of the child process.
+
+  Raises:
+    CommandFailedException: on child process failure and error_ok is False.
+  """
+  # pylint: disable=W0622
+  cmd_result = CommandResult()
+
+  # Prepare log_cmd for logging purposes only.
+  log_cmd = cmd
+  if type(cmd) is list:
+    log_cmd = ' '.join(cmd)
+  if cwd is not None:
+    log_cmd = 'cd %s && %s' % (cwd, log_cmd)
+
+  logging.debug('Begin: %s', log_cmd)
+
+  if dryrun:
+    return
+
+  timer = Timer()
+  with timer:
+    if redirect_stdout:
+      stdout = subprocess.PIPE
+    else:
+      stdout = None
+    if redirect_stderr:
+      stderr = subprocess.PIPE
+    else:
+      stderr = None
+    if input:
+      stdin = subprocess.PIPE
+    else:
+      stdin = None
+
+    cmd_result.cmd = cmd
+    proc = subprocess.Popen(cmd, cwd=cwd, stdin=stdin, stdout=stdout,
+                            stderr=stderr, shell=shell, env=env)
+    (cmd_result.output, cmd_result.error) = proc.communicate(input)
+    cmd_result.returncode = proc.returncode
+
+  logging.debug('Done : %s', log_cmd)
+  logging.debug('Time Taken: %s seconds, Return Code: %r',
+                timer.ElapsedTime(), cmd_result.returncode)
+
+  if not error_ok and cmd_result.returncode != 0:
+    raise CommandFailedException(cmd_result.error)
+
+  if return_result:
+    return cmd_result
+
+  return cmd_result.output
+
+
+def GetFreeSpace(path):
+  """Return the available free space in bytes.
+
+  Args:
+    path: The dir path to check. If this is a file it will be converted to a
+        path.
+
+  Returns:
+    The byte representation of available space.
+  """
+  if os.path.isfile(path):
+    path = os.path.dirname(path)
+
+  stats = os.statvfs(path)
+  return stats.f_bavail * stats.f_frsize
+
+
+def CreateTempFileWithContents(contents):
+  """Creates a temp file containing contents which self deletes when closed.
+
+  Args:
+    contents: The string to write into the temp file.
+
+  Returns:
+    tempfile.NamedTemporaryFile. A file object that will self delete
+    when closed.
+  """
+  message_file = tempfile.NamedTemporaryFile()
+  message_file.write(contents)
+  message_file.flush()
+  return message_file
+
+
+def ListdirFullpath(directory):
+  """Return all files in a directory with full pathnames.
+
+  Args:
+    directory: directory to find files for.
+
+  Returns:
+    Full paths to every file in that directory.
+  """
+  return [os.path.join(directory, f) for f in os.listdir(directory)]
+
+
+class RestrictedAttrDict(dict):
+  """Define a dictionary which is also a struct.
+
+  The keys will belong to a restricted list of values.
+  """
+
+  _slots = ()
+
+  def __init__(self, *args, **kwargs):
+    """Ensure that only the expected keys are added during initialization."""
+    dict.__init__(self, *args, **kwargs)
+
+    # Ensure all slots are at least populated with None.
+    for key in self._slots:
+      self.setdefault(key)
+
+    for key in self.keys():
+      assert key in self._slots, 'Unexpected key %s in %s' % (key, self._slots)
+
+  def __setattr__(self, name, val):
+    """Setting an attribute, actually sets a dictionary value."""
+    if name not in self._slots:
+      raise AttributeError("'%s' may not have attribute '%s'" %
+                           (self.__class__.__name__, name))
+    self[name] = val
+
+  def __getattr__(self, name):
+    """Fetching an attribute, actually fetches a dictionary value."""
+    if name not in self:
+      raise AttributeError("'%s' has no attribute '%s'" %
+                           (self.__class__.__name__, name))
+    return self[name]
+
+  def __setitem__(self, name, val):
+    """Restrict which keys can be stored in this dictionary."""
+    if name not in self._slots:
+      raise KeyError(name)
+    dict.__setitem__(self, name, val)
+
+  def __str__(self):
+    """Default stringification behavior."""
+    name = self._name if hasattr(self, '_name') else self.__class__.__name__
+    return '%s (%s)' % (name, self._GetAttrString())
+
+  def _GetAttrString(self, delim=', ', equal='='):
+    """Return string showing all non-None values of self._slots.
+
+    The ordering of attributes in self._slots is honored in string.
+
+    Args:
+      delim: String for separating key/value elements in result.
+      equal: String to put between key and associated value in result.
+
+    Returns:
+      A string like "a='foo', b=12".
+    """
+    slots = [s for s in self._slots if self[s] is not None]
+    elems = ['%s%s%r' % (s, equal, self[s]) for s in slots]
+    return delim.join(elems)
+
+  def _clear_if_default(self, key, default):
+    """Helper for constructors.
+
+    If they key value is set to the default value, set it to None.
+
+    Args:
+      key: Key value to check and possibly clear.
+      default: Default value to compare the key value against.
+    """
+    if self[key] == default:
+      self[key] = None
+
+
+def PathPrepend(new_dir, curr_path=None):
+  """Prepends a directory to a given path (or system path, if none provided)."""
+  if curr_path is None:
+    curr_path = os.environ.get('PATH')
+  return '%s:%s' % (new_dir, curr_path) if curr_path else new_dir
+
+
+@contextmanager
+def CheckedOpen(name, mode=None, buffering=None):
+  """A context for opening/closing a file iff an actual name is provided."""
+  # Open the file, as necessary.
+  f = None
+  if name:
+    dargs = {'name': name}
+    if mode is not None:
+      dargs['mode'] = mode
+    if buffering is not None:
+      dargs['buffering'] = buffering
+    f = open(**dargs)
+
+  try:
+    # Yield to the wait-statement body.
+    yield f
+  finally:
+    # If an actual file was opened, close it.
+    if f:
+      f.close()
+
+
+class ThreadError(object):
+  """Thread execution error as returned by traceback.format_exc()."""
+
+  def __init__(self, exc):
+    self.exc = exc
+
+  def __str__(self):
+    return self.exc
+
+
+def _RunInThread(func, args, star, idx, result_queue):
+  """Runs a function, queuing its return value / exception.
+
+  If the function returns a value, this will push (idx, output) to the result
+  queue; otherwise, an exception will be caught and pushed to the queue.
+
+  Args:
+    func: A function to execute.
+    args: Arguments (possibly a tuple) to feed to the function.
+    star: Whether the arguments need to be expanded (starred) when calling.
+    idx: An index to attach to the result.
+    result_queue: A queue to push the result onto.
+  """
+  try:
+    ret = func(*args) if star else func(args)
+
+  # We want to catch all exceptions so we can queue them back to the caller.
+  # pylint: disable=W0703
+  except Exception:
+    ret = ThreadError(traceback.format_exc())
+
+  result_queue.put((idx, ret))
+
+
+class ThreadedMapError(Exception):
+  """An error during execution of a threaded map."""
+
+  def __init__(self, thread_errors, *args, **kwargs):
+    """Intitializer for ThreadedMapError.
+
+    Args:
+      thread_errors: A list of pairs (idx, error), where idx is the thread
+        index in the map and error is an instance of ThreadError for it.
+    """
+    super(ThreadedMapError, self).__init__(*args, **kwargs)
+    self.thread_errors = thread_errors
+
+  def __str__(self):
+    return ('Some threads failed during threaded map:\n%s' %
+            '\n'.join([self._FormatThreadError(idx, error)
+                       for idx, error in self.thread_errors]))
+
+  @classmethod
+  def _FormatThreadError(cls, idx, error):
+    """Format the traceback occurring within a single thread."""
+    return 'Thread %d:\n%s' % (idx, cls._AddIndent(str(error), 2))
+
+  @staticmethod
+  def _AddIndent(text, indent):
+    """Prepend an indent (spaces) to all lines of a given text."""
+    return '\n'.join([' ' * indent + line for line in text.splitlines()])
+
+
+def ThreadedMap(func, args_list, star=False):
+  """Executes a function over a list of arguments in multiple threads.
+
+  This will apply the function to each of the given values in parallel,
+  spawning a new thread for each call. If star is True, each argument is
+  assumed to be a tuple and will be expanded (starred) when calling the
+  function.
+
+  Args:
+    func: A function to apply to each tuple of arguments.
+    args_list: Arguments to feed to the function.
+    star: Whether the argument to the function should be starred.
+
+  Returns:
+    A list of outputs corresponding to each input, in order.
+
+  Raises:
+    ThreadedMapError: If execution in one or more threads raised an exception.
+  """
+  result_queue = Queue.Queue()
+  num_results = len(args_list)
+  logging.debug('Threaded mapping over %d inputs', num_results)
+
+  for i, args in enumerate(args_list):
+    t = threading.Thread(target=_RunInThread,
+                         args=(func, args, star, i, result_queue))
+    t.daemon = True
+    t.start()
+
+  results = [None] * num_results
+  logging.debug('Collecting results from threaded runs')
+  thread_errors = []
+  while num_results > 0:
+    idx, result = result_queue.get()
+    if isinstance(result, ThreadError):
+      thread_errors.append((idx, result))
+    else:
+      results[idx] = result
+
+    num_results -= 1
+
+  if thread_errors:
+    raise ThreadedMapError(thread_errors)
+
+  return results
+
+
+def Group(items, key_func):
+  """Groups items based on their key.
+
+  Note that this only associate subsequences of consecutive items of the same
+  key. If a caller requires that all items bearing the same key be grouped
+  together, it is their responsibility to sort the input by key prior to
+  calling this function.
+
+  Args:
+    items: Iterable of values.
+    key_func: A function that returns the key of each item.
+
+  Returns:
+    A list of pairs (key, vals), where vals is a list of consecutive items
+    whose key_func() equals key.
+  """
+  return [(k, list(v)) for k, v in itertools.groupby(items, key_func)]
+
+
+def Linear(score, score_min, score_max, val_start, val_end):
+  """Computes a value as a linear function of a score within given bounds.
+
+  This computes the linear growth/decay of a value based on a given score.
+  Roughly speaking:
+
+    ret = val_start + C * (score - score_min)
+
+  where
+
+    C = (val_end - val_start) / (score_max - score_min)
+
+  Note that score_min/max are used as lower/upper thresholds, determining the
+  range of scores that actually have impact on the returned value. Also note
+  that val_start/end may be arbitrarily related, for example it may be that
+  val_start > val_end, in which case the result will be a linearly decaying
+  function.  The result is undefined (and may raise an exception) if
+  score_min >= score_max.  Provided all arguments are integers, this guarantees
+  that all arithmetic operations, intermediate values, and returned result are
+  integers as well.
+
+  Args:
+    score: A number that determines the linear factor.
+    score_min: The lowest score to consider.
+    score_max: The highest score to consider.
+    val_start: The return value when score <= score_min.
+    val_end: The return value when score >= score_max.
+
+  Returns:
+    An integer value ranging between val_start and val_end.
+  """
+  relative_score = max(min(score, score_max), score_min) - score_min
+  score_range = score_max - score_min
+  val_range = val_end - val_start
+  return val_start + ((val_range * relative_score) / score_range)
+
+
+def TimeDeltaToString(delta, force_seconds=False, subsecond_precision=0):
+  """Returns a readable string representation of a datetime.timedelta.
+
+  The returned value takes the form 5d2h37m5.347s. Normally, this will only
+  show seconds (and fraction thereof) if the delta is under 1 minute long,
+  unless force_seconds is True. Subsecond fractions are shown up to the
+  provided precision and without trailing zeros. Bear in mind that Python's
+  timedelta is only accurate to the microseconds, so specifying precision
+  beyond 6 will have no effect on the output.
+
+  Args:
+    delta: A datetime.timedelta object.
+    force_seconds: Whether to show seconds unconditionally.
+    subsecond_precision: Number of digits after the decimal point (0-6).
+
+  Returns:
+    A string representation of the time delta.
+  """
+  delta_str = ''
+
+  if delta.days > 0:
+    delta_str += '%dd' % delta.days
+    delta -= datetime.timedelta(days=delta.days)
+
+  delta_hours = delta.seconds / 3600
+  if delta_hours > 0:
+    delta_str += '%dh' % delta_hours
+    delta -= datetime.timedelta(hours=delta_hours)
+
+  delta_minutes = delta.seconds / 60
+  if delta_minutes > 0:
+    delta_str += '%dm' % delta_minutes
+    delta -= datetime.timedelta(minutes=delta_minutes)
+
+  if not delta_str or force_seconds:
+    delta_str += '%d' % delta.seconds
+    subseconds = delta.microseconds / (10 ** (6 - subsecond_precision))
+    subsecond_str = ('%0*d' % (subsecond_precision, subseconds)).rstrip('0')
+    if subsecond_str:
+      delta_str += '.%s' % subsecond_str
+    delta_str += 's'
+
+  return delta_str
diff --git a/lib/paygen/utils_unittest b/lib/paygen/utils_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/lib/paygen/utils_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/paygen/utils_unittest.py b/lib/paygen/utils_unittest.py
new file mode 100644
index 0000000..b79aba3
--- /dev/null
+++ b/lib/paygen/utils_unittest.py
@@ -0,0 +1,151 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test Utils library."""
+
+from __future__ import print_function
+
+import datetime
+import operator
+import os
+
+from chromite.lib import cros_test_lib
+from chromite.lib.paygen import utils
+
+
+class TestUtils(cros_test_lib.TempDirTestCase):
+  """Test utils methods."""
+
+  def testCreateTmpInvalidPath(self):
+    """Test that we create a tmp eventually even with invalid paths."""
+    tmps = ['/usr/local/nope', '/tmp']
+    tmp = utils.CreateTmpDir(tmps=tmps)
+    self.assertTrue(tmp.startswith('/tmp'))
+    os.rmdir(tmp)
+
+  def testCreateTmpRaiseException(self):
+    """Test that we raise an exception when we do not have enough space."""
+    self.assertRaises(utils.UnableToCreateTmpDir, utils.CreateTmpDir,
+                      minimum_size=2 ** 50)
+
+  def testCreateTempFileWithContents(self):
+    """Verify that we create a temp file with the right message in it."""
+
+    message = 'Test Message With Rocks In'
+
+    # Create the temp file.
+    with utils.CreateTempFileWithContents(message) as temp_file:
+      temp_name = temp_file.name
+
+      # Verify the name is valid.
+      self.assertTrue(os.path.exists(temp_name))
+
+      # Verify it has the right contents
+      with open(temp_name, 'r') as f:
+        contents = f.readlines()
+
+      self.assertEqual([message], contents)
+
+    # Verify the temp file goes away when we close it.
+    self.assertFalse(os.path.exists(temp_name))
+
+  # pylint: disable=E1101
+  def testListdirFullpath(self):
+    file_a = os.path.join(self.tempdir, 'a')
+    file_b = os.path.join(self.tempdir, 'b')
+
+    with file(file_a, 'w+'):
+      pass
+
+    with file(file_b, 'w+'):
+      pass
+
+    self.assertEqual(sorted(utils.ListdirFullpath(self.tempdir)),
+                     [file_a, file_b])
+
+  def testThreadedMapNormal(self):
+    args = [2, 5, 7]
+    results = utils.ThreadedMap((lambda x: x + 1), args)
+    self.assertEqual(results, [3, 6, 8])
+
+  def testThreadedMapStar(self):
+    args = [(2, 3), (2, 5), (7, 10)]
+    results = utils.ThreadedMap((lambda x, y: x * y), args, star=True)
+    self.assertEqual(results, [6, 10, 70])
+
+  def testThreadedMapException(self):
+    args = [(6, 2), (1, 0), (9, 3)]
+    self.assertRaises(utils.ThreadedMapError, utils.ThreadedMap,
+                      (lambda x, y: x / y), args, star=True)
+
+  def testGroup(self):
+    items = [(1, 'a'), (2, 'b'), (1, 'c')]
+    self.assertEquals(utils.Group(items, operator.itemgetter(0)),
+                      [(1, [(1, 'a')]), (2, [(2, 'b')]), (1, [(1, 'c')])])
+
+    items = [(1, 'c'), (2, 'b'), (1, 'a')]
+    self.assertEquals(utils.Group(items, operator.itemgetter(0)),
+                      [(1, [(1, 'c')]), (2, [(2, 'b')]), (1, [(1, 'a')])])
+
+    items = [(1, 'a'), (1, 'c'), (2, 'b')]
+    self.assertEquals(utils.Group(items, operator.itemgetter(0)),
+                      [(1, [(1, 'a'), (1, 'c')]), (2, [(2, 'b')])])
+
+    items = [(2, 'b'), (1, 'c'), (1, 'a')]
+    self.assertEquals(utils.Group(items, operator.itemgetter(0)),
+                      [(2, [(2, 'b')]), (1, [(1, 'c'), (1, 'a')])])
+
+    # Special case: an empty input.
+    self.assertEquals(utils.Group([], operator.itemgetter(0)), [])
+
+  def testLinear(self):
+    # Check basic linear growth.
+    self.assertEquals([utils.Linear(x, 0, 5, 10, 20) for x in range(0, 6)],
+                      range(10, 21, 2))
+
+    # Check linear decay.
+    self.assertEquals([utils.Linear(x, 0, 5, 20, 10) for x in range(0, 6)],
+                      range(20, 9, -2))
+
+    # Check threashold enforcement.
+    self.assertEquals(utils.Linear(-2, 0, 5, 10, 20), 10)
+    self.assertEquals(utils.Linear(7, 0, 5, 10, 20), 20)
+
+  def testTimeDeltaToString(self):
+    # Shorthand notation.
+    C = datetime.timedelta
+    c = C(days=5, hours=3, minutes=15, seconds=33, microseconds=12037)
+
+    # Test with default formatting.
+    self.assertEquals(utils.TimeDeltaToString(C(5)), '5d')
+    self.assertEquals(utils.TimeDeltaToString(C(hours=3)), '3h')
+    self.assertEquals(utils.TimeDeltaToString(C(minutes=15)), '15m')
+    self.assertEquals(utils.TimeDeltaToString(C(seconds=33)), '33s')
+    self.assertEquals(utils.TimeDeltaToString(C(microseconds=12037)), '0s')
+    self.assertEquals(utils.TimeDeltaToString(c), '5d3h15m')
+
+    # Test with forced seconds and altered precision.
+    self.assertEquals(
+        utils.TimeDeltaToString(c, force_seconds=True), '5d3h15m33s')
+    self.assertEquals(
+        utils.TimeDeltaToString(c, force_seconds=True, subsecond_precision=1),
+        '5d3h15m33s')
+    self.assertEquals(
+        utils.TimeDeltaToString(c, force_seconds=True, subsecond_precision=2),
+        '5d3h15m33.01s')
+    self.assertEquals(
+        utils.TimeDeltaToString(c, force_seconds=True, subsecond_precision=3),
+        '5d3h15m33.012s')
+    self.assertEquals(
+        utils.TimeDeltaToString(c, force_seconds=True, subsecond_precision=4),
+        '5d3h15m33.012s')
+    self.assertEquals(
+        utils.TimeDeltaToString(c, force_seconds=True, subsecond_precision=5),
+        '5d3h15m33.01203s')
+    self.assertEquals(
+        utils.TimeDeltaToString(c, force_seconds=True, subsecond_precision=6),
+        '5d3h15m33.012037s')
+    self.assertEquals(
+        utils.TimeDeltaToString(c, force_seconds=True, subsecond_precision=7),
+        '5d3h15m33.012037s')
diff --git a/lib/perf_dashboard_config.json b/lib/perf_dashboard_config.json
new file mode 100644
index 0000000..3a617c9
--- /dev/null
+++ b/lib/perf_dashboard_config.json
@@ -0,0 +1,10 @@
+[
+	{
+		"master_name": "ChromeOSPerf",
+		"test_name": "image_test.FileSystemMetaDataTest"
+	},
+	{
+		"master_name": "ChromeOSPerf",
+		"test_name": "sdk"
+	}
+]
diff --git a/lib/perf_dashboard_tool b/lib/perf_dashboard_tool
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/perf_dashboard_tool
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/perf_dashboard_tool.py b/lib/perf_dashboard_tool.py
new file mode 100644
index 0000000..be6c1df
--- /dev/null
+++ b/lib/perf_dashboard_tool.py
@@ -0,0 +1,141 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Developer tool for exploring perf website interaction.
+
+The performance dashboards can be found here (login w/your @google.com):
+  https://chromeperf.appspot.com/  (production)
+  https://chrome-perf.googleplex.com/  (staging)
+
+By default though, this tool will post data to a local instance running on your
+system.  See this page for details on running that:
+  https://sites.google.com/a/google.com/chromeperf/
+
+This guide should help familiarize yourself with the perf data format:
+  http://dev.chromium.org/developers/testing/sending-data-to-the-performance-dashboard
+This tool currently uses the version 0 data format.
+
+Some notes:
+ - --revision or --cros-version/--chrome-version should be independent
+ - do not mix rev/cros-ver/chrome-ver in the same data series/graph
+
+Examples:
+  # Create a data point at (20110701024650,361077106).  The test name is
+  # "sdk.size" and is in the "base" series in the "combined" graph.
+  $ ./perf_dashboard_tool -u bytes -t sdk.size -g combined -d base \\
+      --revision 20110701024650 361077106
+
+  # Create a data point at (6689.0.0,2000400100).  The test name is
+  # "disk.size" and is in the "data" series.
+  $ ./perf_dashboard_tool -u bytes -t disk.size --cros-version 6689.0.0 \\
+      --chrome-version 41.6689.0.0 2000400100
+"""
+
+from __future__ import print_function
+
+import getpass
+import os
+import tempfile
+import urllib
+
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import perf_uploader
+
+
+MASTER_NAME = 'ChromeOSPerfTest'
+TEST_NAME = 'perf_uploader_tool.%s' % getpass.getuser()
+
+
+def GetParser():
+  """Return a command line parser"""
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('-n', '--dry-run', default=False, action='store_true',
+                      help='Show what would be uploaded')
+  parser.add_argument('--url', default=perf_uploader.LOCAL_DASHBOARD_URL,
+                      help='Dashboard to send results to')
+
+  group = parser.add_argument_group('Bot Details')
+  group.add_argument('-m', '--master', default=MASTER_NAME,
+                     help='The buildbot master field')
+  group.add_argument('-b', '--bot',
+                     default=cros_build_lib.GetHostName(fully_qualified=True),
+                     help='The bot name (e.g. buildbot config)')
+
+  group = parser.add_argument_group('Version Options (X-axis)')
+  group.add_argument('--revision', default=None,
+                     help='Revision number')
+  group.add_argument('--cros-version', default=None,
+                     help='Chrome OS version (X.Y.Z)')
+  group.add_argument('--chrome-version', default=None,
+                     help='Chrome version (M.X.Y.Z)')
+
+  group = parser.add_argument_group('Data Options')
+  group.add_argument('-t', '--test', default=TEST_NAME,
+                     help='The test name field')
+  group.add_argument('--higher-is-better', default=False, action='store_true',
+                     help='Whether higher values are better than lower')
+  group.add_argument('-u', '--units', default='',
+                     help='Units for the perf data (e.g. percent, bytes)')
+  group.add_argument('-g', '--graph',
+                     help='Graph name (to group multiple tests)')
+  group.add_argument('-d', '--description', default='data',
+                     help='Name for this data series')
+  group.add_argument('--stdio-uri',
+                     help='Custom log page to link data point to')
+  group.add_argument('data',
+                     help='Data point (int or float)')
+
+  return parser
+
+
+def main(argv):
+  parser = GetParser()
+  opts = parser.parse_args(argv)
+  opts.Freeze()
+
+  logging.info('Uploading results to %s', opts.url)
+  logging.info('Master name: %s', opts.master)
+  logging.info('Test name: %s', opts.test)
+
+  with tempfile.NamedTemporaryFile() as output:
+    perf_uploader.OutputPerfValue(
+        output.name,
+        opts.description,
+        float(opts.data),
+        opts.units,
+        graph=opts.graph,
+        stdio_uri=opts.stdio_uri)
+    perf_values = perf_uploader.LoadPerfValues(output.name)
+
+  logging.debug('Uploading:')
+  for value in perf_values:
+    logging.debug('  %s', value)
+
+  perf_uploader.UploadPerfValues(
+      perf_values,
+      opts.bot,
+      opts.test,
+      revision=opts.revision,
+      cros_version=opts.cros_version,
+      chrome_version=opts.chrome_version,
+      dashboard=opts.url,
+      master_name=opts.master,
+      test_prefix='',
+      platform_prefix='',
+      dry_run=opts.dry_run)
+
+  data_name = opts.graph if opts.graph else opts.description
+  args = {
+      'masters': opts.master,
+      'tests': '%s/%s' % (opts.test, data_name),
+      'bots': opts.bot,
+  }
+  view_url = os.path.join(opts.url, 'report?%s' % urllib.urlencode(args))
+  logging.info('View results at %s', view_url)
+  logging.info('Note: To make tests public, visit %s',
+               os.path.join(opts.url, 'change_internal_only'))
+  logging.info('Note: To update the test list, visit %s',
+               os.path.join(opts.url, 'update_test_suites'))
diff --git a/lib/perf_uploader.py b/lib/perf_uploader.py
new file mode 100644
index 0000000..794367b
--- /dev/null
+++ b/lib/perf_uploader.py
@@ -0,0 +1,495 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Uploads performance data to the performance dashboard.
+
+The performance dashboard is owned by Chrome team and is available here:
+https://chromeperf.appspot.com/
+Users must be logged in with an @google.com account to view perf data there.
+
+For more information on sending data to the dashboard, see:
+http://dev.chromium.org/developers/testing/sending-data-to-the-performance-dashboard
+
+Note: This module started off from the autotest/tko/perf_uploader.py but has
+been extended significantly since.
+"""
+
+from __future__ import print_function
+
+import collections
+import httplib
+import json
+import math
+import os
+import re
+import string
+import urllib
+import urllib2
+
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import retry_util
+
+
+# Clearly mark perf values coming from chromite by default.
+_DEFAULT_TEST_PREFIX = 'cbuildbot.'
+_DEFAULT_PLATFORM_PREFIX = 'cros-'
+_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
+_PRESENTATION_CONFIG_FILE = os.path.join(_ROOT_DIR,
+                                         'perf_dashboard_config.json')
+
+LOCAL_DASHBOARD_URL = 'http://localhost:8080'
+STAGE_DASHBOARD_URL = 'https://chrome-perf.googleplex.com'
+DASHBOARD_URL = 'https://chromeperf.appspot.com'
+
+_MAX_DESCRIPTION_LENGTH = 256
+_MAX_UNIT_LENGTH = 32
+
+# Format for Chrome and Chrome OS version strings.
+_VERSION_REGEXP = r'^(\d+)\.(\d+)\.(\d+)\.(\d+)$'
+
+class PerfUploadingError(Exception):
+  """A class to wrap errors in this module.
+
+  This exception class has two attributes: value and orig_exc. "value" is what
+  was used to create this exception while "orig_exc" is the optional original
+  exception that is wrapped by this exception.
+  """
+
+  def __init__(self, value, orig_exc=None):
+    super(PerfUploadingError, self).__init__(value)
+    self.orig_exc = orig_exc
+
+  def __str__(self):
+    r = super(PerfUploadingError, self).__str__()
+    if self.orig_exc:
+      r += '\ncaused by: %s' % str(self.orig_exc)
+    return r
+
+
+PerformanceValue = collections.namedtuple(
+    'PerformanceValue',
+    'description value units higher_is_better graph stdio_uri')
+
+
+def OutputPerfValue(filename, description, value, units,
+                    higher_is_better=True, graph=None, stdio_uri=None):
+  """Record a measured performance value in an output file.
+
+  This is originally from autotest/files/client/common_lib/test.py.
+
+  The output file will subsequently be parsed by ImageTestStage to have the
+  information sent to chromeperf.appspot.com.
+
+  Args:
+    filename: A path to the output file. Data will be appended to this file.
+    description: A string describing the measured perf value. Must
+      be maximum length 256, and may only contain letters, numbers,
+      periods, dashes, and underscores.  For example:
+      "page_load_time", "scrolling-frame-rate".
+    value: A number representing the measured perf value, or a list of
+      measured values if a test takes multiple measurements. Measured perf
+      values can be either ints or floats.
+    units: A string describing the units associated with the measured perf
+      value(s). Must be maximum length 32, and may only contain letters,
+      numbers, periods, dashes, and uderscores. For example: "msec", "fps".
+    higher_is_better: A boolean indicating whether or not a higher measured
+      perf value is considered better. If False, it is assumed that a "lower"
+      measured value is better.
+    graph: A string indicating the name of the graph on which the perf value
+      will be subsequently displayed on the chrome perf dashboard. This
+      allows multiple metrics to be grouped together on the same graph.
+      Default to None, perf values should be graphed individually on separate
+      graphs.
+    stdio_uri: A URL relevant to this data point (e.g. the buildbot log).
+  """
+  def ValidateString(param_name, value, max_len):
+    if len(value) > max_len:
+      raise ValueError('%s must be at most %d characters.', param_name, max_len)
+
+    allowed_chars = string.ascii_letters + string.digits + '-._'
+    if not set(value).issubset(set(allowed_chars)):
+      raise ValueError(
+          '%s may only contain letters, digits, hyphens, periods, and '
+          'underscores. Its current value is %s.',
+          param_name, value
+      )
+
+  ValidateString('description', description, _MAX_DESCRIPTION_LENGTH)
+  ValidateString('units', units, _MAX_UNIT_LENGTH)
+
+  entry = {
+      'description': description,
+      'value': value,
+      'units': units,
+      'higher_is_better': higher_is_better,
+      'graph': graph,
+      'stdio_uri': stdio_uri,
+  }
+
+  data = (json.dumps(entry), '\n')
+  osutils.WriteFile(filename, data, 'a')
+
+
+def LoadPerfValues(filename):
+  """Return a list of PerformanceValue objects from |filename|."""
+  lines = osutils.ReadFile(filename).splitlines()
+  entries = []
+  for line in lines:
+    entry = json.loads(line)
+    entries.append(PerformanceValue(**entry))
+  return entries
+
+
+def _AggregateIterations(perf_values):
+  """Aggregate same measurements from multiple iterations.
+
+  Each perf measurement may exist multiple times across multiple iterations
+  of a test.  Here, the results for each unique measured perf metric are
+  aggregated across multiple iterations.
+
+  Args:
+    perf_values: A list of PerformanceValue objects.
+
+  Returns:
+    A dictionary mapping each unique measured perf value (keyed by tuple of
+      its description and graph name) to information about that perf value
+      (in particular, the value is a list of values for each iteration).
+  """
+  aggregated_data = {}
+  for perf_value in perf_values:
+    key = (perf_value.description, perf_value.graph)
+    try:
+      aggregated_entry = aggregated_data[key]
+    except KeyError:
+      aggregated_entry = {
+          'units': perf_value.units,
+          'higher_is_better': perf_value.higher_is_better,
+          'graph': perf_value.graph,
+          'value': [],
+      }
+      aggregated_data[key] = aggregated_entry
+    # Note: the stddev will be recomputed later when the results
+    # from each of the multiple iterations are averaged together.
+    aggregated_entry['value'].append(perf_value.value)
+  return aggregated_data
+
+
+def _MeanAndStddev(data, precision=4):
+  """Computes mean and standard deviation from a list of numbers.
+
+  Args:
+    data: A list of numeric values.
+    precision: The integer number of decimal places to which to
+      round the results.
+
+  Returns:
+    A 2-tuple (mean, standard_deviation), in which each value is
+      rounded to |precision| decimal places.
+  """
+  n = len(data)
+  if n == 0:
+    raise ValueError('Cannot compute mean and stddev of an empty list.')
+  if n == 1:
+    return round(data[0], precision), 0
+
+  mean = math.fsum(data) / n
+  # Divide by n-1 to compute "sample standard deviation".
+  variance = math.fsum((elem - mean) ** 2 for elem in data) / (n - 1)
+  return round(mean, precision), round(math.sqrt(variance), precision)
+
+
+def _ComputeAvgStddev(perf_data):
+  """Compute average and standard deviations as needed for perf measurements.
+
+  For any perf measurement that exists in multiple iterations (has more than
+  one measured value), compute the average and standard deviation for it and
+  then store the updated information in the dictionary (in place).
+
+  Args:
+    perf_data: A dictionary of measured perf data as computed by
+      _AggregateIterations(), except each "value" is now a single value, not
+      a list of values.
+  """
+  for perf in perf_data.itervalues():
+    perf['value'], perf['stddev'] = _MeanAndStddev(perf['value'])
+  return perf_data
+
+
+PresentationInfo = collections.namedtuple(
+    'PresentationInfo',
+    'master_name test_name')
+
+
+def _GetPresentationInfo(test_name):
+  """Get presentation info for |test_name| from config file.
+
+  Args:
+    test_name: The test name.
+
+  Returns:
+    A PresentationInfo object for this test.
+  """
+  infos = osutils.ReadFile(_PRESENTATION_CONFIG_FILE)
+  infos = json.loads(infos)
+  for info in infos:
+    if info['test_name'] == test_name:
+      try:
+        return PresentationInfo(**info)
+      except:
+        raise PerfUploadingError('No master found for %s' % test_name)
+
+  raise PerfUploadingError('No presentation config found for %s' % test_name)
+
+
+def _FormatForUpload(perf_data, platform_name, presentation_info, revision=None,
+                     cros_version=None, chrome_version=None, test_prefix=None,
+                     platform_prefix=None):
+  """Formats perf data suitably to upload to the perf dashboard.
+
+  The perf dashboard expects perf data to be uploaded as a
+  specially-formatted JSON string.  In particular, the JSON object must be a
+  dictionary with key "data", and value being a list of dictionaries where
+  each dictionary contains all the information associated with a single
+  measured perf value: master name, bot name, test name, perf value, units,
+  and build version numbers.
+
+  See also google3/googleclient/chrome/speed/dashboard/add_point.py for the
+  server side handler.
+
+  Args:
+    platform_name: The string name of the platform.
+    perf_data: A dictionary of measured perf data. This is keyed by
+      (description, graph name) tuple.
+    presentation_info: A PresentationInfo object of the given test.
+    revision: The raw X-axis value; normally it represents a VCS repo, but may
+      be any monotonic increasing value integer.
+    cros_version: A string identifying Chrome OS version e.g. '6052.0.0'.
+    chrome_version: A string identifying Chrome version e.g. '38.0.2091.2'.
+    test_prefix: Arbitrary string to automatically prefix to the test name.
+      If None, then 'cbuildbot.' is used to guarantee namespacing.
+    platform_prefix: Arbitrary string to automatically prefix to
+      |platform_name|. If None, then 'cros-' is used to guarantee namespacing.
+
+  Returns:
+    A dictionary containing the formatted information ready to upload
+      to the performance dashboard.
+  """
+  if test_prefix is None:
+    test_prefix = _DEFAULT_TEST_PREFIX
+  if platform_prefix is None:
+    platform_prefix = _DEFAULT_PLATFORM_PREFIX
+
+  dash_entries = []
+  for (desc, graph), data in perf_data.iteritems():
+    # Each perf metric is named by a path that encodes the test name,
+    # a graph name (if specified), and a description.  This must be defined
+    # according to rules set by the Chrome team, as implemented in:
+    # chromium/tools/build/scripts/slave/results_dashboard.py.
+    desc = desc.replace('/', '_')
+    test_name = test_prefix + presentation_info.test_name
+    test_parts = [test_name, desc]
+    if graph:
+      test_parts.insert(1, graph)
+    test_path = '/'.join(test_parts)
+
+    supp_cols = {'a_default_rev': 'r_cros_version'}
+    if data.get('stdio_uri'):
+      supp_cols['a_stdio_uri'] = data['stdio_uri']
+    if cros_version is not None:
+      supp_cols['r_cros_version'] = cros_version
+    if chrome_version is not None:
+      supp_cols['r_chrome_version'] = chrome_version
+
+    new_dash_entry = {
+        'master': presentation_info.master_name,
+        'bot': platform_prefix + platform_name,
+        'test': test_path,
+        'value': data['value'],
+        'error': data['stddev'],
+        'units': data['units'],
+        'higher_is_better': data['higher_is_better'],
+        'supplemental_columns': supp_cols,
+    }
+    if revision is not None:
+      new_dash_entry['revision'] = revision
+
+    dash_entries.append(new_dash_entry)
+
+  json_string = json.dumps(dash_entries)
+  return {'data': json_string}
+
+
+def _SendToDashboard(data_obj, dashboard=DASHBOARD_URL):
+  """Sends formatted perf data to the perf dashboard.
+
+  Args:
+    data_obj: A formatted data object as returned by _FormatForUpload().
+    dashboard: The dashboard to upload data to.
+
+  Raises:
+    PerfUploadingError if an exception was raised when uploading.
+  """
+  upload_url = os.path.join(dashboard, 'add_point')
+  encoded = urllib.urlencode(data_obj)
+  req = urllib2.Request(upload_url, encoded)
+  try:
+    urllib2.urlopen(req)
+  except urllib2.HTTPError as e:
+    raise PerfUploadingError('HTTPError: %d %s for JSON %s\n' %
+                             (e.code, e.msg, data_obj['data']), e)
+  except urllib2.URLError as e:
+    raise PerfUploadingError('URLError: %s for JSON %s\n' %
+                             (str(e.reason), data_obj['data']), e)
+  except httplib.HTTPException as e:
+    raise PerfUploadingError(
+        'HTTPException for JSON %s\n' % data_obj['data'], e)
+
+
+def _ComputeRevisionFromVersions(chrome_version, cros_version):
+  """Computes the point ID to use, from Chrome and Chrome OS version numbers.
+
+  For ChromeOS row data, data values are associated with both a Chrome
+  version number and a ChromeOS version number (unlike for Chrome row data
+  that is associated with a single revision number).  This function takes
+  both version numbers as input, then computes a single, unique integer ID
+  from them, which serves as a 'fake' revision number that can uniquely
+  identify each ChromeOS data point, and which will allow ChromeOS data points
+  to be sorted by Chrome version number, with ties broken by ChromeOS version
+  number.
+
+  To compute the integer ID, we take the portions of each version number that
+  serve as the shortest unambiguous names for each (as described here:
+  http://www.chromium.org/developers/version-numbers).  We then force each
+  component of each portion to be a fixed width (padded by zeros if needed),
+  concatenate all digits together (with those coming from the Chrome version
+  number first), and convert the entire string of digits into an integer.
+  We ensure that the total number of digits does not exceed that which is
+  allowed by AppEngine NDB for an integer (64-bit signed value).
+
+  For example:
+    Chrome version: 27.0.1452.2 (shortest unambiguous name: 1452.2)
+    ChromeOS version: 27.3906.0.0 (shortest unambiguous name: 3906.0.0)
+    concatenated together with padding for fixed-width columns:
+        ('01452' + '002') + ('03906' + '000' + '00') = '014520020390600000'
+    Final integer ID: 14520020390600000
+
+  Args:
+    chrome_version: The Chrome version number as a string.
+    cros_version: The ChromeOS version number as a string.
+
+  Returns:
+    A unique integer ID associated with the two given version numbers.
+  """
+  # Number of digits to use from each part of the version string for Chrome
+  # and Chrome OS versions when building a point ID out of these two versions.
+  chrome_version_col_widths = [0, 0, 5, 3]
+  cros_version_col_widths = [0, 5, 3, 2]
+
+  def get_digits_from_version(version_num, column_widths):
+    if re.match(_VERSION_REGEXP, version_num):
+      computed_string = ''
+      version_parts = version_num.split('.')
+      for i, version_part in enumerate(version_parts):
+        if column_widths[i]:
+          computed_string += version_part.zfill(column_widths[i])
+      return computed_string
+    else:
+      return None
+
+  chrome_digits = get_digits_from_version(
+      chrome_version, chrome_version_col_widths)
+  cros_digits = get_digits_from_version(
+      cros_version, cros_version_col_widths)
+  if not chrome_digits or not cros_digits:
+    return None
+  result_digits = chrome_digits + cros_digits
+  max_digits = sum(chrome_version_col_widths + cros_version_col_widths)
+  if len(result_digits) > max_digits:
+    return None
+  return int(result_digits)
+
+
+def _RetryIfServerError(perf_exc):
+  """Exception handler to retry an upload if error code is 5xx.
+
+  Args:
+    perf_exc: The exception from _SendToDashboard.
+
+  Returns:
+    True if the cause of |perf_exc| is HTTP 5xx error.
+  """
+  return (isinstance(perf_exc.orig_exc, urllib2.HTTPError) and
+          perf_exc.orig_exc.code >= 500)
+
+
+def UploadPerfValues(perf_values, platform_name, test_name, revision=None,
+                     cros_version=None, chrome_version=None,
+                     dashboard=DASHBOARD_URL, master_name=None,
+                     test_prefix=None, platform_prefix=None, dry_run=False):
+  """Uploads any perf data associated with a test to the perf dashboard.
+
+  Note: If |revision| is used, then |cros_version| & |chrome_version| are not
+  necessary.  Conversely, if |revision| is not used, then |cros_version| and
+  |chrome_version| must both be specified.
+
+  Args:
+    perf_values: List of PerformanceValue objects.
+    platform_name: A string identifying platform e.g. 'x86-release'. 'cros-'
+      will be prepended to |platform_name| internally, by _FormatForUpload.
+    test_name: A string identifying the test
+    revision: The raw X-axis value; normally it represents a VCS repo, but may
+      be any monotonic increasing value integer.
+    cros_version: A string identifying Chrome OS version e.g. '6052.0.0'.
+    chrome_version: A string identifying Chrome version e.g. '38.0.2091.2'.
+    dashboard: The dashboard to upload data to.
+    master_name: The "master" field to use; by default it is looked up in the
+      perf_dashboard_config.json database.
+    test_prefix: Arbitrary string to automatically prefix to the test name.
+      If None, then 'cbuildbot.' is used to guarantee namespacing.
+    platform_prefix: Arbitrary string to automatically prefix to
+      |platform_name|. If None, then 'cros-' is used to guarantee namespacing.
+    dry_run: Do everything but upload the data to the server.
+  """
+  if not perf_values:
+    return
+
+  # Aggregate values from multiple iterations together.
+  perf_data = _AggregateIterations(perf_values)
+
+  # Compute averages and standard deviations as needed for measured perf
+  # values that exist in multiple iterations.  Ultimately, we only upload a
+  # single measurement (with standard deviation) for every unique measured
+  # perf metric.
+  _ComputeAvgStddev(perf_data)
+
+  # Format the perf data for the upload, then upload it.
+  if revision is None:
+    # No "revision" field, calculate one. Chrome and CrOS fields must be given.
+    cros_version = chrome_version[:chrome_version.find('.') + 1] + cros_version
+    revision = _ComputeRevisionFromVersions(chrome_version, cros_version)
+  try:
+    if master_name is None:
+      presentation_info = _GetPresentationInfo(test_name)
+    else:
+      presentation_info = PresentationInfo(master_name, test_name)
+    formatted_data = _FormatForUpload(perf_data, platform_name,
+                                      presentation_info,
+                                      revision=revision,
+                                      cros_version=cros_version,
+                                      chrome_version=chrome_version,
+                                      test_prefix=test_prefix,
+                                      platform_prefix=platform_prefix)
+    if dry_run:
+      logging.debug('UploadPerfValues: skipping upload due to dry-run')
+    else:
+      retry_util.GenericRetry(_RetryIfServerError, 3, _SendToDashboard,
+                              formatted_data, dashboard=dashboard)
+  except PerfUploadingError:
+    logging.exception('Error when uploading perf data to the perf '
+                      'dashboard for test %s.', test_name)
+    raise
+  else:
+    logging.info('Successfully uploaded perf data to the perf '
+                 'dashboard for test %s.', test_name)
diff --git a/lib/perf_uploader_unittest b/lib/perf_uploader_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/perf_uploader_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/perf_uploader_unittest.py b/lib/perf_uploader_unittest.py
new file mode 100644
index 0000000..333eca5
--- /dev/null
+++ b/lib/perf_uploader_unittest.py
@@ -0,0 +1,224 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for perf_uploader module."""
+
+from __future__ import print_function
+
+import json
+import os
+import tempfile
+import urllib2
+import urlparse
+
+from chromite.lib import cros_test_lib
+from chromite.lib import perf_uploader
+from chromite.lib import osutils
+
+
+class PerfUploadTestCase(cros_test_lib.MockTestCase):
+  """Base utility class to setup mock objects and temp file for tests."""
+
+  def setUp(self):
+    presentation_info = perf_uploader.PresentationInfo(
+        master_name='ChromeOSPerf',
+        test_name='TestName',
+    )
+    self.PatchObject(perf_uploader, '_GetPresentationInfo',
+                     return_value=presentation_info)
+    self.file_name = tempfile.NamedTemporaryFile().name
+
+  def tearDown(self):
+    osutils.SafeUnlink(self.file_name)
+
+
+class OutputPerfValueTest(PerfUploadTestCase):
+  """Test function OutputPerfValue."""
+
+  def testInvalidDescription(self):
+    perf_uploader.OutputPerfValue(self.file_name, 'a' * 256, 0, 'ignored')
+    self.assertRaises(ValueError, perf_uploader.OutputPerfValue,
+                      'ignored', 'a' * 257, 0, 'ignored')
+
+    perf_uploader.OutputPerfValue(self.file_name, 'abcXYZ09-_.', 0, 'ignored')
+    self.assertRaises(ValueError, perf_uploader.OutputPerfValue,
+                      'ignored', 'a\x00c', 0, 'ignored')
+
+  def testInvalidUnits(self):
+    self.assertRaises(ValueError, perf_uploader.OutputPerfValue,
+                      'ignored', 'ignored', 0, 'a' * 257)
+    self.assertRaises(ValueError, perf_uploader.OutputPerfValue,
+                      'ignored', 'ignored', 0, 'a\x00c')
+
+  def testValidJson(self):
+    perf_uploader.OutputPerfValue(self.file_name, 'desc', 42, 'units')
+    data = osutils.ReadFile(self.file_name)
+    entry = json.loads(data)
+    self.assertTrue(isinstance(entry, dict))
+
+
+class LoadPerfValuesTest(PerfUploadTestCase):
+  """Test function LoadPerfValues."""
+
+  def testEmptyFile(self):
+    osutils.WriteFile(self.file_name, '')
+    entries = perf_uploader.LoadPerfValues(self.file_name)
+    self.assertEqual(0, len(entries))
+
+  def testLoadOneValue(self):
+    perf_uploader.OutputPerfValue(self.file_name, 'desc', 41, 'units')
+    entries = perf_uploader.LoadPerfValues(self.file_name)
+    self.assertEqual(1, len(entries))
+    self.assertEqual(41, entries[0].value)
+    self.assertEqual('desc', entries[0].description)
+    self.assertEqual(True, entries[0].higher_is_better)
+
+  def testLoadTwoValues(self):
+    perf_uploader.OutputPerfValue(self.file_name, 'desc', 41, 'units')
+    perf_uploader.OutputPerfValue(self.file_name, 'desc2', 42, 'units2')
+    entries = perf_uploader.LoadPerfValues(self.file_name)
+    self.assertEqual(2, len(entries))
+    self.assertEqual(41, entries[0].value)
+    self.assertEqual(42, entries[1].value)
+    self.assertEqual('desc2', entries[1].description)
+    self.assertEqual(None, entries[1].graph)
+
+
+class SendToDashboardTest(PerfUploadTestCase):
+  """Ensure perf values are sent to chromeperf via HTTP."""
+
+  def setUp(self):
+    self.urlopen = self.PatchObject(urllib2, 'urlopen')
+
+  def testOneEntry(self):
+    perf_uploader.OutputPerfValue(self.file_name, 'desc1', 42, 'unit')
+    perf_values = perf_uploader.LoadPerfValues(self.file_name)
+    perf_uploader.UploadPerfValues(perf_values, 'platform', 'TestName',
+                                   cros_version='cros', chrome_version='chrome')
+    request = self.urlopen.call_args[0][0]
+    self.assertEqual(os.path.join(perf_uploader.DASHBOARD_URL, 'add_point'),
+                     request.get_full_url())
+    data = request.get_data()
+    data = urlparse.parse_qs(data)['data']
+    entries = [json.loads(x) for x in data]
+    entry = entries[0][0]
+    self.assertEqual('cros', entry['supplemental_columns']['r_cros_version'])
+    self.assertEqual(42, entry['value'])
+    self.assertEqual('cbuildbot.TestName/desc1', entry['test'])
+    self.assertEqual('unit', entry['units'])
+
+  def testCustomDashboard(self):
+    """Verify we can set data to different dashboards."""
+    perf_uploader.OutputPerfValue(self.file_name, 'desc1', 42, 'unit')
+    perf_values = perf_uploader.LoadPerfValues(self.file_name)
+    perf_uploader.UploadPerfValues(perf_values, 'platform', 'TestName',
+                                   cros_version='cros', chrome_version='chrome',
+                                   dashboard='http://localhost')
+    request = self.urlopen.call_args[0][0]
+    self.assertEqual('http://localhost/add_point', request.get_full_url())
+
+
+class UploadPerfValuesTest(PerfUploadTestCase):
+  """Test UploadPerfValues function."""
+
+  def setUp(self):
+    self.send_func = self.PatchObject(perf_uploader, '_SendToDashboard')
+
+  def testOneEntry(self):
+    """Upload one perf value."""
+    perf_uploader.OutputPerfValue(self.file_name, 'desc1', 42, 'unit')
+    perf_values = perf_uploader.LoadPerfValues(self.file_name)
+    perf_uploader.UploadPerfValues(perf_values, 'platform', 'TestName',
+                                   cros_version='cros', chrome_version='chrome')
+    positional_args, _ = self.send_func.call_args
+    first_param = positional_args[0]
+    data = json.loads(first_param['data'])
+    self.assertEqual(1, len(data))
+    entry = data[0]
+    self.assertEqual('unit', entry['units'])
+    self.assertEqual('cros',
+                     entry['supplemental_columns']['r_cros_version'])
+    self.assertEqual('chrome',
+                     entry['supplemental_columns']['r_chrome_version'])
+    self.assertEqual('cros-platform', entry['bot'])
+    self.assertEqual(42, entry['value'])
+    self.assertEqual(0, entry['error'])
+
+  def testRevision(self):
+    """Verify revision is accepted over cros/chrome version."""
+    perf_uploader.OutputPerfValue(self.file_name, 'desc1', 42, 'unit')
+    perf_values = perf_uploader.LoadPerfValues(self.file_name)
+    perf_uploader.UploadPerfValues(perf_values, 'platform', 'TestName',
+                                   revision=12345)
+    positional_args, _ = self.send_func.call_args
+    first_param = positional_args[0]
+    data = json.loads(first_param['data'])
+    entry = data[0]
+    self.assertEqual(12345, entry['revision'])
+
+  def testTwoEntriesOfSameTest(self):
+    """Upload one test, two perf values."""
+    perf_uploader.OutputPerfValue(self.file_name, 'desc1', 40, 'unit')
+    perf_uploader.OutputPerfValue(self.file_name, 'desc1', 42, 'unit')
+    perf_values = perf_uploader.LoadPerfValues(self.file_name)
+    perf_uploader.UploadPerfValues(perf_values, 'platform', 'TestName',
+                                   cros_version='cros', chrome_version='chrome')
+    positional_args, _ = self.send_func.call_args
+    first_param = positional_args[0]
+    data = json.loads(first_param['data'])
+    self.assertEqual(1, len(data))
+    entry = data[0]
+    self.assertEqual('unit', entry['units'])
+    # Average of 40 and 42
+    self.assertEqual(41, entry['value'])
+    # Standard deviation sqrt(2)
+    self.assertEqual(1.4142, entry['error'])
+
+  def testTwoTests(self):
+    """Upload two tests, one perf value each."""
+    perf_uploader.OutputPerfValue(self.file_name, 'desc1', 40, 'unit')
+    perf_uploader.OutputPerfValue(self.file_name, 'desc2', 42, 'unit')
+    perf_values = perf_uploader.LoadPerfValues(self.file_name)
+    perf_uploader.UploadPerfValues(perf_values, 'platform', 'TestName',
+                                   cros_version='cros', chrome_version='chrome')
+    positional_args, _ = self.send_func.call_args
+    first_param = positional_args[0]
+    data = json.loads(first_param['data'])
+    self.assertEqual(2, len(data))
+    data = sorted(data, key=lambda x: x['test'])
+    entry = data[0]
+    self.assertEqual(40, entry['value'])
+    self.assertEqual(0, entry['error'])
+    entry = data[1]
+    self.assertEqual(42, entry['value'])
+    self.assertEqual(0, entry['error'])
+
+  def testTwoTestsThreeEntries(self):
+    """Upload two tests, one perf value each."""
+    perf_uploader.OutputPerfValue(self.file_name, 'desc1', 40, 'unit')
+    perf_uploader.OutputPerfValue(self.file_name, 'desc1', 42, 'unit')
+    perf_uploader.OutputPerfValue(self.file_name, 'desc2', 42, 'unit')
+    perf_values = perf_uploader.LoadPerfValues(self.file_name)
+    perf_uploader.UploadPerfValues(perf_values, 'platform', 'TestName',
+                                   cros_version='cros', chrome_version='chrome')
+    positional_args, _ = self.send_func.call_args
+    first_param = positional_args[0]
+    data = json.loads(first_param['data'])
+    self.assertEqual(2, len(data))
+    data = sorted(data, key=lambda x: x['test'])
+    entry = data[0]
+    self.assertEqual(41, entry['value'])
+    self.assertEqual(1.4142, entry['error'])
+    entry = data[1]
+    self.assertEqual(42, entry['value'])
+    self.assertEqual(0, entry['error'])
+
+  def testDryRun(self):
+    """Make sure dryrun mode doesn't upload."""
+    self.send_func.side_effect = AssertionError('dryrun should not upload')
+    perf_uploader.OutputPerfValue(self.file_name, 'desc1', 40, 'unit')
+    perf_values = perf_uploader.LoadPerfValues(self.file_name)
+    perf_uploader.UploadPerfValues(perf_values, 'platform', 'TestName',
+                                   cros_version='cros', chrome_version='chrome',
+                                   dry_run=True)
diff --git a/lib/portage_util.py b/lib/portage_util.py
new file mode 100644
index 0000000..41db764
--- /dev/null
+++ b/lib/portage_util.py
@@ -0,0 +1,1656 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Routines and classes for working with Portage overlays and ebuilds."""
+
+from __future__ import print_function
+
+import collections
+import errno
+import filecmp
+import fileinput
+import glob
+import multiprocessing
+import os
+import re
+import shutil
+import sys
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gerrit
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import parallel
+
+
+# The parsed output of running `ebuild <ebuild path> info`.
+RepositoryInfoTuple = collections.namedtuple('RepositoryInfoTuple',
+                                             ('srcdir', 'project'))
+
+
+_PRIVATE_PREFIX = '%(buildroot)s/src/private-overlays'
+
+# Define datastructures for holding PV and CPV objects.
+_PV_FIELDS = ['pv', 'package', 'version', 'version_no_rev', 'rev']
+PV = collections.namedtuple('PV', _PV_FIELDS)
+CPV = collections.namedtuple('CPV', ['category'] + _PV_FIELDS)
+
+# Package matching regexp, as dictated by package manager specification:
+# http://www.gentoo.org/proj/en/qa/pms.xml
+_pkg = r'(?P<package>' + r'[\w+][\w+-]*)'
+_ver = (r'(?P<version>'
+        r'(?P<version_no_rev>(\d+)((\.\d+)*)([a-z]?)'
+        r'((_(pre|p|beta|alpha|rc)\d*)*))'
+        r'(-(?P<rev>r(\d+)))?)')
+_pvr_re = re.compile(r'^(?P<pv>%s-%s)$' % (_pkg, _ver), re.VERBOSE)
+
+# This regex matches a category name.
+_category_re = re.compile(r'^(?P<category>[\w\+\.][\w\+\.\-]*)$', re.VERBOSE)
+
+# This regex matches blank lines, commented lines, and the EAPI line.
+_blank_or_eapi_re = re.compile(r'^\s*(?:#|EAPI=|$)')
+
+WORKON_EBUILD_VERSION = '9999'
+WORKON_EBUILD_SUFFIX = '-%s.ebuild' % WORKON_EBUILD_VERSION
+
+UNITTEST_PACKAGE_BLACKLIST = set((
+    'sys-devel/binutils',
+))
+
+
+class MissingOverlayException(Exception):
+  """This exception indicates that a needed overlay is missing."""
+
+
+def GetOverlayRoot(path):
+  """Get the overlay root folder for |path|.
+
+  For traditional portage overlays, the root folder is |path|.
+  For bricks, the root folder is in the 'packages' sub-folder.
+  """
+  if os.path.exists(os.path.join(path, 'config.json')):
+    # A brick has its overlay root in the packages subdirectory.
+    return os.path.join(path, 'packages')
+  return path
+
+
+def _ListOverlays(board=None, buildroot=constants.SOURCE_ROOT):
+  """Return the list of overlays to use for a given buildbot.
+
+  Always returns all overlays in parent -> child order, and does not
+  perform any filtering.
+
+  Args:
+    board: Board to look at.
+    buildroot: Source root to find overlays.
+  """
+  # Load all the known overlays so we can extract the details below.
+  paths = (
+      'projects',
+      'src/overlays',
+      'src/private-overlays',
+      'src/third_party',
+  )
+  overlays = {}
+  for path in paths:
+    path = os.path.join(buildroot, path, '*')
+    for overlay in glob.glob(path):
+      name = GetOverlayName(overlay)
+      if name is None:
+        continue
+
+      # Sanity check the sets of repos.
+      if name in overlays:
+        raise RuntimeError('multiple repos with same name "%s": %s and %s' %
+                           (name, overlays[name]['path'], overlay))
+
+      try:
+        masters = cros_build_lib.LoadKeyValueFile(
+            os.path.join(GetOverlayRoot(overlay), 'metadata',
+                         'layout.conf'))['masters'].split()
+      except (KeyError, IOError):
+        masters = []
+      overlays[name] = {
+          'masters': masters,
+          'path': GetOverlayRoot(overlay),
+      }
+
+  # Easy enough -- dump them all.
+  if board is None:
+    return [x['path'] for x in overlays.values()]
+
+  # Build up the list of repos we need.
+  ret = []
+  seen = set()
+  def _AddRepo(repo, optional=False):
+    """Recursively add |repo|'s masters from |overlays| to |ret|.
+
+    Args:
+      repo: The repo name to look up.
+      optional: If |repo| does not exist, return False, else
+        raise an MissingOverlayException.
+
+    Returns:
+      True if |repo| was found.
+    """
+    if repo not in overlays:
+      if optional:
+        return False
+      else:
+        raise MissingOverlayException('%s was not found' % repo)
+
+    for master in overlays[repo]['masters'] + [repo]:
+      if master not in seen:
+        seen.add(master)
+        _AddRepo(master)
+        ret.append(overlays[master]['path'])
+        if not master.endswith('-private'):
+          _AddRepo('%s-private' % master, True)
+    return True
+
+  # Legacy: load the global configs.  In the future, this should be found
+  # via the overlay's masters.
+  _AddRepo('chromeos', optional=True)
+  path = os.path.join(buildroot, 'src', 'private-overlays',
+                      'chromeos-*-overlay')
+  ret += glob.glob(path)
+
+  # Locate the board repo by name.
+  # Load the public & private versions if available.
+  found_pub = _AddRepo(board, optional=True)
+  found_priv = _AddRepo('%s-private' % board, optional=True)
+
+  # If neither public nor private board was found, die.
+  if not found_pub and not found_priv:
+    raise MissingOverlayException('board overlay not found: %s' % board)
+
+  return ret
+
+
+def FindOverlays(overlay_type, board=None, buildroot=constants.SOURCE_ROOT):
+  """Return the list of overlays to use for a given buildbot.
+
+  The returned list of overlays will be in parent -> child order.
+
+  Args:
+    overlay_type: A string describing which overlays you want.
+      'private': Just the private overlays.
+      'public': Just the public overlays.
+      'both': Both the public and private overlays.
+    board: Board to look at.
+    buildroot: Source root to find overlays.
+  """
+  overlays = _ListOverlays(board=board, buildroot=buildroot)
+  private_prefix = _PRIVATE_PREFIX % dict(buildroot=buildroot)
+  if overlay_type == constants.PRIVATE_OVERLAYS:
+    return [x for x in overlays if x.startswith(private_prefix)]
+  elif overlay_type == constants.PUBLIC_OVERLAYS:
+    return [x for x in overlays if not x.startswith(private_prefix)]
+  elif overlay_type == constants.BOTH_OVERLAYS:
+    return overlays
+  else:
+    assert overlay_type is None
+    return []
+
+
+def FindOverlayFile(filename, overlay_type='both', board=None,
+                    buildroot=constants.SOURCE_ROOT):
+  """Attempt to find a file in the overlay directories.
+
+  Searches through this board's overlays for the specified file. The
+  overlays are searched in child -> parent order.
+
+  Args:
+    filename: Path to search for inside the overlay.
+    overlay_type: A string describing which overlays you want.
+      'private': Just the private overlays.
+      'public': Just the public overlays.
+      'both': Both the public and private overlays.
+    board: Board to look at.
+    buildroot: Source root to find overlays.
+
+  Returns:
+    Path to the first file found in the search. None if the file is not found.
+  """
+  for overlay in reversed(FindOverlays(overlay_type, board, buildroot)):
+    if os.path.isfile(os.path.join(overlay, filename)):
+      return os.path.join(overlay, filename)
+  return None
+
+
+def FindSysrootOverlays(sysroot):
+  """Ask portage for a list of overlays installed in a given sysroot.
+
+  Returns overlays in lowest to highest priority.  Note that this list
+  is only partially ordered.
+
+  Args:
+    sysroot: The root directory being inspected.
+
+  Returns:
+    list of overlays used in sysroot.
+  """
+  cmd = (cros_build_lib.GetSysrootToolPath(sysroot, 'portageq'),
+         'envvar', 'PORTDIR_OVERLAY')
+  return cros_build_lib.RunCommand(cmd, print_cmd=False,
+                                   capture_output=True).output.strip().split()
+
+
+def ReadOverlayFile(filename, overlay_type='both', board=None,
+                    buildroot=constants.SOURCE_ROOT):
+  """Attempt to open a file in the overlay directories.
+
+  Searches through this board's overlays for the specified file. The
+  overlays are searched in child -> parent order.
+
+  Args:
+    filename: Path to open inside the overlay.
+    overlay_type: A string describing which overlays you want.
+      'private': Just the private overlays.
+      'public': Just the public overlays.
+      'both': Both the public and private overlays.
+    board: Board to look at.
+    buildroot: Source root to find overlays.
+
+  Returns:
+    The contents of the file, or None if no files could be opened.
+  """
+  file_found = FindOverlayFile(filename, overlay_type, board, buildroot)
+  if file_found is None:
+    return None
+  return osutils.ReadFile(file_found)
+
+
+def FindPrimaryOverlay(overlay_type, board, buildroot=constants.SOURCE_ROOT):
+  """Return the primary overlay to use for a given buildbot.
+
+  An overlay is only considered a primary overlay if it has a make.conf and a
+  toolchain.conf. If multiple primary overlays are found, the first primary
+  overlay is returned.
+
+  Args:
+    overlay_type: A string describing which overlays you want.
+      'private': Just the private overlays.
+      'public': Just the public overlays.
+      'both': Both the public and private overlays.
+    board: Board to look at.
+    buildroot: Path to root of build directory.
+
+  Raises:
+    MissingOverlayException: No primary overlay found.
+  """
+  for overlay in FindOverlays(overlay_type, board, buildroot):
+    if (os.path.exists(os.path.join(overlay, 'make.conf')) and
+        os.path.exists(os.path.join(overlay, 'toolchain.conf'))):
+      return overlay
+  raise MissingOverlayException('No primary overlay found for board=%r' % board)
+
+
+def GetOverlayName(overlay):
+  """Get the self-declared repo name for the |overlay| path."""
+  try:
+    return cros_build_lib.LoadKeyValueFile(
+        os.path.join(GetOverlayRoot(overlay), 'metadata',
+                     'layout.conf'))['repo-name']
+  except (KeyError, IOError):
+    # Not all layout.conf files have a repo-name, so don't make a fuss.
+    try:
+      with open(os.path.join(overlay, 'profiles', 'repo_name')) as f:
+        return f.readline().rstrip()
+    except IOError:
+      # Not all overlays have a repo_name, so don't make a fuss.
+      return None
+
+
+class EBuildVersionFormatException(Exception):
+  """Exception for bad ebuild version string format."""
+
+  def __init__(self, filename):
+    self.filename = filename
+    message = ('Ebuild file name %s '
+               'does not match expected format.' % filename)
+    super(EBuildVersionFormatException, self).__init__(message)
+
+
+class EbuildFormatIncorrectException(Exception):
+  """Exception for bad ebuild format."""
+
+  def __init__(self, filename, message):
+    message = 'Ebuild %s has invalid format: %s ' % (filename, message)
+    super(EbuildFormatIncorrectException, self).__init__(message)
+
+
+class EBuild(object):
+  """Wrapper class for information about an ebuild."""
+
+  VERBOSE = False
+  _PACKAGE_VERSION_PATTERN = re.compile(
+      r'.*-(([0-9][0-9a-z_.]*)(-r[0-9]+)?)[.]ebuild')
+  _WORKON_COMMIT_PATTERN = re.compile(r'^CROS_WORKON_COMMIT="(.*)"$')
+
+  # A structure to hold computed values of CROS_WORKON_*.
+  CrosWorkonVars = collections.namedtuple(
+      'CrosWorkonVars',
+      ('localname', 'project', 'srcpath', 'subdir', 'always_live'))
+
+  @classmethod
+  def _Print(cls, message):
+    """Verbose print function."""
+    if cls.VERBOSE:
+      logging.info(message)
+
+  @classmethod
+  def _RunCommand(cls, command, **kwargs):
+    kwargs.setdefault('capture_output', True)
+    return cros_build_lib.RunCommand(
+        command, print_cmd=cls.VERBOSE, **kwargs).output
+
+  @classmethod
+  def _RunGit(cls, cwd, command, **kwargs):
+    result = git.RunGit(cwd, command, print_cmd=cls.VERBOSE, **kwargs)
+    return None if result is None else result.output
+
+  def IsSticky(self):
+    """Returns True if the ebuild is sticky."""
+    return self.is_stable and self.current_revision == 0
+
+  @classmethod
+  def UpdateEBuild(cls, ebuild_path, variables, redirect_file=None,
+                   make_stable=True):
+    """Static function that updates WORKON information in the ebuild.
+
+    This function takes an ebuild_path and updates WORKON information.
+
+    Note: If an exception is thrown, the |ebuild_path| is left in a corrupt
+    state.  You should try to avoid causing exceptions ;).
+
+    Args:
+      ebuild_path: The path of the ebuild.
+      variables: Dictionary of variables to update in ebuild.
+      redirect_file: Optionally redirect output of new ebuild somewhere else.
+      make_stable: Actually make the ebuild stable.
+    """
+    written = False
+    try:
+      for line in fileinput.input(ebuild_path, inplace=1):
+        # Has to be done here to get changes to sys.stdout from fileinput.input.
+        if not redirect_file:
+          redirect_file = sys.stdout
+
+        # Always add variables at the top of the ebuild, before the first
+        # nonblank line other than the EAPI line.
+        if not written and not _blank_or_eapi_re.match(line):
+          for key, value in sorted(variables.items()):
+            assert key is not None and value is not None
+            redirect_file.write('%s=%s\n' % (key, value))
+          written = True
+
+        # Mark KEYWORDS as stable by removing ~'s.
+        if line.startswith('KEYWORDS=') and make_stable:
+          line = line.replace('~', '')
+
+        varname, eq, _ = line.partition('=')
+        if not (eq == '=' and varname.strip() in variables):
+          # Don't write out the old value of the variable.
+          redirect_file.write(line)
+    finally:
+      fileinput.close()
+
+  @classmethod
+  def MarkAsStable(cls, unstable_ebuild_path, new_stable_ebuild_path,
+                   variables, redirect_file=None, make_stable=True):
+    """Static function that creates a revved stable ebuild.
+
+    This function assumes you have already figured out the name of the new
+    stable ebuild path and then creates that file from the given unstable
+    ebuild and marks it as stable.  If the commit_value is set, it also
+    set the commit_keyword=commit_value pair in the ebuild.
+
+    Args:
+      unstable_ebuild_path: The path to the unstable ebuild.
+      new_stable_ebuild_path: The path you want to use for the new stable
+        ebuild.
+      variables: Dictionary of variables to update in ebuild.
+      redirect_file: Optionally redirect output of new ebuild somewhere else.
+      make_stable: Actually make the ebuild stable.
+    """
+    shutil.copyfile(unstable_ebuild_path, new_stable_ebuild_path)
+    EBuild.UpdateEBuild(new_stable_ebuild_path, variables, redirect_file,
+                        make_stable)
+
+  @classmethod
+  def CommitChange(cls, message, overlay):
+    """Commits current changes in git locally with given commit message.
+
+    Args:
+      message: the commit string to write when committing to git.
+      overlay: directory in which to commit the changes.
+
+    Raises:
+      RunCommandError: Error occurred while committing.
+    """
+    logging.info('Committing changes with commit message: %s', message)
+    git_commit_cmd = ['commit', '-a', '-m', message]
+    cls._RunGit(overlay, git_commit_cmd)
+
+  def __init__(self, path):
+    """Sets up data about an ebuild from its path.
+
+    Args:
+      path: Path to the ebuild.
+    """
+    self.overlay, self.category, self.pkgname, filename = path.rsplit('/', 3)
+    m = self._PACKAGE_VERSION_PATTERN.match(filename)
+    if not m:
+      raise EBuildVersionFormatException(filename)
+    self.version, self.version_no_rev, revision = m.groups()
+    if revision is not None:
+      self.current_revision = int(revision.replace('-r', ''))
+    else:
+      self.current_revision = 0
+    self.package = '%s/%s' % (self.category, self.pkgname)
+
+    self._ebuild_path_no_version = os.path.join(
+        os.path.dirname(path), self.pkgname)
+    self.ebuild_path_no_revision = '%s-%s' % (
+        self._ebuild_path_no_version, self.version_no_rev)
+    self._unstable_ebuild_path = '%s%s' % (
+        self._ebuild_path_no_version, WORKON_EBUILD_SUFFIX)
+    self.ebuild_path = path
+
+    self.is_workon = False
+    self.is_stable = False
+    self.is_blacklisted = False
+    self.has_test = False
+    self._ReadEBuild(path)
+
+  @staticmethod
+  def Classify(ebuild_path):
+    """Return whether this ebuild is workon, stable, and/or blacklisted
+
+    workon is determined by whether the ebuild inherits from the
+    'cros-workon' eclass. stable is determined by whether there's a '~'
+    in the KEYWORDS setting in the ebuild. An ebuild is considered blacklisted
+    if a line in it starts with 'CROS_WORKON_BLACKLIST='
+    """
+    is_workon = False
+    is_stable = False
+    is_blacklisted = False
+    has_test = False
+    for line in fileinput.input(ebuild_path):
+      if line.startswith('inherit ') and 'cros-workon' in line:
+        is_workon = True
+      elif line.startswith('KEYWORDS='):
+        for keyword in line.split('=', 1)[1].strip("\"'").split():
+          if not keyword.startswith('~') and keyword != '-*':
+            is_stable = True
+      elif line.startswith('CROS_WORKON_BLACKLIST='):
+        is_blacklisted = True
+      elif (line.startswith('src_test()') or
+            line.startswith('platform_pkg_test()')):
+        has_test = True
+    fileinput.close()
+    return is_workon, is_stable, is_blacklisted, has_test
+
+  def _ReadEBuild(self, path):
+    """Determine the settings of `is_workon`, `is_stable` and is_blacklisted
+
+    These are determined using the static Classify function.
+    """
+    (self.is_workon, self.is_stable,
+     self.is_blacklisted, self.has_test) = EBuild.Classify(path)
+
+  @staticmethod
+  def GetCrosWorkonVars(ebuild_path, pkg_name):
+    """Return computed (as sourced ebuild script) values of:
+
+      * CROS_WORKON_LOCALNAME
+      * CROS_WORKON_PROJECT
+      * CROS_WORKON_SUBDIR
+      * CROS_WORKON_SRCPATH
+      * CROS_WORKON_ALWAYS_LIVE
+
+    Args:
+      ebuild_path: Path to the ebuild file (e.g: platform2-9999.ebuild).
+      pkg_name: The package name (e.g.: platform2).
+
+    Returns:
+      A CrosWorkonVars tuple.
+    """
+    workon_vars = (
+        'CROS_WORKON_LOCALNAME',
+        'CROS_WORKON_PROJECT',
+        'CROS_WORKON_SRCPATH',
+        'CROS_WORKON_SUBDIR',
+        'CROS_WORKON_ALWAYS_LIVE',
+    )
+    env = {
+        'CROS_WORKON_LOCALNAME': pkg_name,
+        'CROS_WORKON_SUBDIR': '',
+        'CROS_WORKON_ALWAYS_LIVE': '',
+    }
+    settings = osutils.SourceEnvironment(ebuild_path, workon_vars, env=env)
+    # Try to detect problems extracting the variables by checking whether
+    # either CROS_WORKON_PROJECT or CROS_WORK_SRCPATH is set. If it isn't,
+    # something went wrong, possibly because we're simplistically sourcing the
+    # ebuild without most of portage being available. That still breaks this
+    # script and needs to be flagged as an error. We won't catch problems
+    # setting CROS_WORKON_LOCALNAME or CROS_WORKON_SUBDIR, or if
+    # CROS_WORKON_{PROJECT,SRCPATH} is set to the wrong thing, but at least
+    # this covers some types of failures.
+    projects = []
+    srcpaths = []
+    if 'CROS_WORKON_PROJECT' in settings:
+      projects = settings['CROS_WORKON_PROJECT'].split(',')
+    if 'CROS_WORKON_SRCPATH' in settings:
+      srcpaths = settings['CROS_WORKON_SRCPATH'].split(',')
+
+    if not (projects or srcpaths):
+      raise EbuildFormatIncorrectException(
+          ebuild_path,
+          'Unable to determine CROS_WORKON_{PROJECT,SRCPATH} values.')
+
+    localnames = settings['CROS_WORKON_LOCALNAME'].split(',')
+    subdirs = settings['CROS_WORKON_SUBDIR'].split(',')
+    live = settings['CROS_WORKON_ALWAYS_LIVE']
+
+    return EBuild.CrosWorkonVars(localnames, projects, srcpaths, subdirs, live)
+
+  def GetSourcePath(self, srcroot, manifest):
+    """Get the project and path for this ebuild.
+
+    The path is guaranteed to exist, be a directory, and be absolute.
+    """
+
+    localnames, projects, srcpaths, subdirs, always_live = (
+        EBuild.GetCrosWorkonVars(self._unstable_ebuild_path, self.pkgname))
+
+    if always_live:
+      return [], []
+
+    # Sanity checks and completion.
+    num_projects = len(projects)
+    # Each project specification has to have the same amount of items.
+    if num_projects != len(localnames):
+      raise EbuildFormatIncorrectException(
+          self._unstable_ebuild_path,
+          'Number of _PROJECT and _LOCALNAME items don\'t match.')
+    # If both SRCPATH and PROJECT are defined, they must have the same number
+    # of items.
+    if len(srcpaths) > num_projects:
+      if num_projects > 0:
+        raise EbuildFormatIncorrectException(
+            self._unstable_ebuild_path,
+            '_PROJECT has fewer items than _SRCPATH.')
+      num_projects = len(srcpaths)
+      projects = [''] * num_projects
+      localnames = [''] * num_projects
+    elif len(srcpaths) < num_projects:
+      if len(srcpaths) > 0:
+        raise EbuildFormatIncorrectException(
+            self._unstable_ebuild_path,
+            '_SRCPATH has fewer items than _PROJECT.')
+      srcpaths = [''] * num_projects
+    # We better have at least one PROJECT or SRCPATH value at this point.
+    if num_projects == 0:
+      raise EbuildFormatIncorrectException(
+          self._unstable_ebuild_path, 'No _PROJECT or _SRCPATH value found.')
+    # Subdir must be either 0,1 or len(project)
+    if num_projects != len(subdirs):
+      if len(subdirs) > 1:
+        raise EbuildFormatIncorrectException(
+            self._unstable_ebuild_path, 'Incorrect number of _SUBDIR items.')
+      # Multiply the single value if present, otherwise fill with empty strings.
+      subdirs = (subdirs or ['']) * num_projects
+
+    # Calculate srcdir (used for core packages).
+    if self.category in ('chromeos-base', 'brillo-base'):
+      dir_ = ''
+    else:
+      dir_ = 'third_party'
+
+    # Obtain brick source directory (used for non-core packages).
+    # TODO(garnold) This manipulates brick internal structure directly instead
+    # of referring to brick_lib; the latter could not be used because of a
+    # cyclic dependency, but should be used once its dependency on portage_util
+    # is eliminated.
+    srcbase = ''
+    if any(srcpaths):
+      brick_dir = os.path.dirname(os.path.dirname(os.path.dirname(
+          os.path.dirname(self._unstable_ebuild_path))))
+      srcbase = os.path.join(brick_dir, 'src')
+      if not os.path.isdir(srcbase):
+        cros_build_lib.Die('_SRCPATH used but brick source path not found.')
+
+    subdir_paths = []
+    rows = zip(localnames, subdirs, projects, srcpaths)
+    for local, sub, project, srcpath in rows:
+      if srcpath:
+        subdir_path = os.path.join(srcbase, srcpath)
+        if not os.path.isdir(subdir_path):
+          cros_build_lib.Die('Source for package %s not found in brick.' %
+                             self.pkgname)
+      else:
+        subdir_path = os.path.realpath(os.path.join(srcroot, dir_, local, sub))
+        if dir_ == '' and not os.path.isdir(subdir_path):
+          subdir_path = os.path.realpath(os.path.join(srcroot, 'platform',
+                                                      local, sub))
+
+        if not os.path.isdir(subdir_path):
+          cros_build_lib.Die('Source repository %s '
+                             'for project %s does not exist.' % (subdir_path,
+                                                                 self.pkgname))
+        # Verify that we're grabbing the commit id from the right project name.
+        real_project = manifest.FindCheckoutFromPath(subdir_path)['name']
+        if project != real_project:
+          cros_build_lib.Die('Project name mismatch for %s '
+                             '(found %s, expected %s)' % (subdir_path,
+                                                          real_project,
+                                                          project))
+
+      subdir_paths.append(subdir_path)
+
+    return projects, subdir_paths
+
+  def GetCommitId(self, srcdir):
+    """Get the commit id for this ebuild."""
+    output = self._RunGit(srcdir, ['rev-parse', 'HEAD'])
+    if not output:
+      cros_build_lib.Die('Cannot determine HEAD commit for %s' % srcdir)
+    return output.rstrip()
+
+  def GetTreeId(self, srcdir):
+    """Get the SHA1 of the source tree for this ebuild.
+
+    Unlike the commit hash, the SHA1 of the source tree is unaffected by the
+    history of the repository, or by commit messages.
+    """
+    output = self._RunGit(srcdir, ['log', '-1', '--format=%T'])
+    if not output:
+      cros_build_lib.Die('Cannot determine HEAD tree hash for %s' % srcdir)
+    return output.rstrip()
+
+  def GetVersion(self, srcroot, manifest, default):
+    """Get the base version number for this ebuild.
+
+    The version is provided by the ebuild through a specific script in
+    the $FILESDIR (chromeos-version.sh).
+    """
+    vers_script = os.path.join(os.path.dirname(self._ebuild_path_no_version),
+                               'files', 'chromeos-version.sh')
+
+    if not os.path.exists(vers_script):
+      return default
+
+    if not self.is_workon:
+      raise EbuildFormatIncorrectException(
+          self._ebuild_path_no_version,
+          'Package has a chromeos-version.sh script but is not workon-able.')
+
+    _, srcdirs = self.GetSourcePath(srcroot, manifest)
+
+    # The chromeos-version script will output a usable raw version number,
+    # or nothing in case of error or no available version
+    try:
+      output = self._RunCommand([vers_script] + srcdirs).strip()
+    except cros_build_lib.RunCommandError as e:
+      cros_build_lib.Die('Package %s chromeos-version.sh failed: %s' %
+                         (self.pkgname, e))
+
+    if not output:
+      cros_build_lib.Die('Package %s has a chromeos-version.sh script but '
+                         'it returned no valid version for "%s"' %
+                         (self.pkgname, ' '.join(srcdirs)))
+
+    # Sanity check: disallow versions that will be larger than the 9999 ebuild
+    # used by cros-workon.
+    main_pv = output.split('.', 1)[0]
+    try:
+      main_pv = int(main_pv)
+    except ValueError:
+      raise ValueError('PV returned is invalid: %s' % output)
+    if main_pv >= int(WORKON_EBUILD_VERSION):
+      raise ValueError('cros-workon packages must have a PV < %s; not %s'
+                       % (WORKON_EBUILD_VERSION, output))
+
+    return output
+
+  @staticmethod
+  def FormatBashArray(unformatted_list):
+    """Returns a python list in a bash array format.
+
+    If the list only has one item, format as simple quoted value.
+    That is both backwards-compatible and more readable.
+
+    Args:
+      unformatted_list: an iterable to format as a bash array. This variable
+        has to be sanitized first, as we don't do any safeties.
+
+    Returns:
+      A text string that can be used by bash as array declaration.
+    """
+    if len(unformatted_list) > 1:
+      return '("%s")' % '" "'.join(unformatted_list)
+    else:
+      return '"%s"' % unformatted_list[0]
+
+  def RevWorkOnEBuild(self, srcroot, manifest, redirect_file=None):
+    """Revs a workon ebuild given the git commit hash.
+
+    By default this class overwrites a new ebuild given the normal
+    ebuild rev'ing logic.  However, a user can specify a redirect_file
+    to redirect the new stable ebuild to another file.
+
+    Args:
+      srcroot: full path to the 'src' subdirectory in the source
+        repository.
+      manifest: git.ManifestCheckout object.
+      redirect_file: Optional file to write the new ebuild.  By default
+        it is written using the standard rev'ing logic.  This file must be
+        opened and closed by the caller.
+
+    Returns:
+      If the revved package is different than the old ebuild, return the full
+      revved package name, including the version number. Otherwise, return None.
+
+    Raises:
+      OSError: Error occurred while creating a new ebuild.
+      IOError: Error occurred while writing to the new revved ebuild file.
+    """
+
+    if self.is_stable:
+      stable_version_no_rev = self.GetVersion(srcroot, manifest,
+                                              self.version_no_rev)
+    else:
+      # If given unstable ebuild, use preferred version rather than 9999.
+      stable_version_no_rev = self.GetVersion(srcroot, manifest, '0.0.1')
+
+    new_version = '%s-r%d' % (
+        stable_version_no_rev, self.current_revision + 1)
+    new_stable_ebuild_path = '%s-%s.ebuild' % (
+        self._ebuild_path_no_version, new_version)
+
+    self._Print('Creating new stable ebuild %s' % new_stable_ebuild_path)
+    if not os.path.exists(self._unstable_ebuild_path):
+      cros_build_lib.Die('Missing unstable ebuild: %s' %
+                         self._unstable_ebuild_path)
+
+    _, srcdirs = self.GetSourcePath(srcroot, manifest)
+    commit_ids = map(self.GetCommitId, srcdirs)
+    tree_ids = map(self.GetTreeId, srcdirs)
+    variables = dict(CROS_WORKON_COMMIT=self.FormatBashArray(commit_ids),
+                     CROS_WORKON_TREE=self.FormatBashArray(tree_ids))
+    self.MarkAsStable(self._unstable_ebuild_path, new_stable_ebuild_path,
+                      variables, redirect_file)
+
+    old_ebuild_path = self.ebuild_path
+    if filecmp.cmp(old_ebuild_path, new_stable_ebuild_path, shallow=False):
+      os.unlink(new_stable_ebuild_path)
+      return None
+    else:
+      self._Print('Adding new stable ebuild to git')
+      self._RunGit(self.overlay, ['add', new_stable_ebuild_path])
+
+      if self.is_stable:
+        self._Print('Removing old ebuild from git')
+        self._RunGit(self.overlay, ['rm', '-f', old_ebuild_path])
+
+      return '%s-%s' % (self.package, new_version)
+
+  @classmethod
+  def GitRepoHasChanges(cls, directory):
+    """Returns True if there are changes in the given directory."""
+    # Refresh the index first. This squashes just metadata changes.
+    cls._RunGit(directory, ['update-index', '-q', '--refresh'])
+    output = cls._RunGit(directory, ['diff-index', '--name-only', 'HEAD'])
+    return output not in [None, '']
+
+  @staticmethod
+  def _GetSHA1ForPath(manifest, path):
+    """Get the latest SHA1 for a given project from Gerrit.
+
+    This function looks up the remote and branch for a given project in the
+    manifest, and uses this to lookup the SHA1 from Gerrit. This only makes
+    sense for unpinned manifests.
+
+    Args:
+      manifest: git.ManifestCheckout object.
+      path: Path of project.
+
+    Raises:
+      Exception if the manifest is pinned.
+    """
+    checkout = manifest.FindCheckoutFromPath(path)
+    project = checkout['name']
+    helper = gerrit.GetGerritHelper(checkout['remote'])
+    manifest_branch = checkout['revision']
+    branch = git.StripRefsHeads(manifest_branch)
+    return helper.GetLatestSHA1ForBranch(project, branch)
+
+  @staticmethod
+  def _GetEBuildPaths(buildroot, manifest, overlay_list, changes):
+    """Calculate ebuild->path map for changed ebuilds.
+
+    Args:
+      buildroot: Path to root of build directory.
+      manifest: git.ManifestCheckout object.
+      overlay_list: List of all overlays.
+      changes: Changes from Gerrit that are being pushed.
+
+    Returns:
+      A dictionary mapping changed ebuilds to lists of associated paths.
+    """
+    directory_src = os.path.join(buildroot, 'src')
+    overlay_dict = dict((o, []) for o in overlay_list)
+    BuildEBuildDictionary(overlay_dict, True, None)
+    changed_paths = set(c.GetCheckout(manifest).GetPath(absolute=True)
+                        for c in changes)
+    ebuild_projects = {}
+    for ebuilds in overlay_dict.itervalues():
+      for ebuild in ebuilds:
+        _, paths = ebuild.GetSourcePath(directory_src, manifest)
+        if changed_paths.intersection(paths):
+          ebuild_projects[ebuild] = paths
+
+    return ebuild_projects
+
+  @classmethod
+  def UpdateCommitHashesForChanges(cls, changes, buildroot, manifest):
+    """Updates the commit hashes for the EBuilds uprevved in changes.
+
+    Args:
+      changes: Changes from Gerrit that are being pushed.
+      buildroot: Path to root of build directory.
+      manifest: git.ManifestCheckout object.
+    """
+    path_sha1s = {}
+    overlay_list = FindOverlays(constants.BOTH_OVERLAYS, buildroot=buildroot)
+    ebuild_paths = cls._GetEBuildPaths(buildroot, manifest, overlay_list,
+                                       changes)
+    for ebuild, paths in ebuild_paths.iteritems():
+      # Calculate any SHA1s that are not already in path_sha1s.
+      for path in set(paths).difference(path_sha1s):
+        path_sha1s[path] = cls._GetSHA1ForPath(manifest, path)
+
+      sha1s = [path_sha1s[path] for path in paths]
+      logging.info('Updating ebuild for package %s with commit hashes %r',
+                   ebuild.package, sha1s)
+      updates = dict(CROS_WORKON_COMMIT=cls.FormatBashArray(sha1s))
+      EBuild.UpdateEBuild(ebuild.ebuild_path, updates)
+
+    # Commit any changes to all overlays.
+    for overlay in overlay_list:
+      if EBuild.GitRepoHasChanges(overlay):
+        EBuild.CommitChange('Updating commit hashes in ebuilds '
+                            'to match remote repository.', overlay=overlay)
+
+
+class PortageDBException(Exception):
+  """Generic PortageDB error."""
+
+
+class PortageDB(object):
+  """Wrapper class to access the portage database located in var/db/pkg."""
+
+  def __init__(self, root='/'):
+    """Initialize the internal structure for the database in the given root.
+
+    Args:
+      root: The path to the root to inspect, for example "/build/foo".
+    """
+    self.root = root
+    self.db_path = os.path.join(root, 'var/db/pkg')
+    self._ebuilds = {}
+
+  def GetInstalledPackage(self, category, pv):
+    """Get the InstalledPackage instance for the passed package.
+
+    Args:
+      category: The category of the package. For example "chromeos-base".
+      pv: The package name with the version (and revision) of the
+          installed package. For example "libchrome-271506-r5".
+
+    Returns:
+      An InstalledPackage instance for the requested package or None if the
+      requested package is not found.
+    """
+    pkg_key = '%s/%s' % (category, pv)
+    if pkg_key in self._ebuilds:
+      return self._ebuilds[pkg_key]
+
+    # Create a new InstalledPackage instance and cache it.
+    pkgdir = os.path.join(self.db_path, category, pv)
+    try:
+      pkg = InstalledPackage(self, pkgdir, category, pv)
+    except PortageDBException:
+      return None
+    self._ebuilds[pkg_key] = pkg
+    return pkg
+
+  def InstalledPackages(self):
+    """Lists all portage packages in the database.
+
+    Returns:
+      A list of InstalledPackage instances for each package in the database.
+    """
+    ebuild_pattern = os.path.join(self.db_path, '*/*/*.ebuild')
+    packages = []
+
+    for path in glob.glob(ebuild_pattern):
+      category, pf, packagecheck = SplitEbuildPath(path)
+      if not _category_re.match(category):
+        continue
+      if pf != packagecheck:
+        continue
+      pkg_key = '%s/%s' % (category, pf)
+      if pkg_key not in self._ebuilds:
+        self._ebuilds[pkg_key] = InstalledPackage(
+            self, os.path.join(self.db_path, category, pf),
+            category, pf)
+      packages.append(self._ebuilds[pkg_key])
+
+    return packages
+
+
+class InstalledPackage(object):
+  """Wrapper class for information about an installed package.
+
+  This class accesses the information provided by var/db/pkg for an installed
+  ebuild, such as the list of files installed by this package.
+  """
+
+  # "type" constants for the ListContents() return value.
+  OBJ = 'obj'
+  SYM = 'sym'
+  DIR = 'dir'
+
+  def __init__(self, portage_db, pkgdir, category=None, pf=None):
+    """Initialize the installed ebuild wrapper.
+
+    Args:
+      portage_db: The PortageDB instance where the ebuild is installed. This
+          is used to query the database about other installed ebuilds, for
+          example, the ones listed in DEPEND, but otherwise it isn't used.
+      pkgdir: The directory where the installed package resides. This could be
+          for example a directory like "var/db/pkg/category/pf" or the
+          "build-info" directory in the portage temporary directory where
+          the package is being built.
+      category: The category of the package. If omitted, it will be loaded from
+          the package contents.
+      pf: The package and version of the package. If omitted, it will be loaded
+          from the package contents. This avoids unncessary lookup when this
+          value is known.
+
+    Raises:
+      PortageDBException if the pkgdir doesn't contain a valid package.
+    """
+    self._portage_db = portage_db
+    self.pkgdir = pkgdir
+    self._fields = {}
+    # Prepopulate the field cache with the category and pf (if provided).
+    if not category is None:
+      self._fields['CATEGORY'] = category
+    if not pf is None:
+      self._fields['PF'] = pf
+
+    if self.pf is None:
+      raise PortageDBException("Package doesn't contain package-version value.")
+
+    # Check that the ebuild is present.
+    ebuild_path = os.path.join(self.pkgdir, '%s.ebuild' % self.pf)
+    if not os.path.exists(ebuild_path):
+      raise PortageDBException("Package doesn't contain an ebuild file.")
+
+    split_pv = SplitPV(self.pf)
+    if split_pv is None:
+      raise PortageDBException('Package and version "%s" doesn\'t have a valid '
+                               'format.' % self.pf)
+    self.package = split_pv.package
+    self.version = split_pv.version
+
+  def _ReadField(self, field_name):
+    """Reads the contents of the file in the installed package directory.
+
+    Args:
+      field_name: The name of the field to read, for example, 'SLOT' or
+          'LICENSE'.
+
+    Returns:
+      A string with the contents of the file. The contents of the file are
+      cached in _fields. If the file doesn't exists returns None.
+    """
+    if field_name not in self._fields:
+      try:
+        value = osutils.ReadFile(os.path.join(self.pkgdir, field_name))
+      except IOError as e:
+        if e.errno != errno.ENOENT:
+          raise
+        value = None
+      self._fields[field_name] = value
+    return self._fields[field_name]
+
+  @property
+  def category(self):
+    return self._ReadField('CATEGORY')
+
+  @property
+  def pf(self):
+    return self._ReadField('PF')
+
+  def ListContents(self):
+    """List of files and directories installed by this package.
+
+    Returns:
+      A list of tuples (file_type, path) where the file_type is a string
+      determining the type of the installed file: InstalledPackage.OBJ (regular
+      files), InstalledPackage.SYM (symlinks) or InstalledPackage.DIR
+      (directory), and path is the relative path of the file to the root like
+      'usr/bin/ls'.
+    """
+    path = os.path.join(self.pkgdir, 'CONTENTS')
+    if not os.path.exists(path):
+      return []
+
+    result = []
+    for line in open(path):
+      line = line.strip()
+      # Line format is: "type file_path [more space-separated fields]".
+      # Discard any other line without at least the first two fields. The
+      # remaining fields depend on the type.
+      typ, data = line.split(' ', 1)
+      if typ == self.OBJ:
+        file_path, _file_hash, _mtime = data.rsplit(' ', 2)
+      elif typ == self.DIR:
+        file_path = data
+      elif typ == self.SYM:
+        file_path, _ = data.split(' -> ', 1)
+      else:
+        # Unknown type.
+        continue
+      result.append((typ, file_path.lstrip('/')))
+
+    return result
+
+
+def BestEBuild(ebuilds):
+  """Returns the newest EBuild from a list of EBuild objects."""
+  from portage.versions import vercmp  # pylint: disable=import-error
+  winner = ebuilds[0]
+  for ebuild in ebuilds[1:]:
+    if vercmp(winner.version, ebuild.version) < 0:
+      winner = ebuild
+  return winner
+
+
+def _FindUprevCandidates(files, allow_blacklisted=False):
+  """Return the uprev candidate ebuild from a specified list of files.
+
+  Usually an uprev candidate is a the stable ebuild in a cros_workon
+  directory.  However, if no such stable ebuild exists (someone just
+  checked in the 9999 ebuild), this is the unstable ebuild.
+
+  If the package isn't a cros_workon package, return None.
+
+  Args:
+    files: List of files in a package directory.
+    allow_blacklisted: If False, discard blacklisted packages.
+  """
+  stable_ebuilds = []
+  unstable_ebuilds = []
+  for path in files:
+    if not path.endswith('.ebuild') or os.path.islink(path):
+      continue
+    ebuild = EBuild(path)
+    if not ebuild.is_workon or (ebuild.is_blacklisted and
+                                not allow_blacklisted):
+      continue
+    if ebuild.is_stable:
+      if ebuild.version == WORKON_EBUILD_VERSION:
+        cros_build_lib.Die('KEYWORDS in %s ebuild should not be stable %s'
+                           % (WORKON_EBUILD_VERSION, path))
+      stable_ebuilds.append(ebuild)
+    else:
+      unstable_ebuilds.append(ebuild)
+
+  # If both ebuild lists are empty, the passed in file list was for
+  # a non-workon package.
+  if not unstable_ebuilds:
+    if stable_ebuilds:
+      path = os.path.dirname(stable_ebuilds[0].ebuild_path)
+      cros_build_lib.Die(
+          'Missing %s ebuild in %s' % (WORKON_EBUILD_VERSION, path))
+    return None
+
+  path = os.path.dirname(unstable_ebuilds[0].ebuild_path)
+  if len(unstable_ebuilds) > 1:
+    cros_build_lib.Die('Found multiple unstable ebuilds in %s' % path)
+
+  if not stable_ebuilds:
+    logging.warning('Missing stable ebuild in %s' % path)
+    return unstable_ebuilds[0]
+
+  if len(stable_ebuilds) == 1:
+    return stable_ebuilds[0]
+
+  stable_versions = set(ebuild.version_no_rev for ebuild in stable_ebuilds)
+  if len(stable_versions) > 1:
+    package = stable_ebuilds[0].package
+    message = 'Found multiple stable ebuild versions in %s:' % path
+    for version in stable_versions:
+      message += '\n    %s-%s' % (package, version)
+    cros_build_lib.Die(message)
+
+  uprev_ebuild = max(stable_ebuilds, key=lambda eb: eb.current_revision)
+  for ebuild in stable_ebuilds:
+    if ebuild != uprev_ebuild:
+      logging.warning('Ignoring stable ebuild revision %s in %s' %
+                      (ebuild.version, path))
+  return uprev_ebuild
+
+
+def BuildEBuildDictionary(overlays, use_all, packages, allow_blacklisted=False):
+  """Build a dictionary of the ebuilds in the specified overlays.
+
+  Args:
+    overlays: A map which maps overlay directories to arrays of stable EBuilds
+      inside said directories.
+    use_all: Whether to include all ebuilds in the specified directories.
+      If true, then we gather all packages in the directories regardless
+      of whether they are in our set of packages.
+    packages: A set of the packages we want to gather.  If use_all is
+      True, this argument is ignored, and should be None.
+    allow_blacklisted: Whether or not to consider blacklisted ebuilds.
+  """
+  for overlay in overlays:
+    for package_dir, _dirs, files in os.walk(overlay):
+      # If we were given a list of packages to uprev, only consider the files
+      # whose potential CP match.
+      # This allows us to uprev specific blacklisted without throwing errors on
+      # every badly formatted blacklisted ebuild.
+      package_name = os.path.basename(package_dir)
+      category = os.path.basename(os.path.dirname(package_dir))
+      if not (use_all or os.path.join(category, package_name) in packages):
+        continue
+
+      # Add stable ebuilds to overlays[overlay].
+      paths = [os.path.join(package_dir, path) for path in files]
+      ebuild = _FindUprevCandidates(paths, allow_blacklisted)
+
+      # If the --all option isn't used, we only want to update packages that
+      # are in packages.
+      if ebuild:
+        overlays[overlay].append(ebuild)
+
+
+def RegenCache(overlay):
+  """Regenerate the cache of the specified overlay.
+
+  Args:
+    overlay: The tree to regenerate the cache for.
+  """
+  repo_name = GetOverlayName(overlay)
+  if not repo_name:
+    return
+
+  layout = cros_build_lib.LoadKeyValueFile(
+      os.path.join(GetOverlayRoot(overlay), 'metadata', 'layout.conf'),
+      ignore_missing=True)
+  if layout.get('cache-format') != 'md5-dict':
+    return
+
+  # Regen for the whole repo.
+  cros_build_lib.RunCommand(['egencache', '--update', '--repo', repo_name,
+                             '--jobs', str(multiprocessing.cpu_count())],
+                            cwd=overlay, enter_chroot=True)
+  # If there was nothing new generated, then let's just bail.
+  result = git.RunGit(overlay, ['status', '-s', 'metadata/'])
+  if not result.output:
+    return
+  # Explicitly add any new files to the index.
+  git.RunGit(overlay, ['add', 'metadata/'])
+  # Explicitly tell git to also include rm-ed files.
+  git.RunGit(overlay, ['commit', '-m', 'regen cache', 'metadata/'])
+
+
+def ParseBashArray(value):
+  """Parse a valid bash array into python list."""
+  # The syntax for bash arrays is nontrivial, so let's use bash to do the
+  # heavy lifting for us.
+  sep = ','
+  # Because %s may contain bash comments (#), put a clever newline in the way.
+  cmd = 'ARR=%s\nIFS=%s; echo -n "${ARR[*]}"' % (value, sep)
+  return cros_build_lib.RunCommand(
+      cmd, print_cmd=False, shell=True, capture_output=True).output.split(sep)
+
+
+def WorkonEBuildGeneratorForDirectory(base_dir):
+  """Yields cros_workon EBuilds in |base_dir|.
+
+  Args:
+    base_dir: Path to the base directory.
+
+  Yields:
+    A cros_workon EBuild instance.
+  """
+  for root, _, files in os.walk(base_dir):
+    for filename in files:
+      # Only look at *-9999.ebuild files.
+      if filename.endswith(WORKON_EBUILD_SUFFIX):
+        full_path = os.path.join(root, filename)
+        ebuild = EBuild(full_path)
+        if not ebuild.is_workon:
+          continue
+        yield ebuild
+
+
+def WorkonEBuildGenerator(buildroot, overlay_type):
+  """Scans all overlays and yields cros_workon EBuilds.
+
+  Args:
+    buildroot: Path to source root to find overlays.
+    overlay_type: The type of overlay to use (one of
+      constants.VALID_OVERLAYS).
+
+  Yields:
+    A cros_workon EBuild instance.
+  """
+  # Get the list of all overlays.
+  overlays = FindOverlays(overlay_type, buildroot=buildroot)
+  # Iterate through overlays and gather all workon ebuilds
+  for overlay in overlays:
+    for ebuild in WorkonEBuildGeneratorForDirectory(overlay):
+      yield ebuild
+
+
+def BuildFullWorkonPackageDictionary(buildroot, overlay_type, manifest):
+  """Scans all cros_workon ebuilds and build a dictionary.
+
+  Args:
+    buildroot: Path to source root to find overlays.
+    overlay_type: The type of overlay to use (one of
+      constants.VALID_OVERLAYS).
+    manifest: git.ManifestCheckout object.
+
+  Returns:
+    A dictionary mapping (project, branch) to a list of packages.
+    E.g., {('chromiumos/third_party/kernel', 'chromeos-3.14'):
+           ['sys-kernel/chromeos-kernel-3_14']}.
+  """
+  # we want (project, branch) -> package (CP or P?)
+  directory_src = os.path.join(buildroot, 'src')
+
+  pkg_map = dict()
+  for ebuild in WorkonEBuildGenerator(buildroot, overlay_type):
+    if ebuild.is_blacklisted:
+      continue
+    package = ebuild.package
+    _, paths = ebuild.GetSourcePath(directory_src, manifest)
+    for path in paths:
+      checkout = manifest.FindCheckoutFromPath(path)
+      project = checkout['name']
+      branch = git.StripRefs(checkout['tracking_branch'])
+      pkg_list = pkg_map.get((project, branch), [])
+      pkg_list.append(package)
+      pkg_map[(project, branch)] = pkg_list
+
+  return pkg_map
+
+
+def GetWorkonProjectMap(overlay, subdirectories):
+  """Get a mapping of cros_workon ebuilds to projects and source paths.
+
+  Args:
+    overlay: Overlay to look at.
+    subdirectories: List of subdirectories to look in on the overlay.
+
+  Yields:
+    Tuples containing (filename, projects, srcpaths) for cros-workon ebuilds in
+    the given overlay under the given subdirectories.
+  """
+  # Search ebuilds for project names, ignoring non-existent directories.
+  # Also filter out ebuilds which are not cros_workon.
+  for subdir in subdirectories:
+    base_dir = os.path.join(overlay, subdir)
+    for ebuild in WorkonEBuildGeneratorForDirectory(base_dir):
+      full_path = ebuild.ebuild_path
+      _, projects, srcpaths, _, _ = EBuild.GetCrosWorkonVars(full_path,
+                                                             ebuild.pkgname)
+      relpath = os.path.relpath(full_path, start=overlay)
+      yield relpath, projects, srcpaths
+
+
+def EbuildToCP(path):
+  """Return the category/path string from an ebuild path.
+
+  Args:
+    path: Path to an ebuild.
+
+  Returns:
+    '$CATEGORY/$PN' (e.g. 'sys-apps/dbus')
+  """
+  return os.path.join(*SplitEbuildPath(path)[0:2])
+
+
+def SplitEbuildPath(path):
+  """Split an ebuild path into its components.
+
+  Given a specified ebuild filename, returns $CATEGORY, $PN, $P. It does not
+  perform any check on ebuild name elements or their validity, merely splits
+  a filename, absolute or relative, and returns the last 3 components.
+
+  Example: For /any/path/chromeos-base/power_manager/power_manager-9999.ebuild,
+  returns ('chromeos-base', 'power_manager', 'power_manager-9999').
+
+  Args:
+    path: Path to the ebuild.
+
+  Returns:
+    $CATEGORY, $PN, $P
+  """
+  return os.path.splitext(path)[0].rsplit('/', 3)[-3:]
+
+
+def SplitPV(pv, strict=True):
+  """Takes a PV value and splits it into individual components.
+
+  Args:
+    pv: Package name and version.
+    strict: If True, returns None if version or package name is missing.
+      Otherwise, only package name is mandatory.
+
+  Returns:
+    A collection with named members:
+      pv, package, version, version_no_rev, rev
+  """
+  m = _pvr_re.match(pv)
+
+  if m is None and strict:
+    return None
+
+  if m is None:
+    return PV(**{'pv': None, 'package': pv, 'version': None,
+                 'version_no_rev': None, 'rev': None})
+
+  return PV(**m.groupdict())
+
+
+def SplitCPV(cpv, strict=True):
+  """Splits a CPV value into components.
+
+  Args:
+    cpv: Category, package name, and version of a package.
+    strict: If True, returns None if any of the components is missing.
+      Otherwise, only package name is mandatory.
+
+  Returns:
+    A collection with named members:
+      category, pv, package, version, version_no_rev, rev
+  """
+  chunks = cpv.split('/')
+  if len(chunks) > 2:
+    raise ValueError('Unexpected package format %s' % cpv)
+  if len(chunks) == 1:
+    category = None
+  else:
+    category = chunks[0]
+
+  m = SplitPV(chunks[-1], strict=strict)
+  if strict and (category is None or m is None):
+    return None
+  return CPV(category=category, **m._asdict())
+
+
+def FindWorkonProjects(packages):
+  """Find the projects associated with the specified cros_workon packages.
+
+  Args:
+    packages: List of cros_workon packages.
+
+  Returns:
+    The set of projects associated with the specified cros_workon packages.
+  """
+  all_projects = set()
+  buildroot, both = constants.SOURCE_ROOT, constants.BOTH_OVERLAYS
+  for overlay in FindOverlays(both, buildroot=buildroot):
+    for _, projects, _ in GetWorkonProjectMap(overlay, packages):
+      all_projects.update(projects)
+  return all_projects
+
+
+def ListInstalledPackages(sysroot):
+  """[DEPRECATED] Lists all portage packages in a given portage-managed root.
+
+  Assumes the existence of a /var/db/pkg package database.
+
+  This function is DEPRECATED, please use PortageDB.InstalledPackages instead.
+
+  Args:
+    sysroot: The root directory being inspected.
+
+  Returns:
+    A list of (cp,v) tuples in the given sysroot.
+  """
+  return [('%s/%s' % (pkg.category, pkg.package), pkg.version)
+          for pkg in PortageDB(sysroot).InstalledPackages()]
+
+
+def BestVisible(atom, board=None, pkg_type='ebuild',
+                buildroot=constants.SOURCE_ROOT):
+  """Get the best visible ebuild CPV for the given atom.
+
+  Args:
+    atom: Portage atom.
+    board: Board to look at. By default, look in chroot.
+    pkg_type: Package type (ebuild, binary, or installed).
+    buildroot: Directory
+
+  Returns:
+    A CPV object.
+  """
+  portageq = 'portageq' if board is None else 'portageq-%s' % board
+  root = cros_build_lib.GetSysroot(board=board)
+  cmd = [portageq, 'best_visible', root, pkg_type, atom]
+  result = cros_build_lib.RunCommand(
+      cmd, cwd=buildroot, enter_chroot=True, debug_level=logging.DEBUG,
+      capture_output=True)
+  return SplitCPV(result.output.strip())
+
+
+def IsPackageInstalled(package, sysroot='/'):
+  """Return whether a portage package is in a given portage-managed root.
+
+  Args:
+    package: The CP to look for.
+    sysroot: The root being inspected.
+  """
+  for key, _version in ListInstalledPackages(sysroot):
+    if key == package:
+      return True
+
+  return False
+
+
+def FindPackageNameMatches(pkg_str, board=None):
+  """Finds a list of installed packages matching |pkg_str|.
+
+  Args:
+    pkg_str: The package name with optional category, version, and slot.
+    board: The board to insepct.
+
+  Returns:
+    A list of matched CPV objects.
+  """
+  cmd = ['equery']
+  if board:
+    cmd = ['equery-%s' % board]
+
+  cmd += ['list', pkg_str]
+  result = cros_build_lib.RunCommand(
+      cmd, enter_chroot=True, capture_output=True, error_code_ok=True)
+
+  matches = []
+  if result.returncode == 0:
+    matches = [SplitCPV(x) for x in result.output.splitlines()]
+
+  return matches
+
+
+def FindEbuildForPackage(pkg_str, sysroot, include_masked=False,
+                         extra_env=None):
+  """Returns a path to an ebuild responsible for package matching |pkg_str|.
+
+  Args:
+    pkg_str: The package name with optional category, version, and slot.
+    sysroot: The root directory being inspected.
+    include_masked: True iff we should include masked ebuilds in our query.
+    extra_env: optional dictionary of extra string/string pairs to use as the
+      environment of equery command.
+
+  Returns:
+    Path to ebuild for this package.
+  """
+  cmd = [cros_build_lib.GetSysrootToolPath(sysroot, 'equery'), 'which']
+  if include_masked:
+    cmd += ['--include-masked']
+  cmd += [pkg_str]
+
+  result = cros_build_lib.RunCommand(cmd, extra_env=extra_env, print_cmd=False,
+                                     capture_output=True, error_code_ok=True)
+
+  if result.returncode:
+    return None
+  return result.output.strip()
+
+
+def GetInstalledPackageUseFlags(pkg_str, board=None):
+  """Gets the list of USE flags for installed packages matching |pkg_str|.
+
+  Args:
+    pkg_str: The package name with optional category, version, and slot.
+    board: The board to inspect.
+
+  Returns:
+    A dictionary with the key being a package CP and the value being the list
+    of USE flags for that package.
+  """
+  cmd = ['qlist']
+  if board:
+    cmd = ['qlist-%s' % board]
+
+  cmd += ['-CqU', pkg_str]
+  result = cros_build_lib.RunCommand(
+      cmd, enter_chroot=True, capture_output=True, error_code_ok=True)
+
+  use_flags = {}
+  if result.returncode == 0:
+    for line in result.output.splitlines():
+      tokens = line.split()
+      use_flags[tokens[0]] = tokens[1:]
+
+  return use_flags
+
+
+def GetBinaryPackageDir(sysroot='/', packages_dir=None):
+  """Returns the binary package directory of |sysroot|."""
+  dir_name = packages_dir if packages_dir else 'packages'
+  return os.path.join(sysroot, dir_name)
+
+
+def GetBinaryPackagePath(c, p, v, sysroot='/', packages_dir=None):
+  """Returns the path to the binary package.
+
+  Args:
+    c: category.
+    p: package.
+    v: version.
+    sysroot: The root being inspected.
+    packages_dir: Name of the packages directory in |sysroot|.
+
+  Returns:
+    The path to the binary package.
+  """
+  pkgdir = GetBinaryPackageDir(sysroot=sysroot, packages_dir=packages_dir)
+  path = os.path.join(pkgdir, c, '%s-%s.tbz2' % (p, v))
+  if not os.path.exists(path):
+    raise ValueError('Cannot find the binary package %s!' % path)
+
+  return path
+
+
+def GetRepositoryForEbuild(ebuild_path, sysroot):
+  """Get parsed output of `ebuild <ebuild_path> info`
+
+  ebuild ... info runs the pkg_info step of an ebuild.
+  cros-workon.eclass defines that step and prints both variables.
+
+  Args:
+    ebuild_path: string full path to ebuild file.
+    sysroot: The root directory being inspected.
+
+  Returns:
+    list of RepositoryInfoTuples.
+  """
+  cmd = (cros_build_lib.GetSysrootToolPath(sysroot, 'ebuild'),
+         ebuild_path, 'info')
+  result = cros_build_lib.RunCommand(
+      cmd, capture_output=True, print_cmd=False, error_code_ok=True)
+
+  # This command should return output that looks a lot like:
+  # CROS_WORKON_SRCDIR=("/mnt/host/source/src/platform2")
+  # CROS_WORKON_PROJECT=("chromiumos/platform2")
+  srcdir_match = re.search(r'^CROS_WORKON_SRCDIR=\((".*")\)$',
+                           result.output, re.MULTILINE)
+  project_match = re.search(r'^CROS_WORKON_PROJECT=\((".*")\)$',
+                            result.output, re.MULTILINE)
+  if not srcdir_match or not project_match:
+    return None
+
+  srcdirs = ParseBashArray(srcdir_match.group(1))
+  projects = ParseBashArray(project_match.group(1))
+  if len(srcdirs) != len(projects):
+    return None
+
+  return [RepositoryInfoTuple(srcdir, project)
+          for srcdir, project in zip(srcdirs, projects)]
+
+
+def CleanOutdatedBinaryPackages(sysroot):
+  """Cleans outdated binary packages from |sysroot|."""
+  return cros_build_lib.RunCommand(
+      [cros_build_lib.GetSysrootToolPath(sysroot, 'eclean'), '-d', 'packages'])
+
+
+def _CheckHasTest(cp, sysroot):
+  """Checks if the ebuild for |cp| has tests.
+
+  Args:
+    cp: A portage package in the form category/package_name.
+    sysroot: Path to the sysroot.
+
+  Returns:
+    |cp| if the ebuild for |cp| defines a test stanza, None otherwise.
+  """
+  ebuild = EBuild(FindEbuildForPackage(cp, sysroot))
+  return cp if ebuild.has_test else None
+
+
+def PackagesWithTest(sysroot, packages):
+  """Returns the subset of |packages| that have unit tests.
+
+  Args:
+    sysroot: Path to the sysroot.
+    packages: List of packages to filter.
+
+  Returns:
+    The subset of |packages| that defines unit tests.
+  """
+  inputs = [(cp, sysroot) for cp in packages]
+  pkg_with_test = set(parallel.RunTasksInProcessPool(_CheckHasTest, inputs))
+
+  # CheckHasTest will return None for packages that do not have tests. We can
+  # discard that value.
+  pkg_with_test.discard(None)
+  return pkg_with_test
diff --git a/lib/portage_util_unittest b/lib/portage_util_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/portage_util_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/portage_util_unittest.py b/lib/portage_util_unittest.py
new file mode 100644
index 0000000..216c7f7
--- /dev/null
+++ b/lib/portage_util_unittest.py
@@ -0,0 +1,1100 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for portage_util.py."""
+
+from __future__ import print_function
+
+import cStringIO
+import mock
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import portage_util
+
+
+MANIFEST = git.ManifestCheckout.Cached(constants.SOURCE_ROOT)
+
+
+# pylint: disable=protected-access
+
+
+class _Package(object):
+  """Package helper class."""
+
+  def __init__(self, package):
+    self.package = package
+
+
+class _DummyCommandResult(object):
+  """Create mock RunCommand results."""
+
+  def __init__(self, output):
+    # Members other than 'output' are expected to be unused, so
+    # we omit them here.
+    #
+    # All shell output will be newline terminated; we add the
+    # newline here for convenience.
+    self.output = output + '\n'
+
+
+class EBuildTest(cros_test_lib.MockTempDirTestCase):
+  """Ebuild related tests."""
+  _MULTILINE_WITH_TEST = """
+hello
+src_test() {
+}"""
+
+  _MULTILINE_NO_TEST = """
+hello
+src_compile() {
+}"""
+
+  _MULTILINE_COMMENTED = """
+#src_test() {
+# notactive
+# }"""
+
+  _MULTILINE_PLATFORM = """
+platform_pkg_test() {
+}"""
+
+  _SINGLE_LINE_TEST = 'src_test() { echo "foo" }'
+
+  def _MakeFakeEbuild(self, fake_ebuild_path, fake_ebuild_content=''):
+    osutils.WriteFile(fake_ebuild_path, fake_ebuild_content, makedirs=True)
+    fake_ebuild = portage_util.EBuild(fake_ebuild_path)
+    return fake_ebuild
+
+  def testParseEBuildPath(self):
+    """Test with ebuild with revision number."""
+    basedir = os.path.join(self.tempdir, 'cat', 'test_package')
+    fake_ebuild_path = os.path.join(basedir, 'test_package-0.0.1-r1.ebuild')
+    fake_ebuild = self._MakeFakeEbuild(fake_ebuild_path)
+
+    self.assertEquals(fake_ebuild.category, 'cat')
+    self.assertEquals(fake_ebuild.pkgname, 'test_package')
+    self.assertEquals(fake_ebuild.version_no_rev, '0.0.1')
+    self.assertEquals(fake_ebuild.current_revision, 1)
+    self.assertEquals(fake_ebuild.version, '0.0.1-r1')
+    self.assertEquals(fake_ebuild.package, 'cat/test_package')
+    self.assertEquals(fake_ebuild._ebuild_path_no_version,
+                      os.path.join(basedir, 'test_package'))
+    self.assertEquals(fake_ebuild.ebuild_path_no_revision,
+                      os.path.join(basedir, 'test_package-0.0.1'))
+    self.assertEquals(fake_ebuild._unstable_ebuild_path,
+                      os.path.join(basedir, 'test_package-9999.ebuild'))
+    self.assertEquals(fake_ebuild.ebuild_path, fake_ebuild_path)
+
+  def testParseEBuildPathNoRevisionNumber(self):
+    """Test with ebuild without revision number."""
+    basedir = os.path.join(self.tempdir, 'cat', 'test_package')
+    fake_ebuild_path = os.path.join(basedir, 'test_package-9999.ebuild')
+    fake_ebuild = self._MakeFakeEbuild(fake_ebuild_path)
+
+    self.assertEquals(fake_ebuild.category, 'cat')
+    self.assertEquals(fake_ebuild.pkgname, 'test_package')
+    self.assertEquals(fake_ebuild.version_no_rev, '9999')
+    self.assertEquals(fake_ebuild.current_revision, 0)
+    self.assertEquals(fake_ebuild.version, '9999')
+    self.assertEquals(fake_ebuild.package, 'cat/test_package')
+    self.assertEquals(fake_ebuild._ebuild_path_no_version,
+                      os.path.join(basedir, 'test_package'))
+    self.assertEquals(fake_ebuild.ebuild_path_no_revision,
+                      os.path.join(basedir, 'test_package-9999'))
+    self.assertEquals(fake_ebuild._unstable_ebuild_path,
+                      os.path.join(basedir, 'test_package-9999.ebuild'))
+    self.assertEquals(fake_ebuild.ebuild_path, fake_ebuild_path)
+
+  def testGetCommitId(self):
+    fake_hash = '24ab3c9f6d6b5c744382dba2ca8fb444b9808e9f'
+    basedir = os.path.join(self.tempdir, 'cat', 'test_package')
+    fake_ebuild_path = os.path.join(basedir, 'test_package-9999.ebuild')
+    fake_ebuild = self._MakeFakeEbuild(fake_ebuild_path)
+
+    # git rev-parse HEAD
+    self.PatchObject(git, 'RunGit', return_value=_DummyCommandResult(fake_hash))
+    test_hash = fake_ebuild.GetCommitId(self.tempdir)
+    self.assertEquals(test_hash, fake_hash)
+
+  def testEBuildStable(self):
+    """Test ebuild w/keyword variations"""
+    basedir = os.path.join(self.tempdir, 'cat', 'test_package')
+    fake_ebuild_path = os.path.join(basedir, 'test_package-9999.ebuild')
+
+    datasets = (
+        ('~amd64', False),
+        ('amd64', True),
+        ('~amd64 ~arm ~x86', False),
+        ('~amd64 arm ~x86', True),
+        ('-* ~arm', False),
+        ('-* x86', True),
+    )
+    for keywords, stable in datasets:
+      fake_ebuild = self._MakeFakeEbuild(
+          fake_ebuild_path, fake_ebuild_content=['KEYWORDS="%s"\n' % keywords])
+      self.assertEquals(fake_ebuild.is_stable, stable)
+
+  def testEBuildBlacklisted(self):
+    """Test blacklisted ebuild"""
+    basedir = os.path.join(self.tempdir, 'cat', 'test_package')
+    fake_ebuild_path = os.path.join(basedir, 'test_package-9999.ebuild')
+
+    fake_ebuild = self._MakeFakeEbuild(fake_ebuild_path)
+    self.assertEquals(fake_ebuild.is_blacklisted, False)
+
+    fake_ebuild = self._MakeFakeEbuild(
+        fake_ebuild_path, fake_ebuild_content=['CROS_WORKON_BLACKLIST="1"\n'])
+    self.assertEquals(fake_ebuild.is_blacklisted, True)
+
+  def testHasTest(self):
+    """Tests that we detect test stanzas correctly."""
+    def run_case(content, expected):
+      with osutils.TempDir() as temp:
+        ebuild = os.path.join(temp, 'overlay', 'app-misc',
+                              'foo-0.0.1-r1.ebuild')
+        osutils.WriteFile(ebuild, content, makedirs=True)
+        self.assertEqual(expected, portage_util.EBuild(ebuild).has_test)
+
+    run_case(self._MULTILINE_WITH_TEST, True)
+    run_case(self._MULTILINE_NO_TEST, False)
+    run_case(self._MULTILINE_COMMENTED, False)
+    run_case(self._MULTILINE_PLATFORM, True)
+    run_case(self._SINGLE_LINE_TEST, True)
+
+
+class ProjectAndPathTest(cros_test_lib.MockTempDirTestCase):
+  """Project and Path related tests."""
+
+  def _MockParseWorkonVariables(self, fake_projects, fake_srcpaths,
+                                fake_localnames, fake_subdirs,
+                                fake_ebuild_contents):
+    """Mock the necessary calls, call GetSourcePath()."""
+
+    def _isdir(path):
+      """Mock function for os.path.isdir"""
+      if any(fake_srcpaths):
+        if path == os.path.join(self.tempdir, 'src'):
+          return True
+
+      for srcpath in fake_srcpaths:
+        if srcpath:
+          if path == os.path.join(self.tempdir, 'src', srcpath):
+            return True
+        else:
+          for localname, subdir in zip(fake_localnames, fake_subdirs):
+            if path == os.path.join(self.tempdir, localname, subdir):
+              return False
+            elif path == os.path.join(self.tempdir, 'platform', localname,
+                                      subdir):
+              return True
+
+      raise Exception('unhandled path: %s' % path)
+
+    def _FindCheckoutFromPath(path):
+      """Mock function for manifest.FindCheckoutFromPath"""
+      for project, localname, subdir in zip(fake_projects, fake_localnames,
+                                            fake_subdirs):
+        if path == os.path.join(self.tempdir, 'platform', localname, subdir):
+          return {'name': project}
+      return {}
+
+    self.PatchObject(os.path, 'isdir', side_effect=_isdir)
+    self.PatchObject(MANIFEST, 'FindCheckoutFromPath',
+                     side_effect=_FindCheckoutFromPath)
+
+    if not fake_srcpaths:
+      fake_srcpaths = [''] * len(fake_projects)
+    if not fake_projects:
+      fake_projects = [''] * len(fake_srcpaths)
+
+    # We need 'chromeos-base' here because it controls default _SUBDIR values.
+    ebuild_path = os.path.join(self.tempdir, 'packages', 'chromeos-base',
+                               'package', 'package-9999.ebuild')
+    osutils.WriteFile(ebuild_path, fake_ebuild_contents, makedirs=True)
+
+    ebuild = portage_util.EBuild(ebuild_path)
+    return ebuild.GetSourcePath(self.tempdir, MANIFEST)
+
+  def testParseLegacyWorkonVariables(self):
+    """Tests if ebuilds in a single item format are correctly parsed."""
+    fake_project = 'my_project1'
+    fake_localname = 'foo'
+    fake_subdir = 'bar'
+    fake_ebuild_contents = """
+CROS_WORKON_PROJECT=%s
+CROS_WORKON_LOCALNAME=%s
+CROS_WORKON_SUBDIR=%s
+    """ % (fake_project, fake_localname, fake_subdir)
+    project, subdir = self._MockParseWorkonVariables(
+        [fake_project], [], [fake_localname], [fake_subdir],
+        fake_ebuild_contents)
+    self.assertEquals(project, [fake_project])
+    self.assertEquals(subdir, [os.path.join(
+        self.tempdir, 'platform', '%s/%s' % (fake_localname, fake_subdir))])
+
+  def testParseArrayWorkonVariables(self):
+    """Tests if ebuilds in an array format are correctly parsed."""
+    fake_projects = ['my_project1', 'my_project2', 'my_project3']
+    fake_localnames = ['foo', 'bar', 'bas']
+    fake_subdirs = ['sub1', 'sub2', 'sub3']
+    # The test content is formatted using the same function that
+    # formats ebuild output, ensuring that we can parse our own
+    # products.
+    fake_ebuild_contents = """
+CROS_WORKON_PROJECT=%s
+CROS_WORKON_LOCALNAME=%s
+CROS_WORKON_SUBDIR=%s
+    """ % (portage_util.EBuild.FormatBashArray(fake_projects),
+           portage_util.EBuild.FormatBashArray(fake_localnames),
+           portage_util.EBuild.FormatBashArray(fake_subdirs))
+    projects, subdirs = self._MockParseWorkonVariables(
+        fake_projects, [], fake_localnames, fake_subdirs, fake_ebuild_contents)
+    self.assertEquals(projects, fake_projects)
+    fake_paths = [
+        os.path.realpath(os.path.join(
+            self.tempdir, 'platform',
+            '%s/%s' % (fake_localnames[i], fake_subdirs[i])))
+        for i in range(0, len(fake_projects))
+    ]
+    self.assertEquals(subdirs, fake_paths)
+
+  def testParseArrayWorkonVariablesWithSrcpaths(self):
+    """Tests if ebuilds with CROS_WORKON_SRCPATH are handled correctly."""
+    fake_projects = ['my_project1', '', '']
+    fake_srcpaths = ['', 'path/to/src', 'path/to/other/src']
+    fake_localnames = ['foo', 'bar', 'bas']
+    fake_subdirs = ['sub1', 'sub2', 'sub3']
+    # The test content is formatted using the same function that
+    # formats ebuild output, ensuring that we can parse our own
+    # products.
+    fake_ebuild_contents = """
+CROS_WORKON_PROJECT=%s
+CROS_WORKON_SRCPATH=%s
+CROS_WORKON_LOCALNAME=%s
+CROS_WORKON_SUBDIR=%s
+    """ % (portage_util.EBuild.FormatBashArray(fake_projects),
+           portage_util.EBuild.FormatBashArray(fake_srcpaths),
+           portage_util.EBuild.FormatBashArray(fake_localnames),
+           portage_util.EBuild.FormatBashArray(fake_subdirs))
+    projects, subdirs = self._MockParseWorkonVariables(
+        fake_projects, fake_srcpaths, fake_localnames, fake_subdirs,
+        fake_ebuild_contents)
+    self.assertEquals(projects, fake_projects)
+    fake_paths = []
+    for srcpath, localname, subdir in zip(
+        fake_srcpaths, fake_localnames, fake_subdirs):
+      if srcpath:
+        path = os.path.realpath(os.path.join(
+            self.tempdir, 'src', srcpath))
+      else:
+        path = os.path.realpath(os.path.join(
+            self.tempdir, 'platform', '%s/%s' % (localname, subdir)))
+      fake_paths.append(path)
+
+    self.assertEquals(subdirs, fake_paths)
+
+
+class StubEBuild(portage_util.EBuild):
+  """Test helper to StubEBuild."""
+
+  def __init__(self, path):
+    super(StubEBuild, self).__init__(path)
+    self.is_workon = True
+    self.is_stable = True
+
+  def _ReadEBuild(self, path):
+    pass
+
+  def GetCommitId(self, srcpath):
+    id_map = {
+        'p1_path': 'my_id',
+        'p1_path1': 'my_id1',
+        'p1_path2': 'my_id2'
+    }
+    if srcpath in id_map:
+      return id_map[srcpath]
+    else:
+      return 'you_lose'
+
+
+class EBuildRevWorkonTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for EBuildRevWorkon."""
+
+  # Lines that we will feed as fake ebuild contents to
+  # EBuild.MarAsStable().  This is the minimum content needed
+  # to test the various branches in the function's main processing
+  # loop.
+  _mock_ebuild = ['EAPI=2\n',
+                  'CROS_WORKON_COMMIT=old_id\n',
+                  'KEYWORDS="~x86 ~arm ~amd64"\n',
+                  'src_unpack(){}\n']
+  _mock_ebuild_multi = ['EAPI=2\n',
+                        'CROS_WORKON_COMMIT=("old_id1","old_id2")\n',
+                        'KEYWORDS="~x86 ~arm ~amd64"\n',
+                        'src_unpack(){}\n']
+  _revved_ebuild = ('EAPI=2\n'
+                    'CROS_WORKON_COMMIT="my_id"\n'
+                    'CROS_WORKON_TREE="treehash"\n'
+                    'KEYWORDS="x86 arm amd64"\n'
+                    'src_unpack(){}\n')
+  _revved_ebuild_multi = ('EAPI=2\n'
+                          'CROS_WORKON_COMMIT=("my_id1" "my_id2")\n'
+                          'CROS_WORKON_TREE=("treehash1" "treehash2")\n'
+                          'KEYWORDS="x86 arm amd64"\n'
+                          'src_unpack(){}\n')
+
+  def setUp(self):
+    self.overlay = os.path.join(self.tempdir, 'overlay')
+    package_name = os.path.join(self.overlay,
+                                'category/test_package/test_package-0.0.1')
+    ebuild_path = package_name + '-r1.ebuild'
+    self.m_ebuild = StubEBuild(ebuild_path)
+    self.revved_ebuild_path = package_name + '-r2.ebuild'
+    self._m_file = cStringIO.StringIO()
+
+  def createRevWorkOnMocks(self, ebuild_content, rev, multi=False):
+    """Creates a mock environment to run RevWorkOnEBuild.
+
+    Args:
+      ebuild_content: The content of the ebuild that will be revved.
+      rev: Tell _RunGit whether this is attempt an attempt to rev an ebuild.
+      multi: Whether there are multiple projects to uprev.
+    """
+    def _GetTreeId(path):
+      """Mock function for portage_util.EBuild.GetTreeId"""
+      return {
+          'p1_path1': 'treehash1',
+          'p1_path2': 'treehash2',
+          'p1_path': 'treehash',
+      }.get(path)
+
+    def _RunGit(cwd, cmd):
+      """Mock function for portage_util.EBuild._RunGit"""
+      self.assertEqual(cwd, self.overlay)
+      self.assertTrue(rev, msg='git should not be run when not revving')
+      if cmd[0] == 'add':
+        self.assertEqual(cmd, ['add', self.revved_ebuild_path])
+      else:
+        self.assertTrue(self.m_ebuild.is_stable)
+        self.assertEqual(cmd, ['rm', '-f', self.m_ebuild.ebuild_path])
+
+    source_mock = self.PatchObject(portage_util.EBuild, 'GetSourcePath')
+    if multi:
+      source_mock.return_value = (['fake_project1', 'fake_project2'],
+                                  ['p1_path1', 'p1_path2'])
+    else:
+      source_mock.return_value = (['fake_project1'], ['p1_path'])
+
+    self.PatchObject(portage_util.EBuild, 'GetTreeId', side_effect=_GetTreeId)
+    self.PatchObject(portage_util.EBuild, '_RunGit', side_effect=_RunGit)
+
+    osutils.WriteFile(self.m_ebuild._unstable_ebuild_path, ebuild_content,
+                      makedirs=True)
+    osutils.WriteFile(self.m_ebuild.ebuild_path, ebuild_content, makedirs=True)
+
+  def RevWorkOnEBuild(self, *args, **kwargs):
+    """Thin helper wrapper to call the function under test.
+
+    Returns:
+      (result, revved_ebuild) where result is the result from the called
+      function, and revved_ebuild is the content of the revved ebuild.
+    """
+    m_file = cStringIO.StringIO()
+    kwargs['redirect_file'] = m_file
+    result = self.m_ebuild.RevWorkOnEBuild(*args, **kwargs)
+    return result, m_file.getvalue()
+
+  def testRevWorkOnEBuild(self):
+    """Test Uprev of a single project ebuild."""
+    self.createRevWorkOnMocks(self._mock_ebuild, rev=True)
+    result, revved_ebuild = self.RevWorkOnEBuild(self.tempdir, MANIFEST)
+    self.assertEqual(result, 'category/test_package-0.0.1-r2')
+    self.assertEqual(self._revved_ebuild, revved_ebuild)
+    self.assertExists(self.revved_ebuild_path)
+
+  def testRevWorkOnMultiEBuild(self):
+    """Test Uprev of a multi-project (array) ebuild."""
+    self.createRevWorkOnMocks(self._mock_ebuild_multi, rev=True, multi=True)
+    result, revved_ebuild = self.RevWorkOnEBuild(self.tempdir, MANIFEST)
+    self.assertEqual(result, 'category/test_package-0.0.1-r2')
+    self.assertEqual(self._revved_ebuild_multi, revved_ebuild)
+    self.assertExists(self.revved_ebuild_path)
+
+  def testRevUnchangedEBuild(self):
+    self.createRevWorkOnMocks(self._mock_ebuild, rev=False)
+
+    self.PatchObject(portage_util.filecmp, 'cmp', return_value=True)
+    result, revved_ebuild = self.RevWorkOnEBuild(self.tempdir, MANIFEST)
+    self.assertEqual(result, None)
+    self.assertEqual(self._revved_ebuild, revved_ebuild)
+    self.assertNotExists(self.revved_ebuild_path)
+
+  def testRevMissingEBuild(self):
+    self.revved_ebuild_path = self.m_ebuild.ebuild_path
+    self.m_ebuild.ebuild_path = self.m_ebuild._unstable_ebuild_path
+    self.m_ebuild.current_revision = 0
+    self.m_ebuild.is_stable = False
+
+    self.createRevWorkOnMocks(self._mock_ebuild[0:1] + self._mock_ebuild[2:],
+                              rev=True)
+    result, revved_ebuild = self.RevWorkOnEBuild(self.tempdir, MANIFEST)
+
+    self.assertEqual(result, 'category/test_package-0.0.1-r1')
+    self.assertEqual(self._revved_ebuild, revved_ebuild)
+    self.assertExists(self.revved_ebuild_path)
+
+  def testCommitChange(self):
+    m = self.PatchObject(portage_util.EBuild, '_RunGit', return_value='')
+    mock_message = 'Commitme'
+    self.m_ebuild.CommitChange(mock_message, '.')
+    m.assert_called_once_with('.', ['commit', '-a', '-m', 'Commitme'])
+
+  def testUpdateCommitHashesForChanges(self):
+    """Tests that we can update the commit hashes for changes correctly."""
+    build_root = 'fakebuildroot'
+    overlays = ['public_overlay']
+    changes = ['fake change']
+    paths = ['fake_path1', 'fake_path2']
+    sha1s = ['sha1', 'shaaaaaaaaaaaaaaaa2']
+    path_ebuilds = {self.m_ebuild: paths}
+
+    self.PatchObject(portage_util, 'FindOverlays', return_value=overlays)
+    self.PatchObject(portage_util.EBuild, '_GetEBuildPaths',
+                     return_value=path_ebuilds)
+    self.PatchObject(portage_util.EBuild, '_GetSHA1ForPath',
+                     side_effect=reversed(sha1s))
+    update_mock = self.PatchObject(portage_util.EBuild, 'UpdateEBuild')
+    self.PatchObject(portage_util.EBuild, 'GitRepoHasChanges',
+                     return_value=True)
+    commit_mock = self.PatchObject(portage_util.EBuild, 'CommitChange')
+
+    portage_util.EBuild.UpdateCommitHashesForChanges(changes, build_root,
+                                                     MANIFEST)
+
+    update_mock.assert_called_once_with(
+        self.m_ebuild.ebuild_path,
+        {'CROS_WORKON_COMMIT': '(%s)' % ' '.join('"%s"' % x for x in sha1s)})
+    commit_mock.assert_called_once_with(mock.ANY, overlay=overlays[0])
+
+  def testGitRepoHasChanges(self):
+    """Tests that GitRepoHasChanges works correctly."""
+    git.RunGit(self.tempdir,
+               ['clone', '--depth=1', constants.CHROMITE_DIR, self.tempdir])
+    # No changes yet as we just cloned the repo.
+    self.assertFalse(portage_util.EBuild.GitRepoHasChanges(self.tempdir))
+    # Update metadata but no real changes.
+    osutils.Touch(os.path.join(self.tempdir, 'LICENSE'))
+    self.assertFalse(portage_util.EBuild.GitRepoHasChanges(self.tempdir))
+    # A real change.
+    osutils.WriteFile(os.path.join(self.tempdir, 'LICENSE'), 'hi')
+    self.assertTrue(portage_util.EBuild.GitRepoHasChanges(self.tempdir))
+
+  def testNoVersionScript(self):
+    """Verify default behavior with no chromeos-version.sh script."""
+    self.assertEqual('1234', self.m_ebuild.GetVersion(None, None, '1234'))
+
+  def testValidVersionScript(self):
+    """Verify normal behavior with a chromeos-version.sh script."""
+    exists = self.PatchObject(os.path, 'exists', return_value=True)
+    self.PatchObject(portage_util.EBuild, 'GetSourcePath',
+                     return_value=(None, []))
+    self.PatchObject(portage_util.EBuild, '_RunCommand', return_value='1122')
+    self.assertEqual('1122', self.m_ebuild.GetVersion(None, None, '1234'))
+    # Sanity check.
+    self.assertEqual(exists.call_count, 1)
+
+  def testVersionScriptNoOutput(self):
+    """Reject scripts that output nothing."""
+    exists = self.PatchObject(os.path, 'exists', return_value=True)
+    self.PatchObject(portage_util.EBuild, 'GetSourcePath',
+                     return_value=(None, []))
+    run = self.PatchObject(portage_util.EBuild, '_RunCommand')
+
+    # Reject no output.
+    run.return_value = ''
+    self.assertRaises(SystemExit, self.m_ebuild.GetVersion, None, None, '1234')
+    # Sanity check.
+    self.assertEqual(exists.call_count, 1)
+    exists.reset_mock()
+
+    # Reject simple output.
+    run.return_value = '\n'
+    self.assertRaises(SystemExit, self.m_ebuild.GetVersion, None, None, '1234')
+    # Sanity check.
+    self.assertEqual(exists.call_count, 1)
+
+  def testVersionScriptTooHighVersion(self):
+    """Reject scripts that output high version numbers."""
+    exists = self.PatchObject(os.path, 'exists', return_value=True)
+    self.PatchObject(portage_util.EBuild, 'GetSourcePath',
+                     return_value=(None, []))
+    self.PatchObject(portage_util.EBuild, '_RunCommand', return_value='999999')
+    self.assertRaises(ValueError, self.m_ebuild.GetVersion, None, None, '1234')
+    # Sanity check.
+    self.assertEqual(exists.call_count, 1)
+
+  def testVersionScriptInvalidVersion(self):
+    """Reject scripts that output bad version numbers."""
+    exists = self.PatchObject(os.path, 'exists', return_value=True)
+    self.PatchObject(portage_util.EBuild, 'GetSourcePath',
+                     return_value=(None, []))
+    self.PatchObject(portage_util.EBuild, '_RunCommand', return_value='abcd')
+    self.assertRaises(ValueError, self.m_ebuild.GetVersion, None, None, '1234')
+    # Sanity check.
+    self.assertEqual(exists.call_count, 1)
+
+  def testUpdateEBuildRecovery(self):
+    """Make sure UpdateEBuild can be called more than once even w/failures."""
+    ebuild = os.path.join(self.tempdir, 'test.ebuild')
+    content = '# Some data\nVAR=val\n'
+    osutils.WriteFile(ebuild, content)
+
+    # First run: pass in an invalid redirect file to trigger an exception.
+    try:
+      portage_util.EBuild.UpdateEBuild(ebuild, {'VAR': 'a'}, redirect_file=1234)
+      assert False, 'this should have thrown an exception ...'
+    except Exception:
+      pass
+
+    # Second run: it should pass normally.
+    portage_util.EBuild.UpdateEBuild(ebuild, {'VAR': 'b'})
+
+
+class ListOverlaysTest(cros_test_lib.TempDirTestCase):
+  """Tests related to listing overlays."""
+
+  def testMissingOverlays(self):
+    """Tests that exceptions are raised when an overlay is missing."""
+    self.assertRaises(portage_util.MissingOverlayException,
+                      portage_util._ListOverlays,
+                      board='foo', buildroot=self.tempdir)
+
+
+class FindOverlaysTest(cros_test_lib.MockTempDirTestCase):
+  """Tests related to finding overlays."""
+
+  FAKE, PUB_PRIV, PUB_PRIV_VARIANT, PUB_ONLY, PUB2_ONLY, PRIV_ONLY, BRICK = (
+      'fake!board', 'pub-priv-board', 'pub-priv-board_variant',
+      'pub-only-board', 'pub2-only-board', 'priv-only-board', 'brick',
+  )
+  PRIVATE = constants.PRIVATE_OVERLAYS
+  PUBLIC = constants.PUBLIC_OVERLAYS
+  BOTH = constants.BOTH_OVERLAYS
+
+  def setUp(self):
+    # Create an overlay tree to run tests against and isolate ourselves from
+    # changes in the main tree.
+    D = cros_test_lib.Directory
+    overlay_files = (D('metadata', ('layout.conf',)),)
+    board_overlay_files = overlay_files + (
+        'make.conf',
+        'toolchain.conf',
+    )
+    file_layout = (
+        D('src', (
+            D('overlays', (
+                D('overlay-%s' % self.PUB_ONLY, board_overlay_files),
+                D('overlay-%s' % self.PUB2_ONLY, board_overlay_files),
+                D('overlay-%s' % self.PUB_PRIV, board_overlay_files),
+                D('overlay-%s' % self.PUB_PRIV_VARIANT, board_overlay_files),
+            )),
+            D('private-overlays', (
+                D('overlay-%s' % self.PUB_PRIV, board_overlay_files),
+                D('overlay-%s' % self.PUB_PRIV_VARIANT, board_overlay_files),
+                D('overlay-%s' % self.PRIV_ONLY, board_overlay_files),
+            )),
+            D('third_party', (
+                D('chromiumos-overlay', overlay_files),
+                D('portage-stable', overlay_files),
+            )),
+        )),
+        D('projects', (
+            D(self.BRICK, (
+                D('packages', overlay_files),
+                'config.json',
+            )),
+        )),
+    )
+    cros_test_lib.CreateOnDiskHierarchy(self.tempdir, file_layout)
+
+    # Seed the board overlays.
+    conf_data = 'repo-name = %(repo-name)s\nmasters = %(masters)s'
+    conf_path = os.path.join(self.tempdir, 'src', '%(private)soverlays',
+                             'overlay-%(board)s', 'metadata', 'layout.conf')
+
+    for board in (self.PUB_PRIV, self.PUB_PRIV_VARIANT, self.PUB_ONLY,
+                  self.PUB2_ONLY):
+      settings = {
+          'board': board,
+          'masters': 'portage-stable ',
+          'private': '',
+          'repo-name': board,
+      }
+      if '_' in board:
+        settings['masters'] += board.split('_')[0]
+      osutils.WriteFile(conf_path % settings,
+                        conf_data % settings)
+
+    # Seed the brick, with PUB_ONLY overlay as its primary overlay.
+    osutils.WriteFile(os.path.join(self.tempdir, 'projects', self.BRICK,
+                                   'packages', 'metadata', 'layout.conf'),
+                      'repo-name = %s\nmasters = %s' % (self.BRICK,
+                                                        self.PUB_ONLY))
+
+    for board in (self.PUB_PRIV, self.PUB_PRIV_VARIANT, self.PRIV_ONLY):
+      settings = {
+          'board': board,
+          'masters': 'portage-stable ',
+          'private': 'private-',
+          'repo-name': '%s-private' % board,
+      }
+      if '_' in board:
+        settings['masters'] += board.split('_')[0]
+      osutils.WriteFile(conf_path % settings,
+                        conf_data % settings)
+
+    # Seed the common overlays.
+    conf_path = os.path.join(self.tempdir, 'src', 'third_party', '%(overlay)s',
+                             'metadata', 'layout.conf')
+    osutils.WriteFile(conf_path % {'overlay': 'chromiumos-overlay'},
+                      conf_data % {'repo-name': 'chromiumos', 'masters': ''})
+    osutils.WriteFile(conf_path % {'overlay': 'portage-stable'},
+                      conf_data % {'repo-name': 'portage-stable',
+                                   'masters': ''})
+
+    # Now build up the list of overlays that we'll use in tests below.
+    self.overlays = {}
+    for b in (None, self.FAKE, self.PUB_PRIV, self.PUB_PRIV_VARIANT,
+              self.PUB_ONLY, self.PUB2_ONLY, self.PRIV_ONLY, self.BRICK):
+      self.overlays[b] = d = {}
+      for o in (self.PRIVATE, self.PUBLIC, self.BOTH, None):
+        try:
+          d[o] = portage_util.FindOverlays(o, b, self.tempdir)
+        except portage_util.MissingOverlayException:
+          d[o] = []
+    self._no_overlays = not bool(any(d.values()))
+
+  def testMissingPrimaryOverlay(self):
+    """Test what happens when a primary overlay is missing.
+
+    If the overlay doesn't exist, FindOverlays should throw a
+    MissingOverlayException.
+    """
+    self.assertRaises(portage_util.MissingOverlayException,
+                      portage_util.FindPrimaryOverlay, self.BOTH,
+                      self.FAKE, self.tempdir)
+
+  def testDuplicates(self):
+    """Verify that no duplicate overlays are returned."""
+    for d in self.overlays.itervalues():
+      for overlays in d.itervalues():
+        self.assertEqual(len(overlays), len(set(overlays)))
+
+  def testOverlaysExist(self):
+    """Verify that all overlays returned actually exist on disk."""
+    for d in self.overlays.itervalues():
+      for overlays in d.itervalues():
+        self.assertTrue(all(os.path.isdir(x) for x in overlays))
+
+  def testPrivatePublicOverlayTypes(self):
+    """Verify public/private filtering.
+
+    If we ask for results from 'both overlays', we should
+    find all public and all private overlays.
+    """
+    for b, d in self.overlays.items():
+      if b == self.FAKE or b == self.BRICK:
+        continue
+      self.assertGreaterEqual(set(d[self.BOTH]), set(d[self.PUBLIC]))
+      self.assertGreater(set(d[self.BOTH]), set(d[self.PRIVATE]))
+      self.assertTrue(set(d[self.PUBLIC]).isdisjoint(d[self.PRIVATE]))
+
+  def testNoOverlayType(self):
+    """If we specify overlay_type=None, no results should be returned."""
+    self.assertTrue(all(d[None] == [] for d in self.overlays.itervalues()))
+
+  def testNonExistentBoard(self):
+    """Test what happens when a non-existent board is supplied.
+
+    If we specify a non-existent board to FindOverlays, only generic
+    overlays should be returned.
+    """
+    for o in (self.PUBLIC, self.BOTH):
+      self.assertLess(set(self.overlays[self.FAKE][o]),
+                      set(self.overlays[self.PUB_PRIV][o]))
+
+  def testAllBoards(self):
+    """If we specify board=None, all overlays should be returned."""
+    for o in (self.PUBLIC, self.BOTH):
+      for b in (self.FAKE, self.PUB_PRIV):
+        self.assertLess(set(self.overlays[b][o]), set(self.overlays[None][o]))
+
+  def testPrimaryOverlays(self):
+    """Verify that boards have a primary overlay.
+
+    Further, the only difference between public boards are the primary overlay
+    which should be listed last.
+    """
+    primary = portage_util.FindPrimaryOverlay(
+        self.BOTH, self.PUB_ONLY, self.tempdir)
+    self.assertIn(primary, self.overlays[self.PUB_ONLY][self.BOTH])
+    self.assertNotIn(primary, self.overlays[self.PUB2_ONLY][self.BOTH])
+    self.assertEqual(primary, self.overlays[self.PUB_ONLY][self.PUBLIC][-1])
+    self.assertEqual(self.overlays[self.PUB_ONLY][self.PUBLIC][:-1],
+                     self.overlays[self.PUB2_ONLY][self.PUBLIC][:-1])
+    self.assertNotEqual(self.overlays[self.PUB_ONLY][self.PUBLIC][-1],
+                        self.overlays[self.PUB2_ONLY][self.PUBLIC][-1])
+
+  def testBrickPrimaryOverlay(self):
+    """Verify that a brick's stacking correctly picks up its primary overlay."""
+    primary = portage_util.FindPrimaryOverlay(
+        self.BOTH, self.BRICK, self.tempdir)
+    self.assertIn(primary, self.overlays[self.PUB_ONLY][self.BOTH])
+    self.assertEqual(primary, self.overlays[self.PUB_ONLY][self.PUBLIC][-1])
+
+  def testReadOverlayFileOrder(self):
+    """Verify that the boards are examined in the right order."""
+    m = self.PatchObject(os.path, 'isfile', return_value=False)
+    portage_util.ReadOverlayFile('test', self.PUBLIC, self.PUB_PRIV,
+                                 self.tempdir)
+    read_overlays = [x[0][0][:-5] for x in m.call_args_list]
+    overlays = [x for x in reversed(self.overlays[self.PUB_PRIV][self.PUBLIC])]
+    self.assertEqual(read_overlays, overlays)
+
+  def testFindOverlayFile(self):
+    """Verify that the first file found is returned."""
+    file_to_find = 'something_special'
+    full_path = os.path.join(self.tempdir,
+                             'src', 'private-overlays',
+                             'overlay-%s' % self.PUB_PRIV,
+                             file_to_find)
+    osutils.Touch(full_path)
+    self.assertEqual(full_path,
+                     portage_util.FindOverlayFile(file_to_find, self.BOTH,
+                                                  self.PUB_PRIV_VARIANT,
+                                                  self.tempdir))
+
+  def testFoundPrivateOverlays(self):
+    """Verify that private boards had their overlays located."""
+    for b in (self.PUB_PRIV, self.PUB_PRIV_VARIANT, self.PRIV_ONLY):
+      self.assertNotEqual(self.overlays[b][self.PRIVATE], [])
+    self.assertNotEqual(self.overlays[self.PUB_PRIV][self.BOTH],
+                        self.overlays[self.PUB_PRIV][self.PRIVATE])
+    self.assertNotEqual(self.overlays[self.PUB_PRIV_VARIANT][self.BOTH],
+                        self.overlays[self.PUB_PRIV_VARIANT][self.PRIVATE])
+
+  def testFoundPublicOverlays(self):
+    """Verify that public boards had their overlays located."""
+    for b in (self.PUB_PRIV, self.PUB_PRIV_VARIANT, self.PUB_ONLY,
+              self.PUB2_ONLY):
+      self.assertNotEqual(self.overlays[b][self.PUBLIC], [])
+    self.assertNotEqual(self.overlays[self.PUB_PRIV][self.BOTH],
+                        self.overlays[self.PUB_PRIV][self.PUBLIC])
+    self.assertNotEqual(self.overlays[self.PUB_PRIV_VARIANT][self.BOTH],
+                        self.overlays[self.PUB_PRIV_VARIANT][self.PUBLIC])
+
+  def testFoundParentOverlays(self):
+    """Verify that the overlays for a parent board are found."""
+    for d in self.PUBLIC, self.PRIVATE:
+      self.assertLess(set(self.overlays[self.PUB_PRIV][d]),
+                      set(self.overlays[self.PUB_PRIV_VARIANT][d]))
+
+
+class UtilFuncsTest(cros_test_lib.TempDirTestCase):
+  """Basic tests for utility functions"""
+
+  def _CreateProfilesRepoName(self, name):
+    """Write |name| to profiles/repo_name"""
+    profiles = os.path.join(self.tempdir, 'profiles')
+    osutils.SafeMakedirs(profiles)
+    repo_name = os.path.join(profiles, 'repo_name')
+    osutils.WriteFile(repo_name, name)
+
+  def testGetOverlayNameNone(self):
+    """If the overlay has no name, it should be fine"""
+    self.assertEqual(portage_util.GetOverlayName(self.tempdir), None)
+
+  def testGetOverlayNameProfilesRepoName(self):
+    """Verify profiles/repo_name can be read"""
+    self._CreateProfilesRepoName('hi!')
+    self.assertEqual(portage_util.GetOverlayName(self.tempdir), 'hi!')
+
+  def testGetOverlayNameProfilesLayoutConf(self):
+    """Verify metadata/layout.conf is read before profiles/repo_name"""
+    self._CreateProfilesRepoName('hi!')
+    metadata = os.path.join(self.tempdir, 'metadata')
+    osutils.SafeMakedirs(metadata)
+    layout_conf = os.path.join(metadata, 'layout.conf')
+    osutils.WriteFile(layout_conf, 'repo-name = bye')
+    self.assertEqual(portage_util.GetOverlayName(self.tempdir), 'bye')
+
+  def testGetOverlayNameProfilesLayoutConfNoRepoName(self):
+    """Verify metadata/layout.conf w/out repo-name is ignored"""
+    self._CreateProfilesRepoName('hi!')
+    metadata = os.path.join(self.tempdir, 'metadata')
+    osutils.SafeMakedirs(metadata)
+    layout_conf = os.path.join(metadata, 'layout.conf')
+    osutils.WriteFile(layout_conf, 'here = we go')
+    self.assertEqual(portage_util.GetOverlayName(self.tempdir), 'hi!')
+
+
+class BuildEBuildDictionaryTest(cros_test_lib.MockTempDirTestCase):
+  """Tests of the EBuild Dictionary."""
+
+  def setUp(self):
+    self.overlay = self.tempdir
+    self.uprev_candidate_mock = self.PatchObject(
+        portage_util, '_FindUprevCandidates',
+        side_effect=BuildEBuildDictionaryTest._FindUprevCandidateMock)
+    self.overlays = {self.overlay: []}
+
+  def _CreatePackage(self, name, blacklisted=False):
+    """Helper that creates an ebuild."""
+    package_path = os.path.join(self.overlay, name,
+                                'test_package-0.0.1.ebuild')
+    content = 'CROS_WORKON_BLACKLIST=1' if blacklisted else ''
+    osutils.WriteFile(package_path, content, makedirs=True)
+
+  @staticmethod
+  def _FindUprevCandidateMock(files, allow_blacklisted=False):
+    """Mock for the FindUprevCandidateMock function.
+
+    Simplified implementation of FindUprevCandidate: consider an ebuild worthy
+    of uprev if |allow_blacklisted| is set or the ebuild is not blacklisted.
+    """
+    for f in files:
+      if (f.endswith('.ebuild') and
+          (not 'CROS_WORKON_BLACKLIST=1' in osutils.ReadFile(f) or
+           allow_blacklisted)):
+        pkgdir = os.path.dirname(f)
+        return _Package(os.path.join(os.path.basename(os.path.dirname(pkgdir)),
+                                     os.path.basename(pkgdir)))
+    return None
+
+  def _assertFoundPackages(self, packages):
+    """Succeeds iff the packages discovered were packages."""
+    self.assertEquals(len(self.overlays), 1)
+    self.assertEquals([p.package for p in self.overlays[self.overlay]],
+                      packages)
+
+  def testWantedPackage(self):
+    """Test that we can find a specific package."""
+    package_name = 'chromeos-base/mypackage'
+    self._CreatePackage(package_name)
+    portage_util.BuildEBuildDictionary(self.overlays, False, [package_name])
+    self._assertFoundPackages([package_name])
+
+  def testUnwantedPackage(self):
+    """Test that we find only the packages we want."""
+    portage_util.BuildEBuildDictionary(self.overlays, False, [])
+    self._assertFoundPackages([])
+
+  def testAnyPackage(self):
+    """Test that we return all packages available if use_all is set."""
+    package_name = 'chromeos-base/package_name'
+    self._CreatePackage(package_name)
+    portage_util.BuildEBuildDictionary(self.overlays, True, [])
+    self._assertFoundPackages([package_name])
+
+  def testUnknownPackage(self):
+    """Test that _FindUprevCandidates is only called if the CP matches."""
+    self._CreatePackage('chromeos-base/package_name')
+    portage_util.BuildEBuildDictionary(self.overlays, False,
+                                       ['chromeos-base/other_package'])
+    self.assertFalse(self.uprev_candidate_mock.called)
+    self._assertFoundPackages([])
+
+  def testBlacklistedPackagesIgnoredByDefault(self):
+    """Test that blacklisted packages are ignored by default."""
+    package_name = 'chromeos-base/blacklisted_package'
+    self._CreatePackage(package_name, blacklisted=True)
+    portage_util.BuildEBuildDictionary(self.overlays, False, [package_name])
+    self._assertFoundPackages([])
+
+  def testBlacklistedPackagesAllowed(self):
+    """Test that we can find blacklisted packages with |allow_blacklisted|."""
+    package_name = 'chromeos-base/blacklisted_package'
+    self._CreatePackage(package_name, blacklisted=True)
+    portage_util.BuildEBuildDictionary(self.overlays, False, [package_name],
+                                       allow_blacklisted=True)
+    self._assertFoundPackages([package_name])
+
+
+class ProjectMappingTest(cros_test_lib.TestCase):
+  """Tests related to Proejct Mapping."""
+
+  def testSplitEbuildPath(self):
+    """Test if we can split an ebuild path into its components."""
+    ebuild_path = 'chromeos-base/platform2/platform2-9999.ebuild'
+    components = ['chromeos-base', 'platform2', 'platform2-9999']
+    for path in (ebuild_path, './' + ebuild_path, 'foo.bar/' + ebuild_path):
+      self.assertEquals(components, portage_util.SplitEbuildPath(path))
+
+  def testSplitPV(self):
+    """Test splitting PVs into package and version components."""
+    pv = 'bar-1.2.3_rc1-r5'
+    package, version_no_rev, rev = tuple(pv.split('-'))
+    split_pv = portage_util.SplitPV(pv)
+    self.assertEquals(split_pv.pv, pv)
+    self.assertEquals(split_pv.package, package)
+    self.assertEquals(split_pv.version_no_rev, version_no_rev)
+    self.assertEquals(split_pv.rev, rev)
+    self.assertEquals(split_pv.version, '%s-%s' % (version_no_rev, rev))
+
+  def testSplitCPV(self):
+    """Test splitting CPV into components."""
+    cpv = 'foo/bar-4.5.6_alpha-r6'
+    cat, pv = cpv.split('/', 1)
+    split_pv = portage_util.SplitPV(pv)
+    split_cpv = portage_util.SplitCPV(cpv)
+    self.assertEquals(split_cpv.category, cat)
+    for k, v in split_pv._asdict().iteritems():
+      self.assertEquals(getattr(split_cpv, k), v)
+
+  def testFindWorkonProjects(self):
+    """Test if we can find the list of workon projects."""
+    ply_image = 'media-gfx/ply-image'
+    ply_image_project = 'chromiumos/third_party/ply-image'
+    this = 'chromeos-base/chromite'
+    this_project = 'chromiumos/chromite'
+    matches = [
+        ([ply_image], set([ply_image_project])),
+        ([this], set([this_project])),
+        ([ply_image, this], set([ply_image_project, this_project]))
+    ]
+    if portage_util.FindOverlays(constants.BOTH_OVERLAYS):
+      for packages, projects in matches:
+        self.assertEquals(projects,
+                          portage_util.FindWorkonProjects(packages))
+
+
+class PortageDBTest(cros_test_lib.TempDirTestCase):
+  """Portage package Database related tests."""
+
+  fake_pkgdb = {'category1': ['package-1', 'package-2'],
+                'category2': ['package-3', 'package-4'],
+                'category3': ['invalid', 'semi-invalid'],
+                'with': ['files-1'],
+                'dash-category': ['package-5'],
+                '-invalid': ['package-6'],
+                'invalid': []}
+  fake_packages = []
+  build_root = None
+  fake_chroot = None
+
+  fake_files = [
+      ('dir', '/lib64'),
+      ('obj', '/lib64/libext2fs.so.2.4', 'a6723f44cf82f1979e9731043f820d8c',
+       '1390848093'),
+      ('dir', '/dir with spaces'),
+      ('obj', '/dir with spaces/file with spaces',
+       'cd4865bbf122da11fca97a04dfcac258', '1390848093'),
+      ('sym', '/lib64/libe2p.so.2', '->', 'libe2p.so.2.3', '1390850489'),
+      ('foo'),
+  ]
+
+  def setUp(self):
+    self.build_root = self.tempdir
+    self.fake_packages = []
+    # Prepare a fake chroot.
+    self.fake_chroot = os.path.join(self.build_root, 'chroot/build/amd64-host')
+    fake_pkgdb_path = os.path.join(self.fake_chroot, 'var/db/pkg')
+    os.makedirs(fake_pkgdb_path)
+    for cat, pkgs in self.fake_pkgdb.iteritems():
+      catpath = os.path.join(fake_pkgdb_path, cat)
+      if cat == 'invalid':
+        # Invalid category is a file. Should not be delved into.
+        osutils.Touch(catpath)
+        continue
+      os.makedirs(catpath)
+      for pkg in pkgs:
+        pkgpath = os.path.join(catpath, pkg)
+        if pkg == 'invalid':
+          # Invalid package is a file instead of a directory/
+          osutils.Touch(pkgpath)
+          continue
+        os.makedirs(pkgpath)
+        if pkg.endswith('-invalid'):
+          # Invalid package does not meet existence of "%s/%s.ebuild" file.
+          osutils.Touch(os.path.join(pkgpath, 'whatever'))
+          continue
+        # Create the package.
+        osutils.Touch(os.path.join(pkgpath, pkg + '.ebuild'))
+        if cat.startswith('-'):
+          # Invalid category.
+          continue
+        # Correct pkg.
+        pv = portage_util.SplitPV(pkg)
+        key = '%s/%s' % (cat, pv.package)
+        self.fake_packages.append((key, pv.version))
+    # Add contents to with/files-1.
+    osutils.WriteFile(
+        os.path.join(fake_pkgdb_path, 'with', 'files-1', 'CONTENTS'),
+        ''.join(' '.join(entry) + '\n' for entry in self.fake_files))
+
+  def testListInstalledPackages(self):
+    """Test if listing packages installed into a root works."""
+    packages = portage_util.ListInstalledPackages(self.fake_chroot)
+    # Sort the lists, because the filesystem might reorder the entries for us.
+    packages.sort()
+    self.fake_packages.sort()
+    self.assertEquals(self.fake_packages, packages)
+
+  def testIsPackageInstalled(self):
+    """Test if checking the existence of an installed package works."""
+    self.assertTrue(portage_util.IsPackageInstalled(
+        'category1/package',
+        sysroot=self.fake_chroot))
+    self.assertFalse(portage_util.IsPackageInstalled(
+        'category1/foo',
+        sysroot=self.fake_chroot))
+
+  def testListContents(self):
+    """Test if the list of installed files is properly parsed."""
+    pdb = portage_util.PortageDB(self.fake_chroot)
+    pkg = pdb.GetInstalledPackage('with', 'files-1')
+    self.assertTrue(pkg)
+    lst = pkg.ListContents()
+
+    # Check ListContents filters out the garbage we added to the list of files.
+    fake_files = [f for f in self.fake_files if f[0] in ('sym', 'obj', 'dir')]
+    self.assertEquals(len(fake_files), len(lst))
+
+    # Check the paths are all relative.
+    self.assertTrue(all(not f[1].startswith('/') for f in lst))
+
+    # Check all the files are present. We only consider file type and path, and
+    # convert the path to a relative path.
+    fake_files = [(f[0], f[1].lstrip('/')) for f in fake_files]
+    self.assertEquals(fake_files, lst)
+
+
+class InstalledPackageTest(cros_test_lib.TempDirTestCase):
+  """InstalledPackage class tests outside a PortageDB."""
+
+  def setUp(self):
+    osutils.WriteFile(os.path.join(self.tempdir, 'package-1.ebuild'), 'EAPI=1')
+    osutils.WriteFile(os.path.join(self.tempdir, 'PF'), 'package-1')
+    osutils.WriteFile(os.path.join(self.tempdir, 'CATEGORY'), 'category-1')
+
+  def testOutOfDBPackage(self):
+    """Tests an InstalledPackage instance can be created without a PortageDB."""
+    pkg = portage_util.InstalledPackage(None, self.tempdir)
+    self.assertEquals('package-1', pkg.pf)
+    self.assertEquals('category-1', pkg.category)
+
+  def testIncompletePackage(self):
+    """Tests an incomplete or otherwise invalid package raises an exception."""
+    # No package name is provided.
+    os.unlink(os.path.join(self.tempdir, 'PF'))
+    self.assertRaises(portage_util.PortageDBException,
+                      portage_util.InstalledPackage, None, self.tempdir)
+
+    # Check that doesn't fail when the package name is provided.
+    pkg = portage_util.InstalledPackage(None, self.tempdir, pf='package-1')
+    self.assertEquals('package-1', pkg.pf)
diff --git a/lib/process_util.py b/lib/process_util.py
new file mode 100644
index 0000000..ab256a8
--- /dev/null
+++ b/lib/process_util.py
@@ -0,0 +1,66 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Process related utilities."""
+
+from __future__ import print_function
+
+import errno
+import os
+import signal
+import sys
+import time
+
+
+def GetExitStatus(status):
+  """Get the exit status of a child from an os.waitpid call.
+
+  Args:
+    status: The return value of os.waitpid(pid, 0)[1]
+
+  Returns:
+    The exit status of the process. If the process exited with a signal,
+    the return value will be 128 plus the signal number.
+  """
+  if os.WIFSIGNALED(status):
+    return 128 + os.WTERMSIG(status)
+  else:
+    assert os.WIFEXITED(status), 'Unexpected exit status %r' % status
+    return os.WEXITSTATUS(status)
+
+
+def ExitAsStatus(status):
+  """Exit the same way as |status|.
+
+  If the status field says it was killed by a signal, then we'll do that to
+  ourselves.  Otherwise we'll exit with the exit code.
+
+  See http://www.cons.org/cracauer/sigint.html for more details.
+
+  Args:
+    status: A status as returned by os.wait type funcs.
+  """
+  exit_status = os.WEXITSTATUS(status)
+
+  if os.WIFSIGNALED(status):
+    # Kill ourselves with the same signal.
+    sig_status = os.WTERMSIG(status)
+    pid = os.getpid()
+    os.kill(pid, sig_status)
+    time.sleep(0.1)
+
+    # Still here?  Maybe the signal was masked.
+    try:
+      signal.signal(sig_status, signal.SIG_DFL)
+    except RuntimeError as e:
+      if e.args[0] != errno.EINVAL:
+        raise
+    os.kill(pid, sig_status)
+    time.sleep(0.1)
+
+    # Still here?  Just exit.
+    exit_status = 127
+
+  # Exit with the code we want.
+  sys.exit(exit_status)
diff --git a/lib/process_util_unittest b/lib/process_util_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/process_util_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/process_util_unittest.py b/lib/process_util_unittest.py
new file mode 100644
index 0000000..499b287
--- /dev/null
+++ b/lib/process_util_unittest.py
@@ -0,0 +1,118 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the process_util.py module."""
+
+from __future__ import print_function
+
+import os
+import signal
+
+from chromite.lib import cros_test_lib
+from chromite.lib import process_util
+
+
+def _SpawnChild(exit_code=None, kill_signal=None):
+  """Create a child, have it exit/killed, and return its status."""
+  assert exit_code is not None or kill_signal is not None
+
+  pid = os.fork()
+  if pid == 0:
+    # Make sure this child never returns.
+    while True:
+      if exit_code is not None:
+        # pylint: disable=W0212
+        os._exit(exit_code)
+      else:
+        os.kill(os.getpid(), kill_signal)
+
+  return os.waitpid(pid, 0)[1]
+
+
+class GetExitStatusTests(cros_test_lib.TestCase):
+  """Tests for GetExitStatus()"""
+
+  def testExitNormal(self):
+    """Verify normal exits get decoded."""
+    status = _SpawnChild(exit_code=0)
+    ret = process_util.GetExitStatus(status)
+    self.assertEqual(ret, 0)
+
+  def testExitError(self):
+    """Verify error exits (>0 && <128) get decoded."""
+    status = _SpawnChild(exit_code=10)
+    ret = process_util.GetExitStatus(status)
+    self.assertEqual(ret, 10)
+
+  def testExitWeird(self):
+    """Verify weird exits (>=128) get decoded."""
+    status = _SpawnChild(exit_code=150)
+    ret = process_util.GetExitStatus(status)
+    self.assertEqual(ret, 150)
+
+  def testSIGUSR1(self):
+    """Verify normal kill signals get decoded."""
+    status = _SpawnChild(kill_signal=signal.SIGUSR1)
+    ret = process_util.GetExitStatus(status)
+    self.assertEqual(ret, 128 + signal.SIGUSR1)
+
+  def testSIGKILL(self):
+    """Verify harsh signals get decoded."""
+    status = _SpawnChild(kill_signal=signal.SIGKILL)
+    ret = process_util.GetExitStatus(status)
+    self.assertEqual(ret, 128 + signal.SIGKILL)
+
+
+class ExitAsStatusTests(cros_test_lib.TestCase):
+  """Tests for ExitAsStatus()"""
+
+  def _Tester(self, exit_code=None, kill_signal=None):
+    """Helper func for testing ExitAsStatus()
+
+    Create a child to mimic the grandchild.
+    Create a grandchild and have it exit/killed.
+    Assert behavior based on exit/signal behavior.
+    """
+    pid = os.fork()
+    if pid == 0:
+      # Let the grandchild exit/kill itself.
+      # The child should mimic the grandchild.
+      status = _SpawnChild(exit_code=exit_code, kill_signal=kill_signal)
+      try:
+        process_util.ExitAsStatus(status)
+      except SystemExit as e:
+        # pylint: disable=W0212
+        os._exit(e.code)
+      raise AssertionError('ERROR: should have exited!')
+
+    # The parent returns the child's status.
+    status = os.waitpid(pid, 0)[1]
+    if exit_code is not None:
+      self.assertFalse(os.WIFSIGNALED(status))
+      self.assertTrue(os.WIFEXITED(status))
+      self.assertEqual(os.WEXITSTATUS(status), exit_code)
+    else:
+      self.assertFalse(os.WIFEXITED(status))
+      self.assertTrue(os.WIFSIGNALED(status))
+      self.assertEqual(os.WTERMSIG(status), kill_signal)
+
+  def testExitNormal(self):
+    """Verify normal exits get decoded."""
+    self._Tester(exit_code=0)
+
+  def testExitError(self):
+    """Verify error exits (>0 && <128) get decoded."""
+    self._Tester(exit_code=10)
+
+  def testExitWeird(self):
+    """Verify weird exits (>=128) get decoded."""
+    self._Tester(exit_code=150)
+
+  def testSIGUSR1(self):
+    """Verify normal kill signals get decoded."""
+    self._Tester(kill_signal=signal.SIGUSR1)
+
+  def testSIGKILL(self):
+    """Verify harsh signals get decoded."""
+    self._Tester(kill_signal=signal.SIGKILL)
diff --git a/lib/proctitle.py b/lib/proctitle.py
new file mode 100644
index 0000000..ea524a9
--- /dev/null
+++ b/lib/proctitle.py
@@ -0,0 +1,33 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper module for dealing with setting the process title (seen in `ps`)."""
+
+from __future__ import print_function
+
+import __main__ as main
+import os
+
+# Import the relevant funcs into our namespace for callers.
+try:
+  # pylint: disable=unused-import, no-name-in-module
+  from setproctitle import getproctitle, setproctitle
+except ImportError:
+  # Module not available -> can't do anything.
+  getproctitle = lambda: None
+  setproctitle = lambda _x: None
+
+
+# Used with the settitle helper below.
+_SCRIPT_NAME = os.path.basename(main.__file__)
+
+
+# Used to distinguish between different runs.
+_TITLE_PID = os.getpid()
+
+
+def settitle(*args):
+  """Set the process title to something useful to make `ps` output easy."""
+  base = ('%s/%s' % (_SCRIPT_NAME, _TITLE_PID),)
+  setproctitle(': '.join(base + args))
diff --git a/lib/project_sdk.py b/lib/project_sdk.py
new file mode 100644
index 0000000..e547a5f
--- /dev/null
+++ b/lib/project_sdk.py
@@ -0,0 +1,160 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common utilities for working with Project SDK."""
+
+from __future__ import print_function
+
+import os
+import re
+import stat
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import workspace_lib
+
+
+def FindRepoRoot(sdk_dir=None):
+  """Locate the SDK root directly by looking for .repo dir.
+
+  This is very similar to constants.SOURCE_ROOT, except that it can operate
+  against repo checkouts outside our current code base.
+
+  CAUTION! Using SDKs from directories other than the default is likely to
+  break assumptions that our tools are built upon.  As a rule of thumb, do not
+  expose this argument externally unless you know what you're doing.
+
+  Args:
+    sdk_dir: Path of the SDK, or any dir inside it. None defaults to
+      constants.SOURCE_ROOT.
+
+  Returns:
+    Root dir of SDK, or None.
+  """
+  if sdk_dir is None:
+    return constants.SOURCE_ROOT
+
+  # Make sure we're looking at an actual directory.
+  if not os.path.isdir(sdk_dir):
+    return None
+
+  # Find the .repo directory and return the path leading up to it, if found.
+  repo_dir = osutils.FindInPathParents('.repo', os.path.abspath(sdk_dir),
+                                       test_func=os.path.isdir)
+  return os.path.dirname(repo_dir) if repo_dir else None
+
+
+def VersionFile(sdk_dir):
+  return os.path.join(sdk_dir, 'SDK_VERSION')
+
+
+def FindVersion(sdk_dir=None):
+  """Find the version of a given SDK.
+
+  If the SDK was fetched by any means other than "brillo sdk" then it will
+  always appear to be 'non-official', even if an official manifest was used.
+
+  Args:
+    sdk_dir: path to the SDK, or any of its sub directories.
+
+  Returns:
+    The version of your SDK as a string. '6500.0.0'
+    None if the directory doesn't appear to be an SDK.
+  """
+  sdk_root = FindRepoRoot(sdk_dir)
+  if sdk_root is None:
+    return None
+
+  v_file = VersionFile(sdk_root)
+  return osutils.ReadFile(v_file) if os.path.exists(v_file) else None
+
+
+def _GetExecutableVersion(cmd, version_arg='--version'):
+  """Gets an executable version string using |version_flag|.
+
+  Args:
+    cmd: Executable to check (for example, '/bin/bash').
+    version_arg: Argument to get |cmd| to print its version.
+
+  Returns:
+    Output string or None if the program doesn't exist or gave a
+    non-zero exit code.
+  """
+  try:
+    return cros_build_lib.RunCommand(
+        [cmd, version_arg], print_cmd=False, capture_output=True).output
+  except cros_build_lib.RunCommandError:
+    return None
+
+
+def VerifyEnvironment(workspace_path=None):
+  """Verify the environment we are installed to.
+
+  Disk permissions are only verified if a workspace path is provided.
+
+  Args:
+    workspace_path: Root directory of the workspace or None.
+
+  Returns:
+    boolean: True if the environment looks friendly.
+  """
+  result = True
+
+  # Verify Python:
+  #   We assume the python environment is acceptable, because we got here.
+  #   However, we can add imports here to check for any required external
+  #   packages.
+
+  # Verify executables that just need to exist.
+  for cmd in ('/bin/bash', 'curl'):
+    if _GetExecutableVersion(cmd) is None:
+      logging.error('%s is required to use the SDK.', cmd)
+      result = False
+
+  # Verify Git version.
+  git_requirement_message = 'git 1.8 or greater is required to use the SDK.'
+  git_version = _GetExecutableVersion('git')
+  if git_version is None:
+    logging.error(git_requirement_message)
+    result = False
+
+  # Example version string: 'git version 2.2.0.rc0.207.ga3a616c'.
+  m = re.match(r'git version (\d+)\.(\d+)', git_version)
+  if not m:
+    logging.error(git_requirement_message)
+    logging.error("git version not recognized from: '%s'.", git_version)
+    result = False
+  else:
+    gv_int_list = [int(d) for d in m.groups()] # Something like [2, 3]
+    if gv_int_list < [1, 8]:
+      logging.error(git_requirement_message)
+      logging.error("Current version: '%s'.", git_version)
+      result = False
+
+  # If a workspace path is provided, validate chroot requirements.
+  if workspace_path:
+    chroot_dir = workspace_lib.ChrootPath(workspace_path)
+
+    # Create a file with the suid bit set.
+    suid_file = os.path.join(chroot_dir, 'suid_test')
+    try:
+      # Create a file with the SUID set for the owner.
+      osutils.Touch(suid_file, makedirs=True, mode=stat.S_ISUID)
+
+      # See if the SUID bit will be respected, or ignored.
+      st = os.statvfs(suid_file)
+
+      # The os.ST_NOSUID constant wasn't added until python-3.2.
+      if st.f_flag & 0x2:
+        logging.error(
+            'Your current chroot directory (%s) does not support the SUID bit,'
+            ' which is required. You can move the chroot to a new location'
+            ' using "brillo chroot --move <new_dir>"', chroot_dir)
+        result = False
+    finally:
+      osutils.SafeUnlink(suid_file)
+
+  return result
diff --git a/lib/project_sdk_unittest b/lib/project_sdk_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/project_sdk_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/project_sdk_unittest.py b/lib/project_sdk_unittest.py
new file mode 100644
index 0000000..7859c02
--- /dev/null
+++ b/lib/project_sdk_unittest.py
@@ -0,0 +1,168 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for the project_sdk library."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import project_sdk
+
+# pylint: disable=protected-access
+
+class ProjectSdkTest(cros_test_lib.TempDirTestCase):
+  """Unittest for project_sdk.py"""
+
+  def setUp(self):
+    self.version = '1.2.3'
+
+    # Define assorted paths to test against.
+    self.bogus_dir = os.path.join(self.tempdir, 'bogus')
+    self.repo_dir = os.path.join(self.tempdir, 'repo')
+    self.nested_dir = os.path.join(self.repo_dir, 'foo', 'bar')
+
+    # Create fake repo with sub-dirs.
+    osutils.SafeMakedirs(os.path.join(self.repo_dir, '.repo'))
+    osutils.SafeMakedirs(self.nested_dir)
+
+    version_file = project_sdk.VersionFile(self.repo_dir)
+    osutils.WriteFile(version_file, self.version)
+
+  def testFindRepoRootCurrentRepo(self):
+    """Test FindRepoRoot with default of CWD."""
+    self.assertEqual(constants.SOURCE_ROOT, project_sdk.FindRepoRoot())
+
+  def testFindRepoRootSpecifiedBogus(self):
+    """Test FindRepoRoot with non-existent directory outside the repo."""
+    self.assertIsNone(project_sdk.FindRepoRoot(self.bogus_dir))
+
+  def testFindRepoRootSpecifiedRoot(self):
+    """Test FindRepoRoot with top level of repo tree."""
+    self.assertEqual(self.repo_dir, project_sdk.FindRepoRoot(self.repo_dir))
+
+  def testFindRepoRootSpecifiedNested(self):
+    """Test FindRepoRoot with nested inside repo tree."""
+    self.assertEqual(self.repo_dir, project_sdk.FindRepoRoot(self.nested_dir))
+
+  def testFindRepoRootSpecifiedNonexistent(self):
+    """Test FindRepoRoot refuses to scan a nonexistent path."""
+    self.assertIsNone(
+        project_sdk.FindRepoRoot(os.path.join(self.nested_dir, 'not_there')))
+
+  def testVersionFile(self):
+    self.assertEqual('/foo/SDK_VERSION', project_sdk.VersionFile('/foo'))
+
+  def testFindVersionDefault(self):
+    """Test FindVersion with default of CWD."""
+    # Expected results are undefined, just ensure we don't crash.
+    project_sdk.FindVersion()
+
+  def testFindVersionBogus(self):
+    """Test FindVersion with non-existent directory outside the repo."""
+    self.assertIsNone(project_sdk.FindVersion(self.bogus_dir))
+
+  def testFindVersionSpecifiedRoot(self):
+    """Test FindVersion with top level of repo tree."""
+    self.assertEqual(self.version, project_sdk.FindVersion(self.repo_dir))
+
+  def testFindVersionSpecifiedNested(self):
+    """Test FindVersion with nested inside repo tree."""
+    self.assertEqual(self.version, project_sdk.FindVersion(self.nested_dir))
+
+
+class ProjectSdkVerifyFake(cros_test_lib.MockTempDirTestCase):
+  """Test VerifyEnvironment with mocks."""
+
+  def setUp(self):
+    self.rc_mock = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
+
+  def MockEnvironment(self, bash=True, git='2.2.0', curl=True, suid=True):
+    """Mocks an environment for the specified checks.
+
+    When adding a new environment check, add it to this function so that
+    each test can isolate just the check it wants to test.
+
+    Args:
+      bash: True to mock a valid bash environment.
+      git: If set, git version to mock.
+      curl: True to mock a valid curl environment.
+      suid: True to mock a valid suid environment.
+    """
+    if bash:
+      self.rc_mock.AddCmdResult(['/bin/bash', '--version'])
+    if git:
+      self.rc_mock.AddCmdResult(['git', '--version'],
+                                output='git version %s' % git)
+    if curl:
+      self.rc_mock.AddCmdResult(['curl', '--version'])
+    if suid:
+      self.PatchObject(os, 'statvfs', autospec=True).return_value.f_flag = 0x0
+
+  def testMockEnvironment(self):
+    """Test that MockEnvironment() by itself sets up a valid env."""
+    self.MockEnvironment()
+
+    self.assertTrue(project_sdk.VerifyEnvironment(self.tempdir))
+
+  def testGTrusty(self):
+    """Test with mock of 'gTrusty' distribution."""
+    self.MockEnvironment(git='2.2.0.rc0.207.ga3a616c')
+
+    self.assertTrue(project_sdk.VerifyEnvironment(self.tempdir))
+
+  def testUbuntu14(self):
+    """Test with mock of 'Ubuntu LTS 14' distribution."""
+    self.MockEnvironment(git='2.1.0')
+
+    self.assertTrue(project_sdk.VerifyEnvironment(self.tempdir))
+
+  def testGitNewEnough(self):
+    """Test with mock of git 1.8."""
+    self.MockEnvironment(git='1.8.3.1')
+
+    self.assertTrue(project_sdk.VerifyEnvironment(self.tempdir))
+
+  def testFailNoBash(self):
+    """Test with mock of no bash present."""
+    self.MockEnvironment(bash=False)
+    self.rc_mock.AddCmdResult(['/bin/bash', '--version'], returncode=127)
+
+    self.assertFalse(project_sdk.VerifyEnvironment(self.tempdir))
+
+  def testFailGitTooOld(self):
+    """Test with mock of git too old to use."""
+    self.MockEnvironment(git='1.7.10.4')
+
+    self.assertFalse(project_sdk.VerifyEnvironment(self.tempdir))
+
+  def testFailNoCurl(self):
+    """Test with mock of no curl present."""
+    self.MockEnvironment(curl=False)
+    self.rc_mock.AddCmdResult(['curl', '--version'], returncode=127)
+
+    self.assertFalse(project_sdk.VerifyEnvironment(self.tempdir))
+
+  def testFailSuid(self):
+    """Test with SUID is disabled."""
+    self.MockEnvironment(suid=False)
+    # The os.ST_NOSUID constant wasn't added until python-3.2.
+    self.PatchObject(os, 'statvfs', autospec=True).return_value.f_flag = 0x2
+
+    self.assertFalse(project_sdk.VerifyEnvironment(self.tempdir))
+
+class ProjectSdkVerifyReal(cros_test_lib.TempDirTestCase):
+  """Test VerifyEnvironment for real."""
+
+  def testVerifyEnvironment(self):
+    """Test, assuming production environment is valid."""
+    self.assertTrue(project_sdk.VerifyEnvironment(self.tempdir))
+
+  def testGetExecutableVersionNonExistent(self):
+    """Tests _GetExecutableVersion() when the executable doesn't exist."""
+    self.assertIsNone(project_sdk._GetExecutableVersion('/not/a/real/program'))
diff --git a/lib/qemu.py b/lib/qemu.py
new file mode 100644
index 0000000..8869e70
--- /dev/null
+++ b/lib/qemu.py
@@ -0,0 +1,303 @@
+# Copyright 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Qemu is used to help with executing and debugging non-x86_64  binaries."""
+
+from __future__ import print_function
+
+import array
+import errno
+import os
+
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+
+
+class Qemu(object):
+  """Framework for running tests via qemu"""
+
+  # The binfmt register format looks like:
+  # :name:type:offset:magic:mask:interpreter:flags
+  _REGISTER_FORMAT = r':%(name)s:M::%(magic)s:%(mask)s:%(interp)s:%(flags)s'
+
+  # Require enough data to read the Ehdr of the ELF.
+  _MIN_ELF_LEN = 64
+
+  # Tuples of (magic, mask) for an arch.  Most only need to identify by the Ehdr
+  # fields: e_ident (16 bytes), e_type (2 bytes), e_machine (2 bytes).
+  _MAGIC_MASK = {
+      'aarch64':
+          (r'\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x02\x00\xb7\x00',
+           r'\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xfe\xff\xff\xff'),
+      'alpha':
+          (r'\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x02\x00\x26\x90',
+           r'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xfe\xff\xff\xff'),
+      'arm':
+          (r'\x7f\x45\x4c\x46\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x02\x00\x28\x00',
+           r'\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xfe\xff\xff\xff'),
+      'armeb':
+          (r'\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x02\x00\x28',
+           r'\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xff\xfe\xff\xff'),
+      'm68k':
+          (r'\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x02\x00\x04',
+           r'\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xff\xfe\xff\xff'),
+      # For mips targets, we need to scan e_flags.  But first we have to skip:
+      # e_version (4 bytes), e_entry/e_phoff/e_shoff (4 or 8 bytes).
+      'mips':
+          (r'\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x02\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x00\x00\x00\x00\x00\x10\x00',
+           r'\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xff\xfe\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x00\x00\x00\x00\x00\xf0\x20'),
+      'mipsel':
+          (r'\x7f\x45\x4c\x46\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x02\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x00\x00\x00\x00\x10\x00\x00',
+           r'\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x00\x00\x00\x20\xf0\x00\x00'),
+      'mipsn32':
+          (r'\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x02\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x00\x00\x00\x00\x00\x00\x20',
+           r'\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xff\xfe\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x00\x00\x00\x00\x00\xf0\x20'),
+      'mipsn32el':
+          (r'\x7f\x45\x4c\x46\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x02\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x00\x00\x00\x20\x00\x00\x00',
+           r'\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x00\x00\x00\x20\xf0\x00\x00'),
+      'mips64':
+          (r'\x7f\x45\x4c\x46\x02\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x02\x00\x08',
+           r'\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xff\xfe\xff\xff'),
+      'mips64el':
+          (r'\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x02\x00\x08\x00',
+           r'\xff\xff\xff\xff\xff\xff\xff\x00\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xfe\xff\xff\xff'),
+      'ppc':
+          (r'\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x02\x00\x14',
+           r'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xff\xfe\xff\xff'),
+      'sparc':
+          (r'\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x02\x00\x12',
+           r'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xff\xfe\xff\xff'),
+      'sparc64':
+          (r'\x7f\x45\x4c\x46\x02\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x02\x00\x2b',
+           r'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xff\xfe\xff\xff'),
+      's390x':
+          (r'\x7f\x45\x4c\x46\x02\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x02\x00\x16',
+           r'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xff\xfe\xff\xff'),
+      'sh4':
+          (r'\x7f\x45\x4c\x46\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x02\x00\x2a\x00',
+           r'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xfe\xff\xff\xff'),
+      'sh4eb':
+          (r'\x7f\x45\x4c\x46\x01\x02\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+           r'\x00\x02\x00\x2a',
+           r'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
+           r'\xff\xfe\xff\xff'),
+  }
+
+  _BINFMT_PATH = '/proc/sys/fs/binfmt_misc'
+  _BINFMT_REGISTER_PATH = os.path.join(_BINFMT_PATH, 'register')
+
+  def __init__(self, sysroot, arch=None):
+    if arch is None:
+      arch = self.DetectArch(None, sysroot)
+    self.arch = arch
+    self.sysroot = sysroot
+    self.name = 'qemu-%s' % self.arch
+    self.build_path = os.path.join('/build', 'bin', self.name)
+    self.binfmt_path = os.path.join(self._BINFMT_PATH, self.name)
+
+  @classmethod
+  def DetectArch(cls, prog, sysroot):
+    """Figure out which qemu wrapper is best for this target"""
+    def MaskMatches(bheader, bmagic, bmask):
+      """Apply |bmask| to |bheader| and see if it matches |bmagic|
+
+      The |bheader| array may be longer than the |bmask|; in which case we
+      will only compare the number of bytes that |bmask| takes up.
+      """
+      # This algo is what the kernel uses.
+      return all(((header_byte ^ magic_byte) & mask_byte) == 0x00
+                 for header_byte, magic_byte, mask_byte in
+                 zip(bheader[0:len(bmask)], bmagic, bmask))
+
+    if prog is None:
+      # Common when doing a global setup.
+      prog = '/'
+
+    for path in (prog, '/sbin/ldconfig', '/bin/sh', '/bin/dash', '/bin/bash'):
+      path = os.path.join(sysroot, path.lstrip('/'))
+      if os.path.islink(path) or not os.path.isfile(path):
+        continue
+
+      # Read the header of the ELF first.
+      matched_arch = None
+      with open(path, 'rb') as f:
+        header = f.read(cls._MIN_ELF_LEN)
+        if len(header) == cls._MIN_ELF_LEN:
+          bheader = array.array('B', header)
+
+          # Walk all the magics and see if any of them match this ELF.
+          for arch, magic_mask in cls._MAGIC_MASK.items():
+            magic = magic_mask[0].decode('string_escape')
+            bmagic = array.array('B', magic)
+            mask = magic_mask[1].decode('string_escape')
+            bmask = array.array('B', mask)
+
+            if MaskMatches(bheader, bmagic, bmask):
+              # Make sure we do not have ambiguous magics as this will
+              # also confuse the kernel when it tries to find a match.
+              if not matched_arch is None:
+                raise ValueError('internal error: multiple masks matched '
+                                 '(%s & %s)' % (matched_arch, arch))
+              matched_arch = arch
+
+      if not matched_arch is None:
+        return matched_arch
+
+  @staticmethod
+  def inode(path):
+    """Return the inode for |path| (or -1 if it doesn't exist)"""
+    try:
+      return os.stat(path).st_ino
+    except OSError as e:
+      if e.errno == errno.ENOENT:
+        return -1
+      raise
+
+  def Install(self, sysroot=None):
+    """Install qemu into |sysroot| safely"""
+    if sysroot is None:
+      sysroot = self.sysroot
+
+    # Copying strategy:
+    # Compare /usr/bin/qemu inode to /build/$board/build/bin/qemu; if
+    # different, hard link to a temporary file, then rename temp to target.
+    # This should ensure that once $QEMU_SYSROOT_PATH exists it will always
+    # exist, regardless of simultaneous test setups.
+    paths = (
+        ('/usr/bin/%s' % self.name,
+         sysroot + self.build_path),
+        ('/usr/bin/qemu-binfmt-wrapper',
+         sysroot + self.build_path + '-binfmt-wrapper'),
+    )
+
+    for src_path, sysroot_path in paths:
+      src_path = os.path.normpath(src_path)
+      sysroot_path = os.path.normpath(sysroot_path)
+      if self.inode(sysroot_path) != self.inode(src_path):
+        # Use hardlinks so that the process is atomic.
+        temp_path = '%s.%s' % (sysroot_path, os.getpid())
+        os.link(src_path, temp_path)
+        os.rename(temp_path, sysroot_path)
+        # Clear out the temp path in case it exists (another process already
+        # swooped in and created the target link for us).
+        try:
+          os.unlink(temp_path)
+        except OSError as e:
+          if e.errno != errno.ENOENT:
+            raise
+
+  @classmethod
+  def GetRegisterBinfmtStr(cls, arch, name, interp):
+    """Get the string used to pass to the kernel for registering the format
+
+    Args:
+      arch: The architecture to get the register string
+      name: The name to use for registering
+      interp: The name for the interpreter
+
+    Returns:
+      A string ready to pass to the register file
+    """
+    magic, mask = cls._MAGIC_MASK[arch]
+
+    # We need to decode the escape sequences as the kernel has a limit on
+    # the register string (256 bytes!).  However, we can't decode two chars:
+    # NUL bytes (since the kernel uses strchr and friends) and colon bytes
+    # (since we use that as the field separator).
+    # TODO: Once this lands, and we drop support for older kernels, we can
+    # probably drop this workaround too.  https://lkml.org/lkml/2014/9/1/181
+    magic = magic.decode('string_escape')
+    mask = mask.decode('string_escape')
+
+    # Further way of data packing: if the mask and magic use 0x00 for the same
+    # byte, then turn the magic into something else.  This way the magic can
+    # be written in raw form, but the mask will still cancel it out.
+    magic = ''.join([
+        '!' if (magic_byte == '\x00' and mask_byte == '\x00') else magic_byte
+        for magic_byte, mask_byte in zip(magic, mask)
+    ])
+
+    # New repack the bytes.
+    def _SemiEncode(s):
+      return s.replace('\x00', r'\x00').replace(':', '\x3a')
+    magic = _SemiEncode(magic)
+    mask = _SemiEncode(mask)
+
+    return cls._REGISTER_FORMAT % {
+        'name': name,
+        'magic': magic,
+        'mask': mask,
+        'interp': '%s-binfmt-wrapper' % interp,
+        'flags': 'POC',
+    }
+
+  def RegisterBinfmt(self):
+    """Make sure qemu has been registered as a format handler
+
+    Prep the binfmt handler. First mount if needed, then unregister any bad
+    mappings, and then register our mapping.
+
+    There may still be some race conditions here where one script
+    de-registers and another script starts executing before it gets
+    re-registered, however it should be rare.
+    """
+    if not os.path.exists(self._BINFMT_REGISTER_PATH):
+      osutils.Mount('binfmt_misc', self._BINFMT_PATH, 'binfmt_misc', 0)
+
+    if os.path.exists(self.binfmt_path):
+      interp = 'interpreter %s\n' % self.build_path
+      for line in osutils.ReadFile(self.binfmt_path):
+        if line == interp:
+          break
+      else:
+        osutils.WriteFile(self.binfmt_path, '-1')
+
+    if not os.path.exists(self.binfmt_path):
+      register = self.GetRegisterBinfmtStr(self.arch, self.name,
+                                           self.build_path)
+      try:
+        osutils.WriteFile(self._BINFMT_REGISTER_PATH, register)
+      except IOError:
+        logging.error('error: attempted to register: (len:%i) %s',
+                      len(register), register)
diff --git a/lib/qemu_unittest b/lib/qemu_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/qemu_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/qemu_unittest.py b/lib/qemu_unittest.py
new file mode 100644
index 0000000..0d9c22f
--- /dev/null
+++ b/lib/qemu_unittest.py
@@ -0,0 +1,48 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test for chromite qemu logic"""
+
+from __future__ import print_function
+
+import glob
+import os
+
+from chromite.lib import cros_test_lib
+from chromite.lib import qemu
+
+
+class QemuTests(cros_test_lib.TestCase):
+  """Verify Qemu logic works"""
+
+  def testArchDetect(self):
+    """Verify we correctly probe each arch"""
+    test_dir = os.path.join(os.path.realpath(os.path.dirname(__file__)),
+                            'datafiles')
+    test_files = os.path.join(test_dir, 'arch.*.elf')
+
+    for test in glob.glob(test_files):
+      test_file = os.path.basename(test)
+      exp_arch = test_file.split('.')[1]
+
+      arch = qemu.Qemu.DetectArch(test_file, test_dir)
+      if arch is None:
+        # See if we have a mask for it.
+        # pylint: disable=W0212
+        self.assertNotIn(exp_arch, qemu.Qemu._MAGIC_MASK.keys(),
+                         msg='ELF "%s" did not match "%s", but should have' %
+                         (test, exp_arch))
+      else:
+        self.assertEqual(arch, exp_arch)
+
+  def testRegisterStr(self):
+    """Verify the binfmt register string doesn't exceed kernel limits"""
+    # pylint: disable=W0212
+    for arch in qemu.Qemu._MAGIC_MASK.keys():
+      name = 'qemu-%s' % arch
+      interp = '/build/bin/%s' % name
+      register = qemu.Qemu.GetRegisterBinfmtStr(arch, name, interp)
+      self.assertGreaterEqual(256, len(register),
+                              msg='arch "%s" has too long of a register string:'
+                                  ' %i: %r' % (arch, len(register), register))
diff --git a/lib/remote_access.py b/lib/remote_access.py
new file mode 100644
index 0000000..ccce218
--- /dev/null
+++ b/lib/remote_access.py
@@ -0,0 +1,1039 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Library containing functions to access a remote test device."""
+
+from __future__ import print_function
+
+import glob
+import os
+import shutil
+import socket
+import stat
+import tempfile
+import time
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import timeout_util
+
+
+_path = os.path.dirname(os.path.realpath(__file__))
+TEST_PRIVATE_KEY = os.path.normpath(
+    os.path.join(_path, '../ssh_keys/testing_rsa'))
+del _path
+
+LOCALHOST = 'localhost'
+LOCALHOST_IP = '127.0.0.1'
+ROOT_ACCOUNT = 'root'
+
+REBOOT_MARKER = '/tmp/awaiting_reboot'
+REBOOT_MAX_WAIT = 120
+REBOOT_SSH_CONNECT_TIMEOUT = 2
+REBOOT_SSH_CONNECT_ATTEMPTS = 2
+CHECK_INTERVAL = 5
+DEFAULT_SSH_PORT = 22
+SSH_ERROR_CODE = 255
+
+# SSH default known_hosts filepath.
+KNOWN_HOSTS_PATH = os.path.expanduser('~/.ssh/known_hosts')
+
+# Dev/test packages are installed in these paths.
+DEV_BIN_PATHS = '/usr/local/bin:/usr/local/sbin'
+
+
+class RemoteAccessException(Exception):
+  """Base exception for this module."""
+
+
+class SSHConnectionError(RemoteAccessException):
+  """Raised when SSH connection has failed."""
+
+  def IsKnownHostsMismatch(self):
+    """Returns True if this error was caused by a known_hosts mismatch.
+
+    Will only check for a mismatch, this will return False if the host
+    didn't exist in known_hosts at all.
+    """
+    # Checking for string output is brittle, but there's no exit code that
+    # indicates why SSH failed so this might be the best we can do.
+    # RemoteAccess.RemoteSh() sets LC_MESSAGES=C so we only need to check for
+    # the English error message.
+    # Verified for OpenSSH_6.6.1p1.
+    return 'REMOTE HOST IDENTIFICATION HAS CHANGED' in str(self)
+
+
+class DeviceNotPingableError(RemoteAccessException):
+  """Raised when device is not pingable."""
+
+
+class DefaultDeviceError(RemoteAccessException):
+  """Raised when a default ChromiumOSDevice can't be found."""
+
+
+class CatFileError(RemoteAccessException):
+  """Raised when error occurs while trying to cat a remote file."""
+
+
+class RunningPidsError(RemoteAccessException):
+  """Raised when unable to get running pids on the device."""
+
+
+def NormalizePort(port, str_ok=True):
+  """Checks if |port| is a valid port number and returns the number.
+
+  Args:
+    port: The port to normalize.
+    str_ok: Accept |port| in string. If set False, only accepts
+      an integer. Defaults to True.
+
+  Returns:
+    A port number (integer).
+  """
+  err_msg = '%s is not a valid port number.' % port
+
+  if not str_ok and not isinstance(port, int):
+    raise ValueError(err_msg)
+
+  port = int(port)
+  if port <= 0 or port >= 65536:
+    raise ValueError(err_msg)
+
+  return port
+
+
+def GetUnusedPort(ip=LOCALHOST, family=socket.AF_INET,
+                  stype=socket.SOCK_STREAM):
+  """Returns a currently unused port.
+
+  Example:
+    Note: Since this does not guarantee the port remains unused when you
+    attempt to bind it, your code should retry in a loop like so:
+    while True:
+      try:
+        port = remote_access.GetUnusedPort()
+        <attempt to bind the port>
+        break
+      except socket.error as e:
+        if e.errno == errno.EADDRINUSE:
+          continue
+        <fallback/raise>
+
+  Args:
+    ip: IP to use to bind the port.
+    family: Address family.
+    stype: Socket type.
+
+  Returns:
+    A port number (integer).
+  """
+  s = None
+  try:
+    s = socket.socket(family, stype)
+    s.bind((ip, 0))
+    return s.getsockname()[1]
+  except (socket.error, OSError):
+    if s:
+      s.close()
+
+
+def RunCommandFuncWrapper(func, msg, *args, **kwargs):
+  """Wraps a function that invokes cros_build_lib.RunCommand.
+
+  If the command failed, logs warning |msg| if error_code_ok is set;
+  logs error |msg| if error_code_ok is not set.
+
+  Args:
+    func: The function to call.
+    msg: The message to display if the command failed.
+    *args: Arguments to pass to |func|.
+    **kwargs: Keyword arguments to pass to |func|.
+
+  Returns:
+    The result of |func|.
+
+  Raises:
+    cros_build_lib.RunCommandError if the command failed and error_code_ok
+    is not set.
+  """
+  error_code_ok = kwargs.pop('error_code_ok', False)
+  result = func(*args, error_code_ok=True, **kwargs)
+  if result.returncode != 0 and not error_code_ok:
+    raise cros_build_lib.RunCommandError(msg, result)
+
+  if result.returncode != 0:
+    logging.warning(msg)
+
+
+def CompileSSHConnectSettings(**kwargs):
+  """Creates a list of SSH connection options.
+
+  Any ssh_config option can be specified in |kwargs|, in addition,
+  several options are set to default values if not specified. Any
+  option can be set to None to prevent this function from assigning
+  a value so that the SSH default value will be used.
+
+  This function doesn't check to make sure the |kwargs| options are
+  valid, so a typo or invalid setting won't be caught until the
+  resulting arguments are passed into an SSH call.
+
+  Args:
+    kwargs: A dictionary of ssh_config settings.
+
+  Returns:
+    A list of arguments to pass to SSH.
+  """
+  settings = {
+      'ConnectTimeout': 30,
+      'ConnectionAttempts': 4,
+      'NumberOfPasswordPrompts': 0,
+      'Protocol': 2,
+      'ServerAliveInterval': 10,
+      'ServerAliveCountMax': 3,
+      'StrictHostKeyChecking': 'no',
+      'UserKnownHostsFile': '/dev/null',
+  }
+  settings.update(kwargs)
+  return ['-o%s=%s' % (k, v) for k, v in settings.items() if v is not None]
+
+
+def RemoveKnownHost(host, known_hosts_path=KNOWN_HOSTS_PATH):
+  """Removes |host| from a known_hosts file.
+
+  `ssh-keygen -R` doesn't work on bind mounted files as they can only
+  be updated in place. Since we bind mount the default known_hosts file
+  when entering the chroot, this function provides an alternate way
+  to remove hosts from the file.
+
+  Args:
+    host: The host name to remove from the known_hosts file.
+    known_hosts_path: Path to the known_hosts file to change. Defaults
+                      to the standard SSH known_hosts file path.
+
+  Raises:
+    cros_build_lib.RunCommandError if ssh-keygen fails.
+  """
+  # `ssh-keygen -R` creates a backup file to retain the old 'known_hosts'
+  # content and never deletes it. Using TempDir here to make sure both the temp
+  # files created by us and `ssh-keygen -R` are deleted afterwards.
+  with osutils.TempDir(prefix='remote-access-') as tempdir:
+    temp_file = os.path.join(tempdir, 'temp_known_hosts')
+    try:
+      # Using shutil.copy2 to preserve the file ownership and permissions.
+      shutil.copy2(known_hosts_path, temp_file)
+    except IOError:
+      # If |known_hosts_path| doesn't exist neither does |host| so we're done.
+      return
+    cros_build_lib.RunCommand(['ssh-keygen', '-R', host, '-f', temp_file],
+                              quiet=True)
+    shutil.copy2(temp_file, known_hosts_path)
+
+
+class RemoteAccess(object):
+  """Provides access to a remote test machine."""
+
+  DEFAULT_USERNAME = ROOT_ACCOUNT
+
+  def __init__(self, remote_host, tempdir, port=None, username=None,
+               private_key=None, debug_level=logging.DEBUG, interactive=True):
+    """Construct the object.
+
+    Args:
+      remote_host: The ip or hostname of the remote test machine.  The test
+                   machine should be running a ChromeOS test image.
+      tempdir: A directory that RemoteAccess can use to store temporary files.
+               It's the responsibility of the caller to remove it.
+      port: The ssh port of the test machine to connect to.
+      username: The ssh login username (default: root).
+      private_key: The identify file to pass to `ssh -i` (default: testing_rsa).
+      debug_level: Logging level to use for all RunCommand invocations.
+      interactive: If set to False, pass /dev/null into stdin for the sh cmd.
+    """
+    self.tempdir = tempdir
+    self.remote_host = remote_host
+    self.port = port if port else DEFAULT_SSH_PORT
+    self.username = username if username else self.DEFAULT_USERNAME
+    self.debug_level = debug_level
+    private_key_src = private_key if private_key else TEST_PRIVATE_KEY
+    self.private_key = os.path.join(
+        tempdir, os.path.basename(private_key_src))
+
+    self.interactive = interactive
+    shutil.copyfile(private_key_src, self.private_key)
+    os.chmod(self.private_key, stat.S_IRUSR)
+
+  @property
+  def target_ssh_url(self):
+    return '%s@%s' % (self.username, self.remote_host)
+
+  def _GetSSHCmd(self, connect_settings=None):
+    if connect_settings is None:
+      connect_settings = CompileSSHConnectSettings()
+
+    cmd = (['ssh', '-p', str(self.port)] +
+           connect_settings +
+           ['-i', self.private_key])
+    if not self.interactive:
+      cmd.append('-n')
+
+    return cmd
+
+  def RemoteSh(self, cmd, connect_settings=None, error_code_ok=False,
+               remote_sudo=False, ssh_error_ok=False, **kwargs):
+    """Run a sh command on the remote device through ssh.
+
+    Args:
+      cmd: The command string or list to run. None or empty string/list will
+           start an interactive session.
+      connect_settings: The SSH connect settings to use.
+      error_code_ok: Does not throw an exception when the command exits with a
+                     non-zero returncode.  This does not cover the case where
+                     the ssh command itself fails (return code 255).
+                     See ssh_error_ok.
+      ssh_error_ok: Does not throw an exception when the ssh command itself
+                    fails (return code 255).
+      remote_sudo: If set, run the command in remote shell with sudo.
+      **kwargs: See cros_build_lib.RunCommand documentation.
+
+    Returns:
+      A CommandResult object.  The returncode is the returncode of the command,
+      or 255 if ssh encountered an error (could not connect, connection
+      interrupted, etc.)
+
+    Raises:
+      RunCommandError when error is not ignored through the error_code_ok flag.
+      SSHConnectionError when ssh command error is not ignored through
+      the ssh_error_ok flag.
+    """
+    kwargs.setdefault('capture_output', True)
+    kwargs.setdefault('debug_level', self.debug_level)
+    # Force English SSH messages. SSHConnectionError.IsKnownHostsMismatch()
+    # requires English errors to detect a known_hosts key mismatch error.
+    kwargs.setdefault('extra_env', {})['LC_MESSAGES'] = 'C'
+
+    ssh_cmd = self._GetSSHCmd(connect_settings)
+    ssh_cmd.append(self.target_ssh_url)
+
+    if cmd:
+      ssh_cmd.append('--')
+
+      if remote_sudo and self.username != ROOT_ACCOUNT:
+        # Prepend sudo to cmd.
+        ssh_cmd.append('sudo')
+
+      if isinstance(cmd, basestring):
+        ssh_cmd += [cmd]
+      else:
+        ssh_cmd += cmd
+
+    try:
+      return cros_build_lib.RunCommand(ssh_cmd, **kwargs)
+    except cros_build_lib.RunCommandError as e:
+      if ((e.result.returncode == SSH_ERROR_CODE and ssh_error_ok) or
+          (e.result.returncode and e.result.returncode != SSH_ERROR_CODE
+           and error_code_ok)):
+        return e.result
+      elif e.result.returncode == SSH_ERROR_CODE:
+        raise SSHConnectionError(e.result.error)
+      else:
+        raise
+
+  def _CheckIfRebooted(self):
+    """Checks whether a remote device has rebooted successfully.
+
+    This uses a rapidly-retried SSH connection, which will wait for at most
+    about ten seconds. If the network returns an error (e.g. host unreachable)
+    the actual delay may be shorter.
+
+    Returns:
+      Whether the device has successfully rebooted.
+    """
+    # In tests SSH seems to be waiting rather longer than would be expected
+    # from these parameters. These values produce a ~5 second wait.
+    connect_settings = CompileSSHConnectSettings(
+        ConnectTimeout=REBOOT_SSH_CONNECT_TIMEOUT,
+        ConnectionAttempts=REBOOT_SSH_CONNECT_ATTEMPTS)
+    cmd = "[ ! -e '%s' ]" % REBOOT_MARKER
+    result = self.RemoteSh(cmd, connect_settings=connect_settings,
+                           error_code_ok=True, ssh_error_ok=True,
+                           capture_output=True)
+
+    errors = {0: 'Reboot complete.',
+              1: 'Device has not yet shutdown.',
+              255: 'Cannot connect to device; reboot in progress.'}
+    if result.returncode not in errors:
+      raise Exception('Unknown error code %s returned by %s.'
+                      % (result.returncode, cmd))
+
+    logging.info(errors[result.returncode])
+    return result.returncode == 0
+
+  def RemoteReboot(self):
+    """Reboot the remote device."""
+    logging.info('Rebooting %s...', self.remote_host)
+    if self.username != ROOT_ACCOUNT:
+      self.RemoteSh('sudo sh -c "touch %s && sudo reboot"' % REBOOT_MARKER)
+    else:
+      self.RemoteSh('touch %s && reboot' % REBOOT_MARKER)
+
+    time.sleep(CHECK_INTERVAL)
+    try:
+      timeout_util.WaitForReturnTrue(self._CheckIfRebooted, REBOOT_MAX_WAIT,
+                                     period=CHECK_INTERVAL)
+    except timeout_util.TimeoutError:
+      cros_build_lib.Die('Reboot has not completed after %s seconds; giving up.'
+                         % (REBOOT_MAX_WAIT,))
+
+  def Rsync(self, src, dest, to_local=False, follow_symlinks=False,
+            recursive=True, inplace=False, verbose=False, sudo=False,
+            remote_sudo=False, **kwargs):
+    """Rsync a path to the remote device.
+
+    Rsync a path to the remote device. If |to_local| is set True, it
+    rsyncs the path from the remote device to the local machine.
+
+    Args:
+      src: The local src directory.
+      dest: The remote dest directory.
+      to_local: If set, rsync remote path to local path.
+      follow_symlinks: If set, transform symlinks into referent
+        path. Otherwise, copy symlinks as symlinks.
+      recursive: Whether to recursively copy entire directories.
+      inplace: If set, cause rsync to overwrite the dest files in place.  This
+        conserves space, but has some side effects - see rsync man page.
+      verbose: If set, print more verbose output during rsync file transfer.
+      sudo: If set, invoke the command via sudo.
+      remote_sudo: If set, run the command in remote shell with sudo.
+      **kwargs: See cros_build_lib.RunCommand documentation.
+    """
+    kwargs.setdefault('debug_level', self.debug_level)
+
+    ssh_cmd = ' '.join(self._GetSSHCmd())
+    rsync_cmd = ['rsync', '--perms', '--verbose', '--times', '--compress',
+                 '--omit-dir-times', '--exclude', '.svn']
+    rsync_cmd.append('--copy-links' if follow_symlinks else '--links')
+    rsync_sudo = 'sudo' if (
+        remote_sudo and self.username != ROOT_ACCOUNT) else ''
+    rsync_cmd += ['--rsync-path',
+                  'PATH=%s:$PATH %s rsync' % (DEV_BIN_PATHS, rsync_sudo)]
+
+    if verbose:
+      rsync_cmd.append('--progress')
+    if recursive:
+      rsync_cmd.append('--recursive')
+    if inplace:
+      rsync_cmd.append('--inplace')
+
+    if to_local:
+      rsync_cmd += ['--rsh', ssh_cmd,
+                    '[%s]:%s' % (self.target_ssh_url, src), dest]
+    else:
+      rsync_cmd += ['--rsh', ssh_cmd, src,
+                    '[%s]:%s' % (self.target_ssh_url, dest)]
+
+    rc_func = cros_build_lib.RunCommand
+    if sudo:
+      rc_func = cros_build_lib.SudoRunCommand
+    return rc_func(rsync_cmd, print_cmd=verbose, **kwargs)
+
+  def RsyncToLocal(self, *args, **kwargs):
+    """Rsync a path from the remote device to the local machine."""
+    return self.Rsync(*args, to_local=kwargs.pop('to_local', True), **kwargs)
+
+  def Scp(self, src, dest, to_local=False, recursive=True, verbose=False,
+          sudo=False, **kwargs):
+    """Scp a file or directory to the remote device.
+
+    Args:
+      src: The local src file or directory.
+      dest: The remote dest location.
+      to_local: If set, scp remote path to local path.
+      recursive: Whether to recursively copy entire directories.
+      verbose: If set, print more verbose output during scp file transfer.
+      sudo: If set, invoke the command via sudo.
+      remote_sudo: If set, run the command in remote shell with sudo.
+      **kwargs: See cros_build_lib.RunCommand documentation.
+
+    Returns:
+      A CommandResult object containing the information and return code of
+      the scp command.
+    """
+    remote_sudo = kwargs.pop('remote_sudo', False)
+    if remote_sudo and self.username != ROOT_ACCOUNT:
+      # TODO: Implement scp with remote sudo.
+      raise NotImplementedError('Cannot run scp with sudo!')
+
+    kwargs.setdefault('debug_level', self.debug_level)
+    # scp relies on 'scp' being in the $PATH of the non-interactive,
+    # SSH login shell.
+    scp_cmd = (['scp', '-P', str(self.port)] +
+               CompileSSHConnectSettings(ConnectTimeout=60) +
+               ['-i', self.private_key])
+
+    if not self.interactive:
+      scp_cmd.append('-n')
+
+    if recursive:
+      scp_cmd.append('-r')
+    if verbose:
+      scp_cmd.append('-v')
+
+    if to_local:
+      scp_cmd += ['%s:%s' % (self.target_ssh_url, src), dest]
+    else:
+      scp_cmd += glob.glob(src) + ['%s:%s' % (self.target_ssh_url, dest)]
+
+    rc_func = cros_build_lib.RunCommand
+    if sudo:
+      rc_func = cros_build_lib.SudoRunCommand
+
+    return rc_func(scp_cmd, print_cmd=verbose, **kwargs)
+
+  def ScpToLocal(self, *args, **kwargs):
+    """Scp a path from the remote device to the local machine."""
+    return self.Scp(*args, to_local=kwargs.pop('to_local', True), **kwargs)
+
+  def PipeToRemoteSh(self, producer_cmd, cmd, **kwargs):
+    """Run a local command and pipe it to a remote sh command over ssh.
+
+    Args:
+      producer_cmd: Command to run locally with its results piped to |cmd|.
+      cmd: Command to run on the remote device.
+      **kwargs: See RemoteSh for documentation.
+    """
+    result = cros_build_lib.RunCommand(producer_cmd, stdout_to_pipe=True,
+                                       print_cmd=False, capture_output=True)
+    return self.RemoteSh(cmd, input=kwargs.pop('input', result.output),
+                         **kwargs)
+
+
+class RemoteDeviceHandler(object):
+  """A wrapper of RemoteDevice."""
+
+  def __init__(self, *args, **kwargs):
+    """Creates a RemoteDevice object."""
+    self.device = RemoteDevice(*args, **kwargs)
+
+  def __enter__(self):
+    """Return the temporary directory."""
+    return self.device
+
+  def __exit__(self, _type, _value, _traceback):
+    """Cleans up the device."""
+    self.device.Cleanup()
+
+
+class ChromiumOSDeviceHandler(object):
+  """A wrapper of ChromiumOSDevice."""
+
+  def __init__(self, *args, **kwargs):
+    """Creates a RemoteDevice object."""
+    self.device = ChromiumOSDevice(*args, **kwargs)
+
+  def __enter__(self):
+    """Return the temporary directory."""
+    return self.device
+
+  def __exit__(self, _type, _value, _traceback):
+    """Cleans up the device."""
+    self.device.Cleanup()
+
+
+class RemoteDevice(object):
+  """Handling basic SSH communication with a remote device."""
+
+  DEFAULT_BASE_DIR = '/tmp/remote-access'
+
+  def __init__(self, hostname, port=None, username=None,
+               base_dir=DEFAULT_BASE_DIR, connect_settings=None,
+               private_key=None, debug_level=logging.DEBUG, ping=True,
+               connect=True):
+    """Initializes a RemoteDevice object.
+
+    Args:
+      hostname: The hostname of the device.
+      port: The ssh port of the device.
+      username: The ssh login username.
+      base_dir: The base work directory to create on the device, or
+        None. Required in order to use RunCommand(), but
+        BaseRunCommand() will be available in either case.
+      connect_settings: Default SSH connection settings.
+      private_key: The identify file to pass to `ssh -i`.
+      debug_level: Setting debug level for logging.
+      ping: Whether to ping the device before attempting to connect.
+      connect: True to set up the connection, otherwise set up will
+        be automatically deferred until device use.
+    """
+    self.hostname = hostname
+    self.port = port
+    self.username = username
+    # The tempdir is for storing the rsa key and/or some temp files.
+    self.tempdir = osutils.TempDir(prefix='ssh-tmp')
+    self.connect_settings = (connect_settings if connect_settings else
+                             CompileSSHConnectSettings())
+    self.private_key = private_key
+    self.debug_level = debug_level
+    # The temporary work directories on the device.
+    self._base_dir = base_dir
+    self._work_dir = None
+    # Use GetAgent() instead of accessing this directly for deferred connect.
+    self._agent = None
+    self.cleanup_cmds = []
+
+    if ping and not self.Pingable():
+      raise DeviceNotPingableError('Device %s is not pingable.' % self.hostname)
+
+    if connect:
+      self._Connect()
+
+  def Pingable(self, timeout=20):
+    """Returns True if the device is pingable.
+
+    Args:
+      timeout: Timeout in seconds (default: 20 seconds).
+
+    Returns:
+      True if the device responded to the ping before |timeout|.
+    """
+    result = cros_build_lib.RunCommand(
+        ['ping', '-c', '1', '-w', str(timeout), self.hostname],
+        error_code_ok=True,
+        capture_output=True)
+    return result.returncode == 0
+
+  def GetAgent(self):
+    """Agent accessor; connects the agent if necessary."""
+    if not self._agent:
+      self._Connect()
+    return self._agent
+
+  def _Connect(self):
+    """Sets up the SSH connection and internal state."""
+    self._agent = RemoteAccess(self.hostname, self.tempdir.tempdir,
+                               port=self.port, username=self.username,
+                               private_key=self.private_key)
+
+  @property
+  def work_dir(self):
+    """The work directory to create on the device.
+
+    This property exists so we can create the remote paths on demand.  For
+    some use cases, it'll never be needed, so skipping creation is faster.
+    """
+    if self._base_dir is None:
+      return None
+
+    if self._work_dir is None:
+      self._work_dir = self.BaseRunCommand(
+          ['mkdir', '-p', self._base_dir, '&&',
+           'mktemp', '-d', '--tmpdir=%s' % self._base_dir],
+          capture_output=True).output.strip()
+      logging.debug('The temporary working directory on the device is %s',
+                    self._work_dir)
+      self.RegisterCleanupCmd(['rm', '-rf', self._work_dir])
+
+    return self._work_dir
+
+  # Since this object is instantiated once per device, we can safely cache the
+  # result of the rsync test.  We assume the remote side doesn't go and delete
+  # or break rsync on us, but that's fine.
+  @cros_build_lib.MemoizedSingleCall
+  def HasRsync(self):
+    """Checks if rsync exists on the device."""
+    result = self.GetAgent().RemoteSh(['PATH=%s:$PATH rsync' % DEV_BIN_PATHS,
+                                       '--version'], error_code_ok=True)
+    return result.returncode == 0
+
+  def RegisterCleanupCmd(self, cmd, **kwargs):
+    """Register a cleanup command to be run on the device in Cleanup().
+
+    Args:
+      cmd: command to run. See RemoteAccess.RemoteSh documentation.
+      **kwargs: keyword arguments to pass along with cmd. See
+        RemoteAccess.RemoteSh documentation.
+    """
+    self.cleanup_cmds.append((cmd, kwargs))
+
+  def Cleanup(self):
+    """Remove work/temp directories and run all registered cleanup commands."""
+    for cmd, kwargs in self.cleanup_cmds:
+      # We want to run through all cleanup commands even if there are errors.
+      kwargs.setdefault('error_code_ok', True)
+      self.BaseRunCommand(cmd, **kwargs)
+
+    self.tempdir.Cleanup()
+
+  def CopyToDevice(self, src, dest, mode=None, **kwargs):
+    """Copy path to device."""
+    msg = 'Could not copy %s to device.' % src
+    if mode is None:
+      # Use rsync by default if it exists.
+      mode = 'rsync' if self.HasRsync() else 'scp'
+
+    if mode == 'scp':
+      # scp always follow symlinks
+      kwargs.pop('follow_symlinks', None)
+      func = self.GetAgent().Scp
+    else:
+      func = self.GetAgent().Rsync
+
+    return RunCommandFuncWrapper(func, msg, src, dest, **kwargs)
+
+  def CopyFromDevice(self, src, dest, mode=None, **kwargs):
+    """Copy path from device."""
+    msg = 'Could not copy %s from device.' % src
+    if mode is None:
+      # Use rsync by default if it exists.
+      mode = 'rsync' if self.HasRsync() else 'scp'
+
+    if mode == 'scp':
+      # scp always follow symlinks
+      kwargs.pop('follow_symlinks', None)
+      func = self.GetAgent().ScpToLocal
+    else:
+      func = self.GetAgent().RsyncToLocal
+
+    return RunCommandFuncWrapper(func, msg, src, dest, **kwargs)
+
+  def CopyFromWorkDir(self, src, dest, **kwargs):
+    """Copy path from working directory on the device."""
+    return self.CopyFromDevice(os.path.join(self.work_dir, src), dest, **kwargs)
+
+  def CopyToWorkDir(self, src, dest='', **kwargs):
+    """Copy path to working directory on the device."""
+    return self.CopyToDevice(src, os.path.join(self.work_dir, dest), **kwargs)
+
+  def IsDirWritable(self, path):
+    """Checks if the given directory is writable on the device.
+
+    Args:
+      path: Directory on the device to check.
+    """
+    tmp_file = os.path.join(path, '.tmp.remote_access.is.writable')
+    result = self.GetAgent().RemoteSh(
+        ['touch', tmp_file, '&&', 'rm', tmp_file],
+        error_code_ok=True, remote_sudo=True, capture_output=True)
+    return result.returncode == 0
+
+  def IsFileExecutable(self, path):
+    """Check if the given file is executable on the device.
+
+    Args:
+      path: full path to the file on the device to check.
+
+    Returns:
+      True if the file is executable, and false if the file does not exist or is
+      not executable.
+    """
+    cmd = ['test', '-f', path, '-a', '-x', path,]
+    result = self.GetAgent().RemoteSh(cmd, remote_sudo=True, error_code_ok=True,
+                                      capture_output=True)
+    return result.returncode == 0
+
+  def GetSize(self, path):
+    """Gets the size of the given file on the device.
+
+    Args:
+      path: full path to the file on the device.
+
+    Returns:
+      Size of the file in number of bytes.
+
+    Raises:
+      ValueError if failed to get file size from the remote output.
+      cros_build_lib.RunCommandError if |path| does not exist or the remote
+      command to get file size has failed.
+    """
+    cmd = ['du', '-Lb', '--max-depth=0', path]
+    result = self.BaseRunCommand(cmd, remote_sudo=True, capture_output=True)
+    return int(result.output.split()[0])
+
+  def CatFile(self, path, max_size=1000000):
+    """Reads the file on device to string if its size is less than |max_size|.
+
+    Args:
+      path: The full path to the file on the device to read.
+      max_size: Read the file only if its size is less than |max_size| in bytes.
+        If None, do not check its size and always cat the path.
+
+    Returns:
+      A string of the file content.
+
+    Raises:
+      CatFileError if failed to read the remote file or the file size is larger
+      than |max_size|.
+    """
+    if max_size is not None:
+      try:
+        file_size = self.GetSize(path)
+      except (ValueError, cros_build_lib.RunCommandError) as e:
+        raise CatFileError('Failed to get size of file "%s": %s' % (path, e))
+      if file_size > max_size:
+        raise CatFileError('File "%s" is larger than %d bytes' %
+                           (path, max_size))
+
+    result = self.BaseRunCommand(['cat', path], remote_sudo=True,
+                                 error_code_ok=True, capture_output=True)
+    if result.returncode:
+      raise CatFileError('Failed to read file "%s" on the device' % path)
+    return result.output
+
+  def PipeOverSSH(self, filepath, cmd, **kwargs):
+    """Cat a file and pipe over SSH."""
+    producer_cmd = ['cat', filepath]
+    return self.GetAgent().PipeToRemoteSh(producer_cmd, cmd, **kwargs)
+
+  def GetRunningPids(self, exe, full_path=True):
+    """Get all the running pids on the device with the executable path.
+
+    Args:
+      exe: The executable path to get pids for.
+      full_path: Whether |exe| is a full executable path.
+
+    Raises:
+      RunningPidsError when failing to parse out pids from command output.
+      SSHConnectionError when error occurs during SSH connection.
+    """
+    try:
+      cmd = ['pgrep', exe]
+      if full_path:
+        cmd.append('-f')
+      result = self.GetAgent().RemoteSh(cmd, error_code_ok=True,
+                                        capture_output=True)
+      try:
+        return [int(pid) for pid in result.output.splitlines()]
+      except ValueError:
+        logging.error('Parsing output failed:\n%s', result.output)
+        raise RunningPidsError('Unable to get running pids of %s' % exe)
+    except SSHConnectionError:
+      logging.error('Error connecting to device %s', self.hostname)
+      raise
+
+  def Reboot(self):
+    """Reboot the device."""
+    return self.GetAgent().RemoteReboot()
+
+  def BaseRunCommand(self, cmd, **kwargs):
+    """Executes a shell command on the device with output captured by default.
+
+    Args:
+      cmd: command to run. See RemoteAccess.RemoteSh documentation.
+      **kwargs: keyword arguments to pass along with cmd. See
+        RemoteAccess.RemoteSh documentation.
+    """
+    kwargs.setdefault('debug_level', self.debug_level)
+    kwargs.setdefault('connect_settings', self.connect_settings)
+    try:
+      return self.GetAgent().RemoteSh(cmd, **kwargs)
+    except SSHConnectionError:
+      logging.error('Error connecting to device %s', self.hostname)
+      raise
+
+  def RunCommand(self, cmd, **kwargs):
+    """Executes a shell command on the device with output captured by default.
+
+    Also sets environment variables using dictionary provided by
+    keyword argument |extra_env|.
+
+    Args:
+      cmd: command to run. See RemoteAccess.RemoteSh documentation.
+      **kwargs: keyword arguments to pass along with cmd. See
+        RemoteAccess.RemoteSh documentation.
+    """
+    # Handle setting environment variables on the device by copying
+    # and sourcing a temporary environment file.
+    extra_env = kwargs.pop('extra_env', None)
+    if extra_env:
+      remote_sudo = kwargs.pop('remote_sudo', False)
+      if remote_sudo and self.GetAgent().username == ROOT_ACCOUNT:
+        remote_sudo = False
+
+      new_cmd = []
+      flat_vars = ['%s=%s' % (k, cros_build_lib.ShellQuote(v))
+                   for k, v in extra_env.iteritems()]
+
+      # If the vars are too large for the command line, do it indirectly.
+      # We pick 32k somewhat arbitrarily -- the kernel should accept this
+      # and rarely should remote commands get near that size.
+      ARG_MAX = 32 * 1024
+
+      # What the command line would generally look like on the remote.
+      cmdline = ' '.join(flat_vars + cmd)
+      if len(cmdline) > ARG_MAX:
+        env_list = ['export %s' % x for x in flat_vars]
+        with tempfile.NamedTemporaryFile(dir=self.tempdir.tempdir,
+                                         prefix='env') as f:
+          logging.debug('Environment variables: %s', ' '.join(env_list))
+          osutils.WriteFile(f.name, '\n'.join(env_list))
+          self.CopyToWorkDir(f.name)
+          env_file = os.path.join(self.work_dir, os.path.basename(f.name))
+          new_cmd += ['.', '%s;' % env_file]
+          if remote_sudo:
+            new_cmd += ['sudo', '-E']
+      else:
+        if remote_sudo:
+          new_cmd += ['sudo']
+        new_cmd += flat_vars
+
+      cmd = new_cmd + cmd
+
+    return self.BaseRunCommand(cmd, **kwargs)
+
+
+class ChromiumOSDevice(RemoteDevice):
+  """Basic commands to interact with a ChromiumOS device over SSH connection."""
+
+  MAKE_DEV_SSD_BIN = '/usr/share/vboot/bin/make_dev_ssd.sh'
+  MOUNT_ROOTFS_RW_CMD = ['mount', '-o', 'remount,rw', '/']
+  LIST_MOUNTS_CMD = ['cat', '/proc/mounts']
+
+  def __init__(self, hostname, **kwargs):
+    """Initializes this object.
+
+    Args:
+      hostname: A network hostname.
+      kwargs: Args to pass to the parent constructor.
+    """
+    super(ChromiumOSDevice, self).__init__(hostname, **kwargs)
+    self._orig_path = None
+    self._path = None
+    self._lsb_release = {}
+
+  @property
+  def orig_path(self):
+    """The $PATH variable on the device."""
+    if not self._orig_path:
+      try:
+        result = self.BaseRunCommand(['echo', "${PATH}"])
+      except cros_build_lib.RunCommandError as e:
+        logging.error('Failed to get $PATH on the device: %s', e.result.error)
+        raise
+
+      self._orig_path = result.output.strip()
+
+    return self._orig_path
+
+  @property
+  def path(self):
+    """The $PATH variable on the device prepended with DEV_BIN_PATHS."""
+    if not self._path:
+      # If the remote path already has our dev paths (which is common), then
+      # there is no need for us to prepend.
+      orig_paths = self.orig_path.split(':')
+      for path in reversed(DEV_BIN_PATHS.split(':')):
+        if path not in orig_paths:
+          orig_paths.insert(0, path)
+
+      self._path = ':'.join(orig_paths)
+
+    return self._path
+
+  @property
+  def lsb_release(self):
+    """The /etc/lsb-release content on the device.
+
+    Returns a dict of entries in /etc/lsb-release file. If multiple entries
+    have the same key, only the first entry is recorded. Returns an empty dict
+    if the reading command failed or the file is corrupted (i.e., does not have
+    the format of <key>=<value> for every line).
+    """
+    if not self._lsb_release:
+      try:
+        content = self.CatFile(constants.LSB_RELEASE_PATH, max_size=None)
+      except CatFileError as e:
+        logging.debug(
+            'Failed to read "%s" on the device: %s',
+            constants.LSB_RELEASE_PATH, e)
+      else:
+        try:
+          self._lsb_release = dict(e.split('=', 1)
+                                   for e in reversed(content.splitlines()))
+        except ValueError:
+          logging.error('File "%s" on the device is mal-formatted.',
+                        constants.LSB_RELEASE_PATH)
+
+    return self._lsb_release
+
+  @property
+  def board(self):
+    """The board name of the device."""
+    return self.lsb_release.get('CHROMEOS_RELEASE_BOARD', '')
+
+  def _RemountRootfsAsWritable(self):
+    """Attempts to Remount the root partition."""
+    logging.info("Remounting '/' with rw...")
+    self.RunCommand(self.MOUNT_ROOTFS_RW_CMD, error_code_ok=True,
+                    remote_sudo=True)
+
+  def _RootfsIsReadOnly(self):
+    """Returns True if rootfs on is mounted as read-only."""
+    r = self.RunCommand(self.LIST_MOUNTS_CMD, capture_output=True)
+    for line in r.output.splitlines():
+      if not line:
+        continue
+
+      chunks = line.split()
+      if chunks[1] == '/' and 'ro' in chunks[3].split(','):
+        return True
+
+    return False
+
+  def DisableRootfsVerification(self):
+    """Disables device rootfs verification."""
+    logging.info('Disabling rootfs verification on device...')
+    self.RunCommand(
+        [self.MAKE_DEV_SSD_BIN, '--remove_rootfs_verification', '--force'],
+        error_code_ok=True, remote_sudo=True)
+    # TODO(yjhong): Make sure an update is not pending.
+    logging.info('Need to reboot to actually disable the verification.')
+    self.Reboot()
+
+  def MountRootfsReadWrite(self):
+    """Checks mount types and remounts them as read-write if needed.
+
+    Returns:
+      True if rootfs is mounted as read-write. False otherwise.
+    """
+    if not self._RootfsIsReadOnly():
+      return True
+
+    # If the image on the device is built with rootfs verification
+    # disabled, we can simply remount '/' as read-write.
+    self._RemountRootfsAsWritable()
+
+    if not self._RootfsIsReadOnly():
+      return True
+
+    logging.info('Unable to remount rootfs as rw (normal w/verified rootfs).')
+    # If the image is built with rootfs verification, turn off the
+    # rootfs verification. After reboot, the rootfs will be mounted as
+    # read-write (there is no need to remount).
+    self.DisableRootfsVerification()
+
+    return not self._RootfsIsReadOnly()
+
+  def RunCommand(self, cmd, **kwargs):
+    """Executes a shell command on the device with output captured by default.
+
+    Also makes sure $PATH is set correctly by adding DEV_BIN_PATHS to
+    'PATH' in |extra_env|.
+
+    Args:
+      cmd: command to run. See RemoteAccess.RemoteSh documentation.
+      **kwargs: keyword arguments to pass along with cmd. See
+        RemoteAccess.RemoteSh documentation.
+    """
+    extra_env = kwargs.pop('extra_env', {})
+    path_env = extra_env.get('PATH', None)
+    if path_env is None:
+      # Optimization: if the default path is already what we want, don't bother
+      # passing it through.
+      if self.orig_path != self.path:
+        path_env = self.path
+    if path_env is not None:
+      extra_env['PATH'] = path_env
+    kwargs['extra_env'] = extra_env
+    return super(ChromiumOSDevice, self).RunCommand(cmd, **kwargs)
diff --git a/lib/remote_access_unittest b/lib/remote_access_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/remote_access_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/remote_access_unittest.py b/lib/remote_access_unittest.py
new file mode 100644
index 0000000..4c15ff2
--- /dev/null
+++ b/lib/remote_access_unittest.py
@@ -0,0 +1,301 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test the remote_access module."""
+
+from __future__ import print_function
+
+import os
+import re
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+from chromite.lib import remote_access
+
+
+# pylint: disable=protected-access
+
+
+class TestNormalizePort(cros_test_lib.TestCase):
+  """Verifies we normalize port."""
+
+  def testNormalizePortStrOK(self):
+    """Tests that string will be converted to integer."""
+    self.assertEqual(remote_access.NormalizePort('123'), 123)
+
+  def testNormalizePortStrNotOK(self):
+    """Tests that error is raised if port is string and str_ok=False."""
+    self.assertRaises(
+        ValueError, remote_access.NormalizePort, '123', str_ok=False)
+
+  def testNormalizePortOutOfRange(self):
+    """Tests that error is rasied when port is out of range."""
+    self.assertRaises(ValueError, remote_access.NormalizePort, '-1')
+    self.assertRaises(ValueError, remote_access.NormalizePort, 99999)
+
+
+class TestRemoveKnownHost(cros_test_lib.MockTempDirTestCase):
+  """Verifies RemoveKnownHost() functionality."""
+
+  # ssh-keygen doesn't check for a valid hostname so use something that won't
+  # be in the user's known_hosts to avoid changing their file contents.
+  _HOST = '0.0.0.0.0.0'
+
+  _HOST_KEY = (
+      _HOST + ' ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCjysPTaDAtRaxRaW1JjqzCHp2'
+      '88gvlUgtJxd2Jt/v63fkqZ5zzLLoeoAMwv0oYSRU82qhLimXpHxXRkrMC5nrpz5zJch+ktql'
+      '0rSRgo+dqc1GzmyOOAq5NkQsgBb3hefxMxCZRV8Dv0n7qaindZRxE8MnRJmVUoj8Wq8wryab'
+      'p+fUBkesBwaJhPXa4WBJeI5d+rO5tEBSNkvIp0USU6Ku3Ct0q2sZbOkY5g1VFAUYm4wyshCf'
+      'oWvU8ivMFp0pCezMISGstKpkIQApq2dLUb6EmeIgnhHzZXOn7doxIGD33JUfFmwNi0qfk3vV'
+      '6vKRVDEZD68+ix6gjKpicY5upA/9P\n')
+
+  def testRemoveKnownHostDefaultFile(self):
+    """Tests RemoveKnownHost() on the default known_hosts file.
+
+    `ssh-keygen -R` on its own fails when run from within the chroot
+    since the default known_hosts is bind mounted.
+    """
+    # It doesn't matter if known_hosts actually has this host in it or not,
+    # this test just makes sure the command doesn't fail. The default
+    # known_hosts file always exists in the chroot due to the bind mount.
+    remote_access.RemoveKnownHost(self._HOST)
+
+  def testRemoveKnownHostCustomFile(self):
+    """Tests RemoveKnownHost() on a custom known_hosts file."""
+    path = os.path.join(self.tempdir, 'known_hosts')
+    osutils.WriteFile(path, self._HOST_KEY)
+    remote_access.RemoveKnownHost(self._HOST, known_hosts_path=path)
+    self.assertEqual(osutils.ReadFile(path), '')
+
+  def testRemoveKnownHostNonexistentFile(self):
+    """Tests RemoveKnownHost() on a nonexistent known_hosts file."""
+    path = os.path.join(self.tempdir, 'known_hosts')
+    remote_access.RemoveKnownHost(self._HOST, known_hosts_path=path)
+
+
+class TestCompileSSHConnectSettings(cros_test_lib.TestCase):
+  """Verifies CompileSSHConnectSettings()."""
+
+  def testCustomSettingIncluded(self):
+    """Tests that a custom setting will be included in the output."""
+    self.assertIn(
+        '-oNumberOfPasswordPrompts=100',
+        remote_access.CompileSSHConnectSettings(NumberOfPasswordPrompts=100))
+
+  def testNoneSettingOmitted(self):
+    """Tests that a None value will omit a default setting from the output."""
+    self.assertIn('-oProtocol=2', remote_access.CompileSSHConnectSettings())
+    self.assertNotIn(
+        '-oProtocol=2',
+        remote_access.CompileSSHConnectSettings(Protocol=None))
+
+
+class RemoteShMock(partial_mock.PartialCmdMock):
+  """Mocks the RemoteSh function."""
+  TARGET = 'chromite.lib.remote_access.RemoteAccess'
+  ATTRS = ('RemoteSh',)
+  DEFAULT_ATTR = 'RemoteSh'
+
+  def RemoteSh(self, inst, cmd, *args, **kwargs):
+    """Simulates a RemoteSh invocation.
+
+    Returns:
+      A CommandResult object with an additional member |rc_mock| to
+      enable examination of the underlying RunCommand() function call.
+    """
+    result = self._results['RemoteSh'].LookupResult(
+        (cmd,), hook_args=(inst, cmd,) + args, hook_kwargs=kwargs)
+
+    # Run the real RemoteSh with RunCommand mocked out.
+    rc_mock = cros_build_lib_unittest.RunCommandMock()
+    rc_mock.AddCmdResult(
+        partial_mock.Ignore(), result.returncode, result.output, result.error)
+
+    with rc_mock:
+      result = self.backup['RemoteSh'](inst, cmd, *args, **kwargs)
+    result.rc_mock = rc_mock
+    return result
+
+
+class RemoteDeviceMock(partial_mock.PartialMock):
+  """Mocks the RemoteDevice function."""
+
+  TARGET = 'chromite.lib.remote_access.RemoteDevice'
+  ATTRS = ('Pingable',)
+
+  def Pingable(self, _):
+    return True
+
+
+class RemoteAccessTest(cros_test_lib.MockTempDirTestCase):
+  """Base class with RemoteSh mocked out for testing RemoteAccess."""
+  def setUp(self):
+    self.rsh_mock = self.StartPatcher(RemoteShMock())
+    self.host = remote_access.RemoteAccess('foon', self.tempdir)
+
+
+class RemoteShTest(RemoteAccessTest):
+  """Tests of basic RemoteSh functions"""
+  TEST_CMD = 'ls'
+  RETURN_CODE = 0
+  OUTPUT = 'witty'
+  ERROR = 'error'
+
+  def assertRemoteShRaises(self, **kwargs):
+    """Asserts that RunCommandError is raised when running TEST_CMD."""
+    self.assertRaises(cros_build_lib.RunCommandError, self.host.RemoteSh,
+                      self.TEST_CMD, **kwargs)
+
+  def assertRemoteShRaisesSSHConnectionError(self, **kwargs):
+    """Asserts that SSHConnectionError is raised when running TEST_CMD."""
+    self.assertRaises(remote_access.SSHConnectionError, self.host.RemoteSh,
+                      self.TEST_CMD, **kwargs)
+
+  def SetRemoteShResult(self, returncode=RETURN_CODE, output=OUTPUT,
+                        error=ERROR):
+    """Sets the RemoteSh command results."""
+    self.rsh_mock.AddCmdResult(self.TEST_CMD, returncode=returncode,
+                               output=output, error=error)
+
+  def testNormal(self):
+    """Test normal functionality."""
+    self.SetRemoteShResult()
+    result = self.host.RemoteSh(self.TEST_CMD)
+    self.assertEquals(result.returncode, self.RETURN_CODE)
+    self.assertEquals(result.output.strip(), self.OUTPUT)
+    self.assertEquals(result.error.strip(), self.ERROR)
+
+  def testRemoteCmdFailure(self):
+    """Test failure in remote cmd."""
+    self.SetRemoteShResult(returncode=1)
+    self.assertRemoteShRaises()
+    self.assertRemoteShRaises(ssh_error_ok=True)
+    self.host.RemoteSh(self.TEST_CMD, error_code_ok=True)
+    self.host.RemoteSh(self.TEST_CMD, ssh_error_ok=True, error_code_ok=True)
+
+  def testSshFailure(self):
+    """Test failure in ssh command."""
+    self.SetRemoteShResult(returncode=remote_access.SSH_ERROR_CODE)
+    self.assertRemoteShRaisesSSHConnectionError()
+    self.assertRemoteShRaisesSSHConnectionError(error_code_ok=True)
+    self.host.RemoteSh(self.TEST_CMD, ssh_error_ok=True)
+    self.host.RemoteSh(self.TEST_CMD, ssh_error_ok=True, error_code_ok=True)
+
+  def testEnvLcMessagesSet(self):
+    """Test that LC_MESSAGES is set to 'C' for an SSH command."""
+    self.SetRemoteShResult()
+    result = self.host.RemoteSh(self.TEST_CMD)
+    rc_kwargs = result.rc_mock.call_args_list[-1][1]
+    self.assertEqual(rc_kwargs['extra_env']['LC_MESSAGES'], 'C')
+
+  def testEnvLcMessagesOverride(self):
+    """Test that LC_MESSAGES is overridden to 'C' for an SSH command."""
+    self.SetRemoteShResult()
+    result = self.host.RemoteSh(self.TEST_CMD, extra_env={'LC_MESSAGES': 'fr'})
+    rc_kwargs = result.rc_mock.call_args_list[-1][1]
+    self.assertEqual(rc_kwargs['extra_env']['LC_MESSAGES'], 'C')
+
+
+class CheckIfRebootedTest(RemoteAccessTest):
+  """Tests of the _CheckIfRebooted function."""
+
+  def MockCheckReboot(self, returncode):
+    self.rsh_mock.AddCmdResult(
+        partial_mock.Regex('.*%s.*' % re.escape(remote_access.REBOOT_MARKER)),
+        returncode)
+
+  def testSuccess(self):
+    """Test the case of successful reboot."""
+    self.MockCheckReboot(0)
+    self.assertTrue(self.host._CheckIfRebooted())
+
+  def testRemoteFailure(self):
+    """Test case of reboot pending."""
+    self.MockCheckReboot(1)
+    self.assertFalse(self.host._CheckIfRebooted())
+
+  def testSshFailure(self):
+    """Test case of connection down."""
+    self.MockCheckReboot(remote_access.SSH_ERROR_CODE)
+    self.assertFalse(self.host._CheckIfRebooted())
+
+  def testInvalidErrorCode(self):
+    """Test case of bad error code returned."""
+    self.MockCheckReboot(2)
+    self.assertRaises(Exception, self.host._CheckIfRebooted)
+
+
+class RemoteDeviceTest(cros_test_lib.MockTestCase):
+  """Tests for RemoteDevice class."""
+
+  def setUp(self):
+    self.rsh_mock = self.StartPatcher(RemoteShMock())
+    self.pingable_mock = self.PatchObject(
+        remote_access.RemoteDevice, 'Pingable', return_value=True)
+
+  def _SetupRemoteTempDir(self):
+    """Mock out the calls needed for a remote tempdir."""
+    self.rsh_mock.AddCmdResult(partial_mock.In('mktemp'))
+    self.rsh_mock.AddCmdResult(partial_mock.In('rm'))
+
+  def testCommands(self):
+    """Tests simple RunCommand() and BaseRunCommand() usage."""
+    command = ['echo', 'foo']
+    expected_output = 'foo'
+    self.rsh_mock.AddCmdResult(command, output=expected_output)
+    self._SetupRemoteTempDir()
+
+    with remote_access.RemoteDeviceHandler('1.1.1.1') as device:
+      self.assertEqual(expected_output,
+                       device.RunCommand(['echo', 'foo']).output)
+      self.assertEqual(expected_output,
+                       device.BaseRunCommand(['echo', 'foo']).output)
+
+  def testRunCommandShortCmdline(self):
+    """Verify short command lines execute env settings directly."""
+    with remote_access.RemoteDeviceHandler('1.1.1.1') as device:
+      self.PatchObject(remote_access.RemoteDevice, 'CopyToWorkDir',
+                       side_effect=Exception('should not be copying files'))
+      self.rsh_mock.AddCmdResult(partial_mock.In('runit'))
+      device.RunCommand(['runit'], extra_env={'VAR': 'val'})
+
+  def testRunCommandLongCmdline(self):
+    """Verify long command lines execute env settings via script."""
+    with remote_access.RemoteDeviceHandler('1.1.1.1') as device:
+      self._SetupRemoteTempDir()
+      m = self.PatchObject(remote_access.RemoteDevice, 'CopyToWorkDir')
+      self.rsh_mock.AddCmdResult(partial_mock.In('runit'))
+      device.RunCommand(['runit'], extra_env={'VAR': 'v' * 1024 * 1024})
+      # We'll assume that the test passed when it tries to copy a file to the
+      # remote side (the shell script to run indirectly).
+      self.assertEqual(m.call_count, 1)
+
+  def testNoDeviceBaseDir(self):
+    """Tests base_dir=None."""
+    command = ['echo', 'foo']
+    expected_output = 'foo'
+    self.rsh_mock.AddCmdResult(command, output=expected_output)
+
+    with remote_access.RemoteDeviceHandler('1.1.1.1', base_dir=None) as device:
+      self.assertEqual(expected_output,
+                       device.BaseRunCommand(['echo', 'foo']).output)
+
+  def testDelayedRemoteDirs(self):
+    """Tests the delayed creation of base_dir/work_dir."""
+    with remote_access.RemoteDeviceHandler('1.1.1.1', base_dir='/f') as device:
+      # Make sure we didn't talk to the remote yet.
+      self.assertEqual(self.rsh_mock.call_count, 0)
+
+      # The work dir will get automatically created when we use it.
+      self.rsh_mock.AddCmdResult(partial_mock.In('mktemp'))
+      _ = device.work_dir
+      self.assertEqual(self.rsh_mock.call_count, 1)
+
+      # Add a mock for the clean up logic.
+      self.rsh_mock.AddCmdResult(partial_mock.In('rm'))
+
+    self.assertEqual(self.rsh_mock.call_count, 2)
diff --git a/lib/retry_stats.py b/lib/retry_stats.py
new file mode 100644
index 0000000..233fddf
--- /dev/null
+++ b/lib/retry_stats.py
@@ -0,0 +1,162 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Infrastructure for collecting statistics about retries."""
+
+from __future__ import print_function
+
+import collections
+import datetime
+
+from chromite.lib import parallel
+from chromite.lib import retry_util
+
+
+# Well known categories we gather stats for.
+CIDB = 'CIDB'
+GSUTIL = 'Google Storage'
+
+
+class UnconfiguredStatsCategory(Exception):
+  """We tried to use a Stats Category without configuring it."""
+
+
+# Create one of these for each retry call.
+#   attempts: a list of all attempts to perform the action.
+StatEntry = collections.namedtuple(
+    'StatEntry',
+    ('category', 'attempts'))
+
+# Create one of these for each attempt to call the function.
+#  time: The time for this attempt in seconds.
+#  exception: None for a successful attempt, or a string exception description.
+Attempt = collections.namedtuple(
+    'Attempt',
+    ('time', 'exception'))
+
+
+# After Setup, contains a multiprocess proxy array.
+# The array holds StatEntry values for each event seen.
+_STATS_COLLECTION = None
+
+
+def SetupStats():
+  """Prepare a given category to collect stats.
+
+  This must be called BEFORE any new processes that might read or write to
+  these stat values are created. It is safe to call this more than once,
+  but most efficient to only make a single call.
+  """
+  # Pylint thinks our manager has no members.
+  # pylint: disable=E1101
+  m = parallel.Manager()
+
+  # pylint: disable=W0603
+  # Create a new stats collection structure that is multiprocess usable.
+  global _STATS_COLLECTION
+  _STATS_COLLECTION = m.list()
+
+
+def _SuccessFilter(entry):
+  """Returns True if the StatEntry succeeded (perhaps after retries)."""
+  # If all attempts contain an exception, they all failed.
+  return not all(a.exception for a in entry.attempts)
+
+
+def _RetryCount(entry):
+  """Returns the number of retries in this StatEntry."""
+  # If all attempts contain an exception, they all failed.
+  return max(len(entry.attempts) - 1, 0)
+
+
+def ReportCategoryStats(out, category):
+  """Dump stats reports for a given category.
+
+  Args:
+    out: Output stream to write to (e.g. sys.stdout).
+    category: A string that defines the 'namespace' for these stats.
+  """
+  # Convert the multiprocess proxy list into a local simple list.
+  local_stats_collection = list(_STATS_COLLECTION)
+
+  # Extract the values for the category we care about.
+  stats = [e for e in local_stats_collection if e.category == category]
+
+  line = '*' * 60 + '\n'
+  edge = '*' * 2
+
+  success = len([e for e in stats if _SuccessFilter(e)])
+  failure = len(stats) - success
+  retry = sum([_RetryCount(e) for e in stats])
+
+  out.write(line)
+  out.write(edge + ' Performance Statistics for %s' % category + '\n')
+  out.write(edge + '\n')
+  out.write(edge + ' Success: %d' % success + '\n')
+  out.write(edge + ' Failure: %d' % failure + '\n')
+  out.write(edge + ' Retries: %d' % retry + '\n')
+  out.write(edge + ' Total: %d' % (success + failure) + '\n')
+  out.write(line)
+
+
+def ReportStats(out):
+  """Dump stats reports for a given category.
+
+  Args:
+    out: Output stream to write to (e.g. sys.stdout).
+    category: A string that defines the 'namespace' for these stats.
+  """
+  categories = sorted(set([e.category for e in _STATS_COLLECTION]))
+
+  for category in categories:
+    ReportCategoryStats(out, category)
+
+
+def RetryWithStats(category, handler, max_retry, functor, *args, **kwargs):
+  """Wrapper around retry_util.GenericRetry that collects stats.
+
+  This wrapper collects statistics about each failure or retry. Each
+  category is defined by a unique string. Each category should be setup
+  before use (actually, before processes are forked).
+
+  All other arguments are blindly passed to retry_util.GenericRetry.
+
+  Args:
+    category: A string that defines the 'namespace' for these stats.
+    handler: See retry_util.GenericRetry.
+    max_retry: See retry_util.GenericRetry.
+    functor: See retry_util.GenericRetry.
+    args: See retry_util.GenericRetry.
+    kwargs: See retry_util.GenericRetry.
+
+  Returns:
+    See retry_util.GenericRetry raises.
+
+  Raises:
+    See retry_util.GenericRetry raises.
+  """
+  statEntry = StatEntry(category, attempts=[])
+
+  # Wrap the work method, so we can gather info.
+  def wrapper(*args, **kwargs):
+    start = datetime.datetime.now()
+
+    try:
+      result = functor(*args, **kwargs)
+    except Exception as e:
+      end = datetime.datetime.now()
+      e_description = '%s: %s' % (type(e).__name__, e)
+      statEntry.attempts.append(Attempt(end - start, e_description))
+      raise
+
+    end = datetime.datetime.now()
+    statEntry.attempts.append(Attempt(end - start, None))
+    return result
+
+  try:
+    return retry_util.GenericRetry(handler, max_retry, wrapper,
+                                   *args, **kwargs)
+  finally:
+    if _STATS_COLLECTION is not None:
+      _STATS_COLLECTION.append(statEntry)
diff --git a/lib/retry_stats_unittest b/lib/retry_stats_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/retry_stats_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/retry_stats_unittest.py b/lib/retry_stats_unittest.py
new file mode 100644
index 0000000..766a4db
--- /dev/null
+++ b/lib/retry_stats_unittest.py
@@ -0,0 +1,226 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the retry_stats.py module."""
+
+from __future__ import print_function
+
+import StringIO
+
+from chromite.lib import cros_test_lib
+from chromite.lib import retry_stats
+
+
+# We access internal members to help with testing.
+# pylint: disable=W0212
+
+
+class TestRetryException(Exception):
+  """Used when testing failure cases."""
+
+class TestRetryStats(cros_test_lib.TestCase):
+  """This contains test cases for the retry_stats module."""
+
+  CAT = 'Test Service A'
+  CAT_B = 'Test Service B'
+
+  SUCCESS_RESULT = 'success result'
+
+  def setUp(self):
+    retry_stats._STATS_COLLECTION = None
+
+  def handlerNoRetry(self, _e):
+    return False
+
+  def handlerRetry(self, _e):
+    return True
+
+  def callSuccess(self):
+    return self.SUCCESS_RESULT
+
+  def callFailure(self):
+    raise TestRetryException()
+
+
+  def _verifyStats(self, category, success=0, failure=0, retry=0):
+    """Verify that the given category has the specified values collected."""
+    stats = [e for e in retry_stats._STATS_COLLECTION if e.category == category]
+
+    stats_success = len([e for e in stats if retry_stats._SuccessFilter(e)])
+    stats_failure = len(stats) - stats_success
+    stats_retry = sum([retry_stats._RetryCount(e) for e in stats])
+
+    self.assertEqual(stats_success, success)
+    self.assertEqual(stats_failure, failure)
+    self.assertEqual(stats_retry, retry)
+
+  def testSetupStats(self):
+    """Verify that we do something when we setup a new stats category."""
+    # Show that setup does something.
+    self.assertEqual(retry_stats._STATS_COLLECTION, None)
+    retry_stats.SetupStats()
+    self.assertNotEqual(retry_stats._STATS_COLLECTION, None)
+
+  def testReportCategoryStatsEmpty(self):
+    retry_stats.SetupStats()
+
+    out = StringIO.StringIO()
+
+    retry_stats.ReportCategoryStats(out, self.CAT)
+
+    expected = """************************************************************
+** Performance Statistics for Test Service A
+**
+** Success: 0
+** Failure: 0
+** Retries: 0
+** Total: 0
+************************************************************
+"""
+
+    self.assertEqual(out.getvalue(), expected)
+
+  def testReportStatsEmpty(self):
+    retry_stats.SetupStats()
+
+    out = StringIO.StringIO()
+    retry_stats.ReportStats(out)
+
+    # No data collected means no categories are known, nothing to report.
+    self.assertEqual(out.getvalue(), '')
+
+  def testReportStats(self):
+    retry_stats.SetupStats()
+
+    # Insert some stats to report.
+    retry_stats.RetryWithStats(
+        self.CAT, self.handlerNoRetry, 3, self.callSuccess)
+    retry_stats.RetryWithStats(
+        self.CAT_B, self.handlerNoRetry, 3, self.callSuccess)
+    self.assertRaises(TestRetryException,
+                      retry_stats.RetryWithStats,
+                      self.CAT, self.handlerRetry, 3, self.callFailure)
+
+    out = StringIO.StringIO()
+    retry_stats.ReportStats(out)
+
+    # Expecting reports for both CAT and CAT_B used above.
+    expected = """************************************************************
+** Performance Statistics for Test Service A
+**
+** Success: 1
+** Failure: 1
+** Retries: 3
+** Total: 2
+************************************************************
+************************************************************
+** Performance Statistics for Test Service B
+**
+** Success: 1
+** Failure: 0
+** Retries: 0
+** Total: 1
+************************************************************
+"""
+
+    self.assertEqual(out.getvalue(), expected)
+
+  def testSuccessNoSetup(self):
+    """Verify that we can handle a successful call if we're never setup."""
+    self.assertEqual(retry_stats._STATS_COLLECTION, None)
+
+    result = retry_stats.RetryWithStats(
+        self.CAT, self.handlerNoRetry, 3, self.callSuccess)
+    self.assertEqual(result, self.SUCCESS_RESULT)
+
+    result = retry_stats.RetryWithStats(
+        self.CAT, self.handlerNoRetry, 3, self.callSuccess)
+    self.assertEqual(result, self.SUCCESS_RESULT)
+
+    self.assertEqual(retry_stats._STATS_COLLECTION, None)
+
+  def testFailureNoRetryNoSetup(self):
+    """Verify that we can handle a failure call if we're never setup."""
+    self.assertEqual(retry_stats._STATS_COLLECTION, None)
+
+    self.assertRaises(TestRetryException,
+                      retry_stats.RetryWithStats,
+                      self.CAT, self.handlerNoRetry, 3, self.callFailure)
+
+    self.assertRaises(TestRetryException,
+                      retry_stats.RetryWithStats,
+                      self.CAT, self.handlerNoRetry, 3, self.callFailure)
+
+    self.assertEqual(retry_stats._STATS_COLLECTION, None)
+
+  def testSuccess(self):
+    """Verify that we can handle a successful call."""
+    retry_stats.SetupStats()
+    self._verifyStats(self.CAT)
+
+    # Succeed once.
+    result = retry_stats.RetryWithStats(
+        self.CAT, self.handlerNoRetry, 3, self.callSuccess)
+    self.assertEqual(result, self.SUCCESS_RESULT)
+    self._verifyStats(self.CAT, success=1)
+
+    # Succeed twice.
+    result = retry_stats.RetryWithStats(
+        self.CAT, self.handlerNoRetry, 3, self.callSuccess)
+    self.assertEqual(result, self.SUCCESS_RESULT)
+    self._verifyStats(self.CAT, success=2)
+
+  def testSuccessRetry(self):
+    """Verify that we can handle a successful call after tries."""
+    retry_stats.SetupStats()
+    self._verifyStats(self.CAT)
+
+    # Use this scoped list as a persistent counter.
+    call_counter = ['fail 1', 'fail 2']
+
+    def callRetrySuccess():
+      if call_counter:
+        raise TestRetryException(call_counter.pop())
+      else:
+        return self.SUCCESS_RESULT
+
+    # Retry twice, then succeed.
+    result = retry_stats.RetryWithStats(
+        self.CAT, self.handlerRetry, 3, callRetrySuccess)
+    self.assertEqual(result, self.SUCCESS_RESULT)
+    self._verifyStats(self.CAT, success=1, retry=2)
+
+  def testFailureNoRetry(self):
+    """Verify that we can handle a failure if the handler doesn't retry."""
+    retry_stats.SetupStats()
+    self._verifyStats(self.CAT)
+
+    # Fail once without retries.
+    self.assertRaises(TestRetryException,
+                      retry_stats.RetryWithStats,
+                      self.CAT, self.handlerNoRetry, 3, self.callFailure)
+    self._verifyStats(self.CAT, failure=1)
+
+    # Fail twice without retries.
+    self.assertRaises(TestRetryException,
+                      retry_stats.RetryWithStats,
+                      self.CAT, self.handlerNoRetry, 3, self.callFailure)
+    self._verifyStats(self.CAT, failure=2)
+
+  def testFailureRetry(self):
+    """Verify that we can handle a failure if we use all retries."""
+    retry_stats.SetupStats()
+    self._verifyStats(self.CAT)
+
+    # Fail once with exhausted retries.
+    self.assertRaises(TestRetryException,
+                      retry_stats.RetryWithStats,
+                      self.CAT, self.handlerRetry, 3, self.callFailure)
+    self._verifyStats(self.CAT, failure=1, retry=3) # 3 retries = 4 attempts.
+
+    # Fail twice with exhausted retries.
+    self.assertRaises(TestRetryException,
+                      retry_stats.RetryWithStats,
+                      self.CAT, self.handlerRetry, 3, self.callFailure)
+    self._verifyStats(self.CAT, failure=2, retry=6)
diff --git a/lib/retry_util.py b/lib/retry_util.py
new file mode 100644
index 0000000..25e5830
--- /dev/null
+++ b/lib/retry_util.py
@@ -0,0 +1,280 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Basic infrastructure for implementing retries."""
+
+from __future__ import print_function
+
+import sys
+import time
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+
+
+def GenericRetry(handler, max_retry, functor, *args, **kwargs):
+  """Generic retry loop w/ optional break out depending on exceptions.
+
+  To retry based on the return value of |functor| see the timeout_util module.
+
+  Keep in mind that the total sleep time will be the triangular value of
+  max_retry multiplied by the sleep value.  e.g. max_retry=5 and sleep=10
+  will be T5 (i.e. 5+4+3+2+1) times 10, or 150 seconds total.  Rather than
+  use a large sleep value, you should lean more towards large retries and
+  lower sleep intervals, or by utilizing backoff_factor.
+
+  Args:
+    handler: A functor invoked w/ the exception instance that
+      functor(*args, **kwargs) threw.  If it returns True, then a
+      retry is attempted.  If False, the exception is re-raised.
+    max_retry: A positive integer representing how many times to retry
+      the command before giving up.  Worst case, the command is invoked
+      (max_retry + 1) times before failing.
+    functor: A callable to pass args and kwargs to.
+    args: Positional args passed to functor.
+    kwargs: Optional args passed to functor.
+    sleep: Optional keyword.  Multiplier for how long to sleep between
+      retries; will delay (1*sleep) the first time, then (2*sleep),
+      continuing via attempt * sleep.
+    backoff_factor: Optional keyword. If supplied and > 1, subsequent sleeps
+                    will be of length (backoff_factor ^ (attempt - 1)) * sleep,
+                    rather than the default behavior of attempt * sleep.
+    success_functor: Optional functor that accepts 1 argument. Will be called
+                     after successful call to |functor|, with the argument
+                     being the number of attempts (1 = |functor| succeeded on
+                     first try).
+    raise_first_exception_on_failure: Optional boolean which determines which
+                                      exception is raised upon failure after
+                                      retries. If True, the first exception
+                                      that was encountered. If False, the
+                                      final one. Default: True.
+
+  Returns:
+    Whatever functor(*args, **kwargs) returns.
+
+  Raises:
+    Exception: Whatever exceptions functor(*args, **kwargs) throws and
+      isn't suppressed is raised.  Note that the first exception encountered
+      is what's thrown.
+  """
+
+  sleep = kwargs.pop('sleep', 0)
+  if max_retry < 0:
+    raise ValueError('max_retry needs to be zero or more: %s' % max_retry)
+
+  backoff_factor = kwargs.pop('backoff_factor', 1)
+  if backoff_factor < 1:
+    raise ValueError('backoff_factor must be 1 or greater: %s'
+                     % backoff_factor)
+
+  success_functor = kwargs.pop('success_functor', lambda x: None)
+  ret, success = (None, False)
+
+  raise_first_exception_on_failure = kwargs.pop(
+      'raise_first_exception_on_failure', True)
+
+  attempt = 0
+
+  exc_info = None
+  for attempt in xrange(max_retry + 1):
+    if attempt and sleep:
+      if backoff_factor > 1:
+        sleep_time = sleep * backoff_factor ** (attempt - 1)
+      else:
+        sleep_time = sleep * attempt
+      time.sleep(sleep_time)
+    try:
+      ret = functor(*args, **kwargs)
+      success = True
+      break
+    except Exception as e:
+      # Note we're not snagging BaseException, so MemoryError/KeyboardInterrupt
+      # and friends don't enter this except block.
+      if not handler(e):
+        raise
+      # If raise_first_exception_on_failure, we intentionally ignore
+      # any failures in later attempts since we'll throw the original
+      # failure if all retries fail.
+      if exc_info is None or not raise_first_exception_on_failure:
+        exc_info = sys.exc_info()
+
+  if success:
+    success_functor(attempt + 1)
+    return ret
+
+  raise exc_info[0], exc_info[1], exc_info[2]
+
+
+def RetryException(exc_retry, max_retry, functor, *args, **kwargs):
+  """Convenience wrapper for GenericRetry based on exceptions.
+
+  Args:
+    exc_retry: A class (or tuple of classes).  If the raised exception
+      is the given class(es), a retry will be attempted.  Otherwise,
+      the exception is raised.
+    max_retry: See GenericRetry.
+    functor: See GenericRetry.
+    *args: See GenericRetry.
+    **kwargs: See GenericRetry.
+  """
+  if not isinstance(exc_retry, (tuple, type)):
+    raise TypeError('exc_retry should be an exception (or tuple), not %r' %
+                    exc_retry)
+  #pylint: disable=E0102
+  def exc_retry(exc, values=exc_retry):
+    return isinstance(exc, values)
+  return GenericRetry(exc_retry, max_retry, functor, *args, **kwargs)
+
+
+def RetryCommand(functor, max_retry, *args, **kwargs):
+  """Wrapper for RunCommand that will retry a command
+
+  Args:
+    functor: RunCommand function to run; retries will only occur on
+      RunCommandError exceptions being thrown.
+    max_retry: A positive integer representing how many times to retry
+      the command before giving up.  Worst case, the command is invoked
+      (max_retry + 1) times before failing.
+    sleep: Optional keyword.  Multiplier for how long to sleep between
+      retries; will delay (1*sleep) the first time, then (2*sleep),
+      continuing via attempt * sleep.
+    retry_on: If provided, we will retry on any exit codes in the given list.
+      Note: A process will exit with a negative exit code if it is killed by a
+      signal. By default, we retry on all non-negative exit codes.
+    error_check: Optional callback to check the error output.  Return None to
+      fall back to |retry_on|, or True/False to set the retry directly.
+    log_retries: Whether to log a warning when retriable errors occur.
+    args: Positional args passed to RunCommand; see RunCommand for specifics.
+    kwargs: Optional args passed to RunCommand; see RunCommand for specifics.
+
+  Returns:
+    A CommandResult object.
+
+  Raises:
+    Exception:  Raises RunCommandError on error with optional error_message.
+  """
+  values = kwargs.pop('retry_on', None)
+  error_check = kwargs.pop('error_check', lambda x: None)
+  log_retries = kwargs.pop('log_retries', True)
+
+  def ShouldRetry(exc):
+    """Return whether we should retry on a given exception."""
+    if not ShouldRetryCommandCommon(exc):
+      return False
+    if values is None and exc.result.returncode < 0:
+      logging.info('Child process received signal %d; not retrying.',
+                   -exc.result.returncode)
+      return False
+
+    ret = error_check(exc)
+    if ret is not None:
+      return ret
+
+    if values is None or exc.result.returncode in values:
+      if log_retries:
+        logging.warning('Command failed with retriable error.\n%s', exc)
+      return True
+    return False
+
+  return GenericRetry(ShouldRetry, max_retry, functor, *args, **kwargs)
+
+
+def ShouldRetryCommandCommon(exc):
+  """Returns whether any RunCommand should retry on a given exception."""
+  if not isinstance(exc, cros_build_lib.RunCommandError):
+    return False
+  if exc.result.returncode is None:
+    logging.error('Child process failed to launch; not retrying:\n'
+                  'command: %s', exc.result.cmdstr)
+    return False
+  return True
+
+
+def RunCommandWithRetries(max_retry, *args, **kwargs):
+  """Wrapper for RunCommand that will retry a command
+
+  Args:
+    max_retry: See RetryCommand and RunCommand.
+    *args: See RetryCommand and RunCommand.
+    **kwargs: See RetryCommand and RunCommand.
+
+  Returns:
+    A CommandResult object.
+
+  Raises:
+    Exception:  Raises RunCommandError on error with optional error_message.
+  """
+  return RetryCommand(cros_build_lib.RunCommand, max_retry, *args, **kwargs)
+
+
+class DownloadError(Exception):
+  """Fetching file via curl failed"""
+
+
+def RunCurl(args, fail=True, **kwargs):
+  """Runs curl and wraps around all necessary hacks.
+
+  Args:
+    args: Command line to pass to curl.
+    fail: Whether to use --fail w/curl.
+    **kwargs: See RunCommandWithRetries and RunCommand.
+
+  Returns:
+    A CommandResult object.
+
+  Raises:
+    DownloadError: Whenever curl fails for any reason.
+  """
+  cmd = ['curl']
+  if fail:
+    cmd.append('--fail')
+  cmd.extend(args)
+
+  # These values were discerned via scraping the curl manpage; they're all
+  # retry related (dns failed, timeout occurred, etc, see  the manpage for
+  # exact specifics of each).
+  # Note we allow 22 to deal w/ 500's- they're thrown by google storage
+  # occasionally.  This is also thrown when getting 4xx, but curl doesn't
+  # make it easy to differentiate between them.
+  # Note we allow 35 to deal w/ Unknown SSL Protocol error, thrown by
+  # google storage occasionally.
+  # Finally, we do not use curl's --retry option since it generally doesn't
+  # actually retry anything; code 18 for example, it will not retry on.
+  retriable_exits = frozenset([5, 6, 7, 15, 18, 22, 26, 28, 35, 52, 56])
+
+  def _CheckExit(exc):
+    """Filter out specific error codes when getting exit 22
+
+    Curl will exit(22) for a wide range of HTTP codes -- both the 4xx and 5xx
+    set.  For the 4xx, we don't want to retry.  We have to look at the output.
+    """
+    if exc.result.returncode == 22:
+      if '404 Not Found' in exc.result.error:
+        return False
+      else:
+        return True
+    else:
+      # We'll let the common exit code filter do the right thing.
+      return None
+
+  args = {
+      'retry_on': retriable_exits,
+      'error_check': _CheckExit,
+      'capture_output': True,
+  }
+  args.update(kwargs)
+  try:
+    return RunCommandWithRetries(5, cmd, sleep=3, **args)
+  except cros_build_lib.RunCommandError as e:
+    code = e.result.returncode
+    if code in (51, 58, 60):
+      # These are the return codes of failing certs as per 'man curl'.
+      raise DownloadError(
+          'Download failed with certificate error? Try "sudo c_rehash".')
+    else:
+      try:
+        return RunCommandWithRetries(5, cmd, sleep=60, **kwargs)
+      except cros_build_lib.RunCommandError as e:
+        raise DownloadError('Curl failed w/ exit code %i: %s' %
+                            (e.result.returncode, e.result.error))
diff --git a/lib/rewrite_git_alternates.py b/lib/rewrite_git_alternates.py
new file mode 100755
index 0000000..09dd4c5
--- /dev/null
+++ b/lib/rewrite_git_alternates.py
@@ -0,0 +1,268 @@
+#!/usr/bin/python2
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functionality for mangling repository checkouts that are shared
+
+In particular, this in combination w/ enter_chroot's mount binding, allows
+us to access the same repo from inside and outside a chroot at the same time
+"""
+
+from __future__ import print_function
+
+__all__ = ('RebuildRepoCheckout',)
+
+import sys
+import os
+import shutil
+import errno
+
+_path = os.path.realpath(__file__)
+_path = os.path.normpath(os.path.join(os.path.dirname(_path), '..', '..'))
+sys.path.insert(0, _path)
+del _path
+
+from chromite.lib import cros_build_lib
+from chromite.lib import git
+from chromite.lib import osutils
+
+
+_CACHE_NAME = '.cros_projects.list'
+
+
+def _FilterNonExistentProjects(project_dir, projects):
+  for project in projects:
+    if os.path.exists(os.path.join(project_dir, project)):
+      yield project
+
+
+def _CleanAlternates(projects, alt_root):
+  alt_root = os.path.normpath(alt_root)
+
+  projects = set(projects)
+  # Ignore our cache.
+  projects.add(_CACHE_NAME)
+  required_directories = set(os.path.dirname(x) for x in projects)
+
+  for abs_root, dirs, files in os.walk(alt_root):
+    rel_root = abs_root[len(alt_root):].strip('/')
+
+    if rel_root not in required_directories:
+      shutil.rmtree(abs_root)
+      dirs[:] = []
+      continue
+
+    if rel_root:
+      for filename in files:
+        if os.path.join(rel_root, filename) not in projects:
+          os.unlink(os.path.join(abs_root, filename))
+
+
+def _UpdateAlternatesDir(alternates_root, reference_maps, projects):
+  for project in projects:
+    alt_path = os.path.join(alternates_root, project)
+    paths = []
+    for k, v in reference_maps.iteritems():
+      suffix = os.path.join('.repo', 'project-objects', project, 'objects')
+      if os.path.exists(os.path.join(k, suffix)):
+        paths.append(os.path.join(v, suffix))
+
+    osutils.SafeMakedirs(os.path.dirname(alt_path))
+    osutils.WriteFile(alt_path, '%s\n' % ('\n'.join(paths),), atomic=True)
+
+
+def _UpdateGitAlternates(proj_root, projects):
+  for project in projects:
+
+    alt_path = os.path.join(proj_root, project, 'objects', 'info',
+                            'alternates')
+    tmp_path = '%s.tmp' % alt_path
+
+    # Clean out any tmp files that may have existed prior.
+    osutils.SafeUnlink(tmp_path)
+
+    # The pathway is written relative to the alternates files absolute path;
+    # literally, .repo/projects/chromite.git/objects/info/alternates.
+    relpath = '../' * (project.count('/') + 4)
+    relpath = os.path.join(relpath, 'alternates', project)
+
+    osutils.SafeMakedirs(os.path.dirname(tmp_path))
+    os.symlink(relpath, tmp_path)
+    os.rename(tmp_path, alt_path)
+
+
+def _GetProjects(repo_root):
+  # Note that we cannot rely upon projects.list, nor repo list, nor repo forall
+  # here to be authoritive.
+  # if we rely on the manifest contents, the local tree may not yet be
+  # updated- thus if we drop the alternate for that project, that project is no
+  # longer usable (which can tick off repo sync).
+  # Thus, we just iterate over the raw underlying projects store, and generate
+  # alternates for that; we regenerate based on either the manifest changing,
+  # local_manifest having changed, or projects.list having changed (which
+  # occurs during partial local repo syncs; aka repo sync chromite for
+  # example).
+  # Finally, note we have to truncate our mtime awareness to just integers;
+  # this is required since utime isn't guaranteed to set floats, despite our
+  # being able to get a float back from stat'ing.
+
+  manifest_xml = os.path.join(repo_root, 'manifest.xml')
+  times = [os.lstat(manifest_xml).st_mtime,
+           os.stat(manifest_xml).st_mtime]
+  for path in ('local_manifest.xml', 'project.list'):
+    path = os.path.join(repo_root, path)
+    if os.path.exists(path):
+      times.append(os.stat(path).st_mtime)
+
+  manifest_time = long(max(times))
+
+  cache_path = os.path.join(repo_root, _CACHE_NAME)
+
+  try:
+    if long(os.stat(cache_path).st_mtime) == manifest_time:
+      return osutils.ReadFile(cache_path).split()
+  except EnvironmentError as e:
+    if e.errno != errno.ENOENT:
+      raise
+
+  # The -a ! section of this find invocation is to block descent
+  # into the actual git repository; for IO constrained systems,
+  # this avoids a fairly large amount of inode/dentry load up.
+  # TLDR; It's faster, don't remove it ;)
+  data = cros_build_lib.RunCommand(
+      ['find', './', '-type', 'd', '-name', '*.git', '-a',
+       '!', '-wholename', '*/*.git/*', '-prune'],
+      cwd=os.path.join(repo_root, 'project-objects'), capture_output=True)
+
+  # Drop the leading ./ and the trailing .git
+  data = [x[2:-4] for x in data.output.splitlines() if x]
+
+  with open(cache_path, 'w') as f:
+    f.write('\n'.join(sorted(data)))
+
+  # Finally, mark the cache with the time of the manifest.xml we examined.
+  os.utime(cache_path, (manifest_time, manifest_time))
+  return data
+
+
+class Failed(Exception):
+  """Exception used to fail out for a bad environment."""
+
+
+def _RebuildRepoCheckout(target_root, reference_map,
+                         alternates_dir):
+  repo_root = os.path.join(target_root, '.repo')
+  proj_root = os.path.join(repo_root, 'project-objects')
+
+  manifest_path = os.path.join(repo_root, 'manifest.xml')
+  if not os.path.exists(manifest_path):
+    raise Failed('%r does not exist, thus cannot be a repo checkout' %
+                 manifest_path)
+
+  projects = ['%s.git' % x for x in _GetProjects(repo_root)]
+  projects = _FilterNonExistentProjects(proj_root, projects)
+  projects = list(sorted(projects))
+
+  if not osutils.SafeMakedirs(alternates_dir, 0o775):
+    # We know the directory exists; thus cleanse out
+    # dead alternates.
+    _CleanAlternates(projects, alternates_dir)
+
+  _UpdateAlternatesDir(alternates_dir, reference_map, projects)
+  _UpdateGitAlternates(proj_root, projects)
+
+
+def WalkReferences(repo_root, max_depth=5, suppress=()):
+  """Given a repo checkout root, find the repo's it references up to max_depth.
+
+  Args:
+    repo_root: The root of a repo checkout to start from
+    max_depth: Git internally limits the max alternates depth to 5;
+      this option exists to adjust how deep we're willing to look.
+    suppress: List of repos already seen (and so to ignore).
+
+  Returns:
+    List of repository roots required for this repo_root.
+  """
+
+  original_root = repo_root
+  seen = set(os.path.abspath(x) for x in suppress)
+
+  for _x in xrange(0, max_depth):
+    repo_root = os.path.abspath(repo_root)
+
+    if repo_root in seen:
+      # Cyclic reference graph; break out of it, if someone induced this the
+      # necessary objects should be in place.  If they aren't, really isn't
+      # much that can be done.
+      return
+
+    yield repo_root
+    seen.add(repo_root)
+    base = os.path.join(repo_root, '.repo', 'manifests.git')
+    result = git.RunGit(
+        base, ['config', 'repo.reference'], error_code_ok=True)
+
+    if result.returncode not in (0, 1):
+      raise Failed('Unexpected returncode %i from examining %s git '
+                   'repo.reference configuration' %
+                   (result.returncode, base))
+
+    repo_root = result.output.strip()
+    if not repo_root:
+      break
+
+  else:
+    raise Failed('While tracing out the references of %s, we recursed more '
+                 'than the allowed %i times ending at %s'
+                 % (original_root, max_depth, repo_root))
+
+
+def RebuildRepoCheckout(repo_root, initial_reference,
+                        chroot_reference_root=None):
+  """Rebuild a repo checkout's 'alternate tree' rewriting the repo to use it
+
+  Args:
+    repo_root: absolute path to the root of a repository checkout
+    initial_reference: absolute path to the root of the repository that is
+      shared
+    chroot_reference_root: if given, repo_root will have it's chroot
+      alternates tree configured with this pathway, enabling repo access to
+      work from within the chroot.
+  """
+
+  reference_roots = list(WalkReferences(initial_reference,
+                                        suppress=[repo_root]))
+
+  # Always rebuild the external alternates for any operation; 1) we don't want
+  # external out of sync from chroot, 2) if this is the first conversion, if
+  # we only update chroot it'll break external access to the repo.
+  reference_map = dict((x, x) for x in reference_roots)
+  rebuilds = [('alternates', reference_map)]
+  if chroot_reference_root:
+    alternates_dir = 'chroot/alternates'
+    base = os.path.join(chroot_reference_root, '.repo', 'chroot', 'external')
+    reference_map = dict((x, '%s%i' % (base, idx + 1))
+                         for idx, x in enumerate(reference_roots))
+    rebuilds += [('chroot/alternates', reference_map)]
+
+  for alternates_dir, reference_map in rebuilds:
+    alternates_dir = os.path.join(repo_root, '.repo', alternates_dir)
+    _RebuildRepoCheckout(repo_root,
+                         reference_map,
+                         alternates_dir)
+  return reference_roots
+
+
+if __name__ == '__main__':
+  chroot_root = None
+  if len(sys.argv) not in (3, 4):
+    sys.stderr.write('Usage: %s <repository_root> <referenced_repository> '
+                     '[path_from_within_the_chroot]\n' % (sys.argv[0],))
+    sys.exit(1)
+  if len(sys.argv) == 4:
+    chroot_root = sys.argv[3]
+  ret = RebuildRepoCheckout(sys.argv[1], sys.argv[2],
+                            chroot_reference_root=chroot_root)
+  print('\n'.join(ret))
diff --git a/lib/sandbox_spec_generator.py b/lib/sandbox_spec_generator.py
new file mode 100644
index 0000000..bc9649c
--- /dev/null
+++ b/lib/sandbox_spec_generator.py
@@ -0,0 +1,408 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Logic to generate a SandboxSpec from an appc pod manifest.
+
+https://github.com/appc/spec/blob/master/SPEC.md
+"""
+
+from __future__ import print_function
+
+import collections
+import copy
+import json
+import os
+import re
+
+from chromite.lib import cros_logging as logging
+from chromite.lib import json_lib
+from chromite.lib import osutils
+from chromite.lib import remote_access
+from chromite.lib import user_db
+
+KEY_ANNOTATIONS_LIST = 'annotations'
+KEY_ANNOTATION_NAME = 'name'
+KEY_ANNOTATION_VALUE = 'value'
+ENDPOINT_NAME_ANNOTATION_PREFIX = 'bruteus-endpoint-'
+
+KEY_APPS_LIST = 'apps'
+KEY_APP_NAME = 'name'
+KEY_APP_IMAGE = 'image'
+KEY_APP_IMAGE_NAME = 'name'
+KEY_APP_ISOLATORS = 'isolators'
+
+KEY_APP_SUB_APP = 'app'
+KEY_SUB_APP_USER = 'user'
+KEY_SUB_APP_GROUP = 'group'
+KEY_SUB_APP_EXEC = 'exec'
+KEY_SUB_APP_PORTS = 'ports'
+
+PORT_SPEC_COUNT = 'count'
+PORT_SPEC_NAME = 'name'
+PORT_SPEC_PORT = 'port'
+PORT_SPEC_PROTOCOL = 'protocol'
+PORT_SPEC_SOCKET_ACTIVATED = 'socketActivated'
+
+PROTOCOL_TCP = 'tcp'
+PROTOCOL_UDP = 'udp'
+VALID_PROTOCOLS = (PROTOCOL_TCP, PROTOCOL_UDP)
+
+ISOLATOR_KEY_NAME = 'name'
+ISOLATOR_KEY_VALUE = 'value'
+ISOLATOR_KEY_VALUE_SET = 'set'
+
+ISOLATOR_NAME_PREFIX = 'os/linux/capabilities-'
+ISOLATOR_NAME_RETAIN_SET = 'os/linux/capabilities-retain-set'
+
+PortSpec = collections.namedtuple('PortSpec', ('allow_all', 'port_list'))
+
+
+def IsValidAcName(name):
+  """Returns true if |name| adheres to appc's AC Name Type.
+
+  This roughly means that a string looks like a protocol-less
+  URL (e.g. foo-foo/bar/bar).
+
+  https://github.com/appc/spec/blob/master/SPEC.md#ac-name-type
+
+  Args:
+    name: string to validate.
+
+  Returns:
+    True iff |name| is a valid AC Name.
+  """
+  return bool(re.match(r'^[a-z0-9]+([-./][a-z0-9]+)*$', name))
+
+
+class SandboxSpecWrapper(object):
+  """Wrapper that knows how to set fields in a protocol buffer.
+
+  This makes mocking out our protocol buffer interface much simpler.
+  """
+
+  def __init__(self):
+    # In the context of unittests run from outside the chroot, this import
+    # will fail.  Tests will mock out this entire class.
+    # pylint: disable=import-error
+    from generated import soma_sandbox_spec_pb2
+    self.sandbox_spec = soma_sandbox_spec_pb2.SandboxSpec()
+
+  def SetName(self, name):
+    """Set the name of the runnable brick."""
+    self.sandbox_spec.name = name
+    self.sandbox_spec.overlay_path = '/bricks/%s' % name
+
+  def AddExecutable(self, uid, gid, command_line, tcp_ports, udp_ports,
+                    linux_caps):
+    """Add an executable to the wrapped SandboxSpec.
+
+    Args:
+      uid: integer UID of the user to run this executable.
+      gid: integer GID of the group to run this executable.
+      command_line: list of strings to run.
+      tcp_ports: list of PortSpec tuples.
+      udp_ports: list of PortSpec tuples.
+      linux_caps: list of string names of capabilities (e.g. 'CAP_CHOWN').
+    """
+    executable = self.sandbox_spec.executables.add()
+    executable.uid = uid
+    executable.gid = gid
+    executable.command_line.extend(command_line)
+    for listen_ports, ports in ((executable.tcp_listen_ports, tcp_ports),
+                                (executable.udp_listen_ports, udp_ports)):
+      if ports.allow_all:
+        listen_ports.allow_all = True
+      else:
+        listen_ports.allow_all = False
+        listen_ports.ports.extend(ports.port_list)
+
+    # Map the names of caps to the appropriate protobuffer values.
+    caps = [self.sandbox_spec.LinuxCaps.Value('LINUX_' + cap_name)
+            for cap_name in linux_caps]
+    executable.capabilities.extend(caps)
+
+  def AddEndpointName(self, endpoint_name):
+    """Adds the name of an endpoint that'll run inside this sandbox."""
+    self.sandbox_spec.endpoint_names.append(endpoint_name)
+
+
+def _GetPortList(desired_protocol, appc_port_list):
+  """Get the list of ports opened for |desired_protocol| from |appc_port_list|.
+
+  Args:
+    desired_protocol: one of VALID_PROTOCOLS.
+    appc_port_list: list of port specifications from a appc pod manifest.
+
+  Returns:
+    Instance of PortSpec.
+  """
+  # The port specification is optional.
+  if appc_port_list is None:
+    return PortSpec(False, [])
+
+  json_lib.AssertIsInstance(appc_port_list, list, 'port specification list')
+
+  allow_all = False
+  port_list = []
+  for port_dict in appc_port_list:
+    json_lib.AssertIsInstance(port_dict, dict, 'port specification')
+    port_dict = copy.deepcopy(port_dict)
+
+    # By default, we open a single specified port.
+    port_dict.setdefault(PORT_SPEC_COUNT, 1)
+    # By default, don't set socket activated.
+    port_dict.setdefault(PORT_SPEC_SOCKET_ACTIVATED, False)
+
+    # We don't actually use the port name, but it's handy for documentation
+    # and standard adherence to enforce its existence.
+    port_name = json_lib.PopValueOfType(
+        port_dict, PORT_SPEC_NAME, unicode, 'port name')
+    logging.debug('Validating appc specifcation of "%s"', port_name)
+    port = json_lib.PopValueOfType(port_dict, PORT_SPEC_PORT, int, 'port')
+    protocol = json_lib.PopValueOfType(
+        port_dict, PORT_SPEC_PROTOCOL, unicode, 'protocol')
+
+    count = json_lib.PopValueOfType(
+        port_dict, PORT_SPEC_COUNT, int, 'port range count')
+
+    # We also don't use the socketActivated flag, but we should tolerate safe
+    # values.
+    socket_activated = json_lib.PopValueOfType(
+        port_dict, PORT_SPEC_SOCKET_ACTIVATED, bool, 'socket activated flag')
+
+    # Validate everything before acting on it.
+    if protocol not in VALID_PROTOCOLS:
+      raise ValueError('Port protocol must be in %r, not "%s"' %
+                       (VALID_PROTOCOLS, protocol))
+    if protocol != desired_protocol:
+      continue
+
+    if socket_activated != False:
+      raise ValueError('No support for socketActivated==True in %s' % port_name)
+
+    if port_dict:
+      raise ValueError('Unknown keys found in port spec %s: %r' %
+                       (port_name, port_dict.keys()))
+
+    if port == -1:
+      # Remember that we're going to return that all ports are opened, but
+      # continue validating all the remaining specifications.
+      allow_all = True
+      continue
+
+    # Now we know it's not the wildcard port, and that we've never declared
+    # a wildcard for this protocol.
+    port = remote_access.NormalizePort(port)
+
+    if count < 1:
+      raise ValueError('May only specify positive port ranges for %s' %
+                       port_name)
+    if port + count >= 65536:
+      raise ValueError('Port range extends past max port number for %s' %
+                       port_name)
+
+    for effective_port in xrange(port, port + count):
+      port_list.append(effective_port)
+
+  return PortSpec(allow_all, port_list)
+
+
+def _ExtractLinuxCapNames(app_dict):
+  """Parses the set of Linux capabilities for an executable.
+
+  Args:
+    app_dict: dictionary defining an executable.
+
+  Returns:
+    List of names of Linux capabilities (e.g. ['CAP_CHOWN']).
+  """
+  if KEY_APP_ISOLATORS not in app_dict:
+    return []
+
+  isolator_list = json_lib.GetValueOfType(
+      app_dict, KEY_APP_ISOLATORS, list,
+      'list of isolators for application')
+  linux_cap_isolators = []
+
+  # Look for any isolators related to capability sets.
+  for isolator in isolator_list:
+    json_lib.AssertIsInstance(isolator, dict, 'isolator instance')
+    isolator_name = json_lib.GetValueOfType(
+        isolator, ISOLATOR_KEY_NAME, unicode, 'isolator name')
+    if not isolator_name.startswith(ISOLATOR_NAME_PREFIX):
+      continue
+    if isolator_name != ISOLATOR_NAME_RETAIN_SET:
+      raise ValueError('Capabilities may only be specified as %s' %
+                       ISOLATOR_NAME_RETAIN_SET)
+    linux_cap_isolators.append(isolator)
+
+  # We may have only a single isolator.
+  if len(linux_cap_isolators) > 1:
+    raise ValueError('Found two lists of Linux caps for an executable')
+  if not linux_cap_isolators:
+    return []
+
+  value = json_lib.GetValueOfType(
+      linux_cap_isolators[0], ISOLATOR_KEY_VALUE, dict,
+      'Linux cap isolator value')
+  caps = json_lib.GetValueOfType(
+      value, ISOLATOR_KEY_VALUE_SET, list, 'Linux cap isolator set')
+  for cap in caps:
+    json_lib.AssertIsInstance(cap, unicode, 'Linux capability in set.')
+
+  return caps
+
+
+class SandboxSpecGenerator(object):
+  """Delegate that knows how to read appc manifests and write SandboxSpecs."""
+
+  def __init__(self, sysroot):
+    self._sysroot = sysroot
+    self._user_db = user_db.UserDB(sysroot)
+
+  def _CheckAbsPathToExecutable(self, path_to_binary):
+    """Raises if there is no exectable at |path_to_binary|."""
+    if not os.path.isabs(path_to_binary):
+      raise ValueError(
+          'Brick executables must be specified by absolute path, not "%s".' %
+          path_to_binary)
+    return True
+
+  def _FillInEndpointNamesFromAnnotations(self, wrapper, annotations):
+    """Fill in the SandboxSpec endpoint_names field from |annotations|.
+
+    An appc pod specification can contain a list of (mostly) arbitrary
+    annotations that projects can use to add their own metadata fields.
+    |annotations| is a list of dicts that each contain a name and value field,
+    and this method looks for 'name' fields that are prefixed with
+    ENDPOINT_NAME_ANNOTATION_PREFIX and treats the associated 'value' as the
+    name of an endpoint that psyched will expect to be registered from within
+    this sandbox.
+
+    Args:
+      wrapper: instance of SandboxSpecWrapper.
+      annotations: list of dicts, each with a name and value field.
+    """
+    for annotation in annotations:
+      json_lib.AssertIsInstance(annotation, dict, 'a single annotation')
+      name = json_lib.GetValueOfType(
+          annotation, KEY_ANNOTATION_NAME, unicode, 'annotation name')
+      if not IsValidAcName(name):
+        raise ValueError('Annotation name "%s" contains illegal characters.' %
+                         name)
+      if name.startswith(ENDPOINT_NAME_ANNOTATION_PREFIX):
+        endpoint_name = json_lib.GetValueOfType(
+            annotation, KEY_ANNOTATION_VALUE, unicode, 'endpoint name value')
+        if not IsValidAcName(name):
+          raise ValueError('Endpoint name "%s" contains illegal characters.' %
+                           endpoint_name)
+        wrapper.AddEndpointName(endpoint_name)
+
+  def _FillInExecutableFromApp(self, wrapper, app):
+    """Fill in the fields of a SandboxSpec.Executable object from |app|.
+
+    Args:
+      wrapper: instance of SandboxSpecWrapper.
+      app: dictionary of information taken from the appc pod manifest.
+    """
+    sub_app = json_lib.GetValueOfType(
+        app, KEY_APP_SUB_APP, dict, 'per app app dict')
+    user = json_lib.GetValueOfType(
+        sub_app, KEY_SUB_APP_USER, unicode, 'app dict user')
+    group = json_lib.GetValueOfType(
+        sub_app, KEY_SUB_APP_GROUP, unicode, 'app dict group')
+
+    if not self._user_db.UserExists(user):
+      raise ValueError('Found invalid username "%s"' % user)
+    if not self._user_db.GroupExists(group):
+      raise ValueError('Found invalid groupname "%s"' % group)
+
+    cmd = json_lib.GetValueOfType(
+        sub_app, KEY_SUB_APP_EXEC, list, 'app command line')
+    if not cmd:
+      raise ValueError('App command line must give the executable to run.')
+    self._CheckAbsPathToExecutable(cmd[0])
+    for cmd_piece in cmd:
+      json_lib.AssertIsInstance(cmd_piece, unicode, 'app.exec fragment')
+
+    port_list = sub_app.get(KEY_SUB_APP_PORTS, None)
+    wrapper.AddExecutable(self._user_db.ResolveUsername(user),
+                          self._user_db.ResolveGroupname(group),
+                          cmd,
+                          _GetPortList(PROTOCOL_TCP, port_list),
+                          _GetPortList(PROTOCOL_UDP, port_list),
+                          _ExtractLinuxCapNames(sub_app))
+
+  def GetSandboxSpec(self, appc_contents, sandbox_spec_name):
+    """Create a SandboxSpec encoding the information in an appc pod manifest.
+
+    Args:
+      appc_contents: string contents of an appc pod manifest
+      sandbox_spec_name: string unique name of this sandbox.
+
+    Returns:
+      an instance of SandboxSpec.
+    """
+    wrapper = SandboxSpecWrapper()
+    overlay_name = None
+
+    app_list = json_lib.GetValueOfType(
+        appc_contents, KEY_APPS_LIST, list, 'app list')
+    for app in app_list:
+      json_lib.AssertIsInstance(app, dict, 'app')
+
+      # Aid debugging of problems in specific apps.
+      app_name = json_lib.GetValueOfType(
+          app, KEY_APP_NAME, unicode, 'app name')
+      if not IsValidAcName(app_name):
+        raise ValueError('Application name "%s" contains illegal characters.' %
+                         app_name)
+      logging.debug('Processing application "%s".', app_name)
+
+      # Get the name of the image, check that it's consistent other image names.
+      image = json_lib.GetValueOfType(
+          app, KEY_APP_IMAGE, dict, 'image specification for app')
+      image_name = json_lib.GetValueOfType(
+          image, KEY_APP_IMAGE_NAME, unicode, 'image name')
+      if not IsValidAcName(image_name):
+        raise ValueError('Image name "%s" contains illegal characters.' %
+                         image_name)
+
+      if overlay_name and overlay_name != image_name:
+        raise ValueError(
+            'All elements of "apps" must have the same image.name.')
+      overlay_name = image_name
+
+      # Add the executable corresponding to this app to our SandboxSpec.
+      self._FillInExecutableFromApp(wrapper, app)
+
+    if not overlay_name:
+      raise ValueError('Overlays must declare at least one app')
+
+    annotation_list = json_lib.GetValueOfType(
+        appc_contents, KEY_ANNOTATIONS_LIST, list, 'list of all annotations')
+    self._FillInEndpointNamesFromAnnotations(wrapper, annotation_list)
+
+    wrapper.SetName(sandbox_spec_name)
+    return wrapper.sandbox_spec
+
+  def WriteSandboxSpec(self, appc_pod_manifest_path, output_path):
+    """Write a SandboxSpec corresponding to |appc_pod_manifest_path| to disk.
+
+    Args:
+      appc_pod_manifest_path: path to an appc pod manifest file.
+      output_path: path to file to write serialized SandboxSpec. The
+          containing directory must exist, but the file may not.  This is not
+          checked atomically.
+    """
+    if os.path.isfile(output_path):
+      raise ValueError(
+          'Refusing to write SandboxSpec to file %s which already exists!' %
+          output_path)
+
+    appc_contents = json.loads(osutils.ReadFile(appc_pod_manifest_path))
+    # Use the file name without extension as the the name of the sandbox spec.
+    sandbox_name = os.path.basename(appc_pod_manifest_path).rsplit('.', 1)[0]
+    spec = self.GetSandboxSpec(appc_contents, sandbox_name)
+    osutils.WriteFile(output_path, spec.SerializeToString())
diff --git a/lib/sandbox_spec_generator_unittest b/lib/sandbox_spec_generator_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/sandbox_spec_generator_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/sandbox_spec_generator_unittest.py b/lib/sandbox_spec_generator_unittest.py
new file mode 100644
index 0000000..8babb75
--- /dev/null
+++ b/lib/sandbox_spec_generator_unittest.py
@@ -0,0 +1,142 @@
+# -*- coding: utf-8 -*-
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for sandbox_spec_generator."""
+
+from __future__ import print_function
+
+import copy
+import mock
+
+from chromite.lib import cros_test_lib
+from chromite.lib import sandbox_spec_generator
+from chromite.lib import user_db
+
+MOCK_UID = 1000
+MOCK_GID = 1001
+
+SANDBOX_NAME = 'sandbox-name'
+
+SIMPLE_MANIFEST_IMAGE_NAME = u'image-name'
+SIMPLE_MANIFEST_TCP_PORT = 4000
+SIMPLE_MANIFEST_PORT_RANGE_BASE = 5000
+SIMPLE_MANIFEST_PORT_RANGE_LEN = 10
+SIMPLE_MANIFEST_EXEC_VALUE = [u'/bin/true', u'--quiet']
+SIMPLE_MANIFEST_ENDPOINT_NAME1 = u'com.foo.yay'
+SIMPLE_MANIFEST_ENDPOINT_NAME2 = u'com.foo.boo'
+SIMPLE_MANIFEST_CAPS = [u'CAP_NET_BIND_SERVICE', u'CAP_NET_ADMIN']
+
+# Calls to set information in the wrapper that corresponds to SIMPLE_MANIFEST.
+SIMPLE_MANIFEST_MOCK_WRAPPER_CALLS = [
+    mock.call.AddExecutable(
+        MOCK_UID, MOCK_GID,
+        SIMPLE_MANIFEST_EXEC_VALUE,
+        sandbox_spec_generator.PortSpec(
+            False,
+            [SIMPLE_MANIFEST_TCP_PORT] +
+            range(SIMPLE_MANIFEST_PORT_RANGE_BASE,
+                  SIMPLE_MANIFEST_PORT_RANGE_BASE +
+                  SIMPLE_MANIFEST_PORT_RANGE_LEN)),
+        sandbox_spec_generator.PortSpec(False, []),
+        SIMPLE_MANIFEST_CAPS),
+    mock.call.AddEndpointName(SIMPLE_MANIFEST_ENDPOINT_NAME1),
+    mock.call.AddEndpointName(SIMPLE_MANIFEST_ENDPOINT_NAME2),
+    mock.call.SetName(SANDBOX_NAME),
+]
+
+SIMPLE_MANIFEST = {
+    'apps': [
+        {
+            'name': u'app-name',
+            'image': {
+                'name': SIMPLE_MANIFEST_IMAGE_NAME,
+            },
+            'app': {
+                'exec': SIMPLE_MANIFEST_EXEC_VALUE,
+                'user': u'chronos',
+                'group': u'chronos',
+                'workingDirectory': u'/opt/work',
+                'ports': [
+                    {
+                        'name': u'health',
+                        'port': SIMPLE_MANIFEST_TCP_PORT,
+                        'protocol': u'tcp',
+                        'socketActivated': False
+                    },
+                    {
+                        'name': u'port_range_test_port',
+                        'port': SIMPLE_MANIFEST_PORT_RANGE_BASE,
+                        'count': SIMPLE_MANIFEST_PORT_RANGE_LEN,
+                        'protocol': u'tcp',
+                        'socketActivated': False
+                    }
+                ],
+                'isolators': [
+                    {
+                        'name': u'unrelated/isolator',
+                        'value': {
+                            'i_am': 'a little tea cup',
+                        }
+                    },
+                    {
+                        'name': u'os/linux/capabilities-retain-set',
+                        'value': {
+                            'set': SIMPLE_MANIFEST_CAPS,
+                        }
+                    },
+                ],
+            },
+        },
+    ],
+    'annotations': [
+        {
+            'name': u'bruteus-endpoint-0',
+            'value': SIMPLE_MANIFEST_ENDPOINT_NAME1
+        },
+        {
+            'name': u'bruteus-endpoint-1',
+            'value': SIMPLE_MANIFEST_ENDPOINT_NAME2
+        }
+    ]
+}
+
+
+class SandboxSpecGeneratorTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for chromite.lib.sandbox_spec_generator."""
+
+  def setUp(self):
+    """Set up a test environment."""
+    self._mock_wrapper = mock.Mock()
+    self.PatchObject(user_db.UserDB, 'ResolveUsername', return_value=MOCK_UID)
+    self.PatchObject(user_db.UserDB, 'ResolveGroupname', return_value=MOCK_GID)
+    self.PatchObject(user_db.UserDB, 'UserExists', return_value=True)
+    self.PatchObject(user_db.UserDB, 'GroupExists', return_value=True)
+    self.PatchObject(sandbox_spec_generator, 'SandboxSpecWrapper',
+                     return_value=self._mock_wrapper)
+    self._generator = sandbox_spec_generator.SandboxSpecGenerator(
+        self.tempdir)
+
+
+  def testChecksForAtLeastOneExecutable(self):
+    """Check that we'll throw up on manifests that don't name an executable."""
+    with self.assertRaises(ValueError):
+      self._generator.GetSandboxSpec({u'apps': []}, SANDBOX_NAME)
+
+  def testParsesSimpleManifest(self):
+    """Test that we can correctly parse a simple manifest."""
+    self._generator.GetSandboxSpec(SIMPLE_MANIFEST, SANDBOX_NAME)
+    self.assertEqual(SIMPLE_MANIFEST_MOCK_WRAPPER_CALLS,
+                     self._mock_wrapper.mock_calls)
+
+  def testAcNameTypeChecking(self):
+    """Test that we validate name fields as being Ac Name Types."""
+    with self.assertRaises(ValueError):
+      manifest = copy.deepcopy(SIMPLE_MANIFEST)
+      manifest['apps'][0]['name'] = u'我喜欢乌龟'
+      self._generator.GetSandboxSpec(manifest, SANDBOX_NAME)
+    with self.assertRaises(ValueError):
+      manifest = copy.deepcopy(SIMPLE_MANIFEST)
+      manifest['apps'][0]['image']['name'] = u'我是一只猫'
+      self._generator.GetSandboxSpec(manifest, SANDBOX_NAME)
diff --git a/lib/signals.py b/lib/signals.py
new file mode 100644
index 0000000..70c6bde
--- /dev/null
+++ b/lib/signals.py
@@ -0,0 +1,138 @@
+# Copyright (c) 2011-2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Signal related functionality."""
+
+from __future__ import print_function
+
+import signal
+import contextlib
+
+
+def RelaySignal(handler, signum, frame):
+  """Notify a listener returned from getsignal of receipt of a signal.
+
+  Returns:
+    True if it was relayed to the target, False otherwise.
+    False in particular occurs if the target isn't relayable.
+  """
+  if handler in (None, signal.SIG_IGN):
+    return True
+  elif handler == signal.SIG_DFL:
+    # This scenario is a fairly painful to handle fully, thus we just
+    # state we couldn't handle it and leave it to client code.
+    return False
+  handler(signum, frame)
+  return True
+
+
+def SignalModuleUsable(_signal=signal.signal, _SIGUSR1=signal.SIGUSR1):
+  """Verify that the signal module is usable and won't segfault on us.
+
+  See http://bugs.python.org/issue14173.  This function detects if the
+  signals module is no longer safe to use (which only occurs during
+  final stages of the interpreter shutdown) and heads off a segfault
+  if signal.* was accessed.
+
+  This shouldn't be used by anything other than functionality that is
+  known and unavoidably invoked by finalizer code during python shutdown.
+
+  Finally, the default args here are intentionally binding what we need
+  from the signal module to do the necessary test; invoking code shouldn't
+  pass any options, nor should any developer ever remove those default
+  options.
+
+  Note that this functionality is intended to be removed just as soon
+  as all consuming code installs their own SIGTERM handlers.
+  """
+  # Track any signals we receive while doing the check.
+  received, actual = [], None
+  def handler(signum, frame):
+    received.append([signum, frame])
+  try:
+    # Play with sigusr1, since it's not particularly used.
+    actual = _signal(_SIGUSR1, handler)
+    _signal(_SIGUSR1, actual)
+    return True
+  except (TypeError, AttributeError, SystemError, ValueError):
+    # The first three exceptions can be thrown depending on the state of the
+    # signal module internal Handlers array; we catch all, and interpret it
+    # as if we were invoked during sys.exit cleanup.
+    # The last exception can be thrown if we're trying to be used in a thread
+    # which is not the main one.  This can come up with standard python modules
+    # such as BaseHTTPServer.HTTPServer.
+    return False
+  finally:
+    # And now relay those signals to the original handler.  Not all may
+    # be delivered- the first may throw an exception for example.  Not our
+    # problem however.
+    for signum, frame in received:
+      actual(signum, frame)
+
+
+@contextlib.contextmanager
+def DeferSignals(*args):
+  """Context Manger to defer signals during a critical block.
+
+  If a signal comes in for the masked signals, the original handler
+  is ran after the  critical block has exited.
+
+  Args:
+    args: Which signals to ignore.  If none are given, defaults to
+      SIGINT and SIGTERM.
+  """
+  signals = args
+  if not signals:
+    signals = [signal.SIGINT, signal.SIGTERM, signal.SIGALRM]
+
+  # Rather than directly setting the handler, we first pull the handlers, then
+  # set the new handler.  The ordering has to be done this way to ensure that
+  # if someone passes in a bad signum (or a signal lands prior to starting the
+  # critical block), we can restore things to pristine state.
+  handlers = dict((signum, signal.getsignal(signum)) for signum in signals)
+
+  received = []
+  def handler(signum, frame):
+    received.append((signum, frame))
+
+  try:
+    for signum in signals:
+      signal.signal(signum, handler)
+
+    yield
+
+  finally:
+    for signum, original in handlers.iteritems():
+      signal.signal(signum, original)
+
+    for signum, frame in received:
+      RelaySignal(handlers[signum], signum, frame)
+
+
+def StrSignal(sig_num):
+  """Convert a signal number to the symbolic name
+
+  Note: Some signal number have multiple names, so you might get
+  back a confusing result like "SIGIOT|SIGABRT".  Since they have
+  the same signal number, it's impossible to say which one is right.
+
+  Args:
+    sig_num: The numeric signal you wish to convert
+
+  Returns:
+    A string of the signal name(s)
+  """
+  # Handle realtime signals first since they are unnamed.
+  if sig_num >= signal.SIGRTMIN and sig_num < signal.SIGRTMAX:
+    return 'SIGRT_%i' % sig_num
+
+  # Probe the module looking for matching signal constant.
+  sig_names = []
+  for name, num in signal.__dict__.iteritems():
+    if name.startswith('SIG') and num == sig_num:
+      sig_names.append(name)
+  if sig_names:
+    return '|'.join(sig_names)
+  else:
+    return 'SIG_%i' % sig_num
diff --git a/lib/signing.py b/lib/signing.py
new file mode 100644
index 0000000..9a2c4ae
--- /dev/null
+++ b/lib/signing.py
@@ -0,0 +1,17 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""All things Chrome OS signing related"""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import constants
+
+
+SIGNING_DIR = os.path.join(constants.CHROMITE_DIR, 'signing')
+INPUT_INSN_DIR = os.path.join(constants.SOURCE_ROOT, 'crostools',
+                              'signer_instructions')
+TEST_INPUT_INSN_DIR = os.path.join(SIGNING_DIR, 'signer_instructions')
diff --git a/lib/stats.py b/lib/stats.py
new file mode 100644
index 0000000..e339e75
--- /dev/null
+++ b/lib/stats.py
@@ -0,0 +1,208 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Library for uploading command stats to AppEngine."""
+
+from __future__ import print_function
+
+import contextlib
+import os
+import urllib
+import urllib2
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import timeout_util
+
+
+class Stats(object):
+  """Entity object for a stats entry."""
+
+  # These attributes correspond to the fields of a stats record.
+  __slots__ = (
+      'board',
+      'cmd_args',
+      'cmd_base',
+      'cmd_line',
+      'cpu_count',
+      'cpu_type',
+      'host',
+      'package_count',
+      'run_time',
+      'username',
+  )
+
+  def __init__(self, **kwargs):
+    """Initialize the record.
+
+    **kwargs keys need to correspond to elements in __slots__.  These arguments
+    can be lists:
+    - cmd_args
+    - cmd_base
+    - cmd_line
+
+    If unset, the |username| and |host|  attributes will be determined
+    automatically.
+    """
+    if kwargs.get('username') is None:
+      kwargs['username'] = git.GetProjectUserEmail(os.path.dirname(__file__))
+
+    if kwargs.get('host') is None:
+      kwargs['host'] = cros_build_lib.GetHostName(fully_qualified=True)
+
+    for attr in ('cmd_args', 'cmd_base', 'cmd_line'):
+      val = kwargs.get(attr)
+      if isinstance(val, (list, tuple,)):
+        kwargs[attr] = ' '.join(map(repr, val))
+
+    for arg in self.__slots__:
+      setattr(self, arg, kwargs.pop(arg, None))
+    if kwargs:
+      raise TypeError('Unknown options specified %r:' % kwargs)
+
+  @property
+  def data(self):
+    """Retrieves a dictionary representing the fields that are set."""
+    data = {}
+    for arg in self.__slots__:
+      val = getattr(self, arg)
+      if val is not None:
+        data[arg] = val
+    return data
+
+  @classmethod
+  def SafeInit(cls, **kwargs):
+    """Construct a Stats object, catching any exceptions.
+
+    See Stats.__init__() for argument list.
+
+    Returns:
+      A Stats() instance if things went smoothly, and None if exceptions were
+      caught in the process.
+    """
+    try:
+      inst = cls(**kwargs)
+    except Exception:
+      logging.error('Exception during stats upload.', exc_info=True)
+    else:
+      return inst
+
+
+class StatsUploader(object):
+  """Functionality to upload the stats to the AppEngine server."""
+
+  # To test with an app engine instance on localhost, set envvar
+  # export CROS_BUILD_STATS_SITE="http://localhost:8080"
+  _PAGE = 'upload_command_stats'
+  _DEFAULT_SITE = 'https://chromiumos-build-stats.appspot.com'
+  _SITE = os.environ.get('CROS_BUILD_STATS_SITE', _DEFAULT_SITE)
+  URL = '%s/%s' % (_SITE, _PAGE)
+  UPLOAD_TIMEOUT = 5
+
+  _DISABLE_FILE = '~/.disable_build_stats_upload'
+
+  _DOMAIN_WHITELIST = (constants.CORP_DOMAIN, constants.GOLO_DOMAIN)
+  _EMAIL_WHITELIST = (constants.GOOGLE_EMAIL, constants.CHROMIUM_EMAIL)
+
+  TIMEOUT_ERROR = 'Timed out during command stat upload - waited %s seconds'
+  ENVIRONMENT_ERROR = 'Exception during command stat upload.'
+  HTTPURL_ERROR = 'Exception during command stat upload to %s.'
+
+  @classmethod
+  def _UploadConditionsMet(cls, stats):
+    """Return True if upload conditions are met."""
+    def CheckDomain(hostname):
+      return any(hostname.endswith(d) for d in cls._DOMAIN_WHITELIST)
+
+    def CheckEmail(email):
+      return any(email.endswith(e) for e in cls._EMAIL_WHITELIST)
+
+    upload = False
+
+    # Verify that host domain is in golo.chromium.org or corp.google.com.
+    if not stats.host or not CheckDomain(stats.host):
+      logging.debug('Host %s is not a Google machine.', stats.host)
+    elif not stats.username:
+      logging.debug('Unable to determine current "git id".')
+    elif not CheckEmail(stats.username):
+      logging.debug('%s is not a Google or Chromium user.', stats.username)
+    elif os.path.exists(osutils.ExpandPath(cls._DISABLE_FILE)):
+      logging.debug('Found %s', cls._DISABLE_FILE)
+    else:
+      upload = True
+
+    if not upload:
+      logging.debug('Skipping stats upload.')
+
+    return upload
+
+  @classmethod
+  def Upload(cls, stats, url=None, timeout=None):
+    """Upload |stats| to |url|.
+
+    Does nothing if upload conditions aren't met.
+
+    Args:
+      stats: A Stats object to upload.
+      url: The url to send the request to.
+      timeout: A timeout value to set, in seconds.
+    """
+    if url is None:
+      url = cls.URL
+    if timeout is None:
+      timeout = cls.UPLOAD_TIMEOUT
+
+    if not cls._UploadConditionsMet(stats):
+      return
+
+    with timeout_util.Timeout(timeout):
+      try:
+        cls._Upload(stats, url)
+      # Stats upload errors are silenced, for the sake of user experience.
+      except timeout_util.TimeoutError:
+        logging.debug(cls.TIMEOUT_ERROR, timeout)
+      except urllib2.HTTPError as e:
+        # HTTPError has a geturl() method, but it relies on self.url, which
+        # is not always set.  In looking at source, self.filename equals url.
+        logging.debug(cls.HTTPURL_ERROR, e.filename, exc_info=True)
+      except EnvironmentError:
+        logging.debug(cls.ENVIRONMENT_ERROR, exc_info=True)
+
+  @classmethod
+  def _Upload(cls, stats, url):
+    logging.debug('Uploading command stats to %r', url)
+    data = urllib.urlencode(stats.data)
+    request = urllib2.Request(url)
+    urllib2.urlopen(request, data)
+
+
+UNCAUGHT_UPLOAD_ERROR = 'Uncaught command stats exception'
+
+
+@contextlib.contextmanager
+def UploadContext():
+  """Provides a context where stats are uploaded in the background.
+
+  Yields:
+    A queue that accepts an arg-list of the format [stats, url, timeout].
+  """
+  try:
+    # We need to use parallel.BackgroundTaskRunner, and not
+    # parallel.RunParallelTasks, because with RunParallelTasks, both the
+    # uploader and the subcommand are treated as background tasks, and the
+    # subcommand will lose responsiveness, since its output will be buffered.
+    with parallel.BackgroundTaskRunner(
+        StatsUploader.Upload, processes=1) as queue:
+      yield queue
+  except parallel.BackgroundFailure as e:
+    # Display unexpected errors, but don't propagate the error.
+    # KeyboardInterrupts are OK to skip since the user initiated it.
+    if (e.exc_infos and
+        all(exc_info.type == KeyboardInterrupt for exc_info in e.exc_infos)):
+      return
+    logging.error('Uncaught command stats exception', exc_info=True)
diff --git a/lib/stats_unittest b/lib/stats_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/stats_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/stats_unittest.py b/lib/stats_unittest.py
new file mode 100644
index 0000000..bc82ac0
--- /dev/null
+++ b/lib/stats_unittest.py
@@ -0,0 +1,246 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for stats."""
+
+from __future__ import print_function
+
+import time
+import urllib2
+
+from chromite.lib import cros_test_lib
+from chromite.lib import parallel
+from chromite.lib import parallel_unittest
+from chromite.lib import partial_mock
+from chromite.lib import stats
+from chromite.lib import timeout_util
+
+
+# pylint: disable=W0212
+
+
+class StatsUploaderMock(partial_mock.PartialMock):
+  """Mocks out stats.StatsUploader."""
+
+  TARGET = 'chromite.lib.stats.StatsUploader'
+  ATTRS = ('URL', 'UPLOAD_TIMEOUT', '_Upload')
+
+  URL = 'Invalid Url'
+  # Increased timeout so that we don't get errors when the machine is loaded.
+  UPLOAD_TIMEOUT = 30
+
+  def _Upload(self, _inst, *_args, **_kwargs):
+    """Disable actual uploading."""
+    return
+
+
+class StatsMock(partial_mock.PartialMock):
+  """Mocks out stats.Stats."""
+
+  TARGET = 'chromite.lib.stats.Stats'
+  ATTRS = ('__init__',)
+
+  def __init__(self):
+    partial_mock.PartialMock.__init__(self)
+    self.init_exception = False
+
+  def _target__init__(self, _inst, **kwargs):
+    """Fill in good values for username and host."""
+    if self.init_exception:
+      raise Exception('abc')
+
+    kwargs.setdefault('username', 'monkey@google.com')
+    kwargs.setdefault('host', 'typewriter.mtv.corp.google.com')
+    return self.backup['__init__'](_inst, **kwargs)
+
+
+class StatsModuleMock(partial_mock.PartialMock):
+  """Mock out everything needed to use this module."""
+
+  def __init__(self):
+    partial_mock.PartialMock.__init__(self)
+    self.uploader_mock = StatsUploaderMock()
+    self.stats_mock = StatsMock()
+    self.parallel_mock = parallel_unittest.ParallelMock()
+
+  def PreStart(self):
+    self.StartPatcher(self.uploader_mock)
+    self.StartPatcher(self.stats_mock)
+    self.StartPatcher(self.parallel_mock)
+
+
+class StatsCreationTest(cros_test_lib.MockLoggingTestCase):
+  """Test the stats creation functionality."""
+
+  def VerifyStats(self, cmd_stat):
+    self.assertNotEquals(cmd_stat.host, None)
+    self.assertNotEquals(cmd_stat.username, None)
+
+  def testIt(self):
+    """Test normal stats creation, exercising default functionality."""
+    cmd_stat = stats.Stats()
+    self.VerifyStats(cmd_stat)
+
+  def testSafeInitNormal(self):
+    """Test normal safe stats creation."""
+    cmd_stat = stats.Stats.SafeInit()
+    self.VerifyStats(cmd_stat)
+
+  def testSafeInitException(self):
+    """Safe stats creation handles exceptions properly."""
+    with cros_test_lib.LoggingCapturer() as logs:
+      cmd_stat = stats.Stats.SafeInit(monkey='foon')
+      self.assertEquals(cmd_stat, None)
+      self.AssertLogsContain(logs, 'Exception')
+
+
+class ConditionsTest(cros_test_lib.MockTestCase):
+  """Test UploadConditionsMet."""
+
+  def testConditionsMet(self):
+    stat = stats.Stats(
+        username='chrome-bot@chromium.org', host='build42-m2.golo.chromium.org')
+    self.assertTrue(stats.StatsUploader._UploadConditionsMet(stat))
+
+  def testConditionsMet2(self):
+    stat = stats.Stats(
+        username='monkey@google.com', host='typewriter.mtv.corp.google.com')
+    self.assertTrue(stats.StatsUploader._UploadConditionsMet(stat))
+
+  def testConditionsNotMet(self):
+    stat = stats.Stats(
+        username='monkey@home.com', host='typewriter.mtv.corp.google.com')
+    self.assertFalse(stats.StatsUploader._UploadConditionsMet(stat))
+
+  def testConditionsNotMet2(self):
+    stat = stats.Stats(
+        username='monkey@google.com', host='typewriter.noname.com')
+    self.assertFalse(stats.StatsUploader._UploadConditionsMet(stat))
+
+
+class UploadTest(cros_test_lib.MockLoggingTestCase):
+  """Test the upload functionality.
+
+  For the tests that validate debug log messages are printed, note that unit
+  tests are run with debug level logging.DEBUG, so logging.debug() messages in
+  the code under test will be displayed.
+  """
+
+  def setUp(self):
+    self.module_mock = StatsModuleMock()
+    self.StartPatcher(self.module_mock)
+
+    self.cmd_stats = stats.Stats(
+        host='test.golo.chromium.org', username='chrome-bot@chromium.org')
+
+  def testNormalRun(self):
+    """Going for code coverage."""
+    self.module_mock.uploader_mock.UnMockAttr('_Upload')
+    self.PatchObject(urllib2, 'urlopen', autospec=True)
+    stats.StatsUploader.Upload(self.cmd_stats)
+    with cros_test_lib.LoggingCapturer() as logs:
+      # pylint: disable=E1101
+      self.assertEquals(urllib2.urlopen.call_count, 1)
+      # Make sure no error messages are output in the normal case.
+      self.AssertLogsContain(logs, stats.StatsUploader.ENVIRONMENT_ERROR,
+                             inverted=True)
+      timeout_regex = stats.StatsUploader.TIMEOUT_ERROR % r'\d+'
+      self.AssertLogsMatch(logs, timeout_regex, inverted=True)
+
+  def CheckSuppressException(self, e, msg):
+    """Verifies we don't propagate a given exception during upload."""
+    with cros_test_lib.LoggingCapturer() as logs:
+      stats.StatsUploader._Upload.side_effect = e
+      # Verify the exception is suppressed when error_ok=True
+      stats.StatsUploader.Upload(self.cmd_stats)
+      # Note: the default log level for unit tests is logging.DEBUG
+      self.AssertLogsContain(logs, msg)
+
+  def testUploadTimeoutIgnore(self):
+    """We don't propagate timeouts during upload."""
+    self.CheckSuppressException(
+        timeout_util.TimeoutError(),
+        stats.StatsUploader.TIMEOUT_ERROR
+        % (stats.StatsUploader.UPLOAD_TIMEOUT,))
+
+  def testEnvironmentErrorIgnore(self):
+    """We don't propagate any environment errors during upload."""
+    url = 'http://somedomainhere.com/foo/bar/uploader'
+    env_msg = stats.StatsUploader.ENVIRONMENT_ERROR
+    url_msg = stats.StatsUploader.HTTPURL_ERROR % url
+    self.CheckSuppressException(EnvironmentError(), env_msg)
+    self.CheckSuppressException(urllib2.HTTPError(url, None, None, None, None),
+                                url_msg)
+    self.CheckSuppressException(urllib2.URLError(""), env_msg)
+
+  def testKeyboardInterruptError(self):
+    """We propagate KeyboardInterrupts."""
+    stats.StatsUploader._Upload.side_effect = KeyboardInterrupt()
+    # Verify the exception is suppressed when error_ok=True
+    self.assertRaises(KeyboardInterrupt, stats.StatsUploader.Upload,
+                      self.cmd_stats)
+
+  def testUploadTimeout(self):
+    """We timeout when the upload takes too long."""
+    def Sleep(*_args, **_kwargs):
+      time.sleep(stats.StatsUploader.UPLOAD_TIMEOUT)
+
+    stats.StatsUploader._Upload.side_effect = Sleep
+    with cros_test_lib.LoggingCapturer() as logs:
+      stats.StatsUploader.Upload(self.cmd_stats, timeout=1)
+      self.AssertLogsContain(logs, stats.StatsUploader.TIMEOUT_ERROR % ('1',))
+
+
+class UploadContextTest(cros_test_lib.MockLoggingTestCase):
+  """Test the suppression behavior of the upload context."""
+
+  def setUp(self):
+    self.StartPatcher(StatsModuleMock())
+
+  def testNoErrors(self):
+    """Test that we don't print anything when there are no errors."""
+    with cros_test_lib.LoggingCapturer() as logs:
+      with stats.UploadContext() as queue:
+        queue.put([stats.Stats()])
+      self.AssertLogsContain(logs, stats.UNCAUGHT_UPLOAD_ERROR, inverted=True)
+      self.assertEquals(stats.StatsUploader._Upload.call_count, 1)
+
+  def testErrorSupression(self):
+    """"Test exception supression."""
+    for e in [parallel.BackgroundFailure]:
+      with cros_test_lib.LoggingCapturer() as logs:
+        with stats.UploadContext():
+          raise e()
+        self.AssertLogsContain(logs, stats.UNCAUGHT_UPLOAD_ERROR)
+
+  def testErrorPropagation(self):
+    """Test we propagate some exceptions."""
+    def RaiseContext(e):
+      with stats.UploadContext():
+        raise e()
+
+    for e in [KeyboardInterrupt, RuntimeError, Exception, BaseException,
+              SyntaxError,]:
+      self.assertRaises(e, RaiseContext, e)
+
+
+class UploadContextParallelTest(cros_test_lib.MockLoggingTestCase):
+  """Test UploadContext using the real parallel library."""
+
+  def testKeyboardInterruptHandling(self):
+    """Test that KeyboardInterrupts during upload aren't logged.
+
+    This must use the parallel library so that exceptions are converted into
+    BackgroundFailures as they are in a real run.
+    """
+    self.PatchObject(stats.StatsUploader, '_Upload',
+                     side_effect=KeyboardInterrupt())
+    with cros_test_lib.LoggingCapturer() as logs:
+      with stats.UploadContext() as queue:
+        queue.put([stats.Stats()])
+      self.AssertLogsContain(logs, stats.UNCAUGHT_UPLOAD_ERROR, inverted=True)
+
+
+def main(_argv):
+  cros_test_lib.main(level='debug', module=__name__)
diff --git a/lib/sudo.py b/lib/sudo.py
new file mode 100644
index 0000000..34fe389
--- /dev/null
+++ b/lib/sudo.py
@@ -0,0 +1,153 @@
+# Copyright (c) 2011-2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper methods and classes related to managing sudo."""
+
+from __future__ import print_function
+
+import errno
+import os
+import signal
+import subprocess
+import sys
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+
+
+class SudoKeepAlive(cros_build_lib.MasterPidContextManager):
+  """Keep sudo auth cookie fresh.
+
+  This refreshes the sudo auth cookie; this is implemented this
+  way to ensure that sudo has access to both invoking tty, and
+  will update the user's tty-less cookie.
+  see crosbug/18393.
+  """
+
+  def __init__(self, ttyless_sudo=True, repeat_interval=4):
+    """Run sudo with a noop, to reset the sudo timestamp.
+
+    Args:
+      ttyless_sudo: Whether to update the tty-less cookie.
+      repeat_interval: In minutes, the frequency to run the update.
+    """
+    cros_build_lib.MasterPidContextManager.__init__(self)
+    self._ttyless_sudo = ttyless_sudo
+    self._repeat_interval = repeat_interval
+    self._proc = None
+    self._existing_keepalive_value = None
+
+  @staticmethod
+  def _IdentifyTTY():
+    for source in (sys.stdin, sys.stdout, sys.stderr):
+      try:
+        return os.ttyname(source.fileno())
+      except EnvironmentError as e:
+        if e.errno not in (errno.EINVAL, errno.ENOTTY):
+          raise
+
+    return 'unknown'
+
+  def _DaemonNeeded(self):
+    """Discern which TTYs require sudo keep alive code.
+
+    Returns:
+      A string representing the set of ttys we need daemons for.
+      This will be the empty string if no daemon is needed.
+    """
+    existing = os.environ.get('CROS_SUDO_KEEP_ALIVE')
+    needed = set([self._IdentifyTTY()])
+    if self._ttyless_sudo:
+      needed.add('unknown')
+    if existing is not None:
+      needed -= set(existing.split(':'))
+    return ':'.join(needed)
+
+  def _enter(self):
+    if os.getuid() == 0:
+      cros_build_lib.Die('This script cannot be run as root.')
+
+    start_for_tty = self._DaemonNeeded()
+    if not start_for_tty:
+      # Daemon is already started.
+      return
+
+    # Note despite the impulse to use 'sudo -v' instead of 'sudo true', the
+    # builder's sudoers configuration is slightly whacked resulting in it
+    # asking for password everytime.  As such use 'sudo true' instead.
+    cmds = ['sudo -n true 2>/dev/null',
+            'sudo -n true < /dev/null > /dev/null 2>&1']
+
+    # First check to see if we're already authed.  If so, then we don't
+    # need to prompt the user for their password.
+    for idx, cmd in enumerate(cmds):
+      ret = cros_build_lib.RunCommand(
+          cmd, print_cmd=False, shell=True, error_code_ok=True)
+
+      if ret.returncode != 0:
+        tty_msg = 'Please disable tty_tickets using these instructions: %s'
+        if os.path.exists("/etc/goobuntu"):
+          url = 'https://goto.google.com/chromeos-sudoers'
+        else:
+          url = 'https://goo.gl/fz9YW'
+
+        # If ttyless sudo is not strictly required for this script, don't
+        # prompt for a password a second time. Instead, just complain.
+        if idx > 0:
+          logging.error(tty_msg, url)
+          if not self._ttyless_sudo:
+            break
+
+        # We need to go interactive and allow sudo to ask for credentials.
+        interactive_cmd = cmd.replace(' -n', '')
+        cros_build_lib.RunCommand(interactive_cmd, shell=True, print_cmd=False)
+
+        # Verify that sudo access is set up properly.
+        try:
+          cros_build_lib.RunCommand(cmd, shell=True, print_cmd=False)
+        except cros_build_lib.RunCommandError:
+          if idx == 0:
+            raise
+          cros_build_lib.Die('tty_tickets must be disabled. ' + tty_msg, url)
+
+    # Anything other than a timeout results in us shutting down.
+    repeat_interval = self._repeat_interval * 60
+    cmd = ('while :; do read -t %i; [ $? -le 128 ] && exit; %s; done' %
+           (repeat_interval, '; '.join(cmds)))
+
+    def ignore_sigint():
+      # We don't want our sudo process shutdown till we shut it down;
+      # since it's part of the session group it however gets SIGINT.
+      # Thus suppress it (which bash then inherits).
+      signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+    self._proc = subprocess.Popen(['bash', '-c', cmd], shell=False,
+                                  close_fds=True, preexec_fn=ignore_sigint,
+                                  stdin=subprocess.PIPE)
+
+    self._existing_keepalive_value = os.environ.get('CROS_SUDO_KEEP_ALIVE')
+    os.environ['CROS_SUDO_KEEP_ALIVE'] = start_for_tty
+
+  # pylint: disable=W0613
+  def _exit(self, exc_type, exc_value, traceback):
+    if self._proc is None:
+      return
+
+    try:
+      self._proc.terminate()
+      self._proc.wait()
+    except EnvironmentError as e:
+      if e.errno != errno.ESRCH:
+        raise
+
+    if self._existing_keepalive_value is not None:
+      os.environ['CROS_SUDO_KEEP_ALIVE'] = self._existing_keepalive_value
+    else:
+      os.environ.pop('CROS_SUDO_KEEP_ALIVE', None)
+
+
+def SetFileContents(path, value, cwd=None):
+  """Set a given filepath contents w/ the passed in value."""
+  cros_build_lib.SudoRunCommand(['tee', path], redirect_stdout=True,
+                                print_cmd=False, input=value, cwd=cwd)
diff --git a/lib/sysroot_lib.py b/lib/sysroot_lib.py
new file mode 100644
index 0000000..b04d4bf
--- /dev/null
+++ b/lib/sysroot_lib.py
@@ -0,0 +1,629 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities to create sysroots."""
+
+from __future__ import print_function
+
+import glob
+import multiprocessing
+import os
+
+from chromite.cbuildbot import binhost
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import locking
+from chromite.lib import osutils
+from chromite.lib import portage_util
+from chromite.lib import toolchain
+
+
+class ConfigurationError(Exception):
+  """Raised when an invalid configuration is found."""
+
+
+STANDARD_FIELD_PORTDIR_OVERLAY = 'PORTDIR_OVERLAY'
+STANDARD_FIELD_CHOST = 'CHOST'
+STANDARD_FIELD_BOARD_OVERLAY = 'BOARD_OVERLAY'
+STANDARD_FIELD_BOARD_USE = 'BOARD_USE'
+STANDARD_FIELD_ARCH = 'ARCH'
+
+_PORTAGE_WRAPPER_TEMPLATE = """#!/bin/sh
+
+# If we try to use sudo when the sandbox is active, we get ugly warnings that
+# just confuse developers.  Disable the sandbox in this case by rexecing.
+if [ "${{SANDBOX_ON}}" = "1" ]; then
+  SANDBOX_ON=0 exec "$0" "$@"
+else
+  unset LD_PRELOAD
+fi
+
+export CHOST="{chost}"
+export PORTAGE_CONFIGROOT="{sysroot}"
+export SYSROOT="{sysroot}"
+if [ -z "$PORTAGE_USERNAME" ]; then
+  export PORTAGE_USERNAME=$(basename "${{HOME}}")
+fi
+export ROOT="{sysroot}"
+exec sudo -E {command} "$@"
+"""
+
+_BOARD_WRAPPER_TEMPLATE = """#!/bin/sh
+exec {command} --board="{board}" "$@"
+"""
+
+_PKGCONFIG_WRAPPER_TEMPLATE = """#!/bin/bash
+
+PKG_CONFIG_LIBDIR=$(printf '%s:' "{sysroot}"/usr/*/pkgconfig)
+export PKG_CONFIG_LIBDIR
+
+export PKG_CONFIG_SYSROOT_DIR="{sysroot}"
+
+# Portage will get confused and try to "help" us by exporting this.
+# Undo that logic.
+unset PKG_CONFIG_PATH
+
+exec pkg-config "$@"
+"""
+
+_wrapper_dir = '/usr/local/bin'
+
+_IMPLICIT_SYSROOT_DEPS = 'IMPLICIT_SYSROOT_DEPS'
+
+_CONFIGURATION_PATH = 'etc/make.conf.board_setup'
+
+_CACHE_PATH = 'var/cache/edb/chromeos'
+
+_CHROMIUMOS_OVERLAY = '/usr/local/portage/chromiumos'
+_ECLASS_OVERLAY = '/usr/local/portage/eclass-overlay'
+
+_CHROME_BINHOST_SUFFIX = '-LATEST_RELEASE_CHROME_BINHOST.conf'
+
+_INTERNAL_BINHOST_DIR = os.path.join(
+    constants.SOURCE_ROOT, 'src/private-overlays/chromeos-partner-overlay/'
+    'chromeos/binhost/target')
+_EXTERNAL_BINHOST_DIR = os.path.join(
+    constants.SOURCE_ROOT, constants.CHROMIUMOS_OVERLAY_DIR,
+    'chromeos/binhost/target')
+
+_CHROMEOS_INTERNAL_BOTO_PATH = os.path.join(
+    constants.SOURCE_ROOT, 'src', 'private-overlays', 'chromeos-overlay',
+    'googlestorage_account.boto')
+
+_ARCH_MAPPING = {
+    'amd64': 'amd64-generic',
+    'x86': 'x86-generic',
+    'arm': 'arm-generic',
+    'mips': 'mipsel-o32-generic',
+}
+
+
+def _CreateWrapper(wrapper_path, template, **kwargs):
+  """Creates a wrapper from a given template.
+
+  Args:
+    wrapper_path: path to the wrapper.
+    template: wrapper template.
+    kwargs: fields to be set in the template.
+  """
+  osutils.WriteFile(wrapper_path, template.format(**kwargs), makedirs=True,
+                    sudo=True)
+  cros_build_lib.SudoRunCommand(['chmod', '+x', wrapper_path], print_cmd=False,
+                                redirect_stderr=True)
+
+
+def _NotEmpty(filepath):
+  """Returns True if |filepath| is not empty.
+
+  Args:
+    filepath: path to a file.
+  """
+  return os.path.exists(filepath) and osutils.ReadFile(filepath).strip()
+
+
+def _DictToKeyValue(dictionary):
+  """Formats dictionary in to a key=value string.
+
+  Args:
+    dictionary: a python dictionary.
+  """
+  output = []
+  for key in sorted(dictionary.keys()):
+    output.append('%s="%s"' % (key, dictionary[key]))
+
+  return '\n'.join(output)
+
+
+class Sysroot(object):
+  """Class that encapsulate the interaction with sysroots."""
+
+  def __init__(self, path):
+    self.path = path
+    self._config_file = os.path.join(path, _CONFIGURATION_PATH)
+    self._cache_file = os.path.join(path, _CACHE_PATH)
+    self._cache_file_lock = self._cache_file + '.lock'
+
+  def GetStandardField(self, field):
+    """Returns the value of a standard field.
+
+    Args:
+      field: Field from the standard configuration file to get.
+        One of STANDARD_FIELD_* from above.
+    """
+    return osutils.SourceEnvironment(self._config_file,
+                                     [field], multiline=True).get(field)
+
+  def GetCachedField(self, field):
+    """Returns the value of |field| in the sysroot cache file.
+
+    Access to the cache is thread-safe as long as we access it through this
+    methods or the bash helper in common.sh.
+
+    Args:
+      field: name of the field.
+    """
+    if not os.path.exists(self._cache_file):
+      return None
+
+    with locking.FileLock(
+        self._cache_file_lock, locktype=locking.FLOCK,
+        world_writable=True).read_lock():
+      return osutils.SourceEnvironment(self._cache_file, [field]).get(field)
+
+  def SetCachedField(self, field, value):
+    """Sets |field| to |value| in the sysroot cache file.
+
+    Access to the cache is thread-safe as long as we access it through this
+    methods or the bash helper in common.sh.
+
+    Args:
+      field: name of the field.
+      value: value to set. If |value| is None, the field is unset.
+    """
+    # TODO(bsimonnet): add support for values with quotes and newlines.
+    # crbug.com/476764.
+    for symbol in '\n`$"\\':
+      if value and symbol in value:
+        raise ValueError('Cannot use \\n, `, $, \\ or " in cached value.')
+
+    with locking.FileLock(
+        self._cache_file_lock, locktype=locking.FLOCK,
+        world_writable=True).write_lock():
+      lines = []
+      if os.path.exists(self._cache_file):
+        lines = osutils.ReadFile(self._cache_file).splitlines()
+
+        # Remove the old value for field if it exists.
+        lines = [l for l in lines if not l.startswith(field + '=')]
+
+      if value is not None:
+        lines.append('%s="%s"' % (field, value))
+      osutils.WriteFile(self._cache_file, '\n'.join(lines), sudo=True)
+
+  def _WrapperPath(self, command, friendly_name=None):
+    """Returns the path to the wrapper for |command|.
+
+    Args:
+      command: command to wrap.
+      friendly_name: suffix to add to the command name. If None, the wrapper
+        will be created in the sysroot.
+    """
+    if friendly_name:
+      return os.path.join(_wrapper_dir, '%s-%s' % (command, friendly_name))
+    return os.path.join(self.path, 'build', 'bin', command)
+
+  def CreateAllWrappers(self, friendly_name=None):
+    """Creates all the wrappers.
+
+    Creates all portage tools wrappers, plus wrappers for gdb, cros_workon and
+    pkg-config.
+
+    Args:
+      friendly_name: if not None, create friendly wrappers with |friendly_name|
+        added to the command.
+    """
+    chost = self.GetStandardField(STANDARD_FIELD_CHOST)
+    for cmd in ('ebuild', 'eclean', 'emaint', 'equery', 'portageq', 'qcheck',
+                'qdepends', 'qfile', 'qlist', 'qmerge', 'qsize'):
+      args = {'sysroot': self.path, 'chost': chost, 'command': cmd}
+      if friendly_name:
+        _CreateWrapper(self._WrapperPath(cmd, friendly_name),
+                       _PORTAGE_WRAPPER_TEMPLATE, **args)
+      _CreateWrapper(self._WrapperPath(cmd),
+                     _PORTAGE_WRAPPER_TEMPLATE, **args)
+
+    if friendly_name:
+      _CreateWrapper(self._WrapperPath('emerge', friendly_name),
+                     _PORTAGE_WRAPPER_TEMPLATE, sysroot=self.path, chost=chost,
+                     command='emerge --root-deps',
+                     source_root=constants.SOURCE_ROOT)
+
+      _CreateWrapper(self._WrapperPath('cros_workon', friendly_name),
+                     _BOARD_WRAPPER_TEMPLATE, board=friendly_name,
+                     command='cros_workon')
+      _CreateWrapper(self._WrapperPath('gdb', friendly_name),
+                     _BOARD_WRAPPER_TEMPLATE, board=friendly_name,
+                     command='cros_gdb')
+      _CreateWrapper(self._WrapperPath('pkg-config', friendly_name),
+                     _PKGCONFIG_WRAPPER_TEMPLATE, sysroot=self.path)
+
+    _CreateWrapper(self._WrapperPath('pkg-config'),
+                   _PKGCONFIG_WRAPPER_TEMPLATE, sysroot=self.path)
+    _CreateWrapper(self._WrapperPath('emerge'), _PORTAGE_WRAPPER_TEMPLATE,
+                   sysroot=self.path, chost=chost, command='emerge --root-deps',
+                   source_root=constants.SOURCE_ROOT)
+
+    # Create a link to the debug symbols in the chroot so that gdb can detect
+    # them.
+    debug_symlink = os.path.join('/usr/lib/debug', self.path.lstrip('/'))
+    sysroot_debug = os.path.join(self.path, 'usr/lib/debug')
+    osutils.SafeMakedirs(os.path.dirname(debug_symlink), sudo=True)
+    osutils.SafeMakedirs(sysroot_debug, sudo=True)
+
+    osutils.SafeSymlink(sysroot_debug, debug_symlink, sudo=True)
+
+  def _GenerateConfig(self, toolchains, board_overlays, portdir_overlays,
+                      header, **kwargs):
+    """Create common config settings for boards and bricks.
+
+    Args:
+      toolchains: ToolchainList object to use.
+      board_overlays: List of board overlays.
+      portdir_overlays: List of portage overlays.
+      header: Header comment string; must start with #.
+      kwargs: Additional configuration values to set.
+
+    Returns:
+      Configuration string.
+
+    Raises:
+      ConfigurationError: Could not generate a valid configuration.
+    """
+    config = {}
+
+    default_toolchains = toolchain.FilterToolchains(toolchains, 'default', True)
+    if not default_toolchains:
+      raise ConfigurationError('No default toolchain could be found.')
+    config['CHOST'] = default_toolchains.keys()[0]
+    config['ARCH'] = toolchain.GetArchForTarget(config['CHOST'])
+
+    config['BOARD_OVERLAY'] = '\n'.join(board_overlays)
+    config['PORTDIR_OVERLAY'] = '\n'.join(portdir_overlays)
+
+    config['MAKEOPTS'] = '-j%s' % str(multiprocessing.cpu_count())
+    config['ROOT'] = self.path + '/'
+    config['PKG_CONFIG'] = self._WrapperPath('pkg-config')
+
+    config.update(kwargs)
+
+    return '\n'.join((header, _DictToKeyValue(config)))
+
+  def GenerateBoardConfig(self, board):
+    """Generates the configuration for a given board.
+
+    Args:
+      board: board name to use to generate the configuration.
+    """
+    toolchains = toolchain.GetToolchainsForBoard(board)
+
+    # Compute the overlay list.
+    portdir_overlays = portage_util.FindOverlays(constants.BOTH_OVERLAYS, board)
+    prefix = os.path.join(constants.SOURCE_ROOT, 'src', 'third_party')
+    board_overlays = [o for o in portdir_overlays if not o.startswith(prefix)]
+
+    header = "# Created by cros_sysroot_utils from --board=%s." % board
+    return self._GenerateConfig(toolchains, board_overlays, portdir_overlays,
+                                header, BOARD_USE=board)
+
+  def GenerateBrickConfig(self, bricks, bsp=None):
+    """Generates the configuration for a given brick stack and bsp.
+
+    Args:
+      bricks: The brick stack, expanded, excluding the bsp.
+      bsp: BSP to use.
+    """
+    brick_list = bricks
+    if bsp:
+      brick_list = bsp.BrickStack() + brick_list
+
+    board_overlays = [b.OverlayDir() for b in brick_list]
+    portdir_overlays = [_CHROMIUMOS_OVERLAY, _ECLASS_OVERLAY] + board_overlays
+
+    # If the bsp is not set use the highest priority brick. This is meant to
+    # preserve support for building with --brick.
+    # TODO(bsimonnet): remove this once we remove support for --brick
+    # (brbug.com/916).
+    bsp = bsp or bricks[-1]
+    toolchains = toolchain.GetToolchainsForBrick(bsp.brick_locator)
+
+    header = '# Autogenerated by chromite.lib.sysroot_lib.'
+    return self._GenerateConfig(toolchains, board_overlays, portdir_overlays,
+                                header)
+
+  def WriteConfig(self, config):
+    """Writes the configuration.
+
+    Args:
+      config: configuration to use.
+    """
+    path = os.path.join(self.path, _CONFIGURATION_PATH)
+    osutils.WriteFile(path, config, makedirs=True, sudo=True)
+
+  def GenerateMakeConf(self, accepted_licenses=None):
+    """Generates the board specific make.conf.
+
+    Args:
+      accepted_licenses: Licenses accepted by portage as a string.
+
+    Returns:
+      The make.conf file as a python string.
+    """
+    config = ["""# AUTO-GENERATED FILE. DO NOT EDIT.
+
+  # Source make.conf from each overlay."""]
+
+    overlay_list = self.GetStandardField(STANDARD_FIELD_BOARD_OVERLAY)
+    boto_config = ''
+    for overlay in overlay_list.splitlines():
+      make_conf = os.path.join(overlay, 'make.conf')
+      boto_file = os.path.join(overlay, 'googlestorage_account.boto')
+      if os.path.isfile(make_conf):
+        config.append('source %s' % make_conf)
+
+      if os.path.isfile(boto_file):
+        boto_config = boto_file
+
+    # If there is a boto file in the chromeos internal overlay, use it as it
+    # will have access to the most stuff.
+    if os.path.isfile(_CHROMEOS_INTERNAL_BOTO_PATH):
+      boto_config = _CHROMEOS_INTERNAL_BOTO_PATH
+
+    gs_fetch_binpkg = os.path.join(constants.SOURCE_ROOT, 'chromite', 'bin',
+                                   'gs_fetch_binpkg')
+    gsutil_cmd = '%s \\"${URI}\\" \\"${DISTDIR}/${FILE}\\"' % gs_fetch_binpkg
+    config.append('BOTO_CONFIG="%s"' % boto_config)
+    config.append('FETCHCOMMAND_GS="bash -c \'BOTO_CONFIG=%s %s\'"'
+                  % (boto_config, gsutil_cmd))
+    config.append('RESUMECOMMAND_GS="$FETCHCOMMAND_GS"')
+
+    if accepted_licenses:
+      config.append('ACCEPT_LICENSE="%s"' % accepted_licenses)
+
+    return '\n'.join(config)
+
+  def GenerateBinhostConf(self, chrome_only=False, local_only=False):
+    """Returns the binhost configuration.
+
+    Args:
+      chrome_only: If True, generate only the binhost for chrome.
+      local_only: If True, use binary packages from local boards only.
+    """
+    board = self.GetStandardField(STANDARD_FIELD_BOARD_USE)
+    if local_only:
+      if not board:
+        return ''
+      # TODO(bsimonnet): Refactor cros_generate_local_binhosts into a function
+      # here and remove the following call.
+      local_binhosts = cros_build_lib.RunCommand(
+          [os.path.join(constants.CHROMITE_BIN_DIR,
+                        'cros_generate_local_binhosts'), '--board=%s' % board],
+          print_cmd=False, capture_output=True).output
+      return '\n'.join([local_binhosts,
+                        'PORTAGE_BINHOST="$LOCAL_BINHOST"'])
+
+    config = []
+    chrome_binhost = board and self._ChromeBinhost(board)
+    preflight_binhost, preflight_binhost_internal = self._PreflightBinhosts(
+        board)
+
+    if chrome_only:
+      if chrome_binhost:
+        return '\n'.join(['source %s' % chrome_binhost,
+                          'PORTAGE_BINHOST="$LATEST_RELEASE_CHROME_BINHOST"'])
+      else:
+        return ''
+
+    config.append("""
+# FULL_BINHOST is populated by the full builders. It is listed first because it
+# is the lowest priority binhost. It is better to download packages from the
+# preflight binhost because they are fresher packages.
+PORTAGE_BINHOST="$FULL_BINHOST"
+""")
+
+    if preflight_binhost:
+      config.append("""
+# PREFLIGHT_BINHOST is populated by the preflight builders. If the same
+# package is provided by both the preflight and full binhosts, the package is
+# downloaded from the preflight binhost.
+source %s
+PORTAGE_BINHOST="$PORTAGE_BINHOST $PREFLIGHT_BINHOST"
+""" % preflight_binhost)
+
+    if preflight_binhost_internal:
+      config.append("""
+# The internal PREFLIGHT_BINHOST is populated by the internal preflight
+# builders. It takes priority over the public preflight binhost.
+source %s
+PORTAGE_BINHOST="$PORTAGE_BINHOST $PREFLIGHT_BINHOST"
+""" % preflight_binhost_internal)
+
+    if chrome_binhost:
+      config.append("""
+# LATEST_RELEASE_CHROME_BINHOST provides prebuilts for chromeos-chrome only.
+source %s
+PORTAGE_BINHOST="$PORTAGE_BINHOST $LATEST_RELEASE_CHROME_BINHOST"
+""" % chrome_binhost)
+
+    return '\n'.join(config)
+
+  def _ChromeBinhost(self, board):
+    """Gets the latest chrome binhost for |board|.
+
+    Args:
+      board: The board to use.
+    """
+    extra_useflags = os.environ.get('USE', '').split()
+    compat_id = binhost.CalculateCompatId(board, extra_useflags)
+    internal_config = binhost.PrebuiltMapping.GetFilename(
+        constants.SOURCE_ROOT, 'chrome')
+    external_config = binhost.PrebuiltMapping.GetFilename(
+        constants.SOURCE_ROOT, 'chromium', internal=False)
+    binhost_dirs = (_INTERNAL_BINHOST_DIR, _EXTERNAL_BINHOST_DIR)
+
+    if os.path.exists(internal_config):
+      pfq_configs = binhost.PrebuiltMapping.Load(internal_config)
+    elif os.path.exists(external_config):
+      pfq_configs = binhost.PrebuiltMapping.Load(external_config)
+    else:
+      return None
+
+    for key in pfq_configs.GetPrebuilts(compat_id):
+      for binhost_dir in binhost_dirs:
+        binhost_file = os.path.join(binhost_dir,
+                                    key.board + _CHROME_BINHOST_SUFFIX)
+        # Make sure the binhost file is not empty. We sometimes empty the file
+        # to force clients to use another binhost.
+        if _NotEmpty(binhost_file):
+          return binhost_file
+
+    return None
+
+  def _PreflightBinhosts(self, board=None):
+    """Returns the preflight binhost to use.
+
+    Args:
+      board: Board name.
+    """
+    prefixes = []
+    arch = self.GetStandardField(STANDARD_FIELD_ARCH)
+    if arch in _ARCH_MAPPING:
+      prefixes.append(_ARCH_MAPPING[arch])
+
+    if board:
+      prefixes = [board, board.split('_')[0]] + prefixes
+
+    filenames = ['%s-PREFLIGHT_BINHOST.conf' % p for p in prefixes]
+
+    external = internal = None
+    for filename in filenames:
+      # The binhost file must exist and not be empty, both for internal and
+      # external binhosts.
+      # When a builder is deleted and no longer publishes prebuilts, we need
+      # developers to pick up the next set of prebuilts. Clearing the binhost
+      # files triggers this.
+      candidate = os.path.join(_INTERNAL_BINHOST_DIR, filename)
+      if not internal and _NotEmpty(candidate):
+        internal = candidate
+
+      candidate = os.path.join(_EXTERNAL_BINHOST_DIR, filename)
+      if not external and _NotEmpty(candidate):
+        external = candidate
+
+    return external, internal
+
+  def CreateSkeleton(self):
+    """Creates a sysroot skeleton."""
+    needed_dirs = [
+        os.path.join(self.path, 'etc', 'portage', 'hooks'),
+        os.path.join(self.path, 'etc'),
+        os.path.join(self.path, 'etc', 'portage', 'profile'),
+        os.path.join('/', 'usr', 'local', 'bin'),
+    ]
+    for d in needed_dirs:
+      osutils.SafeMakedirs(d, sudo=True)
+
+    make_user = os.path.join('/', 'etc', 'make.conf.user')
+    link = os.path.join(self.path, 'etc', 'make.conf.user')
+    if not os.path.exists(make_user):
+      osutils.WriteFile(make_user, '', sudo=True)
+    osutils.SafeSymlink(make_user, link, sudo=True)
+
+    # Create links for portage hooks.
+    hook_glob = os.path.join(constants.CROSUTILS_DIR, 'hooks', '*')
+    for filename in glob.glob(hook_glob):
+      linkpath = os.path.join(self.path, 'etc', 'portage', 'hooks',
+                              os.path.basename(filename))
+      osutils.SafeSymlink(filename, linkpath, sudo=True)
+
+  def _SelectDefaultMakeConf(self):
+    """Selects the best make.conf file possible.
+
+    The best make.conf possible is the ARCH-specific make.conf. If it does not
+    exist, we use the generic make.conf.
+    """
+    make_conf = os.path.join(
+        constants.SOURCE_ROOT, constants.CHROMIUMOS_OVERLAY_DIR, 'chromeos',
+        'config', 'make.conf.generic-target')
+    link = os.path.join(self.path, 'etc', 'make.conf')
+    osutils.SafeSymlink(make_conf, link, sudo=True)
+
+  def _GenerateProfile(self):
+    """Generates the portage profile for this sysroot.
+
+    The generated portage profile depends on the profiles of all used bricks in
+    order as well as the general brillo profile for this architecture.
+    """
+    overlays = self.GetStandardField(STANDARD_FIELD_BOARD_OVERLAY).splitlines()
+    profile_list = [os.path.join(o, 'profiles', 'base') for o in overlays]
+
+    # Keep only the profiles that exist.
+    profile_list = [p for p in profile_list if os.path.exists(p)]
+
+    # Add the arch specific profile.
+    # The profile list is ordered from the lowest to the highest priority. This
+    # profile has to go first so that other profiles can override it.
+    arch = self.GetStandardField(STANDARD_FIELD_ARCH)
+    profile_list.insert(0, 'chromiumos:default/linux/%s/brillo' % arch)
+
+    generated_parent = os.path.join(self.path, 'build', 'generated_profile',
+                                    'parent')
+    osutils.WriteFile(
+        generated_parent, '\n'.join(profile_list), sudo=True, makedirs=True)
+    profile_link = os.path.join(self.path, 'etc', 'portage', 'make.profile')
+    osutils.SafeMakedirs(os.path.dirname(profile_link), sudo=True)
+    osutils.SafeSymlink(os.path.dirname(generated_parent), profile_link,
+                        sudo=True)
+
+  def GeneratePortageConfig(self):
+    """Generates the portage config.
+
+    This step will:
+    * create the portage wrappers.
+    * create the symlink to the architecture-specific make.conf
+    * generate make.conf.board (binhost, gsutil setup and various portage
+      configuration)
+    * choose the best portage profile possible.
+    """
+    self.CreateAllWrappers()
+    self._SelectDefaultMakeConf()
+    self._GenerateProfile()
+
+    make_conf = self.GenerateMakeConf()
+    make_conf_path = os.path.join(self.path, 'etc', 'make.conf.board')
+    osutils.WriteFile(make_conf_path, make_conf, sudo=True)
+
+    # Once make.conf.board has been generated, generate the binhost config.
+    # We need to do this in two steps as the binhost generation step needs
+    # portageq to be available.
+    osutils.WriteFile(make_conf_path,
+                      '\n'.join([make_conf, self.GenerateBinhostConf()]),
+                      sudo=True)
+
+  def UpdateToolchain(self):
+    """Updates the toolchain packages.
+
+    This will install both the toolchains and the packages that are implicitly
+    needed (gcc-libs, linux-headers).
+    """
+    cros_build_lib.RunCommand(
+        [os.path.join(constants.CROSUTILS_DIR, 'install_toolchain'),
+         '--sysroot', self.path])
+
+    if not self.GetCachedField(_IMPLICIT_SYSROOT_DEPS):
+      emerge = [os.path.join(constants.CHROMITE_BIN_DIR, 'parallel_emerge'),
+                '--sysroot=%s' % self.path]
+      cros_build_lib.SudoRunCommand(
+          emerge + ['--root-deps=rdeps', '--usepkg', '--getbinpkg', '--select',
+                    'gcc-libs', 'linux-headers'])
+      self.SetCachedField(_IMPLICIT_SYSROOT_DEPS, 'yes')
diff --git a/lib/sysroot_lib_unittest b/lib/sysroot_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/sysroot_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/sysroot_lib_unittest.py b/lib/sysroot_lib_unittest.py
new file mode 100644
index 0000000..da71123
--- /dev/null
+++ b/lib/sysroot_lib_unittest.py
@@ -0,0 +1,116 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for the sysroot library."""
+
+from __future__ import print_function
+
+import os
+import re
+
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import sysroot_lib
+from chromite.lib import toolchain
+
+
+class SysrootLibTest(cros_test_lib.MockTempDirTestCase):
+  """Unittest for sysroot_lib.py"""
+
+  def setUp(self):
+    """Setup the test environment."""
+    # Fake being root to avoid running all filesystem commands with
+    # SudoRunCommand.
+    self.PatchObject(os, 'getuid', return_value=0)
+
+  def testGetStandardField(self):
+    """Tests that standard field can be fetched correctly."""
+    sysroot = sysroot_lib.Sysroot(self.tempdir)
+    sysroot.WriteConfig('FOO="bar"')
+    self.assertEqual('bar', sysroot.GetStandardField('FOO'))
+
+    # Works with multiline strings
+    multiline = """foo
+bar
+baz
+"""
+    sysroot.WriteConfig('TEST="%s"' % multiline)
+    self.assertEqual(multiline, sysroot.GetStandardField('TEST'))
+
+  def testReadWriteCache(self):
+    """Tests that we can write and read to the cache."""
+    sysroot = sysroot_lib.Sysroot(self.tempdir)
+
+    # If a field is not defined we get None.
+    self.assertEqual(None, sysroot.GetCachedField('foo'))
+
+    # If we set a field, we can get it.
+    sysroot.SetCachedField('foo', 'bar')
+    self.assertEqual('bar', sysroot.GetCachedField('foo'))
+
+    # Setting a field in an existing cache preserve the previous values.
+    sysroot.SetCachedField('hello', 'bonjour')
+    self.assertEqual('bar', sysroot.GetCachedField('foo'))
+    self.assertEqual('bonjour', sysroot.GetCachedField('hello'))
+
+    # Setting a field to None unsets it.
+    sysroot.SetCachedField('hello', None)
+    self.assertEqual(None, sysroot.GetCachedField('hello'))
+
+  def testErrorOnBadCachedValue(self):
+    """Tests that we detect bad value for the sysroot cache."""
+    sysroot = sysroot_lib.Sysroot(self.tempdir)
+
+    forbidden = [
+        'hello"bonjour',
+        'hello\\bonjour',
+        'hello\nbonjour',
+        'hello$bonjour',
+        'hello`bonjour',
+    ]
+    for value in forbidden:
+      with self.assertRaises(ValueError):
+        sysroot.SetCachedField('FOO', value)
+
+  def testProfileGeneration(self):
+    """Tests that we generate the portage profile correctly."""
+    # pylint: disable=protected-access
+    overlay_dir = os.path.join(self.tempdir, 'overlays')
+    sysroot_dir = os.path.join(self.tempdir, 'sysroot')
+    overlays = [os.path.join(overlay_dir, letter) for letter in ('a', 'b', 'c')]
+    for o in overlays:
+      osutils.SafeMakedirs(o)
+    sysroot = sysroot_lib.Sysroot(sysroot_dir)
+
+    sysroot.WriteConfig(sysroot_lib._DictToKeyValue(
+        {'ARCH': 'arm',
+         'BOARD_OVERLAY': '\n'.join(overlays)}))
+
+    sysroot._GenerateProfile()
+
+    profile_link = os.path.join(sysroot.path, 'etc', 'portage', 'make.profile')
+    profile_parent = osutils.ReadFile(
+        os.path.join(profile_link, 'parent')).splitlines()
+    self.assertTrue(os.path.islink(profile_link))
+    self.assertEqual(1, len(profile_parent))
+    self.assertTrue(re.match('chromiumos:.*arm.*', profile_parent[0]))
+
+    profile_dir = os.path.join(overlays[1], 'profiles', 'base')
+    osutils.SafeMakedirs(profile_dir)
+
+    sysroot._GenerateProfile()
+    profile_parent = osutils.ReadFile(
+        os.path.join(profile_link, 'parent')).splitlines()
+    self.assertEqual(2, len(profile_parent))
+    self.assertEqual(profile_dir, profile_parent[1])
+
+  def testGenerateConfigNoToolchainRaisesError(self):
+    """Tests _GenerateConfig() with no toolchain raises an error."""
+    self.PatchObject(toolchain, 'FilterToolchains', autospec=True,
+                     return_value={})
+    sysroot = sysroot_lib.Sysroot(self.tempdir)
+
+    with self.assertRaises(sysroot_lib.ConfigurationError):
+      # pylint: disable=protected-access
+      sysroot._GenerateConfig({}, ['foo_overlay'], ['foo_overlay'], '')
diff --git a/lib/table.py b/lib/table.py
new file mode 100644
index 0000000..f1549a4
--- /dev/null
+++ b/lib/table.py
@@ -0,0 +1,456 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Support generic spreadsheet-like table information."""
+
+from __future__ import print_function
+
+import inspect
+import re
+import sys
+
+from chromite.lib import cros_build_lib
+
+
+class Table(object):
+  """Class to represent column headers and rows of data."""
+
+  __slots__ = (
+      '_column_set',  # Set of column headers (for faster lookup)
+      '_columns',     # List of column headers in order
+      '_name',        # Name to associate with table
+      '_rows',        # List of row dicts
+  )
+
+  EMPTY_CELL = ''
+
+  CSV_BQ = '__BEGINQUOTE__'
+  CSV_EQ = '__ENDQUOTE__'
+
+  @staticmethod
+  def _SplitCSVLine(line):
+    r'''Split a single CSV line into separate values.
+
+    Behavior illustrated by the following examples, with all but
+    the last example taken from Google Docs spreadsheet behavior:
+    'a,b,c,d':           ==> ['a', 'b', 'c', 'd'],
+    'a, b, c, d':        ==> ['a', ' b', ' c', ' d'],
+    'a,b,c,':            ==> ['a', 'b', 'c', ''],
+    'a,"b c",d':         ==> ['a', 'b c', 'd'],
+    'a,"b, c",d':        ==> ['a', 'b, c', 'd'],
+    'a,"b, c, d",e':     ==> ['a', 'b, c, d', 'e'],
+    'a,"""b, c""",d':    ==> ['a', '"b, c"', 'd'],
+    'a,"""b, c"", d",e': ==> ['a', '"b, c", d', 'e'],
+    'a,b\,c,d':          ==> ['a', 'b,c', 'd'],
+
+    Return a list of values.
+    '''
+    # Split on commas, handling two special cases:
+    # 1) Escaped commas are not separators.
+    # 2) A quoted value can have non-separator commas in it.  Quotes
+    #    should be removed.
+    vals = []
+    for val in re.split(r'(?<!\\),', line):
+      if not val:
+        vals.append(val)
+        continue
+
+      # Handle regular double quotes at beginning/end specially.
+      if val[0] == '"':
+        val = Table.CSV_BQ + val[1:]
+      if val[-1] == '"' and (val[-2] != '"' or val[-3] == '"'):
+        val = val[0:-1] + Table.CSV_EQ
+
+      # Remove escape characters now.
+      val = val.replace(r'\,', ',')  # \ before ,
+      val = val.replace('""', '"')   # " before " (Google Spreadsheet syntax)
+
+      prevval = vals[-1] if vals else None
+
+      # If previous value started with quote and ended without one, then
+      # the current value is just a continuation of the previous value.
+      if prevval and prevval.startswith(Table.CSV_BQ):
+        val = prevval + ',' + val
+        # Once entire value is read, strip surrounding quotes
+        if val.endswith(Table.CSV_EQ):
+          vals[-1] = val[len(Table.CSV_BQ):-len(Table.CSV_EQ)]
+        else:
+          vals[-1] = val
+      elif val.endswith(Table.CSV_EQ):
+        vals.append(val[len(Table.CSV_BQ):-len(Table.CSV_EQ)])
+      else:
+        vals.append(val)
+
+    # If an unpaired Table.CSV_BQ is still in vals, then replace with ".
+    vals = [val.replace(Table.CSV_BQ, '"') for val in vals]
+
+    return vals
+
+  @staticmethod
+  def LoadFromCSV(csv_file, name=None):
+    """Create a new Table object by loading contents of |csv_file|."""
+    if type(csv_file) is file:
+      file_handle = csv_file
+    else:
+      file_handle = open(csv_file, 'r')
+    table = None
+
+    for line in file_handle:
+      if line[-1] == '\n':
+        line = line[0:-1]
+
+      vals = Table._SplitCSVLine(line)
+
+      if not table:
+        # Read headers
+        table = Table(vals, name=name)
+
+      else:
+        # Read data row
+        table.AppendRow(vals)
+
+    return table
+
+  def __init__(self, columns, name=None):
+    self._columns = columns
+    self._column_set = set(columns)
+    self._rows = []
+    self._name = name
+
+  def __str__(self):
+    """Return a table-like string representation of this table."""
+    cols = ['%10s' % col for col in self._columns]
+    text = 'Columns: %s\n' % ', '.join(cols)
+
+    ix = 0
+    for row in self._rows:
+      vals = ['%10s' % row[col] for col in self._columns]
+      text += 'Row %3d: %s\n' % (ix, ', '.join(vals))
+      ix += 1
+    return text
+
+  def __nonzero__(self):
+    """Define boolean equivalent for this table."""
+    return bool(self._columns)
+
+  def __len__(self):
+    """Length of table equals the number of rows."""
+    return self.GetNumRows()
+
+  def __eq__(self, other):
+    """Return true if two tables are equal."""
+    # pylint: disable=protected-access
+    return self._columns == other._columns and self._rows == other._rows
+
+  def __ne__(self, other):
+    """Return true if two tables are not equal."""
+    return not self == other
+
+  def __getitem__(self, index):
+    """Access one or more rows by index or slice."""
+    return self.GetRowByIndex(index)
+
+  def __delitem__(self, index):
+    """Delete one or more rows by index or slice."""
+    self.RemoveRowByIndex(index)
+
+  def __iter__(self):
+    """Declare that this class supports iteration (over rows)."""
+    return self._rows.__iter__()
+
+  def GetName(self):
+    """Return name associated with table, None if not available."""
+    return self._name
+
+  def SetName(self, name):
+    """Set the name associated with table."""
+    self._name = name
+
+  def Clear(self):
+    """Remove all row data."""
+    self._rows = []
+
+  def GetNumRows(self):
+    """Return the number of rows in the table."""
+    return len(self._rows)
+
+  def GetNumColumns(self):
+    """Return the number of columns in the table."""
+    return len(self._columns)
+
+  def GetColumns(self):
+    """Return list of column names in order."""
+    return list(self._columns)
+
+  def GetRowByIndex(self, index):
+    """Access one or more rows by index or slice.
+
+    If more than one row is returned they will be contained in a list.
+    """
+    return self._rows[index]
+
+  def _GenRowFilter(self, id_values):
+    """Return a method that returns true for rows matching |id_values|."""
+    def Grep(row):
+      """Filter function for rows with id_values."""
+      for key in id_values:
+        if id_values[key] != row.get(key, None):
+          return False
+      return True
+    return Grep
+
+  def GetRowsByValue(self, id_values):
+    """Return list of rows matching key/value pairs in |id_values|."""
+    # If row retrieval by value is heavily used for larger tables, then
+    # the implementation should change to be more efficient, at the
+    # expense of some pre-processing and extra storage.
+    grep = self._GenRowFilter(id_values)
+    return [r for r in self._rows if grep(r)]
+
+  def GetRowIndicesByValue(self, id_values):
+    """Return list of indices for rows matching k/v pairs in |id_values|."""
+    grep = self._GenRowFilter(id_values)
+    indices = []
+    for ix, row in enumerate(self._rows):
+      if grep(row):
+        indices.append(ix)
+
+    return indices
+
+  def _PrepareValuesForAdd(self, values):
+    """Prepare a |values| dict/list to be added as a row.
+
+    If |values| is a dict, verify that only supported column
+    values are included. Add empty string values for columns
+    not seen in the row.  The original dict may be altered.
+
+    If |values| is a list, translate it to a dict using known
+    column order.  Append empty values as needed to match number
+    of expected columns.
+
+    Return prepared dict.
+    """
+    if isinstance(values, dict):
+      for col in values:
+        if not col in self._column_set:
+          raise LookupError("Tried adding data to unknown column '%s'" % col)
+
+      for col in self._columns:
+        if not col in values:
+          values[col] = self.EMPTY_CELL
+
+    elif isinstance(values, list):
+      if len(values) > len(self._columns):
+        raise LookupError('Tried adding row with too many columns')
+      if len(values) < len(self._columns):
+        shortage = len(self._columns) - len(values)
+        values.extend([self.EMPTY_CELL] * shortage)
+
+      values = dict(zip(self._columns, values))
+
+    return values
+
+  def AppendRow(self, values):
+    """Add a single row of data to the table, according to |values|.
+
+    The |values| argument can be either a dict or list.
+    """
+    row = self._PrepareValuesForAdd(values)
+    self._rows.append(row)
+
+  def SetRowByIndex(self, index, values):
+    """Replace the row at |index| with values from |values| dict."""
+    row = self._PrepareValuesForAdd(values)
+    self._rows[index] = row
+
+  def RemoveRowByIndex(self, index):
+    """Remove the row at |index|."""
+    del self._rows[index]
+
+  def HasColumn(self, name):
+    """Return True if column |name| is in this table, False otherwise."""
+    return name in self._column_set
+
+  def GetColumnIndex(self, name):
+    """Return the column index for column |name|, -1 if not found."""
+    for ix, col in enumerate(self._columns):
+      if name == col:
+        return ix
+    return -1
+
+  def GetColumnByIndex(self, index):
+    """Return the column name at |index|"""
+    return self._columns[index]
+
+  def InsertColumn(self, index, name, value=None):
+    """Insert a new column |name| into table at index |index|.
+
+    If |value| is specified, all rows will have |value| in the new column.
+    Otherwise, they will have the EMPTY_CELL value.
+    """
+    if self.HasColumn(name):
+      raise LookupError('Column %s already exists in table.' % name)
+
+    self._columns.insert(index, name)
+    self._column_set.add(name)
+
+    for row in self._rows:
+      row[name] = value if value is not None else self.EMPTY_CELL
+
+  def AppendColumn(self, name, value=None):
+    """Same as InsertColumn, but new column is appended after existing ones."""
+    self.InsertColumn(self.GetNumColumns(), name, value)
+
+  def ProcessRows(self, row_processor):
+    """Invoke |row_processor| on each row in sequence."""
+    for row in self._rows:
+      row_processor(row)
+
+  def MergeTable(self, other_table, id_columns, merge_rules=None,
+                 allow_new_columns=False, key=None, reverse=False,
+                 new_name=None):
+    """Merge |other_table| into this table, identifying rows by |id_columns|.
+
+    The |id_columns| argument can either be a list of identifying columns names
+    or a single column name (string).  The values in these columns will be used
+    to identify the existing row that each row in |other_table| should be
+    merged into.
+
+    The |merge_rules| specify what to do when there is a merge conflict.  Every
+    column where a conflict is anticipated should have an entry in the
+    |merge_rules| dict.  The value should be one of:
+    'join_with:<text>| = Join the two conflicting values with <text>
+    'accept_this_val' = Keep value in 'this' table and discard 'other' value.
+    'accept_other_val' = Keep value in 'other' table and discard 'this' value.
+    function = Keep return value from function(col_name, this_val, other_val)
+
+    A default merge rule can be specified with the key '__DEFAULT__' in
+    |merge_rules|.
+
+    By default, the |other_table| must not have any columns that don't already
+    exist in this table.  To allow new columns to be creating by virtue of their
+    presence in |other_table| set |allow_new_columns| to true.
+
+    To sort the final merged table, supply |key| and |reverse| arguments exactly
+    as they work with the Sort method.
+    """
+    # If requested, allow columns in other_table to create new columns
+    # in this table if this table does not already have them.
+    if allow_new_columns:
+      # pylint: disable=protected-access
+      for ix, col in enumerate(other_table._columns):
+        if not self.HasColumn(col):
+          # Create a merge_rule on the fly for this new column.
+          if not merge_rules:
+            merge_rules = {}
+          merge_rules[col] = 'accept_other_val'
+
+          if ix == 0:
+            self.InsertColumn(0, col)
+          else:
+            prevcol = other_table._columns[ix - 1]
+            previx = self.GetColumnIndex(prevcol)
+            self.InsertColumn(previx + 1, col)
+
+    for other_row in other_table:
+      self._MergeRow(other_row, id_columns, merge_rules=merge_rules)
+
+    # Optionally re-sort the merged table.
+    if key:
+      self.Sort(key, reverse=reverse)
+
+    if new_name:
+      self.SetName(new_name)
+    elif self.GetName() and other_table.GetName():
+      self.SetName(self.GetName() + ' + ' + other_table.GetName())
+
+  def _GetIdValuesForRow(self, row, id_columns):
+    """Return a dict with values from |row| in |id_columns|."""
+    id_values = dict((col, row[col]) for col in
+                     cros_build_lib.iflatten_instance(id_columns))
+    return id_values
+
+  def _MergeRow(self, other_row, id_columns, merge_rules=None):
+    """Merge |other_row| into this table.
+
+    See MergeTables for description of |id_columns| and |merge_rules|.
+    """
+    id_values = self._GetIdValuesForRow(other_row, id_columns)
+
+    row_indices = self.GetRowIndicesByValue(id_values)
+    if row_indices:
+      row_index = row_indices[0]
+      row = self.GetRowByIndex(row_index)
+      for col in other_row:
+        if col in row:
+          # Find the merge rule that applies to this column, if there is one.
+          merge_rule = None
+          if merge_rules:
+            merge_rule = merge_rules.get(col, None)
+            if not merge_rule and merge_rules:
+              merge_rule = merge_rules.get('__DEFAULT__', None)
+
+          try:
+            val = self._MergeColValue(col, row[col], other_row[col],
+                                      merge_rule=merge_rule)
+          except ValueError:
+            msg = "Failed to merge '%s' value in row %r" % (col, id_values)
+            print(msg, file=sys.stderr)
+            raise
+
+          if val != row[col]:
+            row[col] = val
+        else:
+          # Cannot add new columns to row this way.
+          raise LookupError("Tried merging data to unknown column '%s'" % col)
+      self.SetRowByIndex(row_index, row)
+    else:
+      self.AppendRow(other_row)
+
+  def _MergeColValue(self, col, val, other_val, merge_rule):
+    """Merge |col| values |val| and |other_val| according to |merge_rule|.
+
+    See MergeTable method for explanation of option |merge_rule|.
+    """
+    if val == other_val:
+      return val
+
+    if not merge_rule:
+      raise ValueError("Cannot merge column values without rule: '%s' vs '%s'" %
+                       (val, other_val))
+    elif inspect.isfunction(merge_rule):
+      try:
+        return merge_rule(col, val, other_val)
+      except ValueError:
+        pass  # Fall through to exception at end
+    elif merge_rule == 'accept_this_val':
+      return val
+    elif merge_rule == 'accept_other_val':
+      return other_val
+    else:
+      match = re.match(r'join_with:(.+)$', merge_rule)
+      if match:
+        return match.group(1).join(v for v in (val, other_val) if v)
+
+    raise ValueError("Invalid merge rule (%s) for values '%s' and '%s'." %
+                     (merge_rule, val, other_val))
+
+  def Sort(self, key, reverse=False):
+    """Sort the rows using the given |key| function."""
+    self._rows.sort(key=key, reverse=reverse)
+
+  def WriteCSV(self, filehandle, hiddencols=None):
+    """Write this table out as comma-separated values to |filehandle|.
+
+    To skip certain columns during the write, use the |hiddencols| set.
+    """
+    def ColFilter(col):
+      """Filter function for columns not in hiddencols."""
+      return not hiddencols or col not in hiddencols
+
+    cols = [col for col in self._columns if ColFilter(col)]
+    filehandle.write(','.join(cols) + '\n')
+    for row in self._rows:
+      vals = [row.get(col, self.EMPTY_CELL) for col in cols]
+      filehandle.write(','.join(vals) + '\n')
diff --git a/lib/table_unittest b/lib/table_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/table_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/table_unittest.py b/lib/table_unittest.py
new file mode 100644
index 0000000..fbe5711
--- /dev/null
+++ b/lib/table_unittest.py
@@ -0,0 +1,340 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the table module."""
+
+from __future__ import print_function
+
+import cStringIO
+import sys
+import tempfile
+
+from chromite.lib import cros_test_lib
+from chromite.lib import table
+
+
+# pylint: disable=protected-access
+
+
+class TableTest(cros_test_lib.TempDirTestCase):
+  """Unit tests for the Table class."""
+
+  COL0 = 'Column1'
+  COL1 = 'Column2'
+  COL2 = 'Column3'
+  COL3 = 'Column4'
+  COLUMNS = [COL0, COL1, COL2, COL3]
+
+  ROW0 = {COL0: 'Xyz', COL1: 'Bcd', COL2: 'Cde'}
+  ROW1 = {COL0: 'Abc', COL1: 'Bcd', COL2: 'Opq', COL3: 'Foo'}
+  ROW2 = {COL0: 'Abc', COL1: 'Nop', COL2: 'Wxy', COL3: 'Bar'}
+
+  EXTRAROW = {COL1: 'Walk', COL2: 'The', COL3: 'Line'}
+  EXTRAROWOUT = {COL0: '', COL1: 'Walk', COL2: 'The', COL3: 'Line'}
+
+  ROW0a = {COL0: 'Xyz', COL1: 'Bcd', COL2: 'Cde', COL3: 'Yay'}
+  ROW0b = {COL0: 'Xyz', COL1: 'Bcd', COL2: 'Cde', COL3: 'Boo'}
+  ROW1a = {COL0: 'Abc', COL1: 'Bcd', COL2: 'Opq', COL3: 'Blu'}
+
+  EXTRACOL = 'ExtraCol'
+  EXTRACOLUMNS = [COL0, EXTRACOL, COL1, COL2]
+
+  EROW0 = {COL0: 'Xyz', EXTRACOL: 'Yay', COL1: 'Bcd', COL2: 'Cde'}
+  EROW1 = {COL0: 'Abc', EXTRACOL: 'Hip', COL1: 'Bcd', COL2: 'Opq'}
+  EROW2 = {COL0: 'Abc', EXTRACOL: 'Yay', COL1: 'Nop', COL2: 'Wxy'}
+
+  def _GetRowValsInOrder(self, row):
+    """Take |row| dict and return correctly ordered values in a list."""
+    vals = []
+    for col in self.COLUMNS:
+      vals.append(row.get(col, ''))
+
+    return vals
+
+  def _GetFullRowFor(self, row, cols):
+    return dict((col, row.get(col, '')) for col in cols)
+
+  def assertRowsEqual(self, row1, row2):
+    # Determine column superset
+    cols = set(row1.keys() + row2.keys())
+    self.assertEquals(self._GetFullRowFor(row1, cols),
+                      self._GetFullRowFor(row2, cols))
+
+  def assertRowListsEqual(self, rows1, rows2):
+    for (row1, row2) in zip(rows1, rows2):
+      self.assertRowsEqual(row1, row2)
+
+  def setUp(self):
+    self._table = self._CreateTableWithRows(self.COLUMNS,
+                                            [self.ROW0, self.ROW1, self.ROW2])
+
+  def _CreateTableWithRows(self, cols, rows):
+    mytable = table.Table(list(cols))
+    if rows:
+      for row in rows:
+        mytable.AppendRow(dict(row))
+    return mytable
+
+  def testLen(self):
+    self.assertEquals(3, len(self._table))
+
+  def testGetNumRows(self):
+    self.assertEquals(3, self._table.GetNumRows())
+
+  def testGetNumColumns(self):
+    self.assertEquals(4, self._table.GetNumColumns())
+
+  def testGetColumns(self):
+    self.assertEquals(self.COLUMNS, self._table.GetColumns())
+
+  def testGetColumnIndex(self):
+    self.assertEquals(0, self._table.GetColumnIndex(self.COL0))
+    self.assertEquals(1, self._table.GetColumnIndex(self.COL1))
+    self.assertEquals(2, self._table.GetColumnIndex(self.COL2))
+
+  def testGetColumnByIndex(self):
+    self.assertEquals(self.COL0, self._table.GetColumnByIndex(0))
+    self.assertEquals(self.COL1, self._table.GetColumnByIndex(1))
+    self.assertEquals(self.COL2, self._table.GetColumnByIndex(2))
+
+  def testGetByIndex(self):
+    self.assertRowsEqual(self.ROW0, self._table.GetRowByIndex(0))
+    self.assertRowsEqual(self.ROW0, self._table[0])
+
+    self.assertRowsEqual(self.ROW2, self._table.GetRowByIndex(2))
+    self.assertRowsEqual(self.ROW2, self._table[2])
+
+  def testSlice(self):
+    self.assertRowListsEqual([self.ROW0, self.ROW1], self._table[0:2])
+    self.assertRowListsEqual([self.ROW2], self._table[-1:])
+
+  def testGetByValue(self):
+    rows = self._table.GetRowsByValue({self.COL0: 'Abc'})
+    self.assertEquals([self.ROW1, self.ROW2], rows)
+    rows = self._table.GetRowsByValue({self.COL2: 'Opq'})
+    self.assertEquals([self.ROW1], rows)
+    rows = self._table.GetRowsByValue({self.COL3: 'Foo'})
+    self.assertEquals([self.ROW1], rows)
+
+  def testGetIndicesByValue(self):
+    indices = self._table.GetRowIndicesByValue({self.COL0: 'Abc'})
+    self.assertEquals([1, 2], indices)
+    indices = self._table.GetRowIndicesByValue({self.COL2: 'Opq'})
+    self.assertEquals([1], indices)
+    indices = self._table.GetRowIndicesByValue({self.COL3: 'Foo'})
+    self.assertEquals([1], indices)
+
+  def testAppendRowDict(self):
+    self._table.AppendRow(dict(self.EXTRAROW))
+    self.assertEquals(4, self._table.GetNumRows())
+    self.assertEquals(self.EXTRAROWOUT, self._table[len(self._table) - 1])
+
+  def testAppendRowList(self):
+    self._table.AppendRow(self._GetRowValsInOrder(self.EXTRAROW))
+    self.assertEquals(4, self._table.GetNumRows())
+    self.assertEquals(self.EXTRAROWOUT, self._table[len(self._table) - 1])
+
+  def testSetRowDictByIndex(self):
+    self._table.SetRowByIndex(1, dict(self.EXTRAROW))
+    self.assertEquals(3, self._table.GetNumRows())
+    self.assertEquals(self.EXTRAROWOUT, self._table[1])
+
+  def testSetRowListByIndex(self):
+    self._table.SetRowByIndex(1, self._GetRowValsInOrder(self.EXTRAROW))
+    self.assertEquals(3, self._table.GetNumRows())
+    self.assertEquals(self.EXTRAROWOUT, self._table[1])
+
+  def testRemoveRowByIndex(self):
+    self._table.RemoveRowByIndex(1)
+    self.assertEquals(2, self._table.GetNumRows())
+    self.assertEquals(self.ROW2, self._table[1])
+
+  def testRemoveRowBySlice(self):
+    del self._table[0:2]
+    self.assertEquals(1, self._table.GetNumRows())
+    self.assertEquals(self.ROW2, self._table[0])
+
+  def testIteration(self):
+    ix = 0
+    for row in self._table:
+      self.assertEquals(row, self._table[ix])
+      ix += 1
+
+  def testClear(self):
+    self._table.Clear()
+    self.assertEquals(0, len(self._table))
+
+  def testMergeRows(self):
+    # This merge should fail without a merge rule.  Capture stderr to avoid
+    # scary error message in test output.
+    stderr = sys.stderr
+    sys.stderr = cStringIO.StringIO()
+    self.assertRaises(ValueError, self._table._MergeRow, self.ROW0a, self.COL0)
+    sys.stderr = stderr
+
+    # Merge but stick with current row where different.
+    self._table._MergeRow(self.ROW0a, self.COL0,
+                          merge_rules={self.COL3: 'accept_this_val'})
+    self.assertEquals(3, len(self._table))
+    self.assertRowsEqual(self.ROW0, self._table[0])
+
+    # Merge and use new row where different.
+    self._table._MergeRow(self.ROW0a, self.COL0,
+                          merge_rules={self.COL3: 'accept_other_val'})
+    self.assertEquals(3, len(self._table))
+    self.assertRowsEqual(self.ROW0a, self._table[0])
+
+    # Merge and combine column values where different
+    self._table._MergeRow(self.ROW1a, self.COL2,
+                          merge_rules={self.COL3: 'join_with: '})
+    self.assertEquals(3, len(self._table))
+    final_row = dict(self.ROW1a)
+    final_row[self.COL3] = self.ROW1[self.COL3] + ' ' + self.ROW1a[self.COL3]
+    self.assertRowsEqual(final_row, self._table[1])
+
+  def testMergeTablesSameCols(self):
+    other_table = self._CreateTableWithRows(self.COLUMNS,
+                                            [self.ROW0b, self.ROW1a, self.ROW2])
+
+    self._table.MergeTable(other_table, self.COL2,
+                           merge_rules={self.COL3: 'join_with: '})
+
+    final_row0 = self.ROW0b
+    final_row1 = dict(self.ROW1a)
+    final_row1[self.COL3] = self.ROW1[self.COL3] + ' ' + self.ROW1a[self.COL3]
+    final_row2 = self.ROW2
+    self.assertRowsEqual(final_row0, self._table[0])
+    self.assertRowsEqual(final_row1, self._table[1])
+    self.assertRowsEqual(final_row2, self._table[2])
+
+  def testMergeTablesNewCols(self):
+    self.assertFalse(self._table.HasColumn(self.EXTRACOL))
+
+    other_rows = [self.EROW0, self.EROW1, self.EROW2]
+    other_table = self._CreateTableWithRows(self.EXTRACOLUMNS, other_rows)
+
+    self._table.MergeTable(other_table, self.COL2,
+                           allow_new_columns=True,
+                           merge_rules={self.COL3: 'join_by_space'})
+
+    self.assertTrue(self._table.HasColumn(self.EXTRACOL))
+    self.assertEquals(5, self._table.GetNumColumns())
+    self.assertEquals(1, self._table.GetColumnIndex(self.EXTRACOL))
+
+    final_row0 = dict(self.ROW0)
+    final_row0[self.EXTRACOL] = self.EROW0[self.EXTRACOL]
+    final_row1 = dict(self.ROW1)
+    final_row1[self.EXTRACOL] = self.EROW1[self.EXTRACOL]
+    final_row2 = dict(self.ROW2)
+    final_row2[self.EXTRACOL] = self.EROW2[self.EXTRACOL]
+    self.assertRowsEqual(final_row0, self._table[0])
+    self.assertRowsEqual(final_row1, self._table[1])
+    self.assertRowsEqual(final_row2, self._table[2])
+
+  def testSort1(self):
+    self.assertRowsEqual(self.ROW0, self._table[0])
+    self.assertRowsEqual(self.ROW1, self._table[1])
+    self.assertRowsEqual(self.ROW2, self._table[2])
+
+    # Sort by COL3
+    self._table.Sort(lambda row: row[self.COL3])
+
+    self.assertEquals(3, len(self._table))
+    self.assertRowsEqual(self.ROW0, self._table[0])
+    self.assertRowsEqual(self.ROW2, self._table[1])
+    self.assertRowsEqual(self.ROW1, self._table[2])
+
+    # Reverse sort by COL3
+    self._table.Sort(lambda row: row[self.COL3], reverse=True)
+
+    self.assertEquals(3, len(self._table))
+    self.assertRowsEqual(self.ROW1, self._table[0])
+    self.assertRowsEqual(self.ROW2, self._table[1])
+    self.assertRowsEqual(self.ROW0, self._table[2])
+
+  def testSort2(self):
+    """Test multiple key sort."""
+    self.assertRowsEqual(self.ROW0, self._table[0])
+    self.assertRowsEqual(self.ROW1, self._table[1])
+    self.assertRowsEqual(self.ROW2, self._table[2])
+
+    # Sort by COL0 then COL1
+    def sorter(row):
+      return (row[self.COL0], row[self.COL1])
+    self._table.Sort(sorter)
+
+    self.assertEquals(3, len(self._table))
+    self.assertRowsEqual(self.ROW1, self._table[0])
+    self.assertRowsEqual(self.ROW2, self._table[1])
+    self.assertRowsEqual(self.ROW0, self._table[2])
+
+    # Reverse the sort
+    self._table.Sort(sorter, reverse=True)
+
+    self.assertEquals(3, len(self._table))
+    self.assertRowsEqual(self.ROW0, self._table[0])
+    self.assertRowsEqual(self.ROW2, self._table[1])
+    self.assertRowsEqual(self.ROW1, self._table[2])
+
+  def testSplitCSVLine(self):
+    """Test splitting of csv line."""
+    tests = {
+        'a,b,c,d':           ['a', 'b', 'c', 'd'],
+        'a, b, c, d':        ['a', ' b', ' c', ' d'],
+        'a,b,c,':            ['a', 'b', 'c', ''],
+        'a,"b c",d':         ['a', 'b c', 'd'],
+        'a,"b, c",d':        ['a', 'b, c', 'd'],
+        'a,"b, c, d",e':     ['a', 'b, c, d', 'e'],
+        'a,"""b, c""",d':    ['a', '"b, c"', 'd'],
+        'a,"""b, c"", d",e': ['a', '"b, c", d', 'e'],
+
+        # Following not real Google Spreadsheet cases.
+        r'a,b\,c,d':         ['a', 'b,c', 'd'],
+        'a,",c':             ['a', '",c'],
+        'a,"",c':            ['a', '', 'c'],
+    }
+    for line in tests:
+      vals = table.Table._SplitCSVLine(line)
+      self.assertEquals(vals, tests[line])
+
+  def testWriteReadCSV(self):
+    """Write and Read CSV and verify contents preserved."""
+    # This also tests the Table == and != operators.
+    _, path = tempfile.mkstemp(text=True)
+    tmpfile = open(path, 'w')
+    self._table.WriteCSV(tmpfile)
+    tmpfile.close()
+    mytable = table.Table.LoadFromCSV(path)
+    self.assertEquals(mytable, self._table)
+    self.assertFalse(mytable != self._table)
+
+  def testInsertColumn(self):
+    self._table.InsertColumn(1, self.EXTRACOL, 'blah')
+    goldenrow = dict(self.ROW1)
+    goldenrow[self.EXTRACOL] = 'blah'
+    self.assertRowsEqual(goldenrow, self._table.GetRowByIndex(1))
+    self.assertEquals(self.EXTRACOL, self._table.GetColumnByIndex(1))
+
+  def testAppendColumn(self):
+    self._table.AppendColumn(self.EXTRACOL, 'blah')
+    goldenrow = dict(self.ROW1)
+    goldenrow[self.EXTRACOL] = 'blah'
+    self.assertRowsEqual(goldenrow, self._table.GetRowByIndex(1))
+    col_size = self._table.GetNumColumns()
+    self.assertEquals(self.EXTRACOL, self._table.GetColumnByIndex(col_size - 1))
+
+  def testProcessRows(self):
+    def Processor(row):
+      row[self.COL0] = row[self.COL0] + ' processed'
+    self._table.ProcessRows(Processor)
+
+    final_row0 = dict(self.ROW0)
+    final_row0[self.COL0] += ' processed'
+    final_row1 = dict(self.ROW1)
+    final_row1[self.COL0] += ' processed'
+    final_row2 = dict(self.ROW2)
+    final_row2[self.COL0] += ' processed'
+    self.assertRowsEqual(final_row0, self._table[0])
+    self.assertRowsEqual(final_row1, self._table[1])
+    self.assertRowsEqual(final_row2, self._table[2])
diff --git a/lib/terminal.py b/lib/terminal.py
new file mode 100644
index 0000000..862fe00
--- /dev/null
+++ b/lib/terminal.py
@@ -0,0 +1,83 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Terminal utilities
+
+This module handles terminal interaction including ANSI color codes.
+"""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import cros_build_lib
+
+
+class Color(object):
+  """Conditionally wraps text in ANSI color escape sequences."""
+  BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
+  BOLD = -1
+  COLOR_START = '\033[1;%dm'
+  BOLD_START = '\033[1m'
+  RESET = '\033[0m'
+
+  def __init__(self, enabled=None):
+    """Create a new Color object, optionally disabling color output.
+
+    Args:
+      enabled: True if color output should be enabled. If False then this
+        class will not add color codes at all.
+    """
+    self._enabled = self.UserEnabled() if enabled is None else enabled
+
+  def Start(self, color):
+    """Returns a start color code.
+
+    Args:
+      color: Color to use, .e.g BLACK, RED, etc.
+
+    Returns:
+      If color is enabled, returns an ANSI sequence to start the given color,
+      otherwise returns empty string
+    """
+    if self._enabled:
+      return self.COLOR_START % (color + 30)
+    return ''
+
+  def Stop(self):
+    """Retruns a stop color code.
+
+    Returns:
+      If color is enabled, returns an ANSI color reset sequence, otherwise
+      returns empty string
+    """
+    if self._enabled:
+      return self.RESET
+    return ''
+
+  def Color(self, color, text):
+    """Returns text with conditionally added color escape sequences.
+
+    Keyword arguments:
+      color: Text color -- one of the color constants defined in this class.
+      text: The text to color.
+
+    Returns:
+      If self._enabled is False, returns the original text. If it's True,
+      returns text with color escape sequences based on the value of color.
+    """
+    if not self._enabled:
+      return text
+    if color == self.BOLD:
+      start = self.BOLD_START
+    else:
+      start = self.COLOR_START % (color + 30)
+    return start + text + self.RESET
+
+  @staticmethod
+  def UserEnabled():
+    """See if the global colorization preference is enabled ($NOCOLOR env)"""
+    return not cros_build_lib.BooleanShellValue(
+        os.environ.get('NOCOLOR'), msg='$NOCOLOR env var is invalid',
+        default=False)
diff --git a/lib/timeout_util.py b/lib/timeout_util.py
new file mode 100644
index 0000000..3e360d1
--- /dev/null
+++ b/lib/timeout_util.py
@@ -0,0 +1,268 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions for implementing timeouts."""
+
+from __future__ import print_function
+
+import contextlib
+import datetime
+import functools
+import signal
+import time
+
+from chromite.lib import cros_logging as logging
+from chromite.lib import signals
+
+
+class TimeoutError(Exception):
+  """Raises when code within Timeout has been run too long."""
+
+
+def Timedelta(num, zero_ok=False):
+  """Normalize |num| (in seconds) into a datetime.timedelta."""
+  if not isinstance(num, datetime.timedelta):
+    num = datetime.timedelta(seconds=num)
+  if zero_ok:
+    if num.total_seconds() < 0:
+      raise ValueError('timing must be >= 0, not %s' % (num,))
+  else:
+    if num.total_seconds() <= 0:
+      raise ValueError('timing must be greater than 0, not %s' % (num,))
+  return num
+
+
+@contextlib.contextmanager
+def Timeout(max_run_time,
+            error_message="Timeout occurred- waited %(time)s seconds."):
+  """ContextManager that alarms if code is ran for too long.
+
+  Timeout can run nested and raises a TimeoutException if the timeout
+  is reached. Timeout can also nest underneath FatalTimeout.
+
+  Args:
+    max_run_time: How long to wait before sending SIGALRM.  May be a number
+      (in seconds) or a datetime.timedelta object.
+    error_message: String to wrap in the TimeoutError exception on timeout.
+  """
+  max_run_time = int(Timedelta(max_run_time).total_seconds())
+
+  # pylint: disable=W0613
+  def kill_us(sig_num, frame):
+    raise TimeoutError(error_message % {'time': max_run_time})
+
+  original_handler = signal.signal(signal.SIGALRM, kill_us)
+  previous_time = int(time.time())
+
+  # Signal the min in case the leftover time was smaller than this timeout.
+  remaining_timeout = signal.alarm(0)
+  if remaining_timeout:
+    signal.alarm(min(remaining_timeout, max_run_time))
+  else:
+    signal.alarm(max_run_time)
+
+  try:
+    yield
+  finally:
+    # Cancel the alarm request and restore the original handler.
+    signal.alarm(0)
+    signal.signal(signal.SIGALRM, original_handler)
+
+    # Ensure the previous handler will fire if it was meant to.
+    if remaining_timeout > 0:
+      # Signal the previous handler if it would have already passed.
+      time_left = remaining_timeout - (int(time.time()) - previous_time)
+      if time_left <= 0:
+        signals.RelaySignal(original_handler, signal.SIGALRM, None)
+      else:
+        signal.alarm(time_left)
+
+
+@contextlib.contextmanager
+def FatalTimeout(max_run_time):
+  """ContextManager that exits the program if code is run for too long.
+
+  This implementation is fairly simple, thus multiple timeouts
+  cannot be active at the same time.
+
+  Additionally, if the timeout has elapsed, it'll trigger a SystemExit
+  exception within the invoking code, ultimately propagating that past
+  itself.  If the underlying code tries to suppress the SystemExit, once
+  a minute it'll retrigger SystemExit until control is returned to this
+  manager.
+
+  Args:
+    max_run_time: How long to wait.  May be a number (in seconds) or a
+      datetime.timedelta object.
+  """
+  max_run_time = int(Timedelta(max_run_time).total_seconds())
+
+  # pylint: disable=W0613
+  def kill_us(sig_num, frame):
+    # While this SystemExit *should* crash it's way back up the
+    # stack to our exit handler, we do have live/production code
+    # that uses blanket except statements which could suppress this.
+    # As such, keep scheduling alarms until our exit handler runs.
+    # Note that there is a potential conflict via this code, and
+    # RunCommand's kill_timeout; thus we set the alarming interval
+    # fairly high.
+    signal.alarm(60)
+
+    # The cbuildbot stage that gets aborted by this timeout should be treated as
+    # failed by buildbot.
+    error_message = ("Timeout occurred- waited %i seconds, failing." %
+                     max_run_time)
+    logging.PrintBuildbotStepFailure()
+    logging.error(error_message)
+    raise SystemExit(error_message)
+
+  original_handler = signal.signal(signal.SIGALRM, kill_us)
+  remaining_timeout = signal.alarm(max_run_time)
+  if remaining_timeout:
+    # Restore things to the way they were.
+    signal.signal(signal.SIGALRM, original_handler)
+    signal.alarm(remaining_timeout)
+    # ... and now complain.  Unfortunately we can't easily detect this
+    # upfront, thus the reset dance above.
+    raise Exception("_Timeout cannot be used in parallel to other alarm "
+                    "handling code; failing")
+  try:
+    yield
+  finally:
+    # Cancel the alarm request and restore the original handler.
+    signal.alarm(0)
+    signal.signal(signal.SIGALRM, original_handler)
+
+
+def TimeoutDecorator(max_time):
+  """Decorator used to ensure a func is interrupted if it's running too long."""
+  # Save off the built-in versions of time.time, signal.signal, and
+  # signal.alarm, in case they get mocked out later. We want to ensure that
+  # tests don't accidentally mock out the functions used by Timeout.
+  def _Save():
+    return time.time, signal.signal, signal.alarm
+  def _Restore(values):
+    (time.time, signal.signal, signal.alarm) = values
+  builtins = _Save()
+
+  def NestedTimeoutDecorator(func):
+    @functools.wraps(func)
+    def TimeoutWrapper(*args, **kwargs):
+      new = _Save()
+      try:
+        _Restore(builtins)
+        with Timeout(max_time):
+          _Restore(new)
+          try:
+            func(*args, **kwargs)
+          finally:
+            _Restore(builtins)
+      finally:
+        _Restore(new)
+
+    return TimeoutWrapper
+
+  return NestedTimeoutDecorator
+
+
+def WaitForReturnTrue(*args, **kwargs):
+  """Periodically run a function, waiting in between runs.
+
+  Continues to run until the function returns True.
+
+  Args:
+    See WaitForReturnValue([True], ...)
+
+  Raises:
+    TimeoutError when the timeout is exceeded.
+  """
+  WaitForReturnValue([True], *args, **kwargs)
+
+
+def WaitForReturnValue(values, *args, **kwargs):
+  """Periodically run a function, waiting in between runs.
+
+  Continues to run until the function return value is in the list
+  of accepted |values|.  See WaitForSuccess for more details.
+
+  Args:
+    values: A list or set of acceptable return values.
+    *args, **kwargs: See WaitForSuccess for remaining arguments.
+
+  Returns:
+    The value most recently returned by |func|.
+
+  Raises:
+    TimeoutError when the timeout is exceeded.
+  """
+  def _Retry(return_value):
+    return return_value not in values
+
+  return WaitForSuccess(_Retry, *args, **kwargs)
+
+
+def WaitForSuccess(retry_check, func, timeout, period=1, side_effect_func=None,
+                   func_args=None, func_kwargs=None, fallback_timeout=10):
+  """Periodically run a function, waiting in between runs.
+
+  Continues to run given function until return value is accepted by retry check.
+
+  To retry based on raised exceptions see GenericRetry in retry_util.
+
+  Args:
+    retry_check: A functor that will be passed the return value of |func| as
+      the only argument.  If |func| should be retried |retry_check| should
+      return True.
+    func: The function to run to test for a value.
+    timeout: The maximum amount of time to wait.  May be a number (in seconds)
+      or a datetime.timedelta object.
+    period: How long between calls to |func|.  May be a number (in seconds) or
+      a datetime.timedelta object.
+    side_effect_func: Optional function to be called between polls of func,
+      typically to output logging messages. The remaining time will be passed
+      as a datetime.timedelta object.
+    func_args: Optional list of positional arguments to be passed to |func|.
+    func_kwargs: Optional dictionary of keyword arguments to be passed to
+                 |func|.
+    fallback_timeout: We set a secondary timeout based on sigalarm this many
+                      seconds after the initial timeout. This should NOT be
+                      considered robust, but can allow timeouts inside blocking
+                      methods.
+
+  Returns:
+    The value most recently returned by |func| that was not flagged for retry.
+
+  Raises:
+    TimeoutError when the timeout is exceeded.
+  """
+  timeout = Timedelta(timeout, zero_ok=True)
+  period = Timedelta(period, zero_ok=True)
+  fallback_timeout = Timedelta(fallback_timeout)
+  func_args = func_args or []
+  func_kwargs = func_kwargs or {}
+
+  end = datetime.datetime.now() + timeout
+
+  # Use a sigalarm after an extra delay, in case a function we call is
+  # blocking for some reason. This should NOT be considered reliable.
+  with Timeout(timeout + fallback_timeout):
+    while True:
+      # Guarantee we always run at least once.
+      value = func(*func_args, **func_kwargs)
+      if not retry_check(value):
+        return value
+
+      # Run the user's callback func if available.
+      if side_effect_func:
+        delta = end - datetime.datetime.now()
+        if delta.total_seconds() < 0:
+          delta = datetime.timedelta(seconds=0)
+        side_effect_func(delta)
+
+      # If we're just going to sleep past the timeout period, abort now.
+      delta = end - datetime.datetime.now()
+      if delta <= period:
+        raise TimeoutError('Timed out after %s' % timeout)
+
+      time.sleep(period.total_seconds())
diff --git a/lib/timeout_util_unittest b/lib/timeout_util_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/timeout_util_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/timeout_util_unittest.py b/lib/timeout_util_unittest.py
new file mode 100644
index 0000000..600bc0d
--- /dev/null
+++ b/lib/timeout_util_unittest.py
@@ -0,0 +1,161 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test suite for timeout_util.py"""
+
+from __future__ import print_function
+
+import datetime
+import time
+
+from chromite.lib import cros_test_lib
+from chromite.lib import timeout_util
+
+
+# pylint: disable=W0212,R0904
+
+
+class TestTimeouts(cros_test_lib.TestCase):
+  """Tests for timeout_util.Timeout."""
+
+  def testTimeout(self):
+    """Tests that we can nest Timeout correctly."""
+    self.assertFalse('mock' in str(time.sleep).lower())
+    with timeout_util.Timeout(30):
+      with timeout_util.Timeout(20):
+        with timeout_util.Timeout(1):
+          self.assertRaises(timeout_util.TimeoutError, time.sleep, 10)
+
+        # Should not raise a timeout exception as 20 > 2.
+        time.sleep(1)
+
+  def testTimeoutNested(self):
+    """Tests that we still re-raise an alarm if both are reached."""
+    with timeout_util.Timeout(1):
+      try:
+        with timeout_util.Timeout(2):
+          self.assertRaises(timeout_util.TimeoutError, time.sleep, 1)
+
+      # Craziness to catch nested timeouts.
+      except timeout_util.TimeoutError:
+        pass
+      else:
+        self.fail('Should have thrown an exception')
+
+
+class TestWaitFors(cros_test_lib.TestCase):
+  """Tests for assorted timeout_utils WaitForX methods."""
+
+  def setUp(self):
+    self.values_ix = 0
+    self.timestart = None
+    self.timestop = None
+
+  def GetFunc(self, return_values):
+    """Return a functor that returns given values in sequence with each call."""
+    self.values_ix = 0
+    self.timestart = None
+    self.timestop = None
+
+    def _Func():
+      if not self.timestart:
+        self.timestart = datetime.datetime.utcnow()
+
+      val = return_values[self.values_ix]
+      self.values_ix += 1
+
+      self.timestop = datetime.datetime.utcnow()
+      return val
+
+    return _Func
+
+  def GetTryCount(self):
+    """Get number of times func was tried."""
+    return self.values_ix
+
+  def GetTrySeconds(self):
+    """Get number of seconds that span all func tries."""
+    delta = self.timestop - self.timestart
+    return int(delta.seconds + 0.5)
+
+  def _TestWaitForSuccess(self, maxval, timeout, **kwargs):
+    """Run through a test for WaitForSuccess."""
+
+    func = self.GetFunc(range(20))
+    def _RetryCheck(val):
+      return val < maxval
+
+    return timeout_util.WaitForSuccess(_RetryCheck, func, timeout, **kwargs)
+
+  def _TestWaitForReturnValue(self, values, timeout, **kwargs):
+    """Run through a test for WaitForReturnValue."""
+    func = self.GetFunc(range(20))
+    return timeout_util.WaitForReturnValue(values, func, timeout, **kwargs)
+
+  def testWaitForSuccess1(self):
+    """Test success after a few tries."""
+    self.assertEquals(4, self._TestWaitForSuccess(4, 10, period=1))
+    self.assertEquals(5, self.GetTryCount())
+    self.assertEquals(4, self.GetTrySeconds())
+
+  def testWaitForSuccess2(self):
+    """Test timeout after a couple tries."""
+    self.assertRaises(timeout_util.TimeoutError, self._TestWaitForSuccess,
+                      4, 3, period=1)
+    self.assertEquals(3, self.GetTryCount())
+    self.assertEquals(2, self.GetTrySeconds())
+
+  def testWaitForSuccess3(self):
+    """Test success on first try."""
+    self.assertEquals(0, self._TestWaitForSuccess(0, 10, period=1))
+    self.assertEquals(1, self.GetTryCount())
+    self.assertEquals(0, self.GetTrySeconds())
+
+  def testWaitForSuccess4(self):
+    """Test success after a few tries with longer period."""
+    self.assertEquals(3, self._TestWaitForSuccess(3, 10, period=2))
+    self.assertEquals(4, self.GetTryCount())
+    self.assertEquals(6, self.GetTrySeconds())
+
+  def testWaitForReturnValue1(self):
+    """Test value found after a few tries."""
+    self.assertEquals(4, self._TestWaitForReturnValue((4, 5), 10, period=1))
+    self.assertEquals(5, self.GetTryCount())
+    self.assertEquals(4, self.GetTrySeconds())
+
+  def testWaitForReturnValue2(self):
+    """Test value found on first try."""
+    self.assertEquals(0, self._TestWaitForReturnValue((0, 1), 10, period=1))
+    self.assertEquals(1, self.GetTryCount())
+    self.assertEquals(0, self.GetTrySeconds())
+
+  def testWaitForCallback(self):
+    """Verify side_effect_func works."""
+    side_effect_called = [False]
+    def _SideEffect(remaining):
+      self.assertTrue(isinstance(remaining, datetime.timedelta))
+      side_effect_called[0] = True
+    self.assertEquals(1, self._TestWaitForSuccess(
+        1, 10, period=0.1, side_effect_func=_SideEffect))
+    self.assertTrue(side_effect_called[0])
+
+  def testWaitForCallbackSleepsLong(self):
+    """Verify a long running side effect doesn't call time.sleep(<negative>)."""
+    side_effect_called = [False]
+    def _SideEffect(_remaining):
+      time.sleep(0.3)
+      side_effect_called[0] = True
+    self.assertRaises(timeout_util.TimeoutError, self._TestWaitForSuccess,
+                      10, 0, period=0.1, side_effect_func=_SideEffect)
+    self.assertTrue(side_effect_called[0])
+
+  def testWaitForCallbackAfterTimeout(self):
+    """If side_effect is called after the timeout, remaining should be zero."""
+    side_effect_called = [False]
+    def _SideEffect(remaining):
+      self.assertGreaterEqual(remaining.total_seconds(), 0)
+      side_effect_called[0] = True
+    self.assertRaises(timeout_util.TimeoutError, self._TestWaitForSuccess,
+                      10, 0, period=0.1, side_effect_func=_SideEffect)
+    self.assertTrue(side_effect_called[0])
diff --git a/lib/toolchain.py b/lib/toolchain.py
new file mode 100644
index 0000000..cedc9a0
--- /dev/null
+++ b/lib/toolchain.py
@@ -0,0 +1,119 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for managing the toolchains in the chroot."""
+
+from __future__ import print_function
+
+import cStringIO
+
+from chromite.cbuildbot import constants
+from chromite.lib import brick_lib
+from chromite.lib import cros_build_lib
+from chromite.lib import gs
+from chromite.lib import portage_util
+from chromite.lib import toolchain_list
+
+if cros_build_lib.IsInsideChroot():
+  # Only import portage after we've checked that we're inside the chroot.
+  # Outside may not have portage, in which case the above may not happen.
+  # We'll check in main() if the operation needs portage.
+
+  # pylint: disable=F0401
+  import portage
+
+
+def GetHostTuple():
+  """Returns compiler tuple for the host system."""
+  # pylint: disable=E1101
+  return portage.settings['CHOST']
+
+
+# Tree interface functions. They help with retrieving data about the current
+# state of the tree:
+def GetAllTargets():
+  """Get the complete list of targets.
+
+  Returns:
+    The list of cross targets for the current tree
+  """
+  targets = GetToolchainsForBoard('all')
+
+  # Remove the host target as that is not a cross-target. Replace with 'host'.
+  del targets[GetHostTuple()]
+  return targets
+
+
+def GetToolchainsForBoard(board, buildroot=constants.SOURCE_ROOT):
+  """Get a dictionary mapping toolchain targets to their options for a board.
+
+  Args:
+    board: board name in question (e.g. 'daisy').
+    buildroot: path to buildroot.
+
+  Returns:
+    The list of toolchain tuples for the given board
+  """
+  overlays = portage_util.FindOverlays(
+      constants.BOTH_OVERLAYS, None if board in ('all', 'sdk') else board,
+      buildroot=buildroot)
+  toolchains = toolchain_list.ToolchainList(overlays=overlays)
+  targets = toolchains.GetMergedToolchainSettings()
+  if board == 'sdk':
+    targets = FilterToolchains(targets, 'sdk', True)
+  return targets
+
+
+def GetToolchainsForBrick(brick_locator):
+  """Get a dictionary mapping toolchain targets to their options for a brick.
+
+  Args:
+    brick_locator: locator for the brick.
+
+  Returns:
+    The list of toolchain tuples for the given brick.
+  """
+  toolchains = toolchain_list.ToolchainList(
+      brick=brick_lib.Brick(brick_locator))
+  return toolchains.GetMergedToolchainSettings()
+
+
+def FilterToolchains(targets, key, value):
+  """Filter out targets based on their attributes.
+
+  Args:
+    targets: dict of toolchains
+    key: metadata to examine
+    value: expected value for metadata
+
+  Returns:
+    dict where all targets whose metadata |key| does not match |value|
+    have been deleted
+  """
+  return dict((k, v) for k, v in targets.iteritems() if v[key] == value)
+
+
+def GetSdkURL(for_gsutil=False, suburl=''):
+  """Construct a Google Storage URL for accessing SDK related archives
+
+  Args:
+    for_gsutil: Do you want a URL for passing to `gsutil`?
+    suburl: A url fragment to tack onto the end
+
+  Returns:
+    The fully constructed URL
+  """
+  return gs.GetGsURL(constants.SDK_GS_BUCKET, for_gsutil=for_gsutil,
+                     suburl=suburl)
+
+
+def GetArchForTarget(target):
+  """Returns the arch used by the given toolchain.
+
+  Args:
+    target: a toolchain.
+  """
+  info = cros_build_lib.RunCommand(['crossdev', '--show-target-cfg', target],
+                                   capture_output=True, quiet=True).output
+  return cros_build_lib.LoadKeyValueFile(cStringIO.StringIO(info)).get('arch')
diff --git a/lib/toolchain_list.py b/lib/toolchain_list.py
new file mode 100644
index 0000000..432436b
--- /dev/null
+++ b/lib/toolchain_list.py
@@ -0,0 +1,141 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A type used to represent a toolchain and its setting overrides."""
+
+from __future__ import print_function
+
+import copy
+import collections
+import json
+import os
+
+from chromite.lib import osutils
+
+
+_ToolchainTuple = collections.namedtuple('_ToolchainTuple',
+                                         ('target', 'setting_overrides'))
+
+
+_DEFAULT_TOOLCHAIN_KEY = 'default'
+
+
+class NoDefaultToolchainDefinedError(Exception):
+  """Brillo brick stacks are required to define a default toolchain."""
+
+
+class MismatchedToolchainConfigsError(Exception):
+  """We have no defined resolution for conflicting toolchain configs."""
+
+
+class ToolchainList(object):
+  """Represents a list of toolchains."""
+
+  def __init__(self, brick=None, overlays=None):
+    """Construct an instance.
+
+    Args:
+      brick: brick_lib.Brick object.  We'll add the toolchains used by the brick
+          and its dependencies to |self|.
+      overlays: list of overlay directories to add toolchains from.
+    """
+    if brick is None and overlays is None:
+      raise ValueError('Must specify either brick or overlays.')
+    if brick is not None and overlays is not None:
+      raise ValueError('Must specify one of brick or overlays.')
+
+    self._toolchains = []
+    self._require_explicit_default_toolchain = True
+    if brick:
+      for each_brick in brick.BrickStack():
+        self._AddToolchainsFromBrick(each_brick)
+    else:
+      self._require_explicit_default_toolchain = False
+      for overlay_path in overlays:
+        self._AddToolchainsFromOverlayDir(overlay_path)
+
+  def _AddToolchainsFromOverlayDir(self, overlay_dir):
+    """Add toolchains to |self| from the given overlay.
+
+    Does not include overlays that this overlay depends on.
+
+    Args:
+      overlay_dir: absolute path to an overlay directory.
+    """
+    config_path = os.path.join(overlay_dir, 'toolchain.conf')
+    if not os.path.exists(config_path):
+      # Not all overlays define toolchains.
+      return
+
+    config_lines = osutils.ReadFile(config_path).splitlines()
+    for line in config_lines:
+      # Split by hash sign so that comments are ignored.
+      # Then split the line to get the tuple and its options.
+      line_pieces = line.split('#', 1)[0].split(None, 1)
+      if not line_pieces:
+        continue
+      target = line_pieces[0]
+      settings = json.loads(line_pieces[1]) if len(line_pieces) > 1 else {}
+      self._AddToolchain(target, setting_overrides=settings)
+
+  def _AddToolchainsFromBrick(self, brick):
+    """Add toolchains to |self| defined by the given brick.
+
+    Args:
+      brick: brick_lib.Brick object.
+    """
+    for target, settings in brick.config.get('toolchains', {}):
+      self._AddToolchain(target, setting_overrides=settings)
+
+  def _AddToolchain(self, target, setting_overrides=None):
+    """Add a toolchain to |self|.
+
+    Args:
+      target: string target (e.g. 'x86_64-cros-linux-gnu').
+      setting_overrides: dictionary of setting overrides for this toolchain.
+    """
+    if setting_overrides is None:
+      setting_overrides = dict()
+    self._toolchains.append(_ToolchainTuple(
+        target=target, setting_overrides=setting_overrides))
+
+  def GetMergedToolchainSettings(self):
+    """Returns a dictionary of merged toolchain settings."""
+    targets = {}
+    toolchains = copy.deepcopy(self._toolchains)
+    if not toolchains:
+      return targets
+
+    have_default = any([setting_overrides.get(_DEFAULT_TOOLCHAIN_KEY, False)
+                        for target, setting_overrides in toolchains])
+    if not have_default:
+      if self._require_explicit_default_toolchain:
+        raise NoDefaultToolchainDefinedError(
+            'Expected to find a toolchain marked as default.')
+      default_toolchain = _ToolchainTuple(toolchains[0].target,
+                                          {_DEFAULT_TOOLCHAIN_KEY: True})
+      toolchains.insert(0, default_toolchain)
+
+    # We might get toolchain setting overrides from a couple different overlays.
+    # Merge all these overrides together, disallowing conflicts.
+    for toolchain in toolchains:
+      targets.setdefault(toolchain.target, dict())
+      existing_overrides = targets[toolchain.target]
+      for key, value in toolchain.setting_overrides.iteritems():
+        if key in existing_overrides and existing_overrides[key] != value:
+          raise MismatchedToolchainConfigsError(
+              'For toolchain %s, found %s to be set to both %r and %r.' %
+              (toolchain.target, key, existing_overrides[key], value))
+        existing_overrides[key] = value
+
+    # Now that we've merged all the setting overrides, apply them to defaults.
+    for target in targets.iterkeys():
+      settings = {
+          'sdk': True,
+          'crossdev': '',
+          'default': False,
+      }
+      settings.update(targets[target])
+      targets[target] = settings
+    return targets
diff --git a/lib/toolchain_unittest b/lib/toolchain_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/toolchain_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/toolchain_unittest.py b/lib/toolchain_unittest.py
new file mode 100644
index 0000000..1e44214
--- /dev/null
+++ b/lib/toolchain_unittest.py
@@ -0,0 +1,149 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for toolchain."""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.lib import brick_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import toolchain
+from chromite.lib import toolchain_list
+from chromite.lib import workspace_lib
+
+
+BASE_TOOLCHAIN_CONF = """# The root of all evil is money, err, this config.
+base-target-name # This will become the base target.
+
+# This toolchain is bonus!
+bonus-toolchain {"a setting": "bonus value"}  # Bonus!
+
+"""
+
+ADDITIONAL_TOOLCHAIN_CONF = """# A helpful toolchain related comment.
+extra-toolchain  # Unlikely to win any performance tests.
+
+bonus-toolchain {"stable": true}
+"""
+
+MODERN_BSP_BRICK_CONFIG = {
+    'name': 'bsp-brick',
+    'toolchains': [('base-target-name', {'default': True}),
+                   ('bonus-toolchain', {'a setting': 'bonus value'})
+                  ],
+    'dependencies': ['//custom-firmware-brick'],
+}
+
+MODERN_FIRMWARE_BRICK_CONFIG = {
+    'name': 'custom-firmware-brick',
+    'toolchains': [('bonus-toolchain', {'stable': True}),
+                   ('extra-toolchain', {})],
+}
+
+TYPICAL_BRICK_WITHOUT_TOOLCHAINS = {
+    'name': 'custom-firmware-brick',
+}
+
+EXPECTED_TOOLCHAINS = {
+    'bonus-toolchain': {
+        'sdk': True,
+        'crossdev': '',
+        'default': False,
+        'a setting': 'bonus value',
+        'stable': True,
+    },
+    'extra-toolchain': {'sdk': True, 'crossdev': '', 'default': False},
+    'base-target-name': {'sdk': True, 'crossdev': '', 'default': True},
+}
+
+
+class ToolchainTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for lib.toolchain."""
+
+  def _MakeBrick(self, config):
+    return brick_lib.Brick(os.path.join(self.tempdir, config['name']),
+                           initial_config=config)
+
+  def setUp(self):
+    self.PatchObject(workspace_lib, 'WorkspacePath', return_value=self.tempdir)
+
+  def testArchForToolchain(self):
+    """Tests that we correctly parse crossdev's output."""
+    rc_mock = cros_build_lib_unittest.RunCommandMock()
+
+    noarch = """target=foo
+category=bla
+"""
+    rc_mock.SetDefaultCmdResult(output=noarch)
+    with rc_mock:
+      self.assertEqual(None, toolchain.GetArchForTarget('fake_target'))
+
+    amd64arch = """arch=amd64
+target=foo
+"""
+    rc_mock.SetDefaultCmdResult(output=amd64arch)
+    with rc_mock:
+      self.assertEqual('amd64', toolchain.GetArchForTarget('fake_target'))
+
+  @mock.patch('chromite.lib.toolchain.portage_util.FindOverlays')
+  def testReadsBoardToolchains(self, find_overlays_mock):
+    """Tests that we correctly parse toolchain configs for an overlay stack."""
+    # Create some fake overlays and put toolchain confs in a subset of them.
+    overlays = [os.path.join(self.tempdir, 'overlay%d' % i) for i in range(3)]
+    for overlay in overlays:
+      osutils.SafeMakedirs(overlay)
+    for overlay, contents in [(overlays[0], BASE_TOOLCHAIN_CONF),
+                              (overlays[2], ADDITIONAL_TOOLCHAIN_CONF)]:
+      osutils.WriteFile(os.path.join(overlay, 'toolchain.conf'), contents)
+    find_overlays_mock.return_value = overlays
+    actual_targets = toolchain.GetToolchainsForBoard('board_value')
+    self.assertEqual(EXPECTED_TOOLCHAINS, actual_targets)
+
+  def testReadsBrickToolchains(self):
+    """Tests that we can read the toolchain for a brick stack."""
+    # Creates the brick in a subdirectory of tempdir so that we can create other
+    # bricks without interfering with it.
+    self._MakeBrick(MODERN_FIRMWARE_BRICK_CONFIG)
+    top_brick = self._MakeBrick(MODERN_BSP_BRICK_CONFIG)
+    self.assertEqual(EXPECTED_TOOLCHAINS,
+                     toolchain.GetToolchainsForBrick(top_brick.brick_locator))
+
+  def testShouldDetectMissingDefaultsInBricks(self):
+    """Tests that we check for a default toolchain in bricks."""
+    brick = self._MakeBrick(
+        {'name': 'brick-name', 'toolchains': [('base-toolchain', {})]})
+    self.assertRaises(toolchain_list.NoDefaultToolchainDefinedError,
+                      toolchain.GetToolchainsForBrick,
+                      brick.brick_locator)
+
+  def testShouldDetectConflictingOverrides(self):
+    """Tests that we disallow toolchains with obvious conflicting settings."""
+    conflicting_brick = self._MakeBrick(
+        {'name': 'conflicting-brick',
+         'toolchains': [
+             ('base-toolchain', {'default': True,
+                                 'setting': 'conflicting value'}),
+         ],
+        })
+    brick = self._MakeBrick(
+        {'name': 'bsp-brick',
+         'toolchains': [
+             ('base-toolchain', {'default': True,
+                                 'setting': 'bsp value'}),
+         ],
+         'dependencies': [conflicting_brick.brick_locator],
+        })
+    self.assertRaises(toolchain_list.MismatchedToolchainConfigsError,
+                      toolchain.GetToolchainsForBrick,
+                      brick.brick_locator)
+
+  def testToleratesBricksWithoutToolchains(self):
+    """Tests that we correctly handle bricks that are toolchain agnostic."""
+    simple_brick = self._MakeBrick(TYPICAL_BRICK_WITHOUT_TOOLCHAINS)
+    toolchain.GetToolchainsForBrick(simple_brick.brick_locator)
diff --git a/lib/tracker_access.py b/lib/tracker_access.py
new file mode 100644
index 0000000..3dda67a
--- /dev/null
+++ b/lib/tracker_access.py
@@ -0,0 +1,166 @@
+# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions for accessing the issue tracker in a pythonic way."""
+
+from __future__ import print_function
+
+import pprint
+import sys
+
+# pylint: disable=F0401
+import gdata.client
+import gdata.projecthosting.client
+# pylint: enable=F0401
+
+
+DEFAULT_TRACKER_SOURCE = "chromite-tracker-access-1.0"
+VERBOSE = True  # Set to True to get extra debug info...
+
+
+class TrackerAccess(object):
+  """Class for accessing the tracker on code.google.com."""
+
+  def __init__(self, email="", password="",
+               tracker_source=DEFAULT_TRACKER_SOURCE):
+    """TrackerAccess constructor.
+
+    Args:
+      email: The email address to Login with; may be "" for anonymous access.
+      password: The password that goes with the email address; may be "" if
+                the email is "".
+      tracker_source: A string describing this program.  This can be anything
+                      you like but should should give some indication of which
+                      app is making the request.
+    """
+    # Save parameters...
+    self._email = email
+    self._password = password
+    self._tracker_source = tracker_source
+
+    # This will be initted on first login...
+    self._tracker_client = None
+
+  def Login(self):
+    """Login, if needed.  This may be safely called more than once.
+
+    Commands will call this function as their first line, so the client
+    of this class need not call it themselves unless trying to debug login
+    problems.
+
+    This function should be called even if we're accessing anonymously.
+    """
+    # Bail immediately if we've already logged in...
+    if self._tracker_client is not None:
+      return
+
+    self._tracker_client = gdata.projecthosting.client.ProjectHostingClient()
+    if self._email and self._password:
+      self._tracker_client.client_login(self._email, self._password,
+                                        source=self._tracker_source,
+                                        service="code", account_type='GOOGLE')
+
+  def GetKeyedLabels(self, project_name, issue_id):
+    """Get labels of the form "Key-Value" attached to the given issue.
+
+    Any labels that don't have a dash in them are ignored.
+
+    Args:
+      project_name: The tracker project to query.
+      issue_id: The ID of the issue to query; should be an int but a string
+          will probably work too.
+
+    Returns:
+      A dictionary mapping key/value pairs from the issue's labels, like:
+
+      {'Area': 'Build',
+       'Iteration': '15',
+       'Mstone': 'R9.x',
+       'Pri': '1',
+       'Type': 'Bug'}
+    """
+    # Login if needed...
+    self.Login()
+
+    # Construct the query...
+    query = gdata.projecthosting.client.Query(issue_id=issue_id)
+    try:
+      feed = self._tracker_client.get_issues(project_name, query=query)
+    except gdata.client.RequestError as e:
+      if VERBOSE:
+        print("ERROR: Unable to access bug %s:%s: %s" %
+              (project_name, issue_id, str(e)), file=sys.stderr)
+      return {}
+
+    # There should be exactly one result...
+    assert len(feed.entry) == 1, "Expected exactly 1 result"
+    (entry,) = feed.entry
+
+    # We only care about labels that look like: Key-Value
+    # We'll return a dictionary of those.
+    keyed_labels = {}
+    for label in entry.label:
+      if "-" in label.text:
+        label_key, label_val = label.text.split("-", 1)
+        keyed_labels[label_key] = label_val
+
+    return keyed_labels
+
+
+def _TestGetKeyedLabels(project_name, email, passwordFile, *args):
+  """Test code for GetKeyedLabels().
+
+  Args:
+    project_name: The name of the project we're looking at.
+    email: The email address to use to login.  May be ""
+    passwordFile: A file containing the password for the email address.
+                  May be "" if email is "" for anon access.
+    args: A list of bug IDs to query.
+  """
+  bug_ids = args
+  # If password was specified as a file, read it.
+  if passwordFile:
+    password = open(passwordFile, "r").read().strip()
+  else:
+    password = ""
+
+  ta = TrackerAccess(email, password)
+
+  if not bug_ids:
+    print("No bugs were specified")
+  else:
+    for bug_id in bug_ids:
+      print(bug_id, ta.GetKeyedLabels(project_name, int(bug_id)))
+
+
+def _DoHelp(commands, *args):
+  """Print help for the script."""
+
+  if len(args) >= 2 and args[0] == "help" and args[1] in commands:
+    # If called with arguments 'help' and 'command', show that commands's doc.
+    command_name = args[1]
+    print(commands[command_name].__doc__)
+  else:
+    # Something else: show generic help...
+    print(
+        "Usage %s <command> <command args>\n"
+        "\n"
+        "Known commands: \n"
+        "  %s\n"
+        % (sys.argv[0], pprint.pformat(["help"] + sorted(commands))))
+
+
+def main():
+  """Main function of the script."""
+
+  commands = {
+      "TestGetKeyedLabels": _TestGetKeyedLabels,
+  }
+
+  if len(sys.argv) <= 1 or sys.argv[1] not in commands:
+    # Argument 1 isn't in list of commands; show help and pass all arguments...
+    _DoHelp(commands, *sys.argv[1:])
+  else:
+    command_name = sys.argv[1]
+    commands[command_name](*sys.argv[2:])
diff --git a/lib/unittest_lib.py b/lib/unittest_lib.py
new file mode 100644
index 0000000..d6ae5a4
--- /dev/null
+++ b/lib/unittest_lib.py
@@ -0,0 +1,83 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittest-only utility functions library."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import cros_build_lib
+from chromite.lib import osutils
+
+
+class BuildELFError(Exception):
+  """Generic error building an ELF file."""
+
+
+def BuildELF(filename, defined_symbols=None, undefined_symbols=None,
+             used_libs=None, executable=False, static=False):
+  """Builds a dynamic ELF with the provided import and exports.
+
+  Compiles and links a dynamic program that exports some functions, as libraries
+  do, and requires some symbols from other libraries. Dependencies shoud live
+  in the same directory as the result. This function
+
+  Args:
+    filename: The output filename where the ELF is created.
+    defined_symbols: The list of symbols this ELF exports.
+    undefined_symbols: The list of symbols this ELF requires from other ELFs.
+    used_libs: The list of libraries this ELF loads dynamically, including only
+        the name of the library. For example, 'bz2' rather than 'libbz2.so.1.0'.
+    executable: Whether the file has a main() function.
+    static: Whether the file is statically linked (implies executable=True).
+  """
+  if defined_symbols is None:
+    defined_symbols = []
+  if undefined_symbols is None:
+    undefined_symbols = []
+  if used_libs is None:
+    used_libs = []
+  if static and not executable:
+    raise ValueError('static requires executable to be True.')
+
+  source = ''.join('void %s();\n' % sym for sym in undefined_symbols)
+  source += """
+void __defined_symbols(const char*) __attribute__ ((visibility ("hidden")));
+void __defined_symbols(const char* sym) {
+  %s
+}
+""" % ('\n  '.join('%s();' % sym for sym in undefined_symbols))
+
+  source += ''.join("""
+void %s() __attribute__ ((visibility ("default")));
+void %s() { __defined_symbols("%s"); }
+""" % (sym, sym, sym) for sym in defined_symbols)
+
+  if executable:
+    source += """
+int main() {
+  __defined_symbols("main");
+  return 42;
+}
+"""
+  source_fn = filename + '_tmp.c'
+  osutils.WriteFile(source_fn, source)
+
+  outdir = os.path.dirname(filename)
+  cmd = ['gcc', '-o', filename, source_fn]
+  if not executable:
+    cmd += ['-shared', '-fPIC']
+  if static:
+    cmd += ['-static']
+  cmd += ['-L.', '-Wl,-rpath=./']
+  cmd += ['-l%s' % lib for lib in used_libs]
+  try:
+    cros_build_lib.RunCommand(
+        cmd, cwd=outdir, redirect_stdout=True, redirect_stderr=True,
+        print_cmd=False)
+  except cros_build_lib.RunCommandError, e:
+    raise BuildELFError('%s\n%s' % (e.message, e.result.error))
+  finally:
+    os.unlink(source_fn)
diff --git a/lib/upgrade_table.py b/lib/upgrade_table.py
new file mode 100644
index 0000000..ef750f9
--- /dev/null
+++ b/lib/upgrade_table.py
@@ -0,0 +1,81 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""UpgradeTable class is used in Portage package upgrade process."""
+
+from __future__ import print_function
+
+from chromite.lib import table
+
+
+class UpgradeTable(table.Table):
+  """Class to represent upgrade data in memory, can be written to csv."""
+
+  # Column names.  Note that 'ARCH' is replaced with a real arch name when
+  # these are accessed as attributes off an UpgradeTable object.
+  COL_PACKAGE = 'Package'
+  COL_SLOT = 'Slot'
+  COL_OVERLAY = 'Overlay'
+  COL_CURRENT_VER = 'Current ARCH Version'
+  COL_STABLE_UPSTREAM_VER = 'Stable Upstream ARCH Version'
+  COL_LATEST_UPSTREAM_VER = 'Latest Upstream ARCH Version'
+  COL_STATE = 'State On ARCH'
+  COL_DEPENDS_ON = 'Dependencies On ARCH'
+  COL_USED_BY = 'Required By On ARCH'
+  COL_TARGET = 'Root Target'
+  COL_UPGRADED = 'Upgraded ARCH Version'
+
+  # COL_STATE values should be one of the following:
+  STATE_UNKNOWN = 'unknown'
+  STATE_LOCAL_ONLY = 'local only'
+  STATE_UPSTREAM_ONLY = 'upstream only'
+  STATE_NEEDS_UPGRADE = 'needs upgrade'
+  STATE_PATCHED = 'patched locally'
+  STATE_DUPLICATED = 'duplicated locally'
+  STATE_NEEDS_UPGRADE_AND_PATCHED = 'needs upgrade and patched locally'
+  STATE_NEEDS_UPGRADE_AND_DUPLICATED = 'needs upgrade and duplicated locally'
+  STATE_CURRENT = 'current'
+
+  @staticmethod
+  def GetColumnName(col, arch=None):
+    """Translate from generic column name to specific given |arch|."""
+    if arch:
+      return col.replace('ARCH', arch)
+    return col
+
+  def __init__(self, arch, upgrade=False, name=None):
+    self._arch = arch
+
+    # These constants serve two roles, for csv output:
+    # 1) Restrict which column names are valid.
+    # 2) Specify the order of those columns.
+    columns = [
+        self.COL_PACKAGE,
+        self.COL_SLOT,
+        self.COL_OVERLAY,
+        self.COL_CURRENT_VER,
+        self.COL_STABLE_UPSTREAM_VER,
+        self.COL_LATEST_UPSTREAM_VER,
+        self.COL_STATE,
+        self.COL_DEPENDS_ON,
+        self.COL_USED_BY,
+        self.COL_TARGET,
+    ]
+
+    if upgrade:
+      columns.append(self.COL_UPGRADED)
+
+    table.Table.__init__(self, columns, name=name)
+
+  def __getattribute__(self, name):
+    """When accessing self.COL_*, substitute ARCH name."""
+    if name.startswith('COL_'):
+      text = getattr(UpgradeTable, name)
+      return UpgradeTable.GetColumnName(text, arch=self._arch)
+    else:
+      return object.__getattribute__(self, name)
+
+  def GetArch(self):
+    """Get the architecture associated with this UpgradeTable."""
+    return self._arch
diff --git a/lib/upgrade_table_unittest b/lib/upgrade_table_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/upgrade_table_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/upgrade_table_unittest.py b/lib/upgrade_table_unittest.py
new file mode 100644
index 0000000..85a626f
--- /dev/null
+++ b/lib/upgrade_table_unittest.py
@@ -0,0 +1,83 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the upgrade_table module."""
+
+from __future__ import print_function
+
+from chromite.lib import cros_test_lib
+from chromite.lib import upgrade_table as utable
+
+
+class UpgradeTableTest(cros_test_lib.TestCase):
+  """Unittests for UpgradeTable."""
+  ARCH = 'some-arch'
+  NAME = 'some-name'
+
+  def _CreateTable(self, upgrade_mode, arch=ARCH, name=NAME):
+    return utable.UpgradeTable(arch, upgrade=upgrade_mode, name=name)
+
+  def testGetArch(self):
+    t1 = self._CreateTable(True, arch='arch1')
+    self.assertEquals(t1.GetArch(), 'arch1')
+    t2 = self._CreateTable(False, arch='arch2')
+    self.assertEquals(t2.GetArch(), 'arch2')
+
+  def _AssertEqualsAfterArchSub(self, arch, table_col_name,
+                                static_table_col_name):
+    self.assertEquals(table_col_name,
+                      static_table_col_name.replace('ARCH', arch))
+
+  def testColumnNameArchSubstitute(self):
+    arch = 'foobar'
+    t1 = self._CreateTable(True, arch=arch)
+
+    # Some column names are independent of ARCH.
+    self.assertEquals(t1.COL_PACKAGE, utable.UpgradeTable.COL_PACKAGE)
+    self.assertEquals(t1.COL_SLOT, utable.UpgradeTable.COL_SLOT)
+    self.assertEquals(t1.COL_OVERLAY, utable.UpgradeTable.COL_OVERLAY)
+    self.assertEquals(t1.COL_TARGET, utable.UpgradeTable.COL_TARGET)
+
+    # Other column names require ARCH substitution.
+    self._AssertEqualsAfterArchSub(arch, t1.COL_CURRENT_VER,
+                                   utable.UpgradeTable.COL_CURRENT_VER)
+    self._AssertEqualsAfterArchSub(arch, t1.COL_STABLE_UPSTREAM_VER,
+                                   utable.UpgradeTable.COL_STABLE_UPSTREAM_VER)
+    self._AssertEqualsAfterArchSub(arch, t1.COL_LATEST_UPSTREAM_VER,
+                                   utable.UpgradeTable.COL_LATEST_UPSTREAM_VER)
+    self._AssertEqualsAfterArchSub(arch, t1.COL_STATE,
+                                   utable.UpgradeTable.COL_STATE)
+    self._AssertEqualsAfterArchSub(arch, t1.COL_DEPENDS_ON,
+                                   utable.UpgradeTable.COL_DEPENDS_ON)
+    self._AssertEqualsAfterArchSub(arch, t1.COL_USED_BY,
+                                   utable.UpgradeTable.COL_USED_BY)
+    self._AssertEqualsAfterArchSub(arch, t1.COL_UPGRADED,
+                                   utable.UpgradeTable.COL_UPGRADED)
+
+  def testColumnExistence(self):
+    t1 = self._CreateTable(False)
+    t2 = self._CreateTable(True)
+
+    # All these columns should be in both tables, with same name.
+    cols = [
+        t1.COL_PACKAGE,
+        t1.COL_SLOT,
+        t1.COL_OVERLAY,
+        t1.COL_CURRENT_VER,
+        t1.COL_STABLE_UPSTREAM_VER,
+        t1.COL_LATEST_UPSTREAM_VER,
+        t1.COL_STATE,
+        t1.COL_DEPENDS_ON,
+        t1.COL_USED_BY,
+        t1.COL_TARGET,
+    ]
+
+    for col in cols:
+      self.assertTrue(t1.HasColumn(col))
+      self.assertTrue(t2.HasColumn(col))
+
+    # The UPGRADED column should only be in the table with upgrade_mode=True.
+    col = t1.COL_UPGRADED
+    self.assertFalse(t1.HasColumn(col))
+    self.assertTrue(t2.HasColumn(col))
diff --git a/lib/user_db.py b/lib/user_db.py
new file mode 100644
index 0000000..82b3579
--- /dev/null
+++ b/lib/user_db.py
@@ -0,0 +1,296 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Logic to read the set of users and groups installed on a system."""
+
+from __future__ import print_function
+
+import collections
+import os
+
+from chromite.lib import cros_logging as logging
+from chromite.lib import locking
+from chromite.lib import osutils
+
+
+# These fields must be in the order expected in /etc/passwd entries.
+User = collections.namedtuple(
+    'User', ('user', 'password', 'uid', 'gid', 'gecos', 'home', 'shell'))
+
+# These fields must be in the order expected in /etc/group entries.
+Group = collections.namedtuple(
+    'Group', ('group', 'password', 'gid', 'users'))
+
+
+def UserToEntry(user):
+  """Returns the database file entry corresponding to |user|."""
+  return ':'.join([user.user, user.password, str(user.uid), str(user.gid),
+                   user.gecos, user.home, user.shell])
+
+
+def GroupToEntry(group):
+  """Returns the database file entry corresponding to |group|."""
+  return ':'.join([group.group, group.password,
+                   str(group.gid), ','.join(group.users)])
+
+
+class UserDB(object):
+  """An object that understands the users and groups installed on a system."""
+
+  # Number of times to attempt to acquire the write lock on a database.
+  # The max wait time for the lock is the nth triangular number of seconds.
+  # So in this case, T(24) * 1 second = 300 seconds.
+  _DB_LOCK_RETRIES = 24
+
+  def __init__(self, sysroot):
+    self._sysroot = sysroot
+    self._user_cache = None
+    self._group_cache = None
+
+  @property
+  def _user_db_file(self):
+    """Returns path to user database (aka /etc/passwd in the sysroot)."""
+    return os.path.join(self._sysroot, 'etc', 'passwd')
+
+  @property
+  def _group_db_file(self):
+    """Returns path to group database (aka /etc/group in the sysroot)."""
+    return os.path.join(self._sysroot, 'etc', 'group')
+
+  @property
+  def _users(self):
+    """Returns a list of User tuples."""
+    if self._user_cache is not None:
+      return self._user_cache
+
+    self._user_cache = {}
+    passwd_contents = osutils.ReadFile(self._user_db_file)
+
+    for line in passwd_contents.splitlines():
+      pieces = line.split(':')
+      if len(pieces) != 7:
+        logging.warning('Ignored invalid line in users file: "%s"', line)
+        continue
+
+      user, password, uid, gid, gecos, home, shell = pieces
+
+      try:
+        uid_as_int = int(uid)
+        gid_as_int = int(gid)
+      except ValueError:
+        logging.warning('Ignored invalid uid (%s) or gid (%s).', uid, gid)
+        continue
+
+      if user in self._user_cache:
+        logging.warning('Ignored duplicate user definition for "%s".', user)
+        continue
+
+      self._user_cache[user] = User(user=user, password=password,
+                                    uid=uid_as_int, gid=gid_as_int,
+                                    gecos=gecos, home=home, shell=shell)
+    return self._user_cache
+
+  @property
+  def _groups(self):
+    """Returns a list of Group tuples."""
+    if self._group_cache is not None:
+      return self._group_cache
+
+    self._group_cache = {}
+    group_contents = osutils.ReadFile(self._group_db_file)
+
+    for line in group_contents.splitlines():
+      pieces = line.split(':')
+      if len(pieces) != 4:
+        logging.warning('Ignored invalid line in group file: "%s"', line)
+        continue
+
+      group, password, gid, users = pieces
+
+      try:
+        gid_as_int = int(gid)
+      except ValueError:
+        logging.warning('Ignored invalid or gid (%s).', gid)
+        continue
+
+      if group in self._group_cache:
+        logging.warning('Ignored duplicate group definition for "%s".',
+                        group)
+        continue
+
+      users = users.split(',')
+
+      self._group_cache[group] = Group(group=group, password=password,
+                                       gid=gid_as_int, users=users)
+    return self._group_cache
+
+  def GetUserEntry(self, username, skip_lock=False):
+    """Returns a user's database entry.
+
+    Args:
+      username: name of user to get the entry for.
+      skip_lock: True iff we should skip getting a lock before reading the
+        database.
+
+    Returns:
+      database entry as a string.
+    """
+    if skip_lock:
+      return UserToEntry(self._users[username])
+
+    # Clear the user cache to force ourselves to reparse while holding a lock.
+    self._user_cache = None
+
+    with locking.PortableLinkLock(
+        self._user_db_file + '.lock', max_retry=self._DB_LOCK_RETRIES):
+      return UserToEntry(self._users[username])
+
+  def GetGroupEntry(self, groupname, skip_lock=False):
+    """Returns a group's database entry.
+
+    Args:
+      groupname: name of group to get the entry for.
+      skip_lock: True iff we should skip getting a lock before reading the
+        database.
+
+    Returns:
+      database entry as a string.
+    """
+    if skip_lock:
+      return GroupToEntry(self._groups[groupname])
+
+    # Clear the group cache to force ourselves to reparse while holding a lock.
+    self._group_cache = None
+
+    with locking.PortableLinkLock(
+        self._group_db_file + '.lock', max_retry=self._DB_LOCK_RETRIES):
+      return GroupToEntry(self._groups[groupname])
+
+  def UserExists(self, username):
+    """Returns True iff a user called |username| exists in the database.
+
+    Args:
+      username: name of a user (e.g. 'root')
+
+    Returns:
+      True iff the given |username| has an entry in /etc/passwd.
+    """
+    return username in self._users
+
+  def GroupExists(self, groupname):
+    """Returns True iff a group called |groupname| exists in the database.
+
+    Args:
+      groupname: name of a group (e.g. 'root')
+
+    Returns:
+      True iff the given |groupname| has an entry in /etc/group.
+    """
+    return groupname in self._groups
+
+  def ResolveUsername(self, username):
+    """Resolves a username to a uid.
+
+    Args:
+      username: name of a user (e.g. 'root')
+
+    Returns:
+      The uid of the given username.  Raises ValueError on failure.
+    """
+    user = self._users.get(username)
+    if user:
+      return user.uid
+
+    raise ValueError('Could not resolve unknown user "%s" to uid.' % username)
+
+  def ResolveGroupname(self, groupname):
+    """Resolves a groupname to a gid.
+
+    Args:
+      groupname: name of a group (e.g. 'wheel')
+
+    Returns:
+      The gid of the given groupname.  Raises ValueError on failure.
+    """
+    group = self._groups.get(groupname)
+    if group:
+      return group.gid
+
+    raise ValueError('Could not resolve unknown group "%s" to gid.' % groupname)
+
+  def AddUser(self, user):
+    """Atomically add a user to the database.
+
+    If a user named |user.user| already exists, this method will simply return.
+
+    Args:
+      user: user_db.User object to add to database.
+    """
+    # Try to avoid grabbing the lock in the common case that a user already
+    # exists.
+    if self.UserExists(user.user):
+      logging.info('Not installing user "%s" because it already existed.',
+                   user.user)
+      return
+
+    # Clear the user cache to force ourselves to reparse.
+    self._user_cache = None
+
+    with locking.PortableLinkLock(self._user_db_file + '.lock',
+                                  max_retry=self._DB_LOCK_RETRIES):
+      # Check that |user| exists under the lock in case we're racing to create
+      # this user.
+      if self.UserExists(user.user):
+        logging.info('Not installing user "%s" because it already existed.',
+                     user.user)
+        return
+
+      self._users[user.user] = user
+      new_users = sorted(self._users.itervalues(), key=lambda u: u.uid)
+      contents = '\n'.join([UserToEntry(u) for u in new_users])
+      osutils.WriteFile(self._user_db_file, contents, atomic=True, sudo=True)
+      print('Added user "%s" to %s:' % (user.user, self._user_db_file))
+      print(' - password entry: %s' % user.password)
+      print(' - id: %d' % user.uid)
+      print(' - group id: %d' % user.gid)
+      print(' - gecos: %s' % user.gecos)
+      print(' - home: %s' % user.home)
+      print(' - shell: %s' % user.shell)
+
+  def AddGroup(self, group):
+    """Atomically add a group to the database.
+
+    If a group named |group.group| already exists, this method will simply
+    return.
+
+    Args:
+      group: user_db.Group object to add to database.
+    """
+    # Try to avoid grabbing the lock in the common case that a group already
+    # exists.
+    if self.GroupExists(group.group):
+      logging.info('Not installing group "%s" because it already existed.',
+                   group.group)
+      return
+
+    # Clear the group cache to force ourselves to reparse.
+    self._group_cache = None
+
+    with locking.PortableLinkLock(self._group_db_file + '.lock',
+                                  max_retry=self._DB_LOCK_RETRIES):
+      # Check that |group| exists under the lock in case we're racing to create
+      # this group.
+      if self.GroupExists(group.group):
+        logging.info('Not installing group "%s" because it already existed.',
+                     group.group)
+        return
+
+      self._groups[group.group] = group
+      new_groups = sorted(self._groups.itervalues(), key=lambda g: g.gid)
+      contents = '\n'.join([GroupToEntry(g) for g in new_groups])
+      osutils.WriteFile(self._group_db_file, contents, atomic=True, sudo=True)
+      print('Added group "%s" to %s:' % (group.group, self._group_db_file))
+      print(' - group id: %d' % group.gid)
+      print(' - password entry: %s' % group.password)
+      print(' - user list: %s' % ','.join(group.users))
diff --git a/lib/user_db_unittest b/lib/user_db_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/user_db_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/user_db_unittest.py b/lib/user_db_unittest.py
new file mode 100644
index 0000000..71d53e1
--- /dev/null
+++ b/lib/user_db_unittest.py
@@ -0,0 +1,99 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test that our interface to the user and group database works."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import user_db
+
+
+MOCK_PASSWD_CONTENTS = 'root:x:0:0:root:/root:/bin/bash'
+
+MOCK_GROUP_CONTENTS = 'root:x:0:'
+
+
+class UserDBTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for chromite.lib.user_db."""
+
+  def _SetupDatabases(self, passwd_contents, group_contents):
+    osutils.WriteFile(os.path.join(self.tempdir, 'etc', 'passwd'),
+                      passwd_contents, makedirs=True)
+    osutils.WriteFile(os.path.join(self.tempdir, 'etc', 'group'),
+                      group_contents, makedirs=True)
+
+  def setUp(self):
+    """Set up a test environment."""
+    self._SetupDatabases(MOCK_PASSWD_CONTENTS, MOCK_GROUP_CONTENTS)
+    self._user_db = user_db.UserDB(self.tempdir)
+    self.PatchObject(os, 'getuid', return_value=0)
+
+  def testAcceptsKnownUser(self):
+    """Check that we do appropriate things with valid users."""
+    self.assertTrue(self._user_db.UserExists('root'))
+    self.assertEqual(0, self._user_db.ResolveUsername('root'))
+
+  def testAcceptsKnownGroup(self):
+    """Check that we do appropriate things with valid groups."""
+    self.assertTrue(self._user_db.GroupExists('root'))
+    self.assertEqual(0, self._user_db.ResolveGroupname('root'))
+
+  def testRejectsUnknownUser(self):
+    """Check that we do appropriate things with invalid users."""
+    self.assertFalse(self._user_db.UserExists('foot'))
+    self.assertRaises(ValueError, self._user_db.ResolveUsername, 'foot')
+
+  def testRejectsUnknownGroup(self):
+    """Check that we do appropriate things with invalid groups."""
+    self.assertFalse(self._user_db.GroupExists('wheel'))
+    self.assertRaises(ValueError, self._user_db.ResolveGroupname, 'wheel')
+
+  def testToleratesMalformedLines(self):
+    """Check that skip over invalid lines in databases."""
+    bad_user_contents = '\n'.join(['no colon on this line',
+                                   '::::::',
+                                   'root:x:not a uid:0:root:/root:/bin/bash',
+                                   'root:x:0:not a gid:root:/root:/bin/bash',
+                                   'root:x:0:0:root:/root',
+                                   'root:x:0:0:root:/root:/bin/bash:',
+                                   'bar:x:1:1:bar user:/home/bar:/bin/sh'])
+    bad_group_contents = '\n'.join(['no colon on this line',
+                                    ':::',
+                                    'root:x:not a gid:',
+                                    'root:x:0',
+                                    'root:x:0::',
+                                    'bar:x:1:'])
+    self._SetupDatabases(bad_user_contents, bad_group_contents)
+    db = user_db.UserDB(self.tempdir)
+    self.assertTrue(db.UserExists('bar'))
+    self.assertTrue(db.GroupExists('bar'))
+    self.assertFalse(db.UserExists('root'))
+    self.assertFalse(db.GroupExists('root'))
+
+  def testCanAddUser(self):
+    """Test that we can correctly add a user to a database."""
+    new_user = user_db.User(user='foo', password='!', uid=1000, gid=1000,
+                            gecos='test', home='/dev/null', shell='/bin/false')
+    self.assertFalse(self._user_db.UserExists(new_user.user))
+    self._user_db.AddUser(new_user)
+    self.assertTrue(self._user_db.UserExists(new_user.user))
+
+    # New instances should just see the new user.
+    new_db = user_db.UserDB(self.tempdir)
+    self.assertTrue(new_db.UserExists(new_user.user))
+
+  def testCanAddGroup(self):
+    """Test that we can correctly add a group to a database."""
+    new_group = user_db.Group(group='foo', password='!', gid=1000, users=[])
+    self.assertFalse(self._user_db.GroupExists(new_group.group))
+    self._user_db.AddGroup(new_group)
+    self.assertTrue(self._user_db.GroupExists(new_group.group))
+
+    # New instances should just see the new group.
+    new_db = user_db.UserDB(self.tempdir)
+    self.assertTrue(new_db.GroupExists(new_group.group))
diff --git a/lib/vm.py b/lib/vm.py
new file mode 100644
index 0000000..9f4f8d2
--- /dev/null
+++ b/lib/vm.py
@@ -0,0 +1,248 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""VM-related helper functions/classes."""
+
+from __future__ import print_function
+
+import os
+import shutil
+import time
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import path_util
+from chromite.lib import remote_access
+
+
+class VMError(Exception):
+  """A base exception for VM errors."""
+
+
+class VMCreationError(VMError):
+  """Raised when failed to create a VM image."""
+
+
+def VMIsUpdatable(path):
+  """Check if the existing VM image is updatable.
+
+  Args:
+    path: Path to the VM image.
+
+  Returns:
+    True if VM is updatable; False otherwise.
+  """
+  table = cros_build_lib.GetImageDiskPartitionInfo(path, unit='MB')
+  # Assume if size of the two root partitions match, the image
+  # is updatable.
+  return table['ROOT-B'].size == table['ROOT-A'].size
+
+
+def CreateVMImage(image=None, board=None, updatable=True, dest_dir=None):
+  """Returns the path of the image built to run in a VM.
+
+  By default, the returned VM is a test image that can run full update
+  testing on it. If there exists a VM image with the matching
+  |updatable| setting, this method returns the path to the existing
+  image. If |dest_dir| is set, it will copy/create the VM image to the
+  |dest_dir|.
+
+  Args:
+    image: Path to the (non-VM) image. Defaults to None to use the latest
+      image for the board.
+    board: Board that the image was built with. If None, attempts to use the
+      configured default board.
+    updatable: Create a VM image that supports AU.
+    dest_dir: If set, create/copy the VM image to |dest|; otherwise,
+      use the folder where |image| resides.
+  """
+  if not image and not board:
+    raise VMCreationError(
+        'Cannot create VM when both image and board are None.')
+
+  image_dir = os.path.dirname(image)
+  src_path = dest_path = os.path.join(image_dir, constants.VM_IMAGE_BIN)
+
+  if dest_dir:
+    dest_path = os.path.join(dest_dir, constants.VM_IMAGE_BIN)
+
+  exists = False
+  # Do not create a new VM image if a matching image already exists.
+  exists = os.path.exists(src_path) and (
+      not updatable or VMIsUpdatable(src_path))
+
+  if exists and dest_dir:
+    # Copy the existing VM image to dest_dir.
+    shutil.copyfile(src_path, dest_path)
+
+  if not exists:
+    # No existing VM image that we can reuse. Create a new VM image.
+    logging.info('Creating %s', dest_path)
+    cmd = [os.path.join(constants.CROSUTILS_DIR, 'image_to_vm.sh'),
+           '--test_image']
+
+    if image:
+      cmd.append('--from=%s' % path_util.ToChrootPath(image_dir))
+
+    if updatable:
+      cmd.extend(['--disk_layout', '2gb-rootfs-updatable'])
+
+    if board:
+      cmd.extend(['--board', board])
+
+    # image_to_vm.sh only runs in chroot, but dest_dir may not be
+    # reachable from chroot. In that case, we copy it to a temporary
+    # directory in chroot, and then move it to dest_dir .
+    tempdir = None
+    if dest_dir:
+      # Create a temporary directory in chroot to store the VM
+      # image. This is to avoid the case where dest_dir is not
+      # reachable within chroot.
+      tempdir = cros_build_lib.RunCommand(
+          ['mktemp', '-d'],
+          capture_output=True,
+          enter_chroot=True).output.strip()
+      cmd.append('--to=%s' % tempdir)
+
+    msg = 'Failed to create the VM image'
+    try:
+      cros_build_lib.RunCommand(cmd, enter_chroot=True,
+                                cwd=constants.SOURCE_ROOT)
+    except cros_build_lib.RunCommandError as e:
+      logging.error('%s: %s', msg, e)
+      if tempdir:
+        osutils.RmDir(
+            path_util.FromChrootPath(tempdir), ignore_missing=True)
+      raise VMCreationError(msg)
+
+    if dest_dir:
+      # Move VM from tempdir to dest_dir.
+      shutil.move(
+          path_util.FromChrootPath(
+              os.path.join(tempdir, constants.VM_IMAGE_BIN)), dest_path)
+      osutils.RmDir(path_util.FromChrootPath(tempdir), ignore_missing=True)
+
+  if not os.path.exists(dest_path):
+    raise VMCreationError(msg)
+
+  return dest_path
+
+
+class VMStartupError(VMError):
+  """Raised when failed to start a VM instance."""
+
+
+class VMStopError(VMError):
+  """Raised when failed to stop a VM instance."""
+
+
+class VMInstance(object):
+  """This is a wrapper of a VM instance."""
+
+  MAX_LAUNCH_ATTEMPTS = 5
+  TIME_BETWEEN_LAUNCH_ATTEMPTS = 30
+
+  # VM needs a longer timeout.
+  SSH_CONNECT_TIMEOUT = 120
+
+  def __init__(self, image_path, port=None, tempdir=None,
+               debug_level=logging.DEBUG):
+    """Initializes VMWrapper with a VM image path.
+
+    Args:
+      image_path: Path to the VM image.
+      port: SSH port of the VM.
+      tempdir: Temporary working directory.
+      debug_level: Debug level for logging.
+    """
+    self.image_path = image_path
+    self.tempdir = tempdir
+    self._tempdir_obj = None
+    if not self.tempdir:
+      self._tempdir_obj = osutils.TempDir(prefix='vm_wrapper', sudo_rm=True)
+      self.tempdir = self._tempdir_obj.tempdir
+    self.kvm_pid_path = os.path.join(self.tempdir, 'kvm.pid')
+    self.port = (remote_access.GetUnusedPort() if port is None
+                 else remote_access.NormalizePort(port))
+    self.debug_level = debug_level
+    self.ssh_settings = remote_access.CompileSSHConnectSettings(
+        ConnectTimeout=self.SSH_CONNECT_TIMEOUT)
+    self.agent = remote_access.RemoteAccess(
+        remote_access.LOCALHOST, self.tempdir, self.port,
+        debug_level=self.debug_level, interactive=False)
+    self.device_addr = 'ssh://%s:%d' % (remote_access.LOCALHOST, self.port)
+
+  def _Start(self):
+    """Run the command to start VM."""
+    cmd = [os.path.join(constants.CROSUTILS_DIR, 'bin', 'cros_start_vm'),
+           '--ssh_port', str(self.port),
+           '--image_path', self.image_path,
+           '--no_graphics',
+           '--kvm_pid', self.kvm_pid_path]
+    try:
+      self._RunCommand(cmd, capture_output=True)
+    except cros_build_lib.RunCommandError as e:
+      msg = 'VM failed to start'
+      logging.warning('%s: %s', msg, e)
+      raise VMStartupError(msg)
+
+  def Connect(self):
+    """Returns True if we can connect to VM via SSH."""
+    try:
+      self.agent.RemoteSh(['true'], connect_settings=self.ssh_settings)
+    except Exception:
+      return False
+
+    return True
+
+  def Stop(self, ignore_error=False):
+    """Stops a running VM.
+
+    Args:
+      ignore_error: If set True, do not raise an exception on error.
+    """
+    cmd = [os.path.join(constants.CROSUTILS_DIR, 'bin', 'cros_stop_vm'),
+           '--kvm_pid', self.kvm_pid_path]
+    result = self._RunCommand(cmd, capture_output=True, error_code_ok=True)
+    if result.returncode:
+      msg = 'Failed to stop VM'
+      if ignore_error:
+        logging.warning('%s: %s', msg, result.error)
+      else:
+        logging.error('%s: %s', msg, result.error)
+        raise VMStopError(msg)
+
+  def Start(self):
+    """Start VM and wait until we can ssh into it.
+
+    This command is more robust than just naively starting the VM as it will
+    try to start the VM multiple times if the VM fails to start up. This is
+    inspired by retry_until_ssh in crosutils/lib/cros_vm_lib.sh.
+    """
+    for _ in range(self.MAX_LAUNCH_ATTEMPTS):
+      try:
+        self._Start()
+      except VMStartupError:
+        logging.warning('VM failed to start.')
+        continue
+
+      if self.Connect():
+        # VM is started up successfully if we can connect to it.
+        break
+
+      logging.warning('Cannot connect to VM...')
+      self.Stop(ignore_error=True)
+      time.sleep(self.TIME_BETWEEN_LAUNCH_ATTEMPTS)
+    else:
+      raise VMStartupError('Max attempts (%d) to start VM exceeded.'
+                           % self.MAX_LAUNCH_ATTEMPTS)
+
+    logging.info('VM started at port %d', self.port)
+
+  def _RunCommand(self, *args, **kwargs):
+    """Runs a commmand on the host machine."""
+    kwargs.setdefault('debug_level', self.debug_level)
+    return cros_build_lib.RunCommand(*args, **kwargs)
diff --git a/lib/workon_helper.py b/lib/workon_helper.py
new file mode 100644
index 0000000..fd2c082
--- /dev/null
+++ b/lib/workon_helper.py
@@ -0,0 +1,707 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Routines and a delegate for dealing with locally worked on packages."""
+
+from __future__ import print_function
+
+import collections
+import glob
+import os
+import re
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import portage_util
+from chromite.lib import sysroot_lib
+
+
+# A package is a canonical CP atom.
+# A package may have 0 or more repositories, given as strings.
+# Each repository may be mapped into our workspace at some path.
+PackageInfo = collections.namedtuple('PackageInfo',
+                                     ('package', 'repos', 'src_paths'))
+
+
+def _IsWorkonEbuild(include_chrome, ebuild_path, ebuild_contents=None):
+  """Returns True iff the ebuild at |ebuild_path| is a workon ebuild.
+
+  This means roughly that the ebuild is compatible with our cros_workon based
+  system.  For most packages, this means that it inherits the cros-workon
+  overlay.
+
+  Args:
+    include_chrome: True iff we should include Chrome and chromium-source
+        packages.
+    ebuild_path: path an ebuild in question.
+    ebuild_contents: None, or the contents of the ebuild at |ebuild_path|.
+        If None, _IsWorkonEbuild will read the contents of the ebuild when
+        necessary.
+
+  Returns:
+    True iff the ebuild can be used with cros_workon.
+  """
+  # TODO(rcui): remove special casing of chromeos-chrome here when we make it
+  # inherit from cros-workon / chromium-source class (chromium-os:19259).
+  if (include_chrome and
+      portage_util.EbuildToCP(ebuild_path) == constants.CHROME_CP):
+    return True
+
+  workon_eclasses = 'cros-workon'
+  if include_chrome:
+    workon_eclasses += '|chromium-source'
+
+  ebuild_contents = ebuild_contents or osutils.ReadFile(ebuild_path)
+  if re.search('^inherit .*(%s)' % workon_eclasses,
+               ebuild_contents, re.M):
+    return True
+
+  return False
+
+
+def _GetLinesFromFile(path, line_prefix, line_suffix):
+  """Get a unique set of lines from a file, stripping off a prefix and suffix.
+
+  Rejects lines that do not start with |line_prefix| or end with |line_suffix|.
+  Returns an empty set if the file at |path| does not exist.
+  Discards duplicate lines.
+
+  Args:
+    path: path to file.
+    line_prefix: prefix of line to look for and strip if found.
+    line_suffix: suffix of line to look for and strip if found.
+
+  Returns:
+    A list of filtered lines from the file at |path|.
+  """
+  if not os.path.exists(path):
+    return set()
+
+  # Note that there is an opportunity to race with the file system here.
+  lines = set()
+  for line in osutils.ReadFile(path).splitlines():
+    if not line.startswith(line_prefix) or not line.endswith(line_suffix):
+      logging.warning('Filtering out malformed line: %s', line)
+      continue
+    lines.add(line[len(line_prefix):-len(line_suffix)])
+
+  return lines
+
+
+def _WriteLinesToFile(path, lines, line_prefix, line_suffix):
+  """Write a set of lines to a file, adding prefixes, suffixes and newlines.
+
+  Args:
+    path: path to file.
+    lines: iterable of lines to write.
+    line_prefix: string to prefix each line with.
+    line_suffix: string to append to each line before a newline.
+  """
+  contents = ''.join(
+      ['%s%s%s\n' % (line_prefix, line, line_suffix) for line in lines])
+  osutils.WriteFile(path, contents, makedirs=True)
+
+
+def GetWorkonPath(source_root=constants.CHROOT_SOURCE_ROOT, sub_path=None):
+  """Get the path to files related to packages we're working locally on.
+
+  Args:
+    source_root: path to source root inside chroot.
+    sub_path: optional path to file relative to the workon root directory.
+
+  Returns:
+    path to the workon root directory or file within the root directory.
+  """
+  ret = os.path.join(source_root, '.config/cros_workon')
+  if sub_path:
+    ret = os.path.join(ret, sub_path)
+
+  return ret
+
+
+class WorkonError(Exception):
+  """Raised when invariants of the WorkonHelper are violated."""
+
+
+def _FilterWorkonOnlyEbuilds(ebuilds):
+  """Filter a list of ebuild paths to only with those no stable version.
+
+  Args:
+    ebuilds: list of string paths to ebuild files
+        (e.g. ['/prefix/sys-app/app/app-9999.ebuild'])
+
+  Returns:
+    list of ebuild paths meeting this criterion.
+  """
+  result = []
+  for ebuild_path in ebuilds:
+    ebuild_pattern = os.path.join(os.path.dirname(ebuild_path), '*.ebuild')
+    stable_ebuilds = [path for path in glob.glob(ebuild_pattern)
+                      if not path.endswith('-9999.ebuild')]
+    if not stable_ebuilds:
+      result.append(ebuild_path)
+
+  return result
+
+
+def ListAllWorkedOnAtoms(src_root=constants.CHROOT_SOURCE_ROOT):
+  """Get a list of all atoms we're currently working on.
+
+  Args:
+    src_root: path to source root inside chroot.
+
+  Returns:
+    Dictionary of atoms marked as worked on (e.g. ['chromeos-base/shill']) for
+    each system.
+  """
+  workon_dir = GetWorkonPath(source_root=src_root)
+  if not os.path.isdir(workon_dir):
+    return dict()
+
+  system_to_atoms = dict()
+  for file_name in os.listdir(workon_dir):
+    if file_name.endswith('.mask'):
+      continue
+    file_contents = osutils.ReadFile(os.path.join(workon_dir, file_name))
+
+    atoms = []
+    for line in file_contents.splitlines():
+      match = re.match('=(.*)-9999', line)
+      if match:
+        atoms.append(match.group(1))
+    if atoms:
+      system_to_atoms[os.path.basename(file_name)] = atoms
+
+  return system_to_atoms
+
+
+class WorkonHelper(object):
+  """Delegate that knows how to mark packages as being worked on locally.
+
+  This class assumes that we're executing in the build root.
+  """
+
+  def __init__(self, sysroot, friendly_name=None, verbose=False,
+               src_root=constants.CHROOT_SOURCE_ROOT):
+    """Construct an instance.
+
+    Args:
+      sysroot: path to sysroot to work on packages within.
+      friendly_name: friendly name of the system
+          (e.g. 'host', <board name>, or a brick friendly name).
+          Defaults to 'host' if sysroot is '/' or the last component of the
+          sysroot path.
+      verbose: boolean True iff we should print a lot more command output.
+          This is intended for debugging, and you should never cause a script
+          to depend on behavior enabled by this flag.
+      src_root: path to source root inside chroot.
+    """
+    self._sysroot = sysroot
+    if friendly_name:
+      self._system = friendly_name
+    else:
+      self._system = ('host' if sysroot == '/'
+                      else os.path.basename(sysroot.rstrip('/')))
+    self._verbose = verbose
+    self._src_root = src_root
+    self._cached_overlays = None
+    self._cached_arch = None
+    if not os.path.exists(self._sysroot):
+      raise WorkonError('Sysroot %s is not setup.' % self._sysroot)
+
+    profile = os.path.join(self._sysroot, 'etc', 'portage')
+    self._unmasked_symlink = os.path.join(
+        profile, 'package.unmask', 'cros-workon')
+    self._keywords_symlink = os.path.join(
+        profile, 'package.keywords', 'cros-workon')
+    self._masked_symlink = os.path.join(
+        profile, 'package.mask', 'cros-workon')
+
+    # Clobber and re-create the WORKON_FILE symlinks every time. This is a
+    # trivial operation and eliminates all kinds of corner cases as well as any
+    # possible future renames of WORKON_FILE.
+    # In particular, we build the chroot as a board (amd64-host), bundle it and
+    # unpack it on /. After unpacking, the symlinks will point to
+    # .config/cros_workon/amd64-host instead of .config/cros_workon/host.
+    # Regenerating the symlinks here corrects it. crbug.com/23096.
+    self._RefreshSymlinks()
+
+  @property
+  def workon_file_path(self):
+    """Returns path to the file holding our currently worked on atoms."""
+    return GetWorkonPath(source_root=self._src_root, sub_path=self._system)
+
+  @property
+  def masked_file_path(self):
+    """Returns path to file masking non-9999 ebuilds for worked on atoms."""
+    return self.workon_file_path + '.mask'
+
+  @property
+  def _arch(self):
+    if self._cached_arch is None:
+      self._cached_arch = sysroot_lib.Sysroot(
+          self._sysroot).GetStandardField('ARCH')
+
+    return self._cached_arch
+
+  @property
+  def _overlays(self):
+    """Returns overlays installed for the selected system."""
+    if self._cached_overlays is None:
+      sysroot = sysroot_lib.Sysroot(self._sysroot)
+      portdir_overlay = sysroot.GetStandardField('PORTDIR_OVERLAY')
+      if portdir_overlay:
+        self._cached_overlays = portdir_overlay.strip().splitlines()
+      else:
+        # This command is exceptionally slow, and we don't expect the list of
+        # overlays to change during the lifetime of WorkonHelper.
+        self._cached_overlays = portage_util.FindSysrootOverlays(self._sysroot)
+
+    return self._cached_overlays
+
+  def _SetWorkedOnAtoms(self, atoms):
+    """Sets the unmasked atoms.
+
+    This will generate both the unmasked atom list and the masked atoms list as
+    the two files mention the same atom list.
+
+    Args:
+      atoms: Atoms to unmask.
+    """
+    _WriteLinesToFile(self.workon_file_path, atoms, '=', '-9999')
+    _WriteLinesToFile(self.masked_file_path, atoms, '<', '-9999')
+    self._RefreshSymlinks()
+
+  def _RefreshSymlinks(self):
+    """Recreates the symlinks.
+
+    This will create the three symlinks needed:
+    * package.mask/cros-workon: list of packages to mask.
+    * package.unmask/cros-workon: list of packages to unmask.
+    * package.keywords/cros-workon: list of hidden packages to accept.
+    """
+    for target, symlink in ((self.masked_file_path, self._masked_symlink),
+                            (self.workon_file_path, self._unmasked_symlink),
+                            (self.workon_file_path, self._keywords_symlink)):
+      if os.path.exists(target):
+        osutils.SafeMakedirs(os.path.dirname(symlink), sudo=True)
+        osutils.SafeSymlink(target, symlink, sudo=True)
+      else:
+        logging.debug("Symlink %s already exists. Don't recreated it."
+                      % symlink)
+
+  def _AtomsToEbuilds(self, atoms):
+    """Maps from a list of CP atoms to a list of corresponding -9999 ebuilds.
+
+    Args:
+      atoms: iterable of portage atoms (e.g. ['sys-apps/dbus']).
+
+    Returns:
+      list of ebuilds corresponding to those atoms.
+    """
+    atoms_to_ebuilds = dict([(atom, None) for atom in atoms])
+
+    for overlay in self._overlays:
+      ebuild_paths = glob.glob(
+          os.path.join(overlay, '*-*', '*', '*-9999.ebuild'))
+      for ebuild_path in ebuild_paths:
+        atom = portage_util.EbuildToCP(ebuild_path)
+        if atom in atoms_to_ebuilds:
+          atoms_to_ebuilds[atom] = ebuild_path
+
+    ebuilds = []
+    for atom, ebuild in atoms_to_ebuilds.iteritems():
+      if ebuild is None:
+        raise WorkonError('Could not find ebuild for atom %s' % atom)
+      ebuilds.append(ebuild)
+
+    return ebuilds
+
+  def _GetCanonicalAtom(self, package):
+    """Transform a package name or name fragment to the canonical atom.
+
+    If there a multiple atoms that a package name fragment could map to,
+    picks an arbitrary one and prints a warning.
+
+    Args:
+      package: string package name or fragment of a name.
+
+    Returns:
+      string canonical atom name (e.g. 'sys-apps/dbus')
+    """
+    # Attempt to not hit portage if at all possible for speed.
+    if package in self._GetWorkedOnAtoms():
+      return package
+
+    # Ask portage directly what it thinks about that package.
+    ebuild_path = self._FindEbuildForPackage(package)
+
+    # If portage didn't know about that package, try and autocomplete it.
+    if ebuild_path is None:
+      possible_ebuilds = []
+      for ebuild in self._GetWorkonEbuilds(filter_on_arch=False):
+        if package in ebuild:
+          possible_ebuilds.append(ebuild)
+
+      if not possible_ebuilds:
+        logging.warning('Could not find canonical package for "%s"', package)
+        return None
+
+      if len(possible_ebuilds) > 1:
+        logging.warning('Multiple autocompletes found:')
+        for possible_ebuild in possible_ebuilds:
+          logging.warning('  %s', possible_ebuild)
+      autocompleted_package = portage_util.EbuildToCP(possible_ebuilds[0])
+      # Sanity check to avoid infinite loop.
+      if package == autocompleted_package:
+        logging.error('Resolved %s to itself', package)
+        return None
+      logging.info('Autocompleted "%s" to: "%s"',
+                   package, autocompleted_package)
+
+      return self._GetCanonicalAtom(autocompleted_package)
+
+    if not _IsWorkonEbuild(True, ebuild_path):
+      logging.warning(
+          '"%s" is a -9999 ebuild, but does not inherit from cros-workon?',
+          ebuild_path)
+      return None
+
+    return portage_util.EbuildToCP(ebuild_path)
+
+  def _GetCanonicalAtoms(self, packages):
+    """Transforms a list of package name fragments into a list of CP atoms.
+
+    Args:
+      packages: list of package name fragments.
+
+    Returns:
+      list of canonical portage atoms corresponding to the given fragments.
+    """
+    if not packages:
+      raise WorkonError('No packages specified')
+    if len(packages) == 1 and packages[0] == '.':
+      raise WorkonError('Working on the current package is no longer '
+                        'supported.')
+
+    atoms = []
+    for package_fragment in packages:
+      atom = self._GetCanonicalAtom(package_fragment)
+      if atom is None:
+        raise WorkonError('Error parsing package list')
+      atoms.append(atom)
+
+    return atoms
+
+  def _GetWorkedOnAtoms(self):
+    """Returns a list of CP atoms that we're currently working on."""
+    return _GetLinesFromFile(self.workon_file_path, '=', '-9999')
+
+  def _FindEbuildForPackage(self, package):
+    """Find an ebuild for a given atom (accepting even masked ebuilds).
+
+    Args:
+      package: package string.
+
+    Returns:
+      path to ebuild for given package.
+    """
+    return portage_util.FindEbuildForPackage(
+        package, self._sysroot, include_masked=True,
+        extra_env={'ACCEPT_KEYWORDS': '~%s' % self._arch})
+
+  def _GetWorkonEbuilds(self, filter_workon=False, filter_on_arch=True,
+                        include_chrome=True):
+    """Get a list of of all cros-workon ebuilds in the current system.
+
+    Args:
+      filter_workon: True iff we should filter the list of ebuilds to those
+          packages which define only a workon ebuild (i.e. no stable version).
+      filter_on_arch: True iff we should only return ebuilds which are marked
+          as unstable for the architecture of the system we're interested in.
+      include_chrome: True iff we should also include chromeos-chrome and
+          related ebuilds.  These ebuilds can be worked on, but don't work
+          like normal cros-workon ebuilds.
+
+    Returns:
+      list of paths to ebuilds meeting the above criteria.
+    """
+    result = []
+    if filter_on_arch:
+      keyword_pat = re.compile(r'^KEYWORDS=".*~(\*|%s).*"$' % self._arch, re.M)
+
+    for overlay in self._overlays:
+      ebuild_paths = glob.glob(
+          os.path.join(overlay, '*-*', '*', '*-9999.ebuild'))
+      for ebuild_path in ebuild_paths:
+        ebuild_contents = osutils.ReadFile(ebuild_path)
+        if not _IsWorkonEbuild(include_chrome, ebuild_path,
+                               ebuild_contents=ebuild_contents):
+          continue
+        if filter_on_arch and not keyword_pat.search(ebuild_contents):
+          continue
+        result.append(ebuild_path)
+
+    if filter_workon:
+      result = _FilterWorkonOnlyEbuilds(result)
+
+    return result
+
+  def _GetLiveAtoms(self, filter_workon=False):
+    """Get a list of atoms currently marked as being locally compiled.
+
+    Args:
+      filter_workon: True iff the list should be filtered to only those
+          atoms without a stable version (i.e. the -9999 ebuild is the
+          only ebuild).
+
+    Returns:
+      list of canonical portage atoms.
+    """
+    atoms = self._GetWorkedOnAtoms()
+
+    if filter_workon:
+      ebuilds = _FilterWorkonOnlyEbuilds(self._AtomsToEbuilds(atoms))
+      return [portage_util.EbuildToCP(ebuild) for ebuild in ebuilds]
+
+    return atoms
+
+  def _AddProjectsToPartialManifests(self, atoms):
+    """Add projects corresponding to a list of atoms to the local manifest.
+
+    If we mark projects as workon that we don't have in our local checkout,
+    it is convenient to have them added to the manifest.  Note that users
+    will need to `repo sync` to pull down repositories added in this way.
+
+    Args:
+      atoms: iterable of atoms to ensure are in the manifest.
+    """
+    if git.ManifestCheckout.IsFullManifest(self._src_root):
+      # If we're a full manifest, there is nothing to do.
+      return
+
+    should_repo_sync = False
+    for ebuild_path in self._AtomsToEbuilds(atoms):
+      infos = portage_util.GetRepositoryForEbuild(ebuild_path, self._sysroot)
+      for info in infos:
+        if not info.project:
+          continue
+        cmd = ['loman', 'add', '--workon', info.project]
+        cros_build_lib.RunCommand(cmd, print_cmd=False)
+        should_repo_sync = True
+
+    if should_repo_sync:
+      print('Please run "repo sync" now.')
+
+  def ListAtoms(self, use_all=False, use_workon_only=False):
+    """Returns a list of interesting atoms.
+
+    By default, return a list of the atoms marked as being locally worked on
+    for the system in question.
+
+    Args:
+      use_all: If true, return a list of all atoms we could possibly work on
+          for the system in question.
+      use_workon_only: If true, return a list of all atoms we could possibly
+          work on that have no stable ebuild.
+
+    Returns:
+      a list of atoms (e.g. ['chromeos-base/shill', 'sys-apps/dbus']).
+    """
+    if use_workon_only or use_all:
+      ebuilds = self._GetWorkonEbuilds(filter_workon=use_workon_only)
+      packages = [portage_util.EbuildToCP(ebuild) for ebuild in ebuilds]
+    else:
+      packages = self._GetLiveAtoms()
+
+    return sorted(packages)
+
+  def StartWorkingOnPackages(self, packages, use_all=False,
+                             use_workon_only=False):
+    """Mark a list of packages as being worked on locally.
+
+    Args:
+      packages: list of package name fragments.  While each fragment could be a
+          a complete portage atom, this helper will attempt to infer intent by
+          looking for fragments in a list of all possible atoms for the system
+          in question.
+      use_all: True iff we should ignore the package list, and instead consider
+          all possible atoms that we could mark as worked on locally.
+      use_workon_only: True iff we should ignore the package list, and instead
+          consider all possible atoms for the system in question that define
+          only the -9999 ebuild.
+    """
+    if use_all or use_workon_only:
+      ebuilds = self._GetWorkonEbuilds(filter_workon=use_workon_only)
+      atoms = [portage_util.EbuildToCP(ebuild) for ebuild in ebuilds]
+    else:
+      atoms = self._GetCanonicalAtoms(packages)
+    atoms = set(atoms)
+
+    # Read out what atoms we're already working on.
+    existing_atoms = self._GetWorkedOnAtoms()
+
+    # Warn the user if they're requested to work on an atom that's already
+    # marked as being worked on.
+    for atom in atoms & existing_atoms:
+      logging.warning('Already working on %s', atom)
+
+    # If we have no new atoms to work on, we can quit now.
+    new_atoms = atoms - existing_atoms
+    if not new_atoms:
+      return
+
+    # Write out all these atoms to the appropriate files.
+    current_atoms = new_atoms | existing_atoms
+    self._SetWorkedOnAtoms(current_atoms)
+
+    self._AddProjectsToPartialManifests(new_atoms)
+
+    # Legacy scripts used single quotes in their output, and we carry on this
+    # honorable tradition.
+    logging.info("Started working on '%s' for '%s'",
+                 ' '.join(new_atoms), self._system)
+
+  def StopWorkingOnPackages(self, packages, use_all=False,
+                            use_workon_only=False):
+    """Stop working on a list of packages currently marked as locally worked on.
+
+    Args:
+      packages: list of package name fragments.  These will be mapped to
+          canonical portage atoms via the same process as
+          StartWorkingOnPackages().
+      use_all: True iff instead of the provided package list, we should just
+          stop working on all currently worked on atoms for the system in
+          question.
+      use_workon_only: True iff instead of the provided package list, we should
+          stop working on all currently worked on atoms that define only a
+          -9999 ebuild.
+    """
+    if use_all or use_workon_only:
+      atoms = self._GetLiveAtoms(filter_workon=use_workon_only)
+    else:
+      atoms = self._GetCanonicalAtoms(packages)
+
+    current_atoms = self._GetWorkedOnAtoms()
+    stopped_atoms = []
+    for atom in atoms:
+      if not atom in current_atoms:
+        logging.warning('Not working on %s', atom)
+        continue
+
+      current_atoms.discard(atom)
+      stopped_atoms.append(atom)
+
+    self._SetWorkedOnAtoms(current_atoms)
+
+    if stopped_atoms:
+      # Legacy scripts used single quotes in their output, and we carry on this
+      # honorable tradition.
+      logging.info("Stopped working on '%s' for '%s'",
+                   ' '.join(stopped_atoms), self._system)
+
+  def GetPackageInfo(self, packages, use_all=False, use_workon_only=False):
+    """Get information about packages.
+
+    Args:
+      packages: list of package name fragments.  These will be mapped to
+          canonical portage atoms via the same process as
+          StartWorkingOnPackages().
+      use_all: True iff instead of the provided package list, we should just
+          stop working on all currently worked on atoms for the system in
+          question.
+      use_workon_only: True iff instead of the provided package list, we should
+          stop working on all currently worked on atoms that define only a
+          -9999 ebuild.
+
+    Returns:
+      Returns a list of PackageInfo tuples.
+    """
+    if use_all or use_workon_only:
+      # You can't use info to find the source code from Chrome, since that
+      # workflow is different.
+      ebuilds = self._GetWorkonEbuilds(filter_workon=use_workon_only,
+                                       include_chrome=False)
+    else:
+      atoms = self._GetCanonicalAtoms(packages)
+      ebuilds = [self._FindEbuildForPackage(atom) for atom in atoms]
+
+    ebuild_to_repos = {}
+    for ebuild in ebuilds:
+      workon_vars = portage_util.EBuild.GetCrosWorkonVars(
+          ebuild, portage_util.EbuildToCP(ebuild))
+      projects = workon_vars.project if workon_vars else []
+      ebuild_to_repos[ebuild] = projects
+
+    repository_to_source_path = {}
+    repo_list_result = cros_build_lib.RunCommand(
+        'repo list', shell=True, enter_chroot=True, capture_output=True,
+        print_cmd=False)
+
+    for line in repo_list_result.output.splitlines():
+      pieces = line.split(' : ')
+      if len(pieces) != 2:
+        logging.debug('Ignoring malformed repo list output line: "%s"', line)
+        continue
+
+      source_path, repository = pieces
+      repository_to_source_path[repository] = source_path
+
+    result = []
+    for ebuild in ebuilds:
+      package = portage_util.EbuildToCP(ebuild)
+      repos = ebuild_to_repos.get(ebuild, [])
+      src_paths = [repository_to_source_path.get(repo) for repo in repos]
+      src_paths = [path for path in src_paths if path]
+      result.append(PackageInfo(package, repos, src_paths))
+
+    result.sort()
+    return result
+
+  def RunCommandInAtomSourceDirectory(self, atom, command):
+    """Run a command in the source directory of an atom.
+
+    Args:
+      atom: string atom to run the command in (e.g. 'chromeos-base/shill').
+      command: string shell command to run in the source directory of |atom|.
+    """
+    logging.info('Running "%s" on %s', command, atom)
+    ebuild_path = self._FindEbuildForPackage(atom)
+    if ebuild_path is None:
+      raise WorkonError('Error looking for atom %s' % atom)
+
+    for info in portage_util.GetRepositoryForEbuild(ebuild_path, self._sysroot):
+      cros_build_lib.RunCommand(command, shell=True, cwd=info.srcdir,
+                                print_cmd=False)
+
+  def RunCommandInPackages(self, packages, command, use_all=False,
+                           use_workon_only=False):
+    """Run a command in the source directory of a list of packages.
+
+    Args:
+      packages: list of package name fragments.
+      command: string shell command to run in the source directory of |atom|.
+      use_all: True iff we should ignore the package list, and instead consider
+          all possible workon-able atoms.
+      use_workon_only: True iff we should ignore the package list, and instead
+          consider all possible atoms for the system in question that define
+          only the -9999 ebuild.
+    """
+    if use_all or use_workon_only:
+      atoms = self._GetLiveAtoms(filter_workon=use_workon_only)
+    else:
+      atoms = self._GetCanonicalAtoms(packages)
+    for atom in atoms:
+      self.RunCommandInAtomSourceDirectory(atom, command)
+
+  def InstalledWorkonAtoms(self):
+    """Returns the set of installed cros_workon packages."""
+    installed_cp = set()
+    for pkg in portage_util.PortageDB(self._sysroot).InstalledPackages():
+      installed_cp.add('%s/%s' % (pkg.category, pkg.package))
+
+    return set(a for a in self.ListAtoms(use_all=True) if a in installed_cp)
diff --git a/lib/workon_helper_unittest b/lib/workon_helper_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/workon_helper_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/workon_helper_unittest.py b/lib/workon_helper_unittest.py
new file mode 100644
index 0000000..8b3271e
--- /dev/null
+++ b/lib/workon_helper_unittest.py
@@ -0,0 +1,323 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for workon_helper."""
+
+from __future__ import print_function
+
+import collections
+import os
+
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.lib import portage_util
+from chromite.lib import sysroot_lib
+from chromite.lib import osutils
+from chromite.lib import workon_helper
+
+
+BOARD = 'this_is_a_board_name'
+
+WORKON_ONLY_ATOM = 'sys-apps/my-package'
+VERSIONED_WORKON_ATOM = 'sys-apps/versioned-package'
+NOT_WORKON_ATOM = 'sys-apps/not-workon-package'
+
+HOST_ATOM = 'host-apps/my-package'
+
+WORKED_ON_PATTERN = '=%s-9999'
+MASKED_PATTERN = '<%s-9999'
+
+OVERLAY_ROOT_DIR = 'overlays'
+BOARD_OVERLAY_DIR = 'overlay-' + BOARD
+HOST_OVERLAY_DIR = 'overlay-host'
+
+
+InstalledPackageMock = collections.namedtuple('InstalledPackage',
+                                              ('category', 'package'))
+
+
+class WorkonHelperTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for chromite.lib.workon_helper."""
+
+  def _MakeFakeEbuild(self, overlay, atom, version, is_workon=True):
+    """Makes fake ebuilds with minimal real content.
+
+    Args:
+      overlay: overlay to put this ebuild in.
+      atom: 'category/package' string in the familiar portage sense.
+      version: version suffix for the ebuild (e.g. '9999').
+      is_workon: True iff this should be a workon-able package
+          (i.e. inherits cros-workon).
+    """
+    category, package = atom.split('/', 1)
+    ebuild_path = os.path.join(self._mock_srcdir, OVERLAY_ROOT_DIR, overlay,
+                               category, package,
+                               '%s-%s.ebuild' % (package, version))
+    content = 'KEYWORDS="~*"\n'
+    if is_workon:
+      content += 'inherit cros-workon\n'
+    osutils.WriteFile(ebuild_path, content, makedirs=True)
+    if atom not in self._valid_atoms:
+      self._valid_atoms[atom] = ebuild_path
+
+  def _MockFindOverlays(self, sysroot):
+    """Mocked out version of portage_util.FindOverlays().
+
+    Args:
+      sysroot: path to sysroot.
+
+    Returns:
+      List of paths to overlays.
+    """
+    if sysroot == '/':
+      return [os.path.join(self._overlay_root, HOST_OVERLAY_DIR)]
+    return [os.path.join(self._overlay_root, BOARD_OVERLAY_DIR)]
+
+  def _MockFindEbuildForPackage(self, package, _board=None, **_kwargs):
+    """Mocked out version of portage_util.FindEbuildForPackage().
+
+    Args:
+      package: complete atom string.
+      _board: ignored, see documentation in portage_util.  We intentionally
+          create atoms with different names for hosts/boards so that we can
+          ignore this distinction here.
+      _kwargs: ignored, see documentation in portage_util.
+
+    Returns:
+      An ebuild if we have previously created this atom.
+    """
+    return self._valid_atoms.get(package, None)
+
+  def setUp(self):
+    """Set up a test environment."""
+    self._valid_atoms = dict()
+    self._mock_srcdir = os.path.join(self.tempdir, 'src')
+    workon_dir = workon_helper.GetWorkonPath(source_root=self._mock_srcdir)
+    self._sysroot = os.path.join(self.tempdir, 'sysroot')
+    osutils.SafeMakedirs(self._sysroot)
+    osutils.SafeMakedirs(self._mock_srcdir)
+    for system in ('host', BOARD):
+      osutils.Touch(os.path.join(workon_dir, system), makedirs=True)
+      osutils.Touch(os.path.join(workon_dir, system + '.mask'), makedirs=True)
+    self._overlay_root = os.path.join(self._mock_srcdir, OVERLAY_ROOT_DIR)
+    # Make a bunch of packages to work on.
+    self._MakeFakeEbuild(BOARD_OVERLAY_DIR, WORKON_ONLY_ATOM, '9999')
+    self._MakeFakeEbuild(BOARD_OVERLAY_DIR, VERSIONED_WORKON_ATOM, '9999')
+    self._MakeFakeEbuild(BOARD_OVERLAY_DIR, VERSIONED_WORKON_ATOM, '0.0.1-r1')
+    self._MakeFakeEbuild(BOARD_OVERLAY_DIR, NOT_WORKON_ATOM, '0.0.1-r1',
+                         is_workon=False)
+    self._MakeFakeEbuild(HOST_OVERLAY_DIR, HOST_ATOM, '9999')
+    # Patch the modules interfaces to the rest of the world.
+    self.PatchObject(portage_util, 'FindEbuildForPackage',
+                     self._MockFindEbuildForPackage)
+
+    # Assume only versioned-packages is installed.
+    self.PatchObject(
+        portage_util.PortageDB, 'InstalledPackages',
+        return_value=[InstalledPackageMock('sys-apps', 'versioned-package')])
+    # This basically turns off behavior related to adding repositories to
+    # minilayouts.
+    self.PatchObject(git.ManifestCheckout, 'IsFullManifest', return_value=True)
+    self.PatchObject(
+        portage_util, 'GetRepositoryForEbuild', return_value=(
+            portage_util.RepositoryInfoTuple(srcdir=self._mock_srcdir,
+                                             project='workon-project'),
+        )
+    )
+    # We do a lot of work as root. Pretend to be root so that we never have to
+    # call sudo.
+    self.PatchObject(os, 'getuid', return_value=0)
+
+  def CreateHelper(self, host=False):
+    """Creates and returns a WorkonHelper object.
+
+    Args:
+      host: If True, create the WorkonHelper for the host.
+    """
+    if host:
+      overlay = os.path.join(self._overlay_root, HOST_OVERLAY_DIR)
+      name = 'host'
+    else:
+      overlay = os.path.join(self._overlay_root, BOARD_OVERLAY_DIR)
+      name = BOARD
+
+    # Setup the sysroots.
+    sysroot_lib.Sysroot(self._sysroot).WriteConfig(
+        'ARCH="amd64"\nPORTDIR_OVERLAY="%s"' % overlay)
+
+    # Create helpers for the host or board.
+    return workon_helper.WorkonHelper(
+        self._sysroot, name, src_root=self._mock_srcdir)
+
+  def assertWorkingOn(self, atoms, system=BOARD):
+    """Assert that the workon/mask files mention the given atoms.
+
+    Args:
+      atoms: list of atom strings (e.g. ['sys-apps/dbus', 'foo-cat/bar']).
+      system: string system to consider (either 'host' or a board name).
+    """
+    workon_path = workon_helper.GetWorkonPath(
+        source_root=self._mock_srcdir, sub_path=system)
+    self.assertEqual(sorted([WORKED_ON_PATTERN % atom for atom in atoms]),
+                     sorted(osutils.ReadFile(workon_path).splitlines()))
+    mask_path = workon_path + '.mask'
+    self.assertEqual(sorted([MASKED_PATTERN % atom for atom in atoms]),
+                     sorted(osutils.ReadFile(mask_path).splitlines()))
+
+  def testShouldDetectBoardNotSetUp(self):
+    """Check that we complain if a board has not been previously setup."""
+    with self.assertRaises(workon_helper.WorkonError):
+      workon_helper.WorkonHelper(os.path.join(self.tempdir, 'nonexistent'),
+                                 'this-board-is-not-setup.',
+                                 src_root=self._mock_srcdir)
+
+  def testShouldRegenerateSymlinks(self):
+    """Check that the symlinks are regenerated when using a new sysroot."""
+    # pylint: disable=protected-access
+    helper = self.CreateHelper()
+    workon_link = helper._unmasked_symlink
+
+    # The link exists after starting a package.
+    helper.StartWorkingOnPackages([WORKON_ONLY_ATOM])
+    self.assertTrue(os.path.exists(workon_link))
+
+    # The link exists after recreating a sysroot.
+    osutils.RmDir(self._sysroot)
+    osutils.SafeMakedirs(self._sysroot)
+    helper = self.CreateHelper()
+    self.assertTrue(os.path.exists(workon_link))
+
+    # The link exists when no packages are worked on.
+    helper.StartWorkingOnPackages([WORKON_ONLY_ATOM])
+    self.assertTrue(os.path.exists(workon_link))
+
+  def testCanStartSingleAtom(self):
+    """Check that we can mark a single atom as being worked on."""
+    helper = self.CreateHelper()
+    helper.StartWorkingOnPackages([WORKON_ONLY_ATOM])
+    self.assertWorkingOn([WORKON_ONLY_ATOM])
+
+  def testCanStartMultipleAtoms(self):
+    """Check that we can mark a multiple atoms as being worked on."""
+    helper = self.CreateHelper()
+    expected_atoms = (WORKON_ONLY_ATOM, VERSIONED_WORKON_ATOM)
+    helper.StartWorkingOnPackages(expected_atoms)
+    self.assertWorkingOn(expected_atoms)
+
+  def testCanStartAtomsWithAll(self):
+    """Check that we can mark all possible workon atoms as started."""
+    helper = self.CreateHelper()
+    expected_atoms = (WORKON_ONLY_ATOM, VERSIONED_WORKON_ATOM)
+    helper.StartWorkingOnPackages([], use_all=True)
+    self.assertWorkingOn(expected_atoms)
+
+  def testCanStartAtomsWithWorkonOnly(self):
+    """Check that we can start atoms that have only a cros-workon ebuild."""
+    helper = self.CreateHelper()
+    expected_atoms = (WORKON_ONLY_ATOM,)
+    helper.StartWorkingOnPackages([], use_workon_only=True)
+    self.assertWorkingOn(expected_atoms)
+
+  def testCannotStartAtomTwice(self):
+    """Check that starting an atom twice has no effect."""
+    helper = self.CreateHelper()
+    helper.StartWorkingOnPackages([WORKON_ONLY_ATOM])
+    helper.StartWorkingOnPackages([WORKON_ONLY_ATOM])
+    self.assertWorkingOn([WORKON_ONLY_ATOM])
+
+  def testCanStopSingleAtom(self):
+    """Check that we can stop a previously started atom."""
+    helper = self.CreateHelper()
+    helper.StartWorkingOnPackages([WORKON_ONLY_ATOM])
+    self.assertWorkingOn([WORKON_ONLY_ATOM])
+    helper.StopWorkingOnPackages([WORKON_ONLY_ATOM])
+    self.assertWorkingOn([])
+
+  def testCanStopMultipleAtoms(self):
+    """Check that we can stop multiple previously worked on atoms."""
+    helper = self.CreateHelper()
+    expected_atoms = (WORKON_ONLY_ATOM, VERSIONED_WORKON_ATOM)
+    helper.StartWorkingOnPackages(expected_atoms)
+    self.assertWorkingOn(expected_atoms)
+    helper.StopWorkingOnPackages([WORKON_ONLY_ATOM])
+    self.assertWorkingOn([VERSIONED_WORKON_ATOM])
+    helper.StopWorkingOnPackages([VERSIONED_WORKON_ATOM])
+    self.assertWorkingOn([])
+    # Now do it all at once.
+    helper.StartWorkingOnPackages(expected_atoms)
+    self.assertWorkingOn(expected_atoms)
+    helper.StopWorkingOnPackages(expected_atoms)
+    self.assertWorkingOn([])
+
+  def testCanStopAtomsWithAll(self):
+    """Check that we can stop all worked on atoms."""
+    helper = self.CreateHelper()
+    expected_atoms = (WORKON_ONLY_ATOM, VERSIONED_WORKON_ATOM)
+    helper.StartWorkingOnPackages(expected_atoms)
+    helper.StopWorkingOnPackages([], use_all=True)
+    self.assertWorkingOn([])
+
+  def testCanStopAtomsWithWorkonOnly(self):
+    """Check that we can stop all workon only atoms."""
+    helper = self.CreateHelper()
+    expected_atoms = (WORKON_ONLY_ATOM, VERSIONED_WORKON_ATOM)
+    helper.StartWorkingOnPackages(expected_atoms)
+    helper.StopWorkingOnPackages([], use_workon_only=True)
+    self.assertWorkingOn([VERSIONED_WORKON_ATOM])
+
+  def testShouldDetectUnknownAtom(self):
+    """Check that we reject requests to work on unknown atoms."""
+    with self.assertRaises(workon_helper.WorkonError):
+      helper = self.CreateHelper()
+      helper.StopWorkingOnPackages(['sys-apps/not-a-thing'])
+
+  def testCanListAllWorkedOnAtoms(self):
+    """Check that we can list all worked on atoms across boards."""
+    helper = self.CreateHelper()
+    self.assertEqual(dict(),
+                     workon_helper.ListAllWorkedOnAtoms(
+                         src_root=self._mock_srcdir))
+    helper.StartWorkingOnPackages([WORKON_ONLY_ATOM])
+    self.assertEqual({BOARD: [WORKON_ONLY_ATOM]},
+                     workon_helper.ListAllWorkedOnAtoms(
+                         src_root=self._mock_srcdir))
+    host_helper = self.CreateHelper(host=True)
+    host_helper.StartWorkingOnPackages([HOST_ATOM])
+    self.assertEqual({BOARD: [WORKON_ONLY_ATOM], 'host': [HOST_ATOM]},
+                     workon_helper.ListAllWorkedOnAtoms(
+                         src_root=self._mock_srcdir))
+
+  def testCanListWorkedOnAtoms(self):
+    """Check that we can list the atoms we're currently working on."""
+    helper = self.CreateHelper()
+    self.assertEqual(helper.ListAtoms(), [])
+    helper.StartWorkingOnPackages([WORKON_ONLY_ATOM])
+    self.assertEqual(helper.ListAtoms(), [WORKON_ONLY_ATOM])
+
+  def testCanListAtomsWithAll(self):
+    """Check that we can list all possible atoms to work on."""
+    helper = self.CreateHelper()
+    self.assertEqual(sorted(helper.ListAtoms(use_all=True)),
+                     sorted([WORKON_ONLY_ATOM, VERSIONED_WORKON_ATOM]))
+
+  def testCanListAtomsWithWorkonOnly(self):
+    """Check that we can list all workon only atoms."""
+    helper = self.CreateHelper()
+    self.assertEqual(helper.ListAtoms(use_workon_only=True),
+                     [WORKON_ONLY_ATOM])
+
+  def testCanRunCommand(self):
+    """Test that we can run a command in package source directories."""
+    helper = self.CreateHelper()
+    file_name = 'foo'
+    file_path = os.path.join(self._mock_srcdir, file_name)
+    self.assertNotExists(file_path)
+    helper.RunCommandInPackages([WORKON_ONLY_ATOM], 'touch %s' % file_name)
+    self.assertExists(file_path)
+
+  def testInstalledWorkonAtoms(self):
+    """Test that we can list all the cros workon atoms that are installed."""
+    helper = self.CreateHelper()
+    self.assertEqual(set([VERSIONED_WORKON_ATOM]),
+                     helper.InstalledWorkonAtoms())
diff --git a/lib/workspace_lib.py b/lib/workspace_lib.py
new file mode 100644
index 0000000..bdcd98c
--- /dev/null
+++ b/lib/workspace_lib.py
@@ -0,0 +1,329 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for discovering the directories associated with workspaces.
+
+Workspaces have a variety of important concepts:
+
+* The bootstrap repository. BOOTSTRAP/chromite/bootstrap is expected to be in
+the user's path. Most commands run from here redirect to the active SDK.
+
+* The workspace directory. This directory (identified by presence of
+WORKSPACE_CONFIG), contains code, and is associated with exactly one SDK
+instance. It is normally discovered based on CWD.
+
+* The SDK root. This directory contains a specific SDK version, and is stored in
+BOOTSTRAP/sdk_checkouts/<version>.
+
+This library contains helper methods for finding all of the relevant directories
+here.
+"""
+
+from __future__ import print_function
+
+import json
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import osutils
+
+MAIN_CHROOT_DIR_IN_VM = '/chroots'
+
+# The presence of this file signifies the root of a workspace.
+WORKSPACE_CONFIG = 'workspace-config.json'
+WORKSPACE_LOCAL_CONFIG = '.local.json'
+WORKSPACE_CHROOT_DIR = '.chroot'
+WORKSPACE_IMAGES_DIR = 'build/images'
+WORKSPACE_LOGS_DIR = 'build/logs'
+
+# Prefixes used by locators.
+_BOARD_LOCATOR_PREFIX = 'board:'
+_WORKSPACE_LOCATOR_PREFIX = '//'
+
+
+class LocatorNotResolved(Exception):
+  """Given locator could not be resolved."""
+
+
+class ConfigFileError(Exception):
+  """Configuration file writing or reading failed."""
+
+
+def WorkspacePath(workspace_reference_dir=None):
+  """Returns the path to the current workspace.
+
+  This method works both inside and outside the chroot, though results will
+  be different.
+
+  Args:
+    workspace_reference_dir: Any directory inside the workspace. If None,
+      will use CWD (outside chroot), or bind mount location (inside chroot).
+      You should normally use the default.
+
+  Returns:
+    Path to root directory of the workspace (if valid), or None.
+  """
+  if workspace_reference_dir is None:
+    if cros_build_lib.IsInsideChroot():
+      workspace_reference_dir = constants.CHROOT_WORKSPACE_ROOT
+    else:
+      workspace_reference_dir = os.getcwd()
+
+  workspace_config = osutils.FindInPathParents(
+      WORKSPACE_CONFIG,
+      os.path.abspath(workspace_reference_dir))
+
+  return os.path.dirname(workspace_config) if workspace_config else None
+
+
+def ChrootPath(workspace_path):
+  """Returns the path to the chroot associated with the given workspace.
+
+  Each workspace has its own associated chroot. This method returns the chroot
+  path set in the workspace config if present, or else the default location,
+  which varies depending on whether or not we run in a VM.
+
+  Args:
+    workspace_path: Root directory of the workspace (WorkspacePath()).
+
+  Returns:
+    Path to where the chroot is, or where it should be created.
+  """
+  config_value = GetChrootDir(workspace_path)
+  if config_value:
+    # If the config value is a relative path, we base it in the workspace path.
+    # Otherwise, it is an absolute path and will be returned as is.
+    return os.path.join(workspace_path, config_value)
+
+  # The default for a VM.
+  if osutils.IsInsideVm():
+    return os.path.join(MAIN_CHROOT_DIR_IN_VM, os.path.basename(workspace_path))
+
+  # The default for all other cases.
+  return os.path.join(workspace_path, WORKSPACE_CHROOT_DIR)
+
+
+def SetChrootDir(workspace_path, chroot_dir):
+  """Set which chroot directory a workspace uses.
+
+  This value will overwrite the default value, if set. This is normally only
+  used if the user overwrites the default value. This method is NOT atomic.
+
+  Args:
+    workspace_path: Root directory of the workspace (WorkspacePath()).
+    chroot_dir: Directory in which this workspaces chroot should be created.
+  """
+  # Read the config, update its chroot_dir, and write it.
+  config = _ReadLocalConfig(workspace_path)
+  config['chroot_dir'] = chroot_dir
+  _WriteLocalConfig(workspace_path, config)
+
+
+def GetChrootDir(workspace_path):
+  """Get override of chroot directory for a workspace.
+
+  You should normally call ChrootPath so that the default value will be
+  found if no explicit value has been set.
+
+  Args:
+    workspace_path: Root directory of the workspace (WorkspacePath()).
+
+  Returns:
+    version string or None.
+  """
+  # Config should always return a dictionary.
+  config = _ReadLocalConfig(workspace_path)
+
+  # If version is present, use it, else return None.
+  return config.get('chroot_dir')
+
+
+def GetActiveSdkVersion(workspace_path):
+  """Find which SDK version a workspace is associated with.
+
+  This SDK may or may not exist in the bootstrap cache. There may be no
+  SDK version associated with a workspace.
+
+  Args:
+    workspace_path: Root directory of the workspace (WorkspacePath()).
+
+  Returns:
+    version string or None.
+  """
+  # Config should always return a dictionary.
+  config = _ReadLocalConfig(workspace_path)
+
+  # If version is present, use it, else return None.
+  return config.get('version')
+
+
+def SetActiveSdkVersion(workspace_path, version):
+  """Set which SDK version a workspace is associated with.
+
+  This method is NOT atomic.
+
+  Args:
+    workspace_path: Root directory of the workspace (WorkspacePath()).
+    version: Version string of the SDK. (Eg. 1.2.3)
+  """
+  # Read the config, update its version, and write it.
+  config = _ReadLocalConfig(workspace_path)
+  config['version'] = version
+  _WriteLocalConfig(workspace_path, config)
+
+
+def _ReadLocalConfig(workspace_path):
+  """Read a local config for a workspace.
+
+  Args:
+    workspace_path: Root directory of the workspace (WorkspacePath()).
+
+  Returns:
+    Local workspace config as a Python dictionary.
+  """
+  try:
+    return ReadConfigFile(os.path.join(workspace_path, WORKSPACE_LOCAL_CONFIG))
+  except IOError:
+    # If the file doesn't exist, it's an empty dictionary.
+    return {}
+
+
+def _WriteLocalConfig(workspace_path, config):
+  """Save out a new local config for a workspace.
+
+  Args:
+    workspace_path: Root directory of the workspace (WorkspacePath()).
+    config: New local workspace config contents as a Python dictionary.
+  """
+  WriteConfigFile(os.path.join(workspace_path, WORKSPACE_LOCAL_CONFIG), config)
+
+
+def IsLocator(name):
+  """Returns True if name is a specific locator."""
+  if not name:
+    raise ValueError('Locator is empty')
+  return (name.startswith(_WORKSPACE_LOCATOR_PREFIX)
+          or name.startswith(_BOARD_LOCATOR_PREFIX))
+
+
+def LocatorToPath(locator):
+  """Returns the absolute path for this locator.
+
+  Args:
+    locator: a locator.
+
+  Returns:
+    The absolute path defined by this locator.
+
+  Raises:
+    ValueError: If |locator| is invalid.
+    LocatorNotResolved: If |locator| is valid but could not be resolved.
+  """
+  if locator.startswith(_WORKSPACE_LOCATOR_PREFIX):
+    workspace_path = WorkspacePath()
+    if workspace_path is None:
+      raise LocatorNotResolved(
+          'Workspace not found while trying to resolve %s' % locator)
+    return os.path.join(workspace_path,
+                        locator[len(_WORKSPACE_LOCATOR_PREFIX):])
+
+  if locator.startswith(_BOARD_LOCATOR_PREFIX):
+    return os.path.join(constants.SOURCE_ROOT, 'src', 'overlays',
+                        'overlay-%s' % locator[len(_BOARD_LOCATOR_PREFIX):])
+
+  raise ValueError('Invalid locator %s' % locator)
+
+
+def PathToLocator(path):
+  """Converts a path to a locator.
+
+  This does not raise error if the path does not map to a locator. Some valid
+  (legacy) brick path do not map to any locator: chromiumos-overlay,
+  private board overlays, etc...
+
+  Args:
+    path: absolute or relative to CWD path to a workspace object or board
+      overlay.
+
+  Returns:
+    The locator for this path if it exists, None otherwise.
+  """
+  workspace_path = WorkspacePath()
+  path = os.path.abspath(path)
+
+  if workspace_path is None:
+    return None
+
+  # If path is in the current workspace, return the relative path prefixed with
+  # the workspace prefix.
+  if os.path.commonprefix([path, workspace_path]) == workspace_path:
+    return _WORKSPACE_LOCATOR_PREFIX + os.path.relpath(path, workspace_path)
+
+  # If path is in the src directory of the checkout, this is a board overlay.
+  # Encode it as board locator.
+  src_path = os.path.join(constants.SOURCE_ROOT, 'src')
+  if os.path.commonprefix([path, src_path]) == src_path:
+    parts = os.path.split(os.path.relpath(path, src_path))
+    if parts[0] == 'overlays':
+      board_name = '-'.join(parts[1].split('-')[1:])
+      return _BOARD_LOCATOR_PREFIX + board_name
+
+  return None
+
+
+def LocatorToFriendlyName(locator):
+  """Returns a friendly name for a given locator.
+
+  Args:
+    locator: a locator.
+  """
+  if IsLocator(locator) and locator.startswith(_WORKSPACE_LOCATOR_PREFIX):
+    return locator[len(_WORKSPACE_LOCATOR_PREFIX):].replace('/', '.')
+
+  raise ValueError('Not a valid workspace locator: %s' % locator)
+
+
+def WriteConfigFile(path, config):
+  """Writes |config| to a file at |path|.
+
+  Configuration files in a workspace should all use the same format
+  whenever possible. Currently it's JSON, but centralizing config
+  read/write makes it easier to change when needed.
+
+  Args:
+    path: path to write.
+    config: configuration dictionary to write.
+
+  Raises:
+    ConfigFileError: |config| cannot be written as JSON.
+  """
+  # TODO(dpursell): Add support for comments in config files.
+  try:
+    osutils.WriteFile(
+        path,
+        json.dumps(config, sort_keys=True, indent=4, separators=(',', ': ')),
+        makedirs=True)
+  except TypeError as e:
+    raise ConfigFileError('Writing config file %s failed: %s', path, e)
+
+
+def ReadConfigFile(path):
+  """Reads a configuration file at |path|.
+
+  For use with WriteConfigFile().
+
+  Args:
+    path: file path.
+
+  Returns:
+    Result of parsing the JSON file.
+
+  Raises:
+    ConfigFileError: JSON parsing failed.
+  """
+  try:
+    return json.loads(osutils.ReadFile(path))
+  except ValueError as e:
+    raise ConfigFileError('%s is not in valid JSON format: %s' % (path, e))
diff --git a/lib/workspace_lib_unittest b/lib/workspace_lib_unittest
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/lib/workspace_lib_unittest
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/lib/workspace_lib_unittest.py b/lib/workspace_lib_unittest.py
new file mode 100644
index 0000000..b5378f9
--- /dev/null
+++ b/lib/workspace_lib_unittest.py
@@ -0,0 +1,229 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for the workspace_lib library."""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import workspace_lib
+
+# pylint: disable=protected-access
+
+class WorkspaceLibTest(cros_test_lib.TempDirTestCase):
+  """Unittests for workspace_lib.py"""
+
+  def setUp(self):
+    # Define assorted paths to test against.
+    self.bogus_dir = os.path.join(self.tempdir, 'bogus')
+
+    self.workspace_dir = os.path.join(self.tempdir, 'workspace')
+    self.workspace_config = os.path.join(self.workspace_dir,
+                                         workspace_lib.WORKSPACE_CONFIG)
+    self.workspace_nested = os.path.join(self.workspace_dir, 'foo', 'bar')
+    # Create workspace directories and files.
+    osutils.Touch(self.workspace_config, makedirs=True)
+    osutils.SafeMakedirs(self.workspace_nested)
+
+  @mock.patch('os.getcwd')
+  @mock.patch.object(cros_build_lib, 'IsInsideChroot', return_value=False)
+  def testWorkspacePathOutsideChroot(self, _mock_inside, mock_cwd):
+    # Set default to a dir outside the workspace.
+    mock_cwd.return_value = self.bogus_dir
+
+    # Inside the workspace, specified dir.
+    self.assertEqual(self.workspace_dir,
+                     workspace_lib.WorkspacePath(self.workspace_dir))
+    self.assertEqual(self.workspace_dir,
+                     workspace_lib.WorkspacePath(self.workspace_nested))
+
+    # Outside the workspace, specified dir.
+    self.assertEqual(None, workspace_lib.WorkspacePath(self.tempdir))
+    self.assertEqual(None, workspace_lib.WorkspacePath(self.bogus_dir))
+
+    # Inside the workspace, default dir.
+    mock_cwd.return_value = self.workspace_dir
+    self.assertEqual(self.workspace_dir, workspace_lib.WorkspacePath())
+
+    mock_cwd.return_value = self.workspace_nested
+    self.assertEqual(self.workspace_dir, workspace_lib.WorkspacePath())
+
+    # Outside the workspace, default dir.
+    mock_cwd.return_value = self.tempdir
+    self.assertEqual(None, workspace_lib.WorkspacePath())
+
+    mock_cwd.return_value = self.bogus_dir
+    self.assertEqual(None, workspace_lib.WorkspacePath())
+
+  @mock.patch.object(cros_build_lib, 'IsInsideChroot', return_value=True)
+  def testWorkspacePathInsideChroot(self, _mock_inside):
+    orig_root = constants.CHROOT_WORKSPACE_ROOT
+    try:
+      # Set default to a dir outside the workspace.
+      constants.CHROOT_WORKSPACE_ROOT = self.bogus_dir
+
+      # Inside the workspace, specified dir.
+      self.assertEqual(self.workspace_dir,
+                       workspace_lib.WorkspacePath(self.workspace_dir))
+      self.assertEqual(self.workspace_dir,
+                       workspace_lib.WorkspacePath(self.workspace_nested))
+
+      # Outside the workspace, specified dir.
+      self.assertEqual(None, workspace_lib.WorkspacePath(self.tempdir))
+      self.assertEqual(None, workspace_lib.WorkspacePath(self.bogus_dir))
+
+      # Inside the workspace, default dir.
+      constants.CHROOT_WORKSPACE_ROOT = self.workspace_dir
+      self.assertEqual(self.workspace_dir, workspace_lib.WorkspacePath())
+
+      constants.CHROOT_WORKSPACE_ROOT = self.workspace_nested
+      self.assertEqual(self.workspace_dir, workspace_lib.WorkspacePath())
+
+      # Outside the workspace, default dir.
+      constants.CHROOT_WORKSPACE_ROOT = self.tempdir
+      self.assertEqual(None, workspace_lib.WorkspacePath())
+
+      constants.CHROOT_WORKSPACE_ROOT = self.bogus_dir
+      self.assertEqual(None, workspace_lib.WorkspacePath())
+
+    finally:
+      # Restore our constant to it's real value.
+      constants.CHROOT_WORKSPACE_ROOT = orig_root
+
+  def testChrootPath(self):
+    # Check the default value.
+    self.assertEqual(os.path.join(self.workspace_dir, '.chroot'),
+                     workspace_lib.ChrootPath(self.workspace_dir))
+
+    # Set a new absolute value, check that we get it back.
+    workspace_lib.SetChrootDir(self.workspace_dir, self.bogus_dir)
+    self.assertEqual(self.bogus_dir,
+                     workspace_lib.ChrootPath(self.workspace_dir))
+
+    # Set a new relative path, check that it is properly appended to the
+    # workspace path.
+    workspace_lib.SetChrootDir(self.workspace_dir, 'some/path')
+    self.assertEqual(os.path.join(self.workspace_dir, 'some/path'),
+                     workspace_lib.ChrootPath(self.workspace_dir))
+
+  @mock.patch.object(osutils, 'IsInsideVm', return_value=True)
+  def testChrootPathUnderVm(self, _mock_inside_vm):
+    """Make sure that inside the VM, chroot dir is under /chroots/..."""
+    self.assertEqual(
+        os.path.join(workspace_lib.MAIN_CHROOT_DIR_IN_VM,
+                     os.path.basename(self.workspace_dir)),
+        workspace_lib.ChrootPath(self.workspace_dir)
+    )
+
+  def testReadWriteLocalConfig(self):
+    # Non-existent config should read as an empty dictionary.
+    config = workspace_lib._ReadLocalConfig(self.workspace_dir)
+    self.assertEqual({}, config)
+
+    # Write out an empty dict, and make sure we can read it back.
+    workspace_lib._WriteLocalConfig(self.workspace_dir, {})
+    config = workspace_lib._ReadLocalConfig(self.workspace_dir)
+    self.assertEqual({}, config)
+
+    # Write out a value, and verify we can read it.
+    workspace_lib._WriteLocalConfig(self.workspace_dir, {'version': 'foo'})
+    config = workspace_lib._ReadLocalConfig(self.workspace_dir)
+    self.assertEqual({'version': 'foo'}, config)
+
+    # Overwrite value, and verify we can read it.
+    workspace_lib._WriteLocalConfig(self.workspace_dir, {'version': 'bar'})
+    config = workspace_lib._ReadLocalConfig(self.workspace_dir)
+    self.assertEqual({'version': 'bar'}, config)
+
+  def testReadWriteActiveSdkVersion(self):
+    # If no version is set, value should be None.
+    version = workspace_lib.GetActiveSdkVersion(self.workspace_dir)
+    self.assertEqual(None, version)
+
+    # Set value, and make sure we can read it.
+    workspace_lib.SetActiveSdkVersion(self.workspace_dir, 'foo')
+    version = workspace_lib.GetActiveSdkVersion(self.workspace_dir)
+    self.assertEqual('foo', version)
+
+    # Set different value, and make sure we can read it.
+    workspace_lib.SetActiveSdkVersion(self.workspace_dir, 'bar')
+    version = workspace_lib.GetActiveSdkVersion(self.workspace_dir)
+    self.assertEqual('bar', version)
+
+    # Create config with unrelated values, should be same as no config.
+    workspace_lib._WriteLocalConfig(self.workspace_dir, {'foo': 'bar'})
+    version = workspace_lib.GetActiveSdkVersion(self.workspace_dir)
+    self.assertEqual(None, version)
+
+    # Set version, and make sure it works.
+    workspace_lib.SetActiveSdkVersion(self.workspace_dir, '1.2.3')
+    version = workspace_lib.GetActiveSdkVersion(self.workspace_dir)
+    self.assertEqual('1.2.3', version)
+
+    # Ensure all of config is there afterwords.
+    config = workspace_lib._ReadLocalConfig(self.workspace_dir)
+    self.assertEqual({'version': '1.2.3', 'foo': 'bar'}, config)
+
+  @mock.patch('os.getcwd')
+  @mock.patch.object(cros_build_lib, 'IsInsideChroot', return_value=False)
+  def testPathToLocator(self, _mock_inside, mock_cwd):
+    """Tests the path to locator conversion."""
+    ws = self.workspace_dir
+    mock_cwd.return_value = ws
+
+    foo_path = workspace_lib.PathToLocator(os.path.join(ws, 'foo'))
+    baz_path = workspace_lib.PathToLocator(os.path.join(ws, 'bar', 'foo',
+                                                        'baz'))
+    daisy_path = workspace_lib.PathToLocator(os.path.join(constants.SOURCE_ROOT,
+                                                          'src', 'overlays',
+                                                          'overlay-daisy'))
+    some_path = workspace_lib.PathToLocator(os.path.join(constants.SOURCE_ROOT,
+                                                         'srcs', 'bar'))
+
+    self.assertEqual('//foo', foo_path)
+    self.assertEqual('//bar/foo/baz', baz_path)
+    self.assertEqual('board:daisy', daisy_path)
+    self.assertEqual(None, some_path)
+
+    def assertReversible(loc):
+      path = workspace_lib.LocatorToPath(loc)
+      self.assertEqual(loc, workspace_lib.PathToLocator(path))
+
+    assertReversible('//foo')
+    assertReversible('//foo/bar/baz')
+    assertReversible('board:gizmo')
+
+
+class ConfigurationTest(cros_test_lib.TempDirTestCase):
+  """Test WriteConfigFile() and ReadConfigFile()."""
+
+  def testWriteReadConfigFile(self):
+    """Tests WriteConfigFile() then ReadConfigFile()."""
+    path = os.path.join(self.tempdir, 'foo.json')
+    config = {'foo': 1, 'bar': 2}
+
+    workspace_lib.WriteConfigFile(path, config)
+    self.assertDictEqual(config, workspace_lib.ReadConfigFile(path))
+
+  def testWriteConfigFileInvalid(self):
+    """Tests writing an invalid configuration file."""
+    path = os.path.join(self.tempdir, 'foo.json')
+    config = Exception()
+
+    with self.assertRaises(workspace_lib.ConfigFileError):
+      workspace_lib.WriteConfigFile(path, config)
+
+  def testReadConfigFileInvalid(self):
+    """Tests reading an invalid configuration file."""
+    path = os.path.join(self.tempdir, 'foo.json')
+    osutils.WriteFile(path, 'invalid contents')
+
+    with self.assertRaises(workspace_lib.ConfigFileError):
+      workspace_lib.ReadConfigFile(path)
diff --git a/licensing/__init__.py b/licensing/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/licensing/__init__.py
diff --git a/licensing/about_credits.tmpl b/licensing/about_credits.tmpl
new file mode 100644
index 0000000..095271e
--- /dev/null
+++ b/licensing/about_credits.tmpl
@@ -0,0 +1,119 @@
+<!doctype html>
+<!-- Generated by chromite/licensing/licenses; do not edit.
+     See http://dev.chromium.org/chromium-os/licensing-for-chromiumos-developers
+-->
+<html>
+<head>
+<meta charset="UTF-8">
+<title>Credits</title>
+<link rel="stylesheet" href="chrome://resources/css/text_defaults.css">
+<style>
+body {
+  background-color:white;
+  font-size:84%;
+  max-width:1020px;
+}
+.page-title {
+  font-size:164%;
+  font-weight:bold;
+}
+.product {
+  background-color:#c3d9ff;
+  overflow:auto;
+  padding:2px;
+  margin-top:16px;
+  border-radius:5px;
+}
+.product .title {
+  font-size:110%;
+  font-weight:bold;
+  float:left;
+  margin:3px;
+}
+.product .homepage {
+  text-align:right;
+  float:right;
+  margin:3px;
+}
+.product .homepage:after {
+  content:" - ";
+}
+.product .show {
+  text-align:right;
+  float:right;
+  margin:3px;
+}
+.licence {
+  clear:both;
+  background-color:#e8eef7;
+  padding:16px;
+  border-radius:3px;
+  display:none;
+}
+.licence pre {
+  white-space: pre-wrap
+}
+.licence h3 {
+  margin-top:0px;
+}
+.license-packages {
+  font-size:80%;
+}
+.dialog #print-link {
+  display: none;
+}
+.dialog .homepage {
+  display: none;
+}
+</style>
+<script>
+//<![CDATA[
+function toggle(o) {
+  var licence = o.nextSibling;
+
+  while (licence.className != 'licence') {
+    if (!licence) return false;
+    licence = licence.nextSibling;
+  }
+  return toggleblock(licence, o);
+}
+
+function toggleblock(licence, o) {
+  if (licence.style && licence.style.display == 'block') {
+    licence.style.display = 'none';
+    o.innerHTML = 'show license text';
+  } else {
+    licence.style.display = 'block';
+    o.innerHTML = 'hide license text';
+  }
+  return false;
+}
+
+function toggleall() {
+  var a = window.document.getElementsByClassName('licence');
+  for (var i = 0; i < a.length; i++) {
+    toggleblock(a[i], a[i].previousSibling.previousSibling.previousSibling.previousSibling);
+  }
+  return false;
+}
+//]]>
+</script>
+</head>
+<body>
+<span class="page-title" style="float:left;">Credits</span>
+<span style="float:right">
+<a href="javascript:toggleall()">Toggle All</a>
+<a id="print-link" href="javascript:window.print();">Print</a>
+</span>
+<div style="clear:both; overflow:auto;">
+<h2>List of Packages used in Google Chrome OS:</h2>
+<!-- Chromium <3s the following projects -->
+{{entries}}
+</div>
+
+<h2>List of Shared Licenses used in Google Chrome OS:</h2>
+{{licenses}}
+
+<script src="chrome://os-credits/keyboard_utils.js"></script>
+</body>
+</html>
diff --git a/licensing/about_credits_entry.tmpl b/licensing/about_credits_entry.tmpl
new file mode 100644
index 0000000..ffc579a
--- /dev/null
+++ b/licensing/about_credits_entry.tmpl
@@ -0,0 +1,12 @@
+<div class="product">
+<span class="title">{{name}}</span>
+<a class="show" href="#" onclick="return toggle(this);">show license text</a>
+<span class="homepage"><a href="{{url}}">homepage</a></span>
+<div class="licence">
+<pre>{{licenses_txt}}</pre>
+<ul>
+{{licenses_ptr}}
+</ul>
+</div>
+</div>
+
diff --git a/licensing/about_credits_shared_license_entry.tmpl b/licensing/about_credits_shared_license_entry.tmpl
new file mode 100644
index 0000000..457d0c4
--- /dev/null
+++ b/licensing/about_credits_shared_license_entry.tmpl
@@ -0,0 +1,12 @@
+<div class="product">
+<a name="{{license_name}}" class="title">{{license_type}} License {{license_name}}</a>
+<a class="show" href="#" onclick="return toggle(this);">show license text</a>
+<div class="licence">
+<pre>
+{{license}}
+</pre>
+<div class="license-packages">
+Used by these packages: {{license_packages}}
+</div>
+</div>
+</div>
diff --git a/licensing/ebuild_license_hook b/licensing/ebuild_license_hook
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/licensing/ebuild_license_hook
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/licensing/ebuild_license_hook.py b/licensing/ebuild_license_hook.py
new file mode 100644
index 0000000..ddec3a3
--- /dev/null
+++ b/licensing/ebuild_license_hook.py
@@ -0,0 +1,26 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Emerge hook to pre-parse and verify license information.
+
+Called from src/scripts/hooks/install/gen-package-licenses.sh as part of a
+package emerge.
+"""
+
+from __future__ import print_function
+
+from chromite.lib import commandline
+
+from chromite.licensing import licenses_lib
+
+
+def main(args):
+  parser = commandline.ArgumentParser(usage=__doc__)
+  parser.add_argument('--builddir', type='path', dest='builddir',
+                      help='Take $PORTAGE_BUILDDIR as argument.')
+
+  opts = parser.parse_args(args)
+  opts.Freeze()
+
+  licenses_lib.HookPackageProcess(opts.builddir)
diff --git a/licensing/extra_package_licenses/libarchive-3.1.2.LICENSE b/licensing/extra_package_licenses/libarchive-3.1.2.LICENSE
new file mode 100644
index 0000000..b258806
--- /dev/null
+++ b/licensing/extra_package_licenses/libarchive-3.1.2.LICENSE
@@ -0,0 +1,60 @@
+The libarchive distribution as a whole is Copyright by Tim Kientzle
+and is subject to the copyright notice reproduced at the bottom of
+this file.
+
+Each individual file in this distribution should have a clear
+copyright/licensing statement at the beginning of the file.  If any do
+not, please let me know and I will rectify it.  The following is
+intended to summarize the copyright status of the individual files;
+the actual statements in the files are controlling.
+
+* Except as listed below, all C sources (including .c and .h files)
+  and documentation files are subject to the copyright notice reproduced
+  at the bottom of this file.
+
+* The following source files are also subject in whole or in part to
+  a 3-clause UC Regents copyright; please read the individual source
+  files for details:
+   libarchive/archive_entry.c
+   libarchive/archive_read_support_filter_compress.c
+   libarchive/archive_write_set_filter_compress.c
+   libarchive/mtree.5
+   tar/matching.c
+
+* The following source files are in the public domain:
+   tar/getdate.c
+
+* The build files---including Makefiles, configure scripts,
+  and auxiliary scripts used as part of the compile process---have
+  widely varying licensing terms.  Please check individual files before
+  distributing them to see if those restrictions apply to you.
+
+I intend for all new source code to use the license below and hope over
+time to replace code with other licenses with new implementations that
+do use the license below.  The varying licensing of the build scripts
+seems to be an unavoidable mess.
+
+
+Copyright (c) 2003-2009 <author(s)>
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer
+   in this position and unchanged.
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR(S) ``AS IS'' AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+IN NO EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/licensing/licenses b/licensing/licenses
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/licensing/licenses
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/licensing/licenses.py b/licensing/licenses.py
new file mode 100644
index 0000000..92e5e13
--- /dev/null
+++ b/licensing/licenses.py
@@ -0,0 +1,223 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate an HTML file containing license info for all installed packages.
+
+Documentation on this script is also available here:
+http://www.chromium.org/chromium-os/licensing-for-chromiumos-developers
+
+End user (i.e. package owners) documentation is here:
+http://www.chromium.org/chromium-os/licensing-for-chromiumos-package-owners
+
+Usage:
+For this script to work, you must have built the architecture
+this is being run against, _after_ you've last run repo sync.
+Otherwise, it will query newer source code and then fail to work on packages
+that are out of date in your build.
+
+Recommended build:
+  cros_sdk
+  export BOARD=x86-alex
+  sudo rm -rf /build/$BOARD
+  cd ~/trunk/src/scripts
+  # If you wonder why we need to build Chromium OS just to run
+  # `emerge -p -v virtual/target-os` on it, we don't.
+  # However, later we run ebuild unpack, and this will apply patches and run
+  # configure. Configure will fail due to aclocal macros missing in
+  # /build/x86-alex/usr/share/aclocal (those are generated during build).
+  # This will take about 10mn on a Z620.
+  ./build_packages --board=$BOARD --nowithautotest --nowithtest --nowithdev
+                   --nowithfactory
+  cd ~/trunk/chromite/licensing
+  # This removes left over packages from an earlier build that could cause
+  # conflicts.
+  eclean-$BOARD packages
+  %(prog)s [--debug] [--all-packages] --board $BOARD [-o o.html] 2>&1 | tee out
+
+The workflow above is what you would do to generate a licensing file by hand
+given a chromeos tree.
+Note that building packages now creates a license.yaml fork in the package
+which you can see with
+qtbz2 -x -O  /build/x86-alex/packages/dev-util/libc-bench-0.0.1-r8.tbz2 |
+     qxpak -x -O - license.yaml
+This gets automatically installed in
+/build/x86-alex/var/db/pkg/dev-util/libc-bench-0.0.1-r8/license.yaml
+
+Unless you run with --generate, the script will now gather those license
+bits and generate a license file from there.
+License bits for each package are generated by default from
+src/scripts/hooks/install/gen-package-licenses.sh which gets run automatically
+by emerge as part of a package build (by running this script with
+--hook /path/to/tmp/portage/build/tree/for/that/package
+
+If license bits are missing, they are generated on the fly if you were running
+with sudo. If you didn't use sudo, this on the fly late generation will fail
+and act as a warning that your prebuilts were missing package build time
+licenses.
+
+You can check the licenses and/or generate a HTML file for a list of
+packages using --package or -p:
+  %(prog)s --package "dev-libs/libatomic_ops-7.2d" --package
+  "net-misc/wget-1.14" --board $BOARD -o out.html
+
+Note that you'll want to use --generate to force regeneration of the licensing
+bits from a package source you may have just modified but not rebuilt.
+
+If you want to check licensing against all ChromeOS packages, you should
+run ./build_packages --board=$BOARD to build everything and then run
+this script with --all-packages.
+
+By default, when no package is specified, this script processes all
+packages for $BOARD. The output HTML file is meant to update
+http://src.chromium.org/viewvc/chrome/trunk/src/chrome/browser/resources/ +
+  chromeos/about_os_credits.html?view=log
+(gclient config svn://svn.chromium.org/chrome/trunk/src)
+For an example CL, see https://codereview.chromium.org/13496002/
+
+The detailed process is listed below.
+
+* Check out the branch you intend to generate the HTML file for. Use
+  the internal manifest for this purpose.
+    repo init -b <branch_name> -u <URL>
+
+  The list of branches (e.g. release-R33-5116.B) are available here:
+  https://chromium.googlesource.com/chromiumos/manifest/+refs
+
+* Generate the HTML file by following the steps mentioned
+  previously. Check whether your changes are valid with:
+    bin/diff_license_html output.html-M33 output.html-M34
+  and review the diff.
+
+* Update the about_os_credits.html in the svn repository. Create a CL
+  and upload it for review.
+    gcl change <change_name>
+    gcl upload <change_name>
+
+  When uploading, you may get a warning for file being too large to
+  upload. In this case, your CL can still be reviewed. Always include
+  the diff in your commit message so that the reviewers know what the
+  changes are. You can add reviewers on the review page by clicking on
+  "Edit issue".  (A quick reference:
+  http://www.chromium.org/developers/quick-reference)
+
+  Make sure you click on 'Publish+Mail Comments' after adding reviewers
+  (the review URL looks like this https://codereview.chromium.org/183883018/ ).
+
+* After receiving LGTMs, commit your change with 'gcl commit <change_name>'.
+
+If you don't get this in before the freeze window, it'll need to be merged into
+the branch being released, which is done by adding a Merge-Requested label.
+Once it's been updated to "Merge-Approved" by a TPM, please merge into the
+required release branch. You can ask karen@ for merge approve help.
+Example: http://crbug.com/221281
+
+Note however that this is only during the transition period.
+build-image will be modified to generate the license for each board and save
+the file in /opt/google/chrome/resources/about_os_credits.html or as defined
+in http://crbug.com/271832 .
+"""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+
+from chromite.licensing import licenses_lib
+
+
+EXTRA_LICENSES_DIR = os.path.join(licenses_lib.SCRIPT_DIR,
+                                  'extra_package_licenses')
+
+# These packages exist as workarounds....
+EXTRA_PACKAGES = (
+    ('sys-kernel/Linux-2.6',
+     ['http://www.kernel.org/'], ['GPL-2'], []),
+    ('app-arch/libarchive-3.1.2',
+     ['http://www.libarchive.org/'], ['BSD', 'public-domain'],
+     ['libarchive-3.1.2.LICENSE']),
+)
+
+
+def LoadPackageInfo(board, all_packages, generateMissing, packages):
+  """Do the work when we're not called as a hook."""
+  logging.info("Using board %s.", board)
+
+  builddir = os.path.join(cros_build_lib.GetSysroot(board=board),
+                          'tmp', 'portage')
+
+  if not os.path.exists(builddir):
+    raise AssertionError(
+        "FATAL: %s missing.\n"
+        "Did you give the right board and build that tree?" % builddir)
+
+  detect_packages = not packages
+  if detect_packages:
+    # If no packages were specified, we look up the full list.
+    packages = licenses_lib.ListInstalledPackages(board, all_packages)
+
+  if not packages:
+    raise AssertionError('FATAL: Could not get any packages for board %s' %
+                         board)
+
+  logging.debug("Initial Package list to work through:\n%s",
+                '\n'.join(sorted(packages)))
+  licensing = licenses_lib.Licensing(board, packages, generateMissing)
+
+  licensing.LoadPackageInfo()
+  logging.debug("Package list to skip:\n%s",
+                '\n'.join([p for p in sorted(packages)
+                           if licensing.packages[p].skip]))
+  logging.debug("Package list left to work through:\n%s",
+                '\n'.join([p for p in sorted(packages)
+                           if not licensing.packages[p].skip]))
+  licensing.ProcessPackageLicenses()
+  if detect_packages:
+    # If we detected 'all' packages, we have to add in these extras.
+    for fullnamewithrev, homepages, names, files in EXTRA_PACKAGES:
+      license_texts = [osutils.ReadFile(os.path.join(EXTRA_LICENSES_DIR, f))
+                       for f in files]
+      licensing.AddExtraPkg(fullnamewithrev, homepages, names, license_texts)
+
+  return licensing
+
+
+def main(args):
+  parser = commandline.ArgumentParser(usage=__doc__)
+  parser.add_argument("-b", "--board",
+                      help="which board to run for, like x86-alex")
+  parser.add_argument("-p", "--package", action="append", default=[],
+                      dest="packages",
+                      help="check the license of the package, e.g.,"
+                      "dev-libs/libatomic_ops-7.2d")
+  parser.add_argument("-a", "--all-packages", action="store_true",
+                      dest="all_packages",
+                      help="Run licensing against all packages in the "
+                      "build tree, instead of just virtual/target-os "
+                      "dependencies.")
+  parser.add_argument("-g", "--generate-licenses", action="store_true",
+                      dest="gen_licenses",
+                      help="Generate license information, if missing.")
+  parser.add_argument("-o", "--output", type="path",
+                      help="which html file to create with output")
+  opts = parser.parse_args(args)
+
+
+  if not opts.board:
+    raise AssertionError("No board given (--board)")
+
+  if not opts.output and not opts.gen_licenses:
+    raise AssertionError("You must specify --output and/or --generate-licenses")
+
+  if opts.gen_licenses and os.geteuid() != 0:
+    raise AssertionError("Run with sudo if you use --generate-licenses.")
+
+  licensing = LoadPackageInfo(
+      opts.board, opts.all_packages, opts.gen_licenses, opts.packages)
+
+  if opts.output:
+    licensing.GenerateHTMLLicenseOutput(opts.output)
diff --git a/licensing/licenses_lib.py b/licensing/licenses_lib.py
new file mode 100644
index 0000000..67b13cb
--- /dev/null
+++ b/licensing/licenses_lib.py
@@ -0,0 +1,1210 @@
+# Copyright 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Library for validating ebuild license information, and generating credits.
+
+Documentation on this script is also available here:
+  http://www.chromium.org/chromium-os/licensing
+"""
+
+from __future__ import print_function
+
+import cgi
+import codecs
+import os
+import re
+import tempfile
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import portage_util
+
+# We are imported by src/repohooks/pre-upload.py in a non chroot environment
+# where yaml may not be there, so we don't error on that since it's not needed
+# in that case.
+try:
+  import yaml
+except ImportError:
+  yaml = None
+
+debug = True
+
+# See http://crbug.com/207004 for discussion.
+PER_PKG_LICENSE_DIR = 'var/db/pkg'
+
+STOCK_LICENSE_DIRS = [
+    os.path.join(constants.SOURCE_ROOT,
+                 'src/third_party/portage-stable/licenses'),
+]
+
+# There are licenses for custom software we got and isn't part of
+# upstream gentoo.
+CUSTOM_LICENSE_DIRS = [
+    os.path.join(constants.SOURCE_ROOT,
+                 'src/third_party/chromiumos-overlay/licenses'),
+]
+
+COPYRIGHT_ATTRIBUTION_DIR = (
+    os.path.join(
+        constants.SOURCE_ROOT,
+        'src/third_party/chromiumos-overlay/licenses/copyright-attribution'))
+
+# Virtual packages don't need to have a license and often don't, so we skip them
+# chromeos-base contains google platform packages that are covered by the
+# general license at top of tree, so we skip those too.
+SKIPPED_CATEGORIES = [
+    'virtual',
+]
+
+SKIPPED_PACKAGES = [
+    # Fix these packages by adding a real license in the code.
+    # You should not skip packages just because the license scraping doesn't
+    # work. Stick those special cases into PACKAGE_LICENSES.
+    # Packages should only be here because they are sub/split packages already
+    # covered by the license of the main package.
+
+    # These are Chrome-OS-specific packages, copyright BSD-Google
+    'sys-kernel/chromeos-kernel',  # already manually credit Linux
+]
+
+SKIPPED_LICENSES = [
+    # Some of our packages contain binary blobs for which we have special
+    # negotiated licenses, and no need to display anything publicly. Strongly
+    # consider using Google-TOS instead, if possible.
+    'Proprietary-Binary',
+
+    # If you have an early repo for which license terms have yet to be decided
+    # use this. It will cause licensing for the package to be mostly ignored.
+    # Official should error for any package with this license.
+    'TAINTED', # TODO(dgarrett): Error on official builds with this license.
+]
+
+LICENSE_NAMES_REGEX = [
+    r'^copyright$',
+    r'^copyright[.]txt$',
+    r'^copyright[.]regex$',                        # llvm
+    r'^copying.*$',
+    r'^licen[cs]e.*$',
+    r'^licensing.*$',                              # libatomic_ops
+    r'^ipa_font_license_agreement_v1[.]0[.]txt$',  # ja-ipafonts
+    r'^PKG-INFO$',                                 # copyright assignment for
+                                                   # some python packages
+                                                   # (netifaces, unittest2)
+]
+
+# These are _temporary_ license mappings for packages that do not have a valid
+# shared/custom license, or LICENSE file we can use.
+# Once this script runs earlier (during the package build process), it will
+# block new source without a LICENSE file if the ebuild contains a license
+# that requires copyright assignment (BSD and friends).
+# At that point, new packages will get fixed to include LICENSE instead of
+# adding workaround mappings like those below.
+# The way you now fix copyright attribution cases create a custom file with the
+# right license directly in COPYRIGHT_ATTRIBUTION_DIR.
+PACKAGE_LICENSES = {
+    # TODO: replace the naive license parsing code in this script with a hook
+    # into portage's license parsing. See http://crbug.com/348779
+
+    # Chrome (the browser) is complicated, it has a morphing license that is
+    # either BSD-Google, or BSD-Google,Google-TOS depending on how it was
+    # built. We bypass this problem for now by hardcoding the Google-TOS bit as
+    # per ChromeOS with non free bits
+    'chromeos-base/chromeos-chrome': ['BSD-Google', 'Google-TOS'],
+
+    # Currently the code cannot parse LGPL-3 || ( LGPL-2.1 MPL-1.1 )
+    'dev-python/pycairo': ['LGPL-3', 'LGPL-2.1'],
+}
+
+# Any license listed list here found in the ebuild will make the code look for
+# license files inside the package source code in order to get copyright
+# attribution from them.
+COPYRIGHT_ATTRIBUTION_LICENSES = [
+    'BSD',    # requires distribution of copyright notice
+    'BSD-2',  # so does BSD-2 http://opensource.org/licenses/BSD-2-Clause
+    'BSD-3',  # and BSD-3? http://opensource.org/licenses/BSD-3-Clause
+    'BSD-4',  # and 4?
+    'BSD-with-attribution',
+    'MIT',
+    'MIT-with-advertising',
+    'Old-MIT',
+]
+
+# The following licenses are not invalid or to show as a less helpful stock
+# license, but it's better to look in the source code for a more specific
+# license if there is one, but not an error if no better one is found.
+# Note that you don't want to set just anything here since any license here
+# will be included once in stock form and a second time in custom form if
+# found (there is no good way to know that a license we found on disk is the
+# better version of the stock version, so we show both).
+LOOK_IN_SOURCE_LICENSES = [
+    'as-is',  # The stock license is very vague, source always has more details.
+    'PSF-2',  # The custom license in python is more complete than the template.
+
+    # As far as I know, we have no requirement to do copyright attribution for
+    # these licenses, but the license included in the code has slightly better
+    # information than the stock Gentoo one (including copyright attribution).
+    'BZIP2',     # Single use license, do copyright attribution.
+    'OFL',       # Almost single use license, do copyright attribution.
+    'OFL-1.1',   # Almost single use license, do copyright attribution.
+    'UoI-NCSA',  # Only used by NSCA, might as well show their custom copyright.
+]
+
+# This used to provide overrides. I can't find a valid reason to add any more
+# here, though.
+PACKAGE_HOMEPAGES = {
+    # Example:
+    # 'x11-proto/glproto': ['http://www.x.org/'],
+}
+
+# These are tokens found in LICENSE= in an ebuild that aren't licenses we
+# can actually read from disk.
+# You should not use this to blacklist real licenses.
+LICENCES_IGNORE = [
+    ')',              # Ignore OR tokens from LICENSE="|| ( LGPL-2.1 MPL-1.1 )"
+    '(',
+    '||',
+]
+
+# Find the directory of this script.
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+
+# The template files we depend on for generating HTML.
+TMPL = os.path.join(SCRIPT_DIR, 'about_credits.tmpl')
+ENTRY_TMPL = os.path.join(SCRIPT_DIR, 'about_credits_entry.tmpl')
+SHARED_LICENSE_TMPL = os.path.join(
+    SCRIPT_DIR, 'about_credits_shared_license_entry.tmpl')
+
+
+# This is called directly by src/repohooks/pre-upload.py
+def GetLicenseTypesFromEbuild(ebuild_path):
+  """Returns a list of license types from the ebuild file.
+
+  This function does not always return the correct list, but it is
+  faster than using portageq for not having to access chroot. It is
+  intended to be used for tasks such as presubmission checks.
+
+  Args:
+    ebuild_path: ebuild to read.
+
+  Returns:
+    list of licenses read from ebuild.
+
+  Raises:
+    ValueError: ebuild errors.
+  """
+  ebuild_env_tmpl = """
+has() { [[ " ${*:2} " == *" $1 "* ]]; }
+inherit() {
+  local overlay_list="%(overlay_list)s"
+  local eclass overlay f
+  for eclass; do
+    has ${eclass} ${_INHERITED_} && continue
+    _INHERITED_+=" ${eclass}"
+    for overlay in %(overlay_list)s; do
+      f="${overlay}/eclass/${eclass}.eclass"
+      if [[ -e ${f} ]]; then
+        source "${f}"
+        break
+      fi
+     done
+  done
+}
+source %(ebuild)s"""
+
+  # TODO: the overlay_list hard-coded here should be changed to look
+  # at the current overlay, and then the master overlays. E.g. for an
+  # ebuild file in overlay-parrot, we will look at parrot overlay
+  # first, and then look at portage-stable and chromiumos, which are
+  # listed as masters in overlay-parrot/metadata/layout.conf.
+  tmpl_env = {
+      'ebuild': ebuild_path,
+      'overlay_list': '%s %s' % (
+          os.path.join(constants.SOURCE_ROOT,
+                       'src/third_party/chromiumos-overlay'),
+          os.path.join(constants.SOURCE_ROOT,
+                       'src/third_party/portage-stable'))
+  }
+
+  with tempfile.NamedTemporaryFile(bufsize=0) as f:
+    osutils.WriteFile(f.name, ebuild_env_tmpl % tmpl_env)
+    env = osutils.SourceEnvironment(
+        f.name, whitelist=['LICENSE'], ifs=' ', multiline=True)
+
+  if not env.get('LICENSE'):
+    raise ValueError('No LICENSE found in the ebuild.')
+  if re.search(r'[,;]', env['LICENSE']):
+    raise ValueError(
+        'LICENSE field in the ebuild should be whitespace-limited.')
+
+  return env['LICENSE'].split()
+
+
+class PackageLicenseError(Exception):
+  """Thrown if something fails while getting license information for a package.
+
+  This will cause the processing to error in the end.
+  """
+
+
+class PackageInfo(object):
+  """Package specific information, mostly about licenses."""
+
+  def __init__(self, board, fullnamerev):
+    """Package info initializer.
+
+    Args:
+      board: The board this package was built for.
+      fullnamerev: package name of the form 'x11-base/X.Org-1.9.3-r23'
+    """
+
+    self.board = board  # This field may be None, based on entry path.
+
+    #
+    # Populate these fields from fullnamerev:
+    #   category, name, version, revision
+    #
+    try:
+      cpv = portage_util.SplitCPV(fullnamerev)
+    except TypeError:
+      cpv = None
+
+    # A bad package can either raise a TypeError exception or return None.
+    if not cpv:
+      raise AssertionError(
+          'portage couldn\'t find %s, missing version number?' % fullnamerev)
+
+    #
+    # These define the package uniquely.
+    #
+
+    self.category, self.name, self.version, self.revision = (
+        cpv.category, cpv.package, cpv.version_no_rev, cpv.rev)
+
+    if self.revision is not None:
+      self.revision = str(self.revision).lstrip('r')
+
+    #
+    # These fields hold license information used to generate the credits page.
+    #
+
+    # This contains licenses names for this package.
+    self.license_names = set()
+
+    # Full Text of discovered license information.
+    self.license_text_scanned = []
+
+    self.homepages = []
+
+    #
+    # These fields show the results of processing.
+    #
+
+    # After reading basic package information, we can mark the package as
+    # one to skip in licensing.
+    self.skip = False
+
+    # Intellegently populate initial skip information.
+    self.LookForSkip()
+
+  @property
+  def fullnamerev(self):
+    """e.g. libnl/libnl-3.2.24-r12"""
+    s = '%s-%s' % (self.fullname, self.version)
+    if self.revision:
+      s += '-r%s' % self.revision
+    return s
+
+  @property
+  def fullname(self):
+    """e.g. libnl/libnl-3.2.24"""
+    return '%s/%s' % (self.category, self.name)
+
+  @property
+  def license_dump_path(self):
+    """e.g. /build/x86-alex/var/db/pkg/sys-apps/dtc-1.4.0/license.yaml.
+
+    Only valid for packages that have already been emerged.
+    """
+    return os.path.join(cros_build_lib.GetSysroot(self.board),
+                        PER_PKG_LICENSE_DIR, self.fullnamerev, 'license.yaml')
+
+  def _RunEbuildPhases(self, ebuild_path, phases):
+    """Run a list of ebuild phases on an ebuild.
+
+    Args:
+      ebuild_path: exact path of the ebuild file.
+      phases: list of phases like ['clean', 'fetch'] or ['unpack'].
+
+    Returns:
+      ebuild command output
+    """
+    ebuild_cmd = cros_build_lib.GetSysrootToolPath(
+        cros_build_lib.GetSysroot(self.board), 'ebuild')
+    return cros_build_lib.RunCommand(
+        [ebuild_cmd, ebuild_path] + phases, print_cmd=debug,
+        redirect_stdout=True)
+
+  def _GetOverrideLicense(self):
+    """Look in COPYRIGHT_ATTRIBUTION_DIR for license with copyright attribution.
+
+    For dev-util/bsdiff-4.3-r5, the code will look for
+    dev-util/bsdiff-4.3-r5
+    dev-util/bsdiff-4.3
+    dev-util/bsdiff
+
+    It is ok to have more than one bsdiff license file, and an empty file acts
+    as a rubout (i.e. an empty dev-util/bsdiff-4.4 will shadow dev-util/bsdiff
+    and tell the licensing code to look in the package source for a license
+    instead of using dev-util/bsdiff as an override).
+
+    Returns:
+      False (no license found) or a multiline license string.
+    """
+    license_read = None
+    # dev-util/bsdiff-4.3-r5 -> bsdiff-4.3-r5
+    filename = os.path.basename(self.fullnamerev)
+    license_path = os.path.join(COPYRIGHT_ATTRIBUTION_DIR,
+                                os.path.dirname(self.fullnamerev))
+    pv = portage_util.SplitPV(filename)
+    pv_no_rev = '%s-%s' % (pv.package, pv.version_no_rev)
+    for filename in (pv.pv, pv_no_rev, pv.package):
+      file_path = os.path.join(license_path, filename)
+      logging.debug('Looking for override copyright attribution license in %s',
+                    file_path)
+      if os.path.exists(file_path):
+        # Turn
+        # /../merlin/trunk/src/third_party/chromiumos-overlay/../dev-util/bsdiff
+        # into
+        # chromiumos-overlay/../dev-util/bsdiff
+        short_dir_path = os.path.join(*file_path.rsplit(os.path.sep, 5)[1:])
+        license_read = 'Copyright Attribution License %s:\n\n' % short_dir_path
+        license_read += ReadUnknownEncodedFile(
+            file_path, 'read copyright attribution license')
+        break
+
+    return license_read
+
+  def _ExtractLicenses(self, src_dir, need_copyright_attribution):
+    """Scrounge for text licenses in the source of package we'll unpack.
+
+    This is only called if we couldn't get usable licenses from the ebuild,
+    or one of them is BSD/MIT like which forces us to look for a file with
+    copyright attribution in the source code itself.
+
+    First, we have a shortcut where we scan COPYRIGHT_ATTRIBUTION_DIR to see if
+    we find a license for this package. If so, we use that.
+    Typically it'll be used if the unpacked source does not have the license
+    that we're required to display for copyright attribution (in some cases it's
+    plain absent, in other cases, it could be in a filename we don't look for).
+
+    Otherwise, we scan the unpacked source code for what looks like license
+    files as defined in LICENSE_NAMES_REGEX.
+
+    Raises:
+      AssertionError: on runtime errors
+      PackageLicenseError: couldn't find copyright attribution file.
+    """
+    license_override = self._GetOverrideLicense()
+    if license_override:
+      self.license_text_scanned = [license_override]
+      return
+
+    if not src_dir:
+      ebuild_path = self._FindEbuildPath()
+      self._RunEbuildPhases(ebuild_path, ['clean', 'fetch'])
+      raw_output = self._RunEbuildPhases(ebuild_path, ['unpack'])
+      output = raw_output.output.splitlines()
+      # Output is spammy, it looks like this:
+      #  * gc-7.2d.tar.gz RMD160 SHA1 SHA256 size ;-) ...                 [ ok ]
+      #  * checking gc-7.2d.tar.gz ;-) ...                                [ ok ]
+      #  * Running stacked hooks for pre_pkg_setup
+      #  *    sysroot_build_bin_dir ...
+      #  [ ok ]
+      #  * Running stacked hooks for pre_src_unpack
+      #  *    python_multilib_setup ...
+      #  [ ok ]
+      # >>> Unpacking source...
+      # >>> Unpacking gc-7.2d.tar.gz to /build/x86-alex/tmp/po/[...]ps-7.2d/work
+      # >>> Source unpacked in /build/x86-alex/tmp/portage/[...]ops-7.2d/work
+      # So we only keep the last 2 lines, the others we don't care about.
+      output = [line for line in output if line[0:3] == '>>>' and
+                line != '>>> Unpacking source...']
+      for line in output:
+        logging.info(line)
+
+      portageq_cmd = cros_build_lib.GetSysrootToolPath(
+          cros_build_lib.GetSysroot(self.board), 'portageq')
+      args = [portageq_cmd, 'envvar', 'PORTAGE_TMPDIR']
+      result = cros_build_lib.RunCommand(args, print_cmd=debug,
+                                         redirect_stdout=True)
+      tmpdir = result.output.splitlines()[0]
+      # tmpdir gets something like /build/daisy/tmp/
+      src_dir = os.path.join(tmpdir, 'portage', self.fullnamerev, 'work')
+
+      if not os.path.exists(src_dir):
+        raise AssertionError(
+            'Unpack of %s didn\'t create %s. Version mismatch' %
+            (self.fullnamerev, src_dir))
+
+    # You may wonder how deep should we go?
+    # In case of packages with sub-packages, it could be deep.
+    # Let's just be safe and get everything we can find.
+    # In the case of libatomic_ops, it's actually required to look deep
+    # to find the MIT license:
+    # dev-libs/libatomic_ops-7.2d/work/gc-7.2/libatomic_ops/doc/LICENSING.txt
+    args = ['find', src_dir, '-type', 'f']
+    result = cros_build_lib.RunCommand(args, print_cmd=debug,
+                                       redirect_stdout=True).output.splitlines()
+    # Truncate results to look like this: swig-2.0.4/COPYRIGHT
+    files = [x[len(src_dir):].lstrip('/') for x in result]
+    license_files = []
+    for name in files:
+      # When we scan a source tree managed by git, this can contain license
+      # files that are not part of the source. Exclude those.
+      # (e.g. .git/refs/heads/licensing)
+      if '.git/' in name:
+        continue
+      basename = os.path.basename(name)
+      # Looking for license.* brings up things like license.gpl, and we
+      # never want a GPL license when looking for copyright attribution,
+      # so we skip them here. We also skip regexes that can return
+      # license.py (seen in some code).
+      if re.search(r'.*GPL.*', basename) or re.search(r'\.py$', basename):
+        continue
+      for regex in LICENSE_NAMES_REGEX:
+        if re.search(regex, basename, re.IGNORECASE):
+          license_files.append(name)
+          break
+
+    if not license_files:
+      if need_copyright_attribution:
+        logging.error("""
+%s: unable to find usable license.
+Typically this will happen because the ebuild says it's MIT or BSD, but there
+was no license file that this script could find to include along with a
+copyright attribution (required for BSD/MIT).
+
+If this is Google source, please change
+LICENSE="BSD"
+to
+LICENSE="BSD-Google"
+
+If not, go investigate the unpacked source in %s,
+and find which license to assign.  Once you found it, you should copy that
+license to a file under %s
+(or you can modify LICENSE_NAMES_REGEX to pickup a license file that isn't
+being scraped currently).""",
+                      self.fullnamerev, src_dir, COPYRIGHT_ATTRIBUTION_DIR)
+        raise PackageLicenseError()
+      else:
+        # We can get called for a license like as-is where it's preferable
+        # to find a better one in the source, but not fatal if we didn't.
+        logging.info('Was not able to find a better license for %s '
+                     'in %s to replace the more generic one from ebuild',
+                     self.fullnamerev, src_dir)
+
+    # Examples of multiple license matches:
+    # dev-lang/swig-2.0.4-r1: swig-2.0.4/COPYRIGHT swig-2.0.4/LICENSE
+    # dev-libs/glib-2.32.4-r1: glib-2.32.4/COPYING pkg-config-0.26/COPYING
+    # dev-libs/libnl-3.2.14: libnl-doc-3.2.14/COPYING libnl-3.2.14/COPYING
+    # dev-libs/libpcre-8.30-r2: pcre-8.30/LICENCE pcre-8.30/COPYING
+    # dev-libs/libusb-0.1.12-r6: libusb-0.1.12/COPYING libusb-0.1.12/LICENSE
+    # dev-libs/pyzy-0.1.0-r1: db/COPYING pyzy-0.1.0/COPYING
+    # net-misc/strongswan-5.0.2-r4: strongswan-5.0.2/COPYING
+    #                               strongswan-5.0.2/LICENSE
+    # sys-process/procps-3.2.8_p11: debian/copyright procps-3.2.8/COPYING
+    logging.info('License(s) for %s: %s', self.fullnamerev,
+                 ' '.join(license_files))
+    for license_file in sorted(license_files):
+      # Joy and pink ponies. Some license_files are encoded as latin1 while
+      # others are utf-8 and of course you can't know but only guess.
+      license_path = os.path.join(src_dir, license_file)
+      license_txt = ReadUnknownEncodedFile(license_path, 'Adding License')
+
+      self.license_text_scanned += [
+          'Scanned Source License %s:\n\n%s' % (license_file, license_txt)]
+
+    # We used to clean up here, but there have been many instances where
+    # looking at unpacked source to see where the licenses were, was useful
+    # so let's disable this for now
+    # self._RunEbuildPhases(['clean'])
+
+  def LookForSkip(self):
+    """Look for a reason to skip over this package.
+
+    Sets self.skip to True if a reason was found.
+
+    Returns:
+      True if a reason was found.
+    """
+    if self.category in SKIPPED_CATEGORIES:
+      logging.info('%s in SKIPPED_CATEGORIES, skip package', self.fullname)
+      self.skip = True
+
+    if self.fullname in SKIPPED_PACKAGES:
+      logging.info('%s in SKIPPED_PACKAGES, skip package', self.fullname)
+      self.skip = True
+
+    # TODO(dgarrett): There are additional reasons that should be handled here.
+
+    return self.skip
+
+  def _FindEbuildPath(self):
+    """Discover the path to a package's associated ebuild.
+
+    This method is not valid during the emerge hook process.
+
+    Returns:
+      full path file name of the ebuild file for this package.
+
+    Raises:
+      AssertionError if it can't be discovered for some reason.
+    """
+    equery_cmd = cros_build_lib.GetSysrootToolPath(
+        cros_build_lib.GetSysroot(self.board), 'equery')
+    args = [equery_cmd, '-q', '-C', 'which', self.fullnamerev]
+    try:
+      path = cros_build_lib.RunCommand(args, print_cmd=True,
+                                       redirect_stdout=True).output.strip()
+    except cros_build_lib.RunCommandError:
+      path = None
+
+    # Path can be false because of an exception, or a command result.
+    if not path:
+      raise AssertionError('_FindEbuildPath for %s failed.\n'
+                           'Is your tree clean? Try a rebuild?' %
+                           self.fullnamerev)
+
+    logging.debug('%s -> %s', ' '.join(args), path)
+
+    if not os.access(path, os.F_OK):
+      raise AssertionError('Can\'t access %s', path)
+
+    return path
+
+  def GetLicenses(self, build_info_dir, src_dir):
+    """Populate the license related fields.
+
+    Fields populated:
+      license_names, license_text_scanned, homepages, skip
+
+    Some packages have static license mappings applied to them that get
+    retrieved from the ebuild.
+
+    For others, we figure out whether the package source should be scanned to
+    add licenses found there.
+
+    Args:
+      build_info_dir: Path to the build_info for the ebuild. This can be from
+        the working directory during the emerge hook, or in the portage pkg db.
+      src_dir: Directory to the expanded source code for this package. If None,
+        the source will be expanded, if needed (slow).
+
+    Raises:
+      AssertionError: on runtime errors
+      PackageLicenseError: couldn't find license in ebuild and source.
+    """
+    # If the total size installed is zero, we installed no content to license.
+    if _BuildInfo(build_info_dir, 'SIZE').strip() == '0':
+      self.skip = True
+      return
+
+    self.homepages = _BuildInfo(build_info_dir, 'HOMEPAGE').split()
+    ebuild_license_names = _BuildInfo(build_info_dir, 'LICENSE').split()
+
+    # If this ebuild only uses skipped licenses, skip it.
+    if (ebuild_license_names and
+        all(l in SKIPPED_LICENSES for l in ebuild_license_names)):
+      self.skip = True
+
+    if self.skip:
+      return
+
+    if self.fullname in PACKAGE_HOMEPAGES:
+      self.homepages = PACKAGE_HOMEPAGES[self.fullname]
+
+    # Packages with missing licenses or licenses that need mapping (like
+    # BSD/MIT) are hardcoded here:
+    if self.fullname in PACKAGE_LICENSES:
+      ebuild_license_names = PACKAGE_LICENSES[self.fullname]
+      logging.info('Static license mapping for %s: %s', self.fullnamerev,
+                   ','.join(ebuild_license_names))
+    else:
+      logging.info('Read licenses for %s: %s', self.fullnamerev,
+                   ','.join(ebuild_license_names))
+
+    # Lots of packages in chromeos-base have their license set to BSD instead
+    # of BSD-Google:
+    new_license_names = []
+    for license_name in ebuild_license_names:
+      # TODO: temp workaround for http;//crbug.com/348750 , remove when the bug
+      # is fixed.
+      if (license_name == 'BSD' and
+          self.fullnamerev.startswith('chromeos-base/') and
+          'BSD-Google' not in ebuild_license_names):
+        license_name = 'BSD-Google'
+        logging.warning(
+            'Fixed BSD->BSD-Google for %s because it\'s in chromeos-base. '
+            'Please fix the LICENSE field in the ebuild', self.fullnamerev)
+      # TODO: temp workaround for http;//crbug.com/348749 , remove when the bug
+      # is fixed.
+      if license_name == 'Proprietary':
+        license_name = 'Google-TOS'
+        logging.warning(
+            'Fixed Proprietary -> Google-TOS for %s. '
+            'Please fix the LICENSE field in the ebuild', self.fullnamerev)
+      new_license_names.append(license_name)
+    ebuild_license_names = new_license_names
+
+    # The ebuild license field can look like:
+    # LICENSE="GPL-3 LGPL-3 Apache-2.0" (this means AND, as in all 3)
+    # for third_party/portage-stable/app-admin/rsyslog/rsyslog-5.8.11.ebuild
+    # LICENSE="|| ( LGPL-2.1 MPL-1.1 )"
+    # for third_party/portage-stable/x11-libs/cairo/cairo-1.8.8.ebuild
+
+    # The parser isn't very smart and only has basic support for the
+    # || ( X Y ) OR logic to do the following:
+    # In order to save time needlessly unpacking packages and looking or a
+    # cleartext license (which is really a crapshoot), if we have a license
+    # like BSD that requires looking for copyright attribution, but we can
+    # chose another license like GPL, we do that.
+
+    if not self.skip and not ebuild_license_names:
+      logging.error('%s: no license found in ebuild. FIXME!', self.fullnamerev)
+      # In a bind, you could comment this out. I'm making the output fail to
+      # get your attention since this error really should be fixed, but if you
+      # comment out the next line, the script will try to find a license inside
+      # the source.
+      raise PackageLicenseError()
+
+    # This is not invalid, but the parser can't deal with it, so if it ever
+    # happens, error out to tell the programmer to do something.
+    # dev-python/pycairo-1.10.0-r4: LGPL-3 || ( LGPL-2.1 MPL-1.1 )
+    if '||' in ebuild_license_names[1:]:
+      logging.error('%s: Can\'t parse || in the middle of a license: %s',
+                    self.fullnamerev, ' '.join(ebuild_license_names))
+      raise PackageLicenseError()
+
+    or_licenses_and_one_is_no_attribution = False
+    # We do a quick early pass first so that the longer pass below can
+    # run accordingly.
+    for license_name in [x for x in ebuild_license_names
+                         if x not in LICENCES_IGNORE]:
+      # Here we have an OR case, and one license that we can use stock, so
+      # we remember that in order to be able to skip license attributions if
+      # any were in the OR.
+      if (ebuild_license_names[0] == '||' and
+          license_name not in COPYRIGHT_ATTRIBUTION_LICENSES):
+        or_licenses_and_one_is_no_attribution = True
+
+    need_copyright_attribution = False
+    scan_source_for_licenses = False
+
+    for license_name in [x for x in ebuild_license_names
+                         if x not in LICENCES_IGNORE]:
+      # Licenses like BSD or MIT can't be used as is because they do not contain
+      # copyright self. They have to be replaced by copyright file given in the
+      # source code, or manually mapped by us in PACKAGE_LICENSES
+      if license_name in COPYRIGHT_ATTRIBUTION_LICENSES:
+        # To limit needless efforts, if a package is BSD or GPL, we ignore BSD
+        # and use GPL to avoid scanning the package, but we can only do this if
+        # or_licenses_and_one_is_no_attribution has been set above.
+        # This ensures that if we have License: || (BSD3 BSD4), we will
+        # look in the source.
+        if or_licenses_and_one_is_no_attribution:
+          logging.info('%s: ignore license %s because ebuild LICENSES had %s',
+                       self.fullnamerev, license_name,
+                       ' '.join(ebuild_license_names))
+        else:
+          logging.info('%s: can\'t use %s, will scan source code for copyright',
+                       self.fullnamerev, license_name)
+          need_copyright_attribution = True
+          scan_source_for_licenses = True
+      else:
+        self.license_names.add(license_name)
+        # We can't display just 2+ because it only contains text that says to
+        # read v2 or v3.
+        if license_name == 'GPL-2+':
+          self.license_names.add('GPL-2')
+        if license_name == 'LGPL-2+':
+          self.license_names.add('LGPL-2')
+
+      if license_name in LOOK_IN_SOURCE_LICENSES:
+        logging.info('%s: Got %s, will try to find better license in source...',
+                     self.fullnamerev, license_name)
+        scan_source_for_licenses = True
+
+    if self.license_names:
+      logging.info('%s: using stock|cust license(s) %s',
+                   self.fullnamerev, ','.join(self.license_names))
+
+    # If the license(s) could not be found, or one requires copyright
+    # attribution, dig in the source code for license files:
+    # For instance:
+    # Read licenses from ebuild for net-dialup/ppp-2.4.5-r3: BSD,GPL-2
+    # We need get the substitution file for BSD and add it to GPL.
+    if scan_source_for_licenses:
+      self._ExtractLicenses(src_dir, need_copyright_attribution)
+
+    # This shouldn't run, but leaving as sanity check.
+    if not self.license_names and not self.license_text_scanned:
+      raise AssertionError('Didn\'t find usable licenses for %s' %
+                           self.fullnamerev)
+
+  def SaveLicenseDump(self, save_file):
+    """Save PackageInfo contents to a YAML file.
+
+    This is used to cache license results between the emerge hook phase and
+    credits page generation.
+
+    Args:
+      save_file: File to save the yaml contents into.
+    """
+    logging.debug('Saving license to %s', save_file)
+    yaml_dump = self.__dict__.items()
+    osutils.WriteFile(save_file, yaml.dump(yaml_dump), makedirs=True)
+
+
+class Licensing(object):
+  """Do the actual work of extracting licensing info and outputting html."""
+
+  def __init__(self, board, package_fullnames, gen_licenses):
+    # eg x86-alex
+    self.board = board
+    # List of stock and custom licenses referenced in ebuilds. Used to
+    # print a report. Dict value says which packages use that license.
+    self.licenses = {}
+
+    # Licenses are supposed to be generated at package build time and be
+    # ready for us, but in case they're not, they can be generated.
+    self.gen_licenses = gen_licenses
+
+    self.package_text = {}
+    self.entry_template = None
+
+    # We need to have a dict for the list of packages objects, index by package
+    # fullnamerev, so that when we scan our licenses at the end, and find out
+    # some shared licenses are only used by one package, we can access that
+    # package object by name, and add the license directly in that object.
+    self.packages = {}
+    self._package_fullnames = package_fullnames
+
+  @property
+  def sorted_licenses(self):
+    return sorted(self.licenses.keys(), key=str.lower)
+
+  def _LoadLicenseDump(self, pkg):
+    save_file = pkg.license_dump_path
+    logging.debug('Getting license from %s for %s', save_file, pkg.name)
+    yaml_dump = yaml.load(osutils.ReadFile(save_file))
+    for key, value in yaml_dump:
+      pkg.__dict__[key] = value
+
+  def LicensedPackages(self, license_name):
+    """Return list of packages using a given license."""
+    return self.licenses[license_name]
+
+  def LoadPackageInfo(self):
+    """Populate basic package info for all packages from their ebuild."""
+    for package_name in self._package_fullnames:
+      pkg = PackageInfo(self.board, package_name)
+      self.packages[package_name] = pkg
+
+  def ProcessPackageLicenses(self):
+    """Iterate through all packages provided and gather their licenses.
+
+    GetLicenses will scrape licenses from the code and/or gather stock license
+    names. We gather the list of stock and custom ones for later processing.
+
+    Do not call this after adding virtual packages with AddExtraPkg.
+    """
+    for package_name in self.packages:
+      pkg = self.packages[package_name]
+
+      if pkg.skip:
+        logging.debug('Package %s is in skip list', package_name)
+        continue
+
+      # Other skipped packages get dumped with incomplete info and the skip flag
+      if not os.path.exists(pkg.license_dump_path):
+        if not self.gen_licenses:
+          raise PackageLicenseError('License for %s is missing' % package_name)
+
+        logging.error('>>> License for %s is missing, creating now <<<',
+                      package_name)
+        build_info_path = os.path.join(
+            cros_build_lib.GetSysroot(pkg.board),
+            PER_PKG_LICENSE_DIR, pkg.fullnamerev)
+        pkg.GetLicenses(build_info_path, None)
+
+        # We dump packages where licensing failed too.
+        pkg.SaveLicenseDump(pkg.license_dump_path)
+
+      # Load the pre-cached version, if the in-memory version is incomplete.
+      if not pkg.license_names:
+        logging.debug('loading dump for %s', pkg.fullnamerev)
+        self._LoadLicenseDump(pkg)
+
+  def AddExtraPkg(self, fullnamerev, homepages, license_names, license_texts):
+    """Allow adding pre-created virtual packages.
+
+    GetLicenses will not work on them, so add them after having run
+    ProcessPackages.
+
+    Args:
+      fullnamerev: package name of the form x11-base/X.Org-1.9.3-r23
+      homepages: list of url strings.
+      license_names: list of license name strings.
+      license_texts: custom license text to use, mostly for attribution.
+    """
+    pkg = PackageInfo(self.board, fullnamerev)
+    pkg.homepages = homepages
+    pkg.license_names = license_names
+    pkg.license_text_scanned = license_texts
+    self.packages[fullnamerev] = pkg
+
+  # Called directly by src/repohooks/pre-upload.py
+  @staticmethod
+  def FindLicenseType(license_name):
+    """Says if a license is stock Gentoo, custom, or doesn't exist."""
+
+    for directory in STOCK_LICENSE_DIRS:
+      path = os.path.join(directory, license_name)
+      if os.path.exists(path):
+        return 'Gentoo Package Stock'
+
+    for directory in CUSTOM_LICENSE_DIRS:
+      path = os.path.join(directory, license_name)
+      if os.path.exists(path):
+        return 'Custom'
+
+    if license_name in SKIPPED_LICENSES:
+      return 'Custom'
+
+    raise AssertionError("""
+license %s could not be found in %s
+If the license in the ebuild is correct,
+a) a stock license should be added to portage-stable/licenses :
+running `cros_portage_upgrade` inside of the chroot should clone this repo
+to /tmp/portage/:
+https://chromium.googlesource.com/chromiumos/overlays/portage/+/gentoo
+find the new licenses under licenses, and add them to portage-stable/licenses
+
+b) if it's a non gentoo package with a custom license, you can copy that license
+to third_party/chromiumos-overlay/licenses/
+
+Try re-running the script with -p cat/package-ver --generate
+after fixing the license.""" %
+                         (license_name,
+                          '\n'.join(STOCK_LICENSE_DIRS + CUSTOM_LICENSE_DIRS))
+                        )
+
+  @staticmethod
+  def ReadSharedLicense(license_name):
+    """Read and return stock or cust license file specified in an ebuild."""
+
+    license_path = None
+    for directory in STOCK_LICENSE_DIRS + CUSTOM_LICENSE_DIRS:
+      path = os.path.join(directory, license_name)
+      if os.path.exists(path):
+        license_path = path
+        break
+
+    if license_path:
+      return ReadUnknownEncodedFile(license_path, 'read license')
+    else:
+      raise AssertionError('license %s could not be found in %s'
+                           % (license_name,
+                              '\n'.join(STOCK_LICENSE_DIRS +
+                                        CUSTOM_LICENSE_DIRS))
+                          )
+
+  @staticmethod
+  def EvaluateTemplate(template, env):
+    """Expand a template with vars like {{foo}} using a dict of expansions."""
+    # TODO switch to stock python templates.
+    for key, val in env.iteritems():
+      template = template.replace('{{%s}}' % key, val)
+    return template
+
+  def _GeneratePackageLicenseText(self, pkg):
+    """Concatenate all licenses related to a pkg.
+
+    This means a combination of ebuild shared licenses and licenses read from
+    the pkg source tree, if any.
+
+    Args:
+      pkg: PackageInfo object
+
+    Raises:
+      AssertionError: on runtime errors
+    """
+    license_text = []
+    for license_text_scanned in pkg.license_text_scanned:
+      license_text.append(license_text_scanned)
+      license_text.append('%s\n' % ('-=' * 40))
+
+    license_pointers = []
+    # sln: shared license name.
+    for sln in pkg.license_names:
+      # Says whether it's a stock gentoo or custom license.
+      license_type = self.FindLicenseType(sln)
+      license_pointers.append(
+          "<li><a href='#%s'>%s License %s</a></li>" % (
+              sln, license_type, sln))
+
+    # This should get caught earlier, but one extra check.
+    if not license_text + license_pointers:
+      raise AssertionError('Ended up with no license_text for %s' %
+                           pkg.fullnamerev)
+
+    env = {
+        'name': '%s-%s' % (pkg.name, pkg.version),
+        'url': cgi.escape(pkg.homepages[0]) if pkg.homepages else '',
+        'licenses_txt': cgi.escape('\n'.join(license_text)) or '',
+        'licenses_ptr': '\n'.join(license_pointers) or '',
+    }
+    self.package_text[pkg] = self.EvaluateTemplate(self.entry_template, env)
+
+  def GenerateHTMLLicenseOutput(self, output_file,
+                                output_template=TMPL,
+                                entry_template=ENTRY_TMPL,
+                                license_template=SHARED_LICENSE_TMPL):
+    """Generate the combined html license file used in ChromeOS.
+
+    Args:
+      output_file: resulting HTML license output.
+      output_template: template for the entire HTML file.
+      entry_template: template for per package entries.
+      license_template: template for shared license entries.
+    """
+    self.entry_template = ReadUnknownEncodedFile(entry_template)
+    sorted_license_txt = []
+
+    # Keep track of which licenses are used by which packages.
+    for pkg in self.packages.values():
+      if pkg.skip:
+        continue
+      for sln in pkg.license_names:
+        self.licenses.setdefault(sln, []).append(pkg.fullnamerev)
+
+    # Find licenses only used once, and roll them in the package that uses them.
+    # We use keys() because licenses is modified in the loop, so we can't use
+    # an iterator.
+    for sln in self.licenses.keys():
+      if len(self.licenses[sln]) == 1:
+        pkg_fullnamerev = self.licenses[sln][0]
+        logging.info('Collapsing shared license %s into single use license '
+                     '(only used by %s)', sln, pkg_fullnamerev)
+        license_type = self.FindLicenseType(sln)
+        license_txt = self.ReadSharedLicense(sln)
+        single_license = '%s License %s:\n\n%s' % (license_type, sln,
+                                                   license_txt)
+        pkg = self.packages[pkg_fullnamerev]
+        pkg.license_text_scanned.append(single_license)
+        pkg.license_names.remove(sln)
+        del self.licenses[sln]
+
+    for pkg in sorted(self.packages.values(),
+                      key=lambda x: (x.name.lower(), x.version, x.revision)):
+      if pkg.skip:
+        logging.debug('Skipping package %s', pkg.fullnamerev)
+        continue
+      self._GeneratePackageLicenseText(pkg)
+      sorted_license_txt += [self.package_text[pkg]]
+
+    # Now generate the bottom of the page that will contain all the shared
+    # licenses and a list of who is pointing to them.
+    license_template = ReadUnknownEncodedFile(license_template)
+
+    licenses_txt = []
+    for license_name in self.sorted_licenses:
+      env = {
+          'license_name': license_name,
+          'license': cgi.escape(self.ReadSharedLicense(license_name)),
+          'license_type': self.FindLicenseType(license_name),
+          'license_packages': ' '.join(self.LicensedPackages(license_name)),
+      }
+      licenses_txt += [self.EvaluateTemplate(license_template, env)]
+
+    file_template = ReadUnknownEncodedFile(output_template)
+    env = {
+        'entries': '\n'.join(sorted_license_txt),
+        'licenses': '\n'.join(licenses_txt),
+    }
+    osutils.WriteFile(output_file,
+                      self.EvaluateTemplate(file_template, env).encode('UTF-8'))
+
+
+def ListInstalledPackages(board, all_packages=False):
+  """Return a list of all packages installed for a particular board."""
+
+  # If all_packages is set to True, all packages visible in the build
+  # chroot are used to generate the licensing file. This is not what you want
+  # for a release license file, but it's a way to run licensing checks against
+  # all packages.
+  # If it's set to False, it will only generate a licensing file that contains
+  # packages used for a release build (as determined by the dependencies for
+  # virtual/target-os).
+
+  if all_packages:
+    # The following returns all packages that were part of the build tree
+    # (many get built or used during the build, but do not get shipped).
+    # Note that it also contains packages that are in the build as
+    # defined by build_packages but not part of the image we ship.
+    equery_cmd = cros_build_lib.GetSysrootToolPath(
+        cros_build_lib.GetSysroot(board), 'equery')
+    args = [equery_cmd, 'list', '*']
+    packages = cros_build_lib.RunCommand(args, print_cmd=debug,
+                                         redirect_stdout=True
+                                        ).output.splitlines()
+  else:
+    # The following returns all packages that were part of the build tree
+    # (many get built or used during the build, but do not get shipped).
+    # Note that it also contains packages that are in the build as
+    # defined by build_packages but not part of the image we ship.
+    emerge_cmd = cros_build_lib.GetSysrootToolPath(
+        cros_build_lib.GetSysroot(board), 'emerge')
+    args = [emerge_cmd, '--with-bdeps=y', '--usepkgonly',
+            '--emptytree', '--pretend', '--color=n', 'virtual/target-os']
+    emerge = cros_build_lib.RunCommand(args, print_cmd=debug,
+                                       redirect_stdout=True).output.splitlines()
+    # Another option which we've decided not to use, is bdeps=n.  This outputs
+    # just the packages we ship, but does not packages that were used to build
+    # them, including a package like flex which generates a .a that is included
+    # and shipped in ChromeOS.
+    # We've decided to credit build packages, even if we're not legally required
+    # to (it's always nice to do), and that way we get corner case packages like
+    # flex. This is why we use bdep=y and not bdep=n.
+
+    packages = []
+    # [binary   R    ] x11-libs/libva-1.1.1 to /build/x86-alex/
+    pkg_rgx = re.compile(r'\[[^]]+R[^]]+\] (.+) to /build/.*')
+    # If we match something else without the 'R' like
+    # [binary     U  ] chromeos-base/pepper-flash-13.0.0.133-r1 [12.0.0.77-r1]
+    # this is bad and we should die on this.
+    pkg_rgx2 = re.compile(r'(\[[^]]+\] .+) to /build/.*')
+    for line in emerge:
+      match = pkg_rgx.search(line)
+      match2 = pkg_rgx2.search(line)
+      if match:
+        packages.append(match.group(1))
+      elif match2:
+        raise AssertionError('Package incorrectly installed, try eclean-%s' %
+                             board, '\n%s' % match2.group(1))
+
+  return packages
+
+
+def _HandleIllegalXMLChars(text):
+  """Handles illegal XML Characters.
+
+  XML 1.0 acceptable character range:
+  Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | \
+           [#x10000-#x10FFFF]
+
+  This function finds all illegal characters in the text and filters
+  out all whitelisted characters (e.g. ^L).
+
+  Args:
+    text: text to examine.
+
+  Returns:
+    Filtered |text| and a list of non-whitelisted illegal characters found.
+  """
+  whitelist_re = re.compile(u'[\x0c]')
+  text = whitelist_re.sub('', text)
+  # illegal_chars_re includes all illegal characters (whitelisted or
+  # not), so we can expand the whitelist without modifying this line.
+  illegal_chars_re = re.compile(
+      u'[\x00-\x08\x0b\x0c\x0e-\x1F\uD800-\uDFFF\uFFFE\uFFFF]')
+  return (text, illegal_chars_re.findall(text))
+
+
+def ReadUnknownEncodedFile(file_path, logging_text=None):
+  """Read a file of unknown encoding (UTF-8 or latin) by trying in sequence.
+
+  Args:
+    file_path: what to read.
+    logging_text: what to display for logging depending on file read.
+
+  Returns:
+    File content, possibly converted from latin1 to UTF-8.
+
+  Raises:
+    Assertion error: if non-whitelisted illegal XML characters
+      are found in the file.
+    ValueError: returned if we get invalid XML.
+  """
+  try:
+    with codecs.open(file_path, encoding='utf-8') as c:
+      file_txt = c.read()
+      if logging_text:
+        logging.info('%s %s (UTF-8)', logging_text, file_path)
+  except UnicodeDecodeError:
+    with codecs.open(file_path, encoding='latin1') as c:
+      file_txt = c.read()
+      if logging_text:
+        logging.info('%s %s (latin1)', logging_text, file_path)
+
+  file_txt, char_list = _HandleIllegalXMLChars(file_txt)
+
+  if char_list:
+    raise ValueError('Illegal XML characters %s found in %s.' %
+                     (char_list, file_path))
+
+  return file_txt
+
+
+def _BuildInfo(build_info_path, filename):
+  """Fetch contents of a file from portage build_info directory.
+
+  Portage maintains a build_info directory that exists both during the process
+  of emerging an ebuild, and (in a different location) after the ebuild has been
+  emerged.
+
+  Various useful data files exist there like:
+   'CATEGORY', 'PF', 'SIZE', 'HOMEPAGE', 'LICENSE'
+
+  Args:
+    build_info_path: Path to the build_info directory to read from.
+    filename: Name of the file to read.
+
+  Returns:
+    Contents of the file as a string, or "".
+  """
+  filename = os.path.join(build_info_path, filename)
+
+  # Buildinfo properties we read are in US-ASCII, not Unicode.
+  try:
+    bi = osutils.ReadFile(filename).rstrip()
+  # Some properties like HOMEPAGE may be absent.
+  except IOError:
+    bi = ""
+  return bi
+
+
+def HookPackageProcess(pkg_build_path):
+  """Different entry point to populate a packageinfo.
+
+  This is called instead of LoadPackageInfo when called by a package build.
+
+  Args:
+    pkg_build_path: unpacked being built by emerge.
+  """
+  build_info_dir = os.path.join(pkg_build_path, 'build-info')
+
+  fullnamerev = '%s/%s' % (_BuildInfo(build_info_dir, 'CATEGORY'),
+                           _BuildInfo(build_info_dir, 'PF'))
+  logging.debug('Computed package name %s from %s',
+                fullnamerev, pkg_build_path)
+
+  pkg = PackageInfo(None, fullnamerev)
+
+  src_dir = os.path.join(pkg_build_path, 'work')
+  pkg.GetLicenses(build_info_dir, src_dir)
+
+  pkg.SaveLicenseDump(os.path.join(build_info_dir, 'license.yaml'))
diff --git a/mobmonitor/README b/mobmonitor/README
new file mode 100644
index 0000000..e5ddcb9
--- /dev/null
+++ b/mobmonitor/README
@@ -0,0 +1,260 @@
+----------------------------------
+----------------------------------
+Details on using the Mob* Monitor:
+----------------------------------
+----------------------------------
+
+
+Overview:
+---------
+
+The Mob* Monitor provides a way to monitor the health state of a particular
+service. Service health is defined by a set of satisfiable checks, called
+health checks.
+
+The Mob* Monitor executes health checks that are written for a particular
+service and collects information on the health state. Users can query
+the health state of a service via an RPC/RESTful interface.
+
+When a service is unhealthy, the Mob* Monitor can be requested to execute
+repair actions that are defined in the service's check file package.
+
+
+Check Files and Check File Packages:
+------------------------------------
+
+Check file packages are located in the check file directory. Each 'package'
+is a Python package.
+
+The layout of the checkfile directory is as follows:
+
+checkfile_directory:
+    service1:
+        __init__.py
+        service_actions.py
+        more_service_actions.py
+        easy_check.py
+        harder_check.py
+        ...
+    service2:
+        __init__.py
+        service2_actions.py
+        service_check.py
+        ....
+    .
+    .
+    .
+    serviceN:
+        ...
+
+Each service check file package should be flat, that is, no subdirectories will
+be walked to collect health checks.
+
+Check files define health checks and must end in '_check.py'. The Mob* Monitor
+does not enforce how or where in the package you define repair actions.
+
+
+Health Checks:
+--------------
+
+Health checks are the basic conditions that altogether define whether or not a
+service is healthy from the perspective of the Mob* Monitor.
+
+A health check is a python object that implements the following interface:
+
+  - Check()
+
+    Tests the health condition.
+
+    -> Returns 0 if the health check was completely satisfied.
+    -> Returns a positive integer if the check was successfuly, but could
+       have been better.
+    -> Returns a negative integer if the check was unsuccessful.
+
+  - Diagnose(errocode)
+
+    Maps an error code to a description and a set of actions that can be
+    used to repair or improve the condition.
+
+    -> Returns a tuple of (description, actions) where:
+         description is a string describing the state.
+         actions is a list of repair functions.
+
+
+Health checks can (optionally) also define the following attributes:
+
+  - CHECK_INTERVAL: Defines the interval (in seconds) between health check
+                    executions. This defaults to 30 seconds if not defined.
+
+
+A check file may contain as many health checks as the writer feels is
+necessary. There is no restriction on what else may be included in the
+check file. The writer is free to write many health check files.
+
+
+Repair Actions:
+---------------
+
+Repair actions are used to repair or improve the health state of a service. The
+appropriate repair actions to take are returned in a health check's Diagnose
+method.
+
+Repair actions are functions and can be defined anywhere in the service check
+package.
+
+It is suggested that repair actions are defined in files ending in 'actions.py'
+which are imported by health check files.
+
+
+Health Check and Action Example:
+--------------------------------
+
+Suppose we have a service named 'myservice'. The check file package should have
+the following layout:
+
+checkdir:
+    myservice:
+        __init__.py
+        myservice_check.py
+        repair_actions.py
+
+
+The 'myservice_check.py' file should look like the following:
+
+    from myservice import repair_actions
+
+    def IsKeyFileInstalled():
+      """Checks if the key file is installed.
+
+      Returns:
+        True if USB key is plugged in, False otherwise.
+      """
+      ....
+      return result
+
+
+    class MyHealthCheck(object):
+
+      CHECK_INTERVAL = 10
+
+      def Check(self):
+        if IsKeyFileInstalled():
+          return 0
+
+        return -1
+
+      def Diagnose(self, errcode):
+        if -1 == errcode:
+          return ('Key file is missing.' [repair_actions.InstallKeyFile])
+
+        return ('Unknown failure.', [])
+
+
+And the 'repair_actions.py' file should look like:
+
+
+    def InstallKeyFile(**kwargs):
+      """Installs the key file."""
+      ...
+
+
+
+Communicating with the Mob* Monitor:
+------------------------------------
+
+A small RPC library is provided for communicating with the Mob* Monitor
+which can be found in the module 'chromite.mobmonitor.rpc.rpc'.
+
+Communication is done via the RpcExecutor class defined in the above module.
+The RPC interface provided by RpcExecutor is as follows:
+
+  - GetServiceList()
+
+    Returns a list of the names of the services that are being monitored.
+    There will be one name for each recognized service check directory.
+
+  - GetStatus(service)
+
+    Returns the health status of a service with name |service|. The |service|
+    name may be omitted, in this case, the status of every service is
+    retrieved.
+
+    A service's health status is a named tuple with the following fields:
+      - service: The name of the service.
+      - health: A boolean as to whether or not the service is healthy.
+      - healthchecks: A list of healthchecks that did not succeed. Referring
+          back to the 'Health Checks' section above, a check writer can
+          specify return codes for health checks that tell the monitor that
+          the health check result was satisfactory, but not optimal. These
+          quasi-healthy checks will also be listed here.
+
+    A healthcheck returned in a service's health status is a named tuple with
+    the following fields:
+      - name: The name of the health check.
+      - health: A boolean as to whether or not the health check succeeded.
+      - description: A description of the health check's state.
+      - actions: A list of the names of actions that may be taken to repair or
+          improve this health condition.
+
+    A service is unhealthy if at least one health check failed. A failed health
+    check will have its health field marked as False.
+
+    A healthy service will display its health field as True and will not list
+    any health checks.
+
+    A service may also be quasi-healthy. In this case, the health field will
+    be True, but health conditions that could be improved are listed.
+
+  - RepairService(service, action, args, kwargs)
+
+    Request the Mob* Monitor to execute a repair action for the specified
+    service. |args| is a list of positional arguments and |kwargs| is a
+    dict of keyword arguments.
+
+    The monitor will return the status of the service post repair execution.
+
+
+Using the RPC library:
+
+    from chromite.mobmonitor.rpc import rpc
+
+    def testStatus():
+      # RpcExecutor takes optional keyword args for |host| and |port|.
+      # They default to 'localhost' and 9991 respectively.
+      rpcexec = rpc.RpcExecutor()
+      service_list = rpcexec.GetServiceList()
+      for service in service_list:
+        print(rpcexec.GetStatus(service))
+      rpcexec.RepairService('someservice', 'someaction', [1, 2], {'z': 3})
+
+
+Using the mobmoncli:
+
+  A command line interface is provided for communicating with the Mob* Monitor.
+  The mobmoncli script is installed and part of the PATH on moblabs.
+  It provides the same interface discussed above for the RpcExecutor.
+
+  See chromite.mobmonitor.scripts.mobmoncli for a list of options that can be
+  passed.
+
+  Usage examples:
+
+    Getting a list of every service:
+      $ mobmoncli GetServiceList
+
+    Getting every service status:
+      $ mobmoncli GetStatus
+
+    Getting a particular service status
+      $ mobmoncli GetStatus -s myservice
+
+    Repairing a service:
+      $ mobmoncli RepairService -s myservice -a myaction
+
+    Passing arguments to a repair action:
+      $ mobmoncli RepairService -s myservice -a myotheraction -i 1,2,a=3
+
+      The inputs are a comma-separated list. Each item in the list may or
+      may not be equal-sign separated. If they are equal-sign separated,
+      that item is treated as a keyword argument, else as a positional
+      argument to the repair function.
diff --git a/mobmonitor/__init__.py b/mobmonitor/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/mobmonitor/__init__.py
diff --git a/mobmonitor/checkfile/__init__.py b/mobmonitor/checkfile/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/mobmonitor/checkfile/__init__.py
diff --git a/mobmonitor/checkfile/manager.py b/mobmonitor/checkfile/manager.py
new file mode 100644
index 0000000..3b09cf8
--- /dev/null
+++ b/mobmonitor/checkfile/manager.py
@@ -0,0 +1,506 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Store and manage Mob* Monitor checkfiles."""
+
+from __future__ import print_function
+
+import cherrypy
+import collections
+import imp
+import inspect
+import os
+import time
+
+from cherrypy.process import plugins
+from chromite.lib import cros_logging as logging
+
+
+LOGGER = logging.getLogger(__name__)
+
+
+HCEXECUTION_IN_PROGRESS = 0
+HCEXECUTION_COMPLETED = 1
+
+HCSTATUS_HEALTHY = 0
+
+IN_PROGRESS_DESCRIPTION = 'Health check is currently executing.'
+NULL_DESCRIPTION = ''
+EMPTY_ACTIONS = []
+HEALTHCHECK_STATUS = collections.namedtuple('healthcheck_status',
+                                            ['name', 'health', 'description',
+                                             'actions'])
+
+HEALTH_CHECK_METHODS = ['Check', 'Diagnose']
+
+CHECK_INTERVAL_DEFAULT_SEC = 10
+HEALTH_CHECK_DEFAULT_ATTRIBUTES = {
+    'CHECK_INTERVAL_SEC': CHECK_INTERVAL_DEFAULT_SEC}
+
+CHECKFILE_DIR = '/etc/mobmonitor/checkfiles/'
+CHECKFILE_ENDING = '_check.py'
+
+SERVICE_STATUS = collections.namedtuple('service_status',
+                                        ['service', 'health', 'healthchecks'])
+
+ACTION_INFO = collections.namedtuple('action_info',
+                                     ['action', 'info', 'args', 'kwargs'])
+
+
+class CollectionError(Exception):
+  """Raise when an error occurs during checkfile collection."""
+
+
+def MapHealthcheckStatusToDict(hcstatus):
+  """Map a manager.HEALTHCHECK_STATUS named tuple to a dictionary.
+
+  Args:
+    hcstatus: A HEALTHCHECK_STATUS object.
+
+  Returns:
+    A dictionary version of the HEALTHCHECK_STATUS object.
+  """
+  return {'name': hcstatus.name, 'health': hcstatus.health,
+          'description': hcstatus.description,
+          'actions': [a.__name__ for a in hcstatus.actions]}
+
+
+def MapServiceStatusToDict(status):
+  """Map a manager.SERVICE_STATUS named tuple to a dictionary.
+
+  Args:
+    status: A SERVICE_STATUS object.
+
+  Returns:
+    A dictionary version of the SERVICE_STATUS object.
+  """
+  hcstatuses = [
+      MapHealthcheckStatusToDict(hcstatus) for hcstatus in status.healthchecks]
+  return {'service': status.service, 'health': status.health,
+          'healthchecks': hcstatuses}
+
+
+def MapActionInfoToDict(actioninfo):
+  return {'action': actioninfo.action, 'info': actioninfo.info,
+          'args': actioninfo.args, 'kwargs': actioninfo.kwargs}
+
+
+def isHealthcheckHealthy(hcstatus):
+  """Test if a health check is perfectly healthy.
+
+  Args:
+    hcstatus: A HEALTHCHECK_STATUS named tuple.
+
+  Returns:
+    True if the hcstatus is perfectly healthy, False otherwise.
+  """
+  if not isinstance(hcstatus, HEALTHCHECK_STATUS):
+    return False
+  return hcstatus.health and not (hcstatus.description or hcstatus.actions)
+
+
+def isServiceHealthy(status):
+  """Test if a service is perfectly healthy.
+
+  Args:
+    status: A SERVICE_STATUS named tuple.
+
+  Returns:
+    True if the status is perfectly healthy, False otherwise.
+  """
+  if not isinstance(status, SERVICE_STATUS):
+    return False
+  return status.health and not status.healthchecks
+
+
+def DetermineHealthcheckStatus(hcname, healthcheck):
+  """Determine the healthcheck status.
+
+  Args:
+    hcname: A string. The name of the health check.
+    healthcheck: A healthcheck object.
+
+  Returns:
+    A HEALTHCHECK_STATUS named tuple.
+  """
+  try:
+    # Run the health check condition.
+    result = healthcheck.Check()
+
+    # Determine the healthcheck's status.
+    health = result >= HCSTATUS_HEALTHY
+
+    if result == HCSTATUS_HEALTHY:
+      return HEALTHCHECK_STATUS(hcname, health, NULL_DESCRIPTION,
+                                EMPTY_ACTIONS)
+
+    description, actions = healthcheck.Diagnose(result)
+    return HEALTHCHECK_STATUS(hcname, health, description, actions)
+
+  except Exception as e:
+    # Checkfiles may contain all kinds of errors! We do not want the
+    # Mob* Monitor to fail, so catch generic exceptions.
+    LOGGER.error('Failed to execute health check %s: %s', hcname, e,
+                 exc_info=True)
+    return HEALTHCHECK_STATUS(hcname, False,
+                              'Failed to execute the health check.'
+                              ' Please review the health check file.',
+                              EMPTY_ACTIONS)
+
+
+def IsHealthCheck(obj):
+  """A sanity check to see if a class implements the health check interface.
+
+  Args:
+    obj: A Python object.
+
+  Returns:
+    True if obj has 'check' and 'diagnose' functions.
+    False otherwise.
+  """
+  return all(callable(getattr(obj, m, None)) for m in HEALTH_CHECK_METHODS)
+
+
+def ApplyHealthCheckAttributes(obj):
+  """Set default values for health check attributes.
+
+  Args:
+    obj: A Python object.
+
+  Returns:
+    The same object with default attribute values set if they were not
+    already defined.
+  """
+  for attr, default in HEALTH_CHECK_DEFAULT_ATTRIBUTES.iteritems():
+    if not hasattr(obj, attr):
+      setattr(obj, attr, default)
+
+  return obj
+
+
+def ImportFile(service, modulepath):
+  """Import and collect health checks from the given module.
+
+  Args:
+    service: The name of the service this check module belongs to and
+      for which the objects to import belong to.
+    modulepath: The path of the module to import.
+
+  Returns:
+    A tuple containing the healthchecks defined in the module and the
+    time of the module's last modification.
+
+  Raises:
+    SyntaxError may be raised by imp.load_source if the python file
+      specified by modulepath has errors.
+  """
+  # Name and load the module from the module path. If service is 'testservice'
+  # and the module path is '/path/to/checkdir/testservice/test_check.py',
+  # the module name becomes 'testservice.test_check'.
+  objects = []
+  modname = '%s.%s' % (service,
+                       os.path.basename(os.path.splitext(modulepath)[0]))
+  module = imp.load_source(modname, modulepath)
+
+  for name in dir(module):
+    obj = getattr(module, name)
+    if inspect.isclass(obj) and IsHealthCheck(obj):
+      objects.append(ApplyHealthCheckAttributes(obj()))
+
+  return objects, os.path.getmtime(modulepath)
+
+
+class CheckFileManager(object):
+  """Manage the health checks that are associated with each service."""
+
+  def __init__(self, interval_sec=1, checkdir=CHECKFILE_DIR):
+    if not os.path.exists(checkdir):
+      raise CollectionError('Check directory does not exist: %s' % checkdir)
+
+    self.interval_sec = interval_sec
+    self.checkdir = checkdir
+    self.monitor = None
+
+    # service_checks is a dict of the following form:
+    #
+    #   {service_name: {hcname: (mtime, healthcheck)}}
+    #
+    # service_name: A string and is the name of the service.
+    # hcname: A string and is the name of the health check.
+    # mtime: The epoch time of the last modification of the check file.
+    # healthcheck: The health check object.
+    self.service_checks = {}
+
+    # service_check_results is a dict of the following form:
+    #
+    #   {service_name: {hcname: (execution_status, exec_time,
+    #                            healthcheck_status)}}
+    #
+    # service_name: As above.
+    # hcname: As above.
+    # execution_status: An integer. This will be one of the HCEXECUTION
+    #   variables defined at the top of the file.
+    # exec_time: The time of last execution.
+    # healthcheck_status: A HEALTHCHECK_STATUS named tuple.
+    self.service_check_results = {}
+
+    # service_states is dict of the following form:
+    #
+    #   {service_name: service_status}
+    #
+    # service_name: As above.
+    # service_status: A SERVICE_STATUS named tuple.
+    self.service_states = {}
+
+  def Update(self, service, objects, mtime):
+    """Update the healthcheck objects for each service.
+
+    Args:
+      service: The service that the healthcheck corresponds to.
+      objects: A list of healthcheck objects.
+      mtime: The time of last modification of the healthcheck module.
+    """
+    for obj in objects:
+      name = obj.__class__.__name__
+      self.service_checks.setdefault(service, {})
+
+      stored_mtime, _ = self.service_checks[service].get(name, (None, None))
+      if stored_mtime is None or mtime > stored_mtime:
+        self.service_checks[service][name] = (mtime, obj)
+        LOGGER.info('Updated healthcheck "%s" for service "%s" at time "%s"',
+                    name, service, mtime)
+
+  def Execute(self, force=False):
+    """Execute all health checks and collect healthcheck status information.
+
+    Args:
+      force: Ignore the health check interval and execute the health checks.
+    """
+    for service, healthchecks in self.service_checks.iteritems():
+      # Set default result dictionary if this is a new service.
+      self.service_check_results.setdefault(service, {})
+
+      for hcname, (_mtime, healthcheck) in healthchecks.iteritems():
+        # Update if the record is stale or non-existent.
+        etime = time.time()
+        _, exec_time, status = self.service_check_results[service].get(
+            hcname, (None, None, None))
+
+        if exec_time is None or force or (
+            etime > healthcheck.CHECK_INTERVAL_SEC + exec_time):
+          # Record the execution status.
+          status = HEALTHCHECK_STATUS(hcname, True, IN_PROGRESS_DESCRIPTION,
+                                      EMPTY_ACTIONS)
+          self.service_check_results[service][hcname] = (
+              HCEXECUTION_IN_PROGRESS, etime, status)
+
+          # TODO (msartori): Implement crbug.com/501959.
+          #   This bug deals with handling slow health checks.
+
+          status = DetermineHealthcheckStatus(hcname, healthcheck)
+
+          # Update the execution and healthcheck status.
+          self.service_check_results[service][hcname] = (
+              HCEXECUTION_COMPLETED, etime, status)
+
+  def ConsolidateServiceStates(self):
+    """Consolidate health check results and determine service health states."""
+    for service, results in self.service_check_results.iteritems():
+      self.service_states.setdefault(service, {})
+
+      quasi_or_unhealthy_checks = []
+      for (_exec_status, _exec_stime, hcstatus) in results.itervalues():
+        if not isHealthcheckHealthy(hcstatus):
+          quasi_or_unhealthy_checks.append(hcstatus)
+
+      health = all([hc.health for hc in quasi_or_unhealthy_checks])
+
+      self.service_states[service] = SERVICE_STATUS(service, health,
+                                                    quasi_or_unhealthy_checks)
+
+  def CollectionExecutionCallback(self):
+    """Callback for cherrypy Monitor. Collect checkfiles from the checkdir."""
+    # Find all service check file packages.
+    _, service_dirs, _ = next(os.walk(self.checkdir))
+    for service_name in service_dirs:
+      service_package = os.path.join(self.checkdir, service_name)
+
+      # Import the package.
+      try:
+        file_, path, desc = imp.find_module(service_name, [self.checkdir])
+        imp.load_module(service_name, file_, path, desc)
+      except Exception as e:
+        LOGGER.warning('Failed to import package %s: %s', service_name, e,
+                       exc_info=True)
+        continue
+
+      # Collect all of the service's health checks.
+      for file_ in os.listdir(service_package):
+        filepath = os.path.join(service_package, file_)
+        if os.path.isfile(filepath) and file_.endswith(CHECKFILE_ENDING):
+          try:
+            healthchecks, mtime = ImportFile(service_name, filepath)
+            self.Update(service_name, healthchecks, mtime)
+          except Exception as e:
+            LOGGER.warning('Failed to import module %s.%s: %s',
+                           service_name, file_[:-3], e,
+                           exc_info=True)
+
+    self.Execute()
+    self.ConsolidateServiceStates()
+
+  def StartCollectionExecution(self):
+    # The Monitor frequency is mis-named. It's the time between
+    # each callback execution.
+    self.monitor = plugins.Monitor(cherrypy.engine,
+                                   self.CollectionExecutionCallback,
+                                   frequency=self.interval_sec)
+    self.monitor.subscribe()
+
+  def GetServiceList(self):
+    """Return a list of the monitored services.
+
+    Returns:
+      A list of the services for which we have checks defined.
+    """
+    return self.service_states.keys()
+
+  def GetStatus(self, service):
+    """Query the current health state of the service.
+
+    Args:
+      service: The name of service that we are querying the health state of.
+
+    Returns:
+      A SERVICE_STATUS named tuple which has the following fields:
+        service: A string. The service name.
+        health: A boolean. True if all checks passed, False if not.
+        healthchecks: A list of failed or quasi-healthy checks for the service.
+          Each member of the list is a HEALTHCHECK_STATUS and details the
+          appropriate repair actions for that particular health check.
+
+      If service is not specified, a list of all service states is returned.
+    """
+    if not service:
+      return self.service_states.values()
+
+    return self.service_states.get(service, SERVICE_STATUS(service, False, []))
+
+  def ActionInfo(self, service, healthcheck, action):
+    """Describes a currently valid action for the given service and healthcheck.
+
+    An action is valid if the following hold:
+      The |service| is recognized and is in an unhealthy or quasi-healthy state.
+      The |healthcheck| is recognized and is in an unhealthy or quasi-healthy
+        state and it belongs to |service|.
+      The |action| is one specified as a suitable repair action by the
+        Diagnose method of some non-healthy healthcheck of |service|.
+
+    Args:
+      service: A string. The name of a service being monitored.
+      healthcheck: A string. The name of a healthcheck belonging to |service|.
+      action: A string. The name of an action returned by some healthcheck's
+        Diagnose method.
+
+    Returns:
+      An ACTION_INFO named tuple which has the following fields:
+        action: A string. The given |action| string.
+        info: A string. The docstring of |action|.
+        args: A list of strings. The positional arguments for |action|.
+        kwargs: A dictionary representing the default keyword arguments
+          for |action|. The keys will be the kwarg names and the values
+          will be the default arguments.
+    """
+    status = self.service_states.get(service, None)
+    if not status:
+      return ACTION_INFO(action, 'Service not recognized.', [], {})
+    elif isServiceHealthy(status):
+      return ACTION_INFO(action, 'Service is healthy.', [], {})
+
+    hc = [x for x in status.healthchecks if x.name == healthcheck]
+    if not hc:
+      return ACTION_INFO(action, 'Healthcheck not recognized.', [], {})
+    hc = hc[0]
+    if isHealthcheckHealthy(hc):
+      return ACTION_INFO(action, 'Healthcheck is healthy.', [], {})
+
+    func = None
+    for a in hc.actions:
+      if a.__name__ == action:
+        func = a
+        break
+
+    if func is None:
+      return ACTION_INFO(action, 'Action not recognized.', [], {})
+
+    # Collect information on the repair action.
+    argspec = inspect.getargspec(func)
+    func_args = argspec.args or []
+    func_args = [x for x in func_args if x not in ['self', 'cls']]
+    func_defaults = argspec.defaults or {}
+
+    num_args = len(func_args)
+    num_defaults = len(func_defaults)
+
+    args = func_args[:num_args-num_defaults]
+    kwargs = dict(zip(func_args[num_args-num_defaults:], func_defaults))
+
+    info = func.__doc__
+
+    return ACTION_INFO(action, info, args, kwargs)
+
+  def RepairService(self, service, healthcheck, action, args, kwargs):
+    """Execute the repair action on the specified service.
+
+    Args:
+      service: The name of the service to be repaired.
+      healthcheck: The particular healthcheck we are repairing.
+      action: The name of the action to execute.
+      args: A list of positional arguments for the given repair action.
+      kwargs: A dictionary of keyword arguments for the given repair action.
+
+    Returns:
+      The same return value of GetStatus(service).
+    """
+    # No repair occurs if the service is not specified or is perfectly healthy.
+    status = self.service_states.get(service, None)
+    if status is None:
+      return SERVICE_STATUS(service, False, [])
+    elif isServiceHealthy(status):
+      return self.GetStatus(service)
+
+    # No repair occurs if the healthcheck is not specifed or perfectly healthy.
+    hc = [x for x in status.healthchecks if x.name == healthcheck]
+    if not hc or isHealthcheckHealthy(hc[0]):
+      return SERVICE_STATUS(healthcheck, False, [])
+    hc = hc[0]
+
+    # Get the repair action from the healthcheck.
+    repair_func = None
+    for a in hc.actions:
+      if a.__name__ == action:
+        repair_func = a
+        break
+
+    # TODO (msartori): Implement crbug.com/503373
+    if repair_func is not None:
+      try:
+        repair_func(*args, **kwargs)
+
+        # Update the service status and return.
+        # While actions are 'service-centric' from the perspective of the
+        # monitor, actions may have system-wide effect, so we must re-check
+        # all services.
+        self.Execute(force=True)
+        self.ConsolidateServiceStates()
+      except Exception, e:
+        LOGGER.error('Failed to execute the repair action "%s"'
+                     ' for service "%s": %s', action, service, e,
+                     exc_info=True)
+    else:
+      LOGGER.error('Failed to retrieve a suitable repair function for'
+                   ' service="%s" and action="%s".', service, action,
+                   exc_info=True)
+
+    return self.GetStatus(service)
diff --git a/mobmonitor/checkfile/manager_unittest b/mobmonitor/checkfile/manager_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/mobmonitor/checkfile/manager_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/mobmonitor/checkfile/manager_unittest.py b/mobmonitor/checkfile/manager_unittest.py
new file mode 100644
index 0000000..d666bb9
--- /dev/null
+++ b/mobmonitor/checkfile/manager_unittest.py
@@ -0,0 +1,1014 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for Mob* Monitor checkfile manager."""
+
+from __future__ import print_function
+
+import imp
+import mock
+import os
+import subprocess
+import time
+import threading
+
+from cherrypy.process import plugins
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.mobmonitor.checkfile import manager
+from chromite.mobmonitor.rpc import rpc
+
+# Test health check and related attributes
+class TestHealthCheck(object):
+  """Test health check."""
+
+  def Check(self):
+    """Stub Check."""
+    return 0
+
+  def Diagnose(self, _errcode):
+    """Stub Diagnose."""
+    return ('Unknown Error.', [])
+
+
+class TestHealthCheckHasAttributes(object):
+  """Test health check with attributes."""
+
+  CHECK_INTERVAL_SEC = 10
+
+  def Check(self):
+    """Stub Check."""
+    return 0
+
+  def Diagnose(self, _errcode):
+    """Stub Diagnose."""
+    return ('Unknown Error.', [])
+
+
+class TestHealthCheckUnhealthy(object):
+  """Unhealthy test health check."""
+
+  def __init__(self):
+    self.x = -1
+
+  def Check(self):
+    """Stub Check."""
+    return self.x
+
+  def Diagnose(self, errcode):
+    """Stub Diagnose."""
+    if errcode == -1:
+      return ('Stub Error.', [self.Repair])
+    return ('Unknown Error.', [])
+
+  def Repair(self):
+    self.x = 0
+
+
+class TestHealthCheckMultipleActions(object):
+  """Unhealthy check with many actions that have different parameters."""
+
+  def __init__(self):
+    self.x = -1
+
+  def Check(self):
+    """Stub Check."""
+    return self.x
+
+  def Diagnose(self, errcode):
+    """Stub Diagnose."""
+    if errcode == -1:
+      return ('Stub Error.', [self.NoParams, self.PositionalParams,
+                              self.DefaultParams, self.MixedParams])
+    return ('Unknown Error.', [])
+
+  def NoParams(self):
+    """NoParams Action."""
+    self.x = 0
+
+  # pylint: disable=unused-argument
+  def PositionalParams(self, x, y, z):
+    """PositionalParams Action."""
+    self.x = 0
+
+  def DefaultParams(self, x=1, y=2, z=3):
+    """DefaultParams Action."""
+    self.x = 0
+
+  def MixedParams(self, x, y, z=1):
+    """MixedParams Action."""
+    self.x = 0
+  # pylint: enable=unused-argument
+
+
+class TestHealthCheckQuasihealthy(object):
+  """Quasi-healthy test health check."""
+
+  def Check(self):
+    """Stub Check."""
+    return 1
+
+  def Diagnose(self, errcode):
+    """Stub Diagnose."""
+    if errcode == 1:
+      return ('Stub Error.', [self.RepairStub])
+    return ('Unknown Error.', [])
+
+  def RepairStub(self):
+    """Stub repair action."""
+
+
+class TestHealthCheckBroken(object):
+  """Broken test health check."""
+
+  def Check(self):
+    """Stub Check."""
+    raise ValueError()
+
+  def Diagnose(self, _errcode):
+    """A broken Diagnose function. A proper return should be a pair."""
+    raise ValueError()
+
+
+def TestAction():
+  return True
+
+
+TEST_SERVICE_NAME = 'test-service'
+TEST_MTIME = 100
+TEST_EXEC_TIME = 400
+CHECKDIR = '.'
+
+# Strings that are used to mock actual check modules.
+CHECKFILE_MANY_SIMPLE = '''
+SERVICE = 'test-service'
+
+class MyHealthCheck2(object):
+  def Check(self):
+    return 0
+
+  def Diagnose(self, errcode):
+    return ('Unknown error.', [])
+
+class MyHealthCheck3(object):
+  def Check(self):
+    return 0
+
+  def Diagnose(self, errcode):
+    return ('Unknown error.', [])
+
+class MyHealthCheck4(object):
+  def Check(self):
+    return 0
+
+  def Diagnose(self, errcode):
+    return ('Unknown error.', [])
+'''
+
+CHECKFILE_MANY_SIMPLE_ONE_BAD = '''
+SERVICE = 'test-service'
+
+class MyHealthCheck(object):
+  def Check(self):
+    return 0
+
+  def Diagnose(self, errcode):
+    return ('Unknown error.', [])
+
+class NotAHealthCheck(object):
+  def Diagnose(self, errcode):
+    return ('Unknown error.', [])
+
+class MyHealthCheck2(object):
+  def Check(self):
+    return 0
+
+  def Diagnose(self, errcode):
+    return ('Unknown error.', [])
+'''
+
+NOT_A_CHECKFILE = '''
+class NotAHealthCheck(object):
+  def NotCheckNorDiagnose(self):
+    return -1
+'''
+
+ANOTHER_NOT_A_CHECKFILE = '''
+class AnotherNotAHealthCheck(object):
+  def AnotherNotCheckNorDiagnose(self):
+    return -2
+'''
+
+ACTION_FILE = '''
+def TestAction():
+  return True
+
+def AnotherAction():
+  return False
+'''
+
+
+class RunCommand(threading.Thread):
+  """Helper class for executing the Mob* Monitor with a timeout."""
+
+  def __init__(self, cmd, timeout):
+    threading.Thread.__init__(self)
+    self.cmd = cmd
+    self.timeout = timeout
+    self.p = None
+
+    self.proc_stdout = None
+    self.proc_stderr = None
+
+  def run(self):
+    self.p = subprocess.Popen(self.cmd, stdout=subprocess.PIPE,
+                              stderr=subprocess.STDOUT)
+    self.proc_stdout, self.proc_stderr = self.p.communicate()
+
+  def Stop(self):
+    self.join(self.timeout)
+
+    if self.is_alive():
+      self.p.terminate()
+      self.join(self.timeout)
+
+      if self.is_alive():
+        self.p.kill()
+        self.join(self.timeout)
+
+    return self.proc_stdout
+
+
+class CheckFileManagerHelperTest(cros_test_lib.MockTestCase):
+  """Unittests for CheckFileManager helper functions."""
+
+  def testMapHealthcheckStatusToDict(self):
+    """Test mapping a manager.HEALTHCHECK_STATUS to a dict."""
+    def _func():
+      pass
+
+    status = manager.HEALTHCHECK_STATUS('test', False, 'desc', [_func])
+    expect = {'name': 'test', 'health': False, 'description': 'desc',
+              'actions': ['_func']}
+    self.assertEquals(expect, manager.MapHealthcheckStatusToDict(status))
+
+  def testMapServiceStatusToDict(self):
+    """Test mapping a manager.SERVICE_STATUS to a dict."""
+    def _func():
+      pass
+
+    hcstatus = manager.HEALTHCHECK_STATUS('test', False, 'desc', [_func])
+    hcexpect = {'name': 'test', 'health': False, 'description': 'desc',
+                'actions': ['_func']}
+    status = manager.SERVICE_STATUS('test-service', False, [hcstatus])
+    expect = {'service': 'test-service', 'health': False,
+              'healthchecks': [hcexpect]}
+    self.assertEquals(expect, manager.MapServiceStatusToDict(status))
+
+  def testMapActionInfoToDict(self):
+    """Test mapping a manager.ACTION_INFO to a dict."""
+    actioninfo = manager.ACTION_INFO('test', 'test', [1], {'a': 1})
+    expect = {'action': 'test', 'info': 'test', 'args': [1],
+              'kwargs': {'a': 1}}
+    self.assertEquals(expect, manager.MapActionInfoToDict(actioninfo))
+
+  def testIsHealthcheckHealthy(self):
+    """Test checking whether health check statuses are healthy."""
+    # Test a healthy health check.
+    hch = manager.HEALTHCHECK_STATUS('healthy', True, manager.NULL_DESCRIPTION,
+                                     manager.EMPTY_ACTIONS)
+    self.assertTrue(manager.isHealthcheckHealthy(hch))
+
+    # Test a quasi-healthy health check.
+    hcq = manager.HEALTHCHECK_STATUS('quasi-healthy', True, 'Quasi-Healthy',
+                                     ['QuasiAction'])
+    self.assertFalse(manager.isHealthcheckHealthy(hcq))
+
+    # Test an unhealthy health check.
+    hcu = manager.HEALTHCHECK_STATUS('unhealthy', False, 'Unhealthy',
+                                     ['UnhealthyAction'])
+    self.assertFalse(manager.isHealthcheckHealthy(hcu))
+
+    # Test an object that is not a health check status.
+    s = manager.SERVICE_STATUS('service_status', True, [])
+    self.assertFalse(manager.isHealthcheckHealthy(s))
+
+  def testIsServiceHealthy(self):
+    """Test checking whether service statuses are healthy."""
+    # Define some health check statuses.
+    hch = manager.HEALTHCHECK_STATUS('healthy', True, manager.NULL_DESCRIPTION,
+                                     manager.EMPTY_ACTIONS)
+    hcq = manager.HEALTHCHECK_STATUS('quasi-healthy', True, 'Quasi-Healthy',
+                                     ['QuasiAction'])
+    hcu = manager.HEALTHCHECK_STATUS('unhealthy', False, 'Unhealthy',
+                                     ['UnhealthyAction'])
+
+    # Test a healthy service.
+    s = manager.SERVICE_STATUS('healthy', True, [])
+    self.assertTrue(manager.isServiceHealthy(s))
+
+    # Test a quasi-healthy service.
+    s = manager.SERVICE_STATUS('quasi-healthy', True, [hch, hcq])
+    self.assertFalse(manager.isServiceHealthy(s))
+
+    # Test an unhealthy service.
+    s = manager.SERVICE_STATUS('unhealthy', False, [hcu])
+    self.assertFalse(manager.isServiceHealthy(s))
+
+    # Test an object that is not a service status.
+    self.assertFalse(manager.isServiceHealthy(hch))
+
+  def testDetermineHealthcheckStatusHealthy(self):
+    """Test DetermineHealthCheckStatus on a healthy check."""
+    hcname = TestHealthCheck.__name__
+    testhc = TestHealthCheck()
+    expected = manager.HEALTHCHECK_STATUS(hcname, True,
+                                          manager.NULL_DESCRIPTION,
+                                          manager.EMPTY_ACTIONS)
+    self.assertEquals(expected,
+                      manager.DetermineHealthcheckStatus(hcname, testhc))
+
+  def testDeterminHealthcheckStatusUnhealthy(self):
+    """Test DetermineHealthcheckStatus on an unhealthy check."""
+    hcname = TestHealthCheckUnhealthy.__name__
+    testhc = TestHealthCheckUnhealthy()
+    desc, actions = testhc.Diagnose(testhc.Check())
+    expected = manager.HEALTHCHECK_STATUS(hcname, False, desc, actions)
+    self.assertEquals(expected,
+                      manager.DetermineHealthcheckStatus(hcname, testhc))
+
+  def testDetermineHealthcheckStatusQuasihealth(self):
+    """Test DetermineHealthcheckStatus on a quasi-healthy check."""
+    hcname = TestHealthCheckQuasihealthy.__name__
+    testhc = TestHealthCheckQuasihealthy()
+    desc, actions = testhc.Diagnose(testhc.Check())
+    expected = manager.HEALTHCHECK_STATUS(hcname, True, desc, actions)
+    self.assertEquals(expected,
+                      manager.DetermineHealthcheckStatus(hcname, testhc))
+
+  def testDetermineHealthcheckStatusBrokenCheck(self):
+    """Test DetermineHealthcheckStatus raises on a broken health check."""
+    hcname = TestHealthCheckBroken.__name__
+    testhc = TestHealthCheckBroken()
+    result = manager.DetermineHealthcheckStatus(hcname, testhc)
+
+    self.assertEquals(hcname, result.name)
+    self.assertFalse(result.health)
+    self.assertFalse(result.actions)
+
+  def testIsHealthCheck(self):
+    """Test that IsHealthCheck properly asserts the health check interface."""
+
+    class NoAttrs(object):
+      """Test health check missing 'check' and 'diagnose' methods."""
+
+    class NoCheckAttr(object):
+      """Test health check missing 'check' method."""
+      def Diagnose(self, errcode):
+        pass
+
+    class NoDiagnoseAttr(object):
+      """Test health check missing 'diagnose' method."""
+      def Check(self):
+        pass
+
+    class GoodHealthCheck(object):
+      """Test health check that implements 'check' and 'diagnose' methods."""
+      def Check(self):
+        pass
+
+      def Diagnose(self, errcode):
+        pass
+
+    self.assertFalse(manager.IsHealthCheck(NoAttrs()))
+    self.assertFalse(manager.IsHealthCheck(NoCheckAttr()))
+    self.assertFalse(manager.IsHealthCheck(NoDiagnoseAttr()))
+    self.assertTrue(manager.IsHealthCheck(GoodHealthCheck()))
+
+  def testApplyHealthCheckAttributesNoAttrs(self):
+    """Test that we can apply attributes to a health check."""
+    testhc = TestHealthCheck()
+    result = manager.ApplyHealthCheckAttributes(testhc)
+    self.assertEquals(result.CHECK_INTERVAL_SEC,
+                      manager.CHECK_INTERVAL_DEFAULT_SEC)
+
+  def testApplyHealthCheckAttributesHasAttrs(self):
+    """Test that we do not override an acceptable attribute."""
+    testhc = TestHealthCheckHasAttributes()
+    check_interval = testhc.CHECK_INTERVAL_SEC
+    result = manager.ApplyHealthCheckAttributes(testhc)
+    self.assertEquals(result.CHECK_INTERVAL_SEC, check_interval)
+
+  def testImportFileAllHealthChecks(self):
+    """Test that health checks and service name are collected."""
+    self.StartPatcher(mock.patch('os.path.splitext'))
+    os.path.splitext.return_value = '/path/to/test_check.py'
+
+    self.StartPatcher(mock.patch('os.path.getmtime'))
+    os.path.getmtime.return_value = TEST_MTIME
+
+    checkmodule = imp.new_module('test_check')
+    exec CHECKFILE_MANY_SIMPLE in checkmodule.__dict__
+    self.StartPatcher(mock.patch('imp.load_source'))
+    imp.load_source.return_value = checkmodule
+
+    healthchecks, mtime = manager.ImportFile(TEST_SERVICE_NAME, '/')
+
+    self.assertEquals(len(healthchecks), 3)
+    self.assertEquals(mtime, TEST_MTIME)
+
+  def testImportFileSomeHealthChecks(self):
+    """Test importing when not all classes are actually health checks."""
+    self.StartPatcher(mock.patch('os.path.splitext'))
+    os.path.splitext.return_value = '/path/to/test_check.py'
+
+    self.StartPatcher(mock.patch('os.path.getmtime'))
+    os.path.getmtime.return_value = TEST_MTIME
+
+    checkmodule = imp.new_module('test_check')
+    exec CHECKFILE_MANY_SIMPLE_ONE_BAD in checkmodule.__dict__
+    self.StartPatcher(mock.patch('imp.load_source'))
+    imp.load_source.return_value = checkmodule
+
+    healthchecks, mtime = manager.ImportFile(TEST_SERVICE_NAME, '/')
+
+    self.assertEquals(len(healthchecks), 2)
+    self.assertEquals(mtime, TEST_MTIME)
+
+
+class CheckFileManagerTest(cros_test_lib.MockTestCase):
+  """Unittests for CheckFileManager."""
+
+  def testCollectionExecutionCallbackCheckfiles(self):
+    """Test the CollectionExecutionCallback on collecting checkfiles."""
+    self.StartPatcher(mock.patch('os.walk'))
+    os.walk.return_value = iter([[CHECKDIR, [TEST_SERVICE_NAME], []]])
+
+    self.StartPatcher(mock.patch('os.listdir'))
+    os.listdir.return_value = ['test_check.py']
+
+    self.StartPatcher(mock.patch('os.path.isfile'))
+    os.path.isfile.return_value = True
+
+    self.StartPatcher(mock.patch('imp.find_module'))
+    imp.find_module.return_value = (None, None, None)
+    self.StartPatcher(mock.patch('imp.load_module'))
+
+    myobj = TestHealthCheck()
+    manager.ImportFile = mock.Mock(return_value=[[myobj], TEST_MTIME])
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+    cfm.CollectionExecutionCallback()
+
+    manager.ImportFile.assert_called_once_with(
+        TEST_SERVICE_NAME, './%s/test_check.py' % TEST_SERVICE_NAME)
+
+    self.assertTrue(TEST_SERVICE_NAME in cfm.service_checks)
+    self.assertEquals(cfm.service_checks[TEST_SERVICE_NAME],
+                      {myobj.__class__.__name__: (TEST_MTIME, myobj)})
+
+  def testCollectionExecutionCallbackNoChecks(self):
+    """Test the CollectionExecutionCallback with no valid check files."""
+    self.StartPatcher(mock.patch('os.walk'))
+    os.walk.return_value = iter([['/checkdir/', [], ['test.py']]])
+
+    manager.ImportFile = mock.Mock(return_value=None)
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+    cfm.CollectionExecutionCallback()
+
+    self.assertFalse(manager.ImportFile.called)
+
+    self.assertFalse(TEST_SERVICE_NAME in cfm.service_checks)
+
+  def testStartCollectionExecution(self):
+    """Test the StartCollectionExecution method."""
+    plugins.Monitor = mock.Mock()
+
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+    cfm.StartCollectionExecution()
+
+    self.assertTrue(plugins.Monitor.called)
+
+  def testUpdateExistingHealthCheck(self):
+    """Test update when a health check exists and is not stale."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    myobj = TestHealthCheck()
+
+    cfm.service_checks[TEST_SERVICE_NAME] = {myobj.__class__.__name__:
+                                             (TEST_MTIME, myobj)}
+
+    myobj2 = TestHealthCheck()
+    cfm.Update(TEST_SERVICE_NAME, [myobj2], TEST_MTIME)
+    self.assertTrue(TEST_SERVICE_NAME in cfm.service_checks)
+    self.assertEquals(cfm.service_checks[TEST_SERVICE_NAME],
+                      {myobj.__class__.__name__: (TEST_MTIME, myobj)})
+
+  def testUpdateNonExistingHealthCheck(self):
+    """Test adding a new health check to the manager."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+    cfm.service_checks = {}
+
+    myobj = TestHealthCheck()
+    cfm.Update(TEST_SERVICE_NAME, [myobj], TEST_MTIME)
+
+    self.assertTrue(TEST_SERVICE_NAME in cfm.service_checks)
+    self.assertEquals(cfm.service_checks[TEST_SERVICE_NAME],
+                      {myobj.__class__.__name__: (TEST_MTIME, myobj)})
+
+  def testExecuteFresh(self):
+    """Test executing a health check when the result is still fresh."""
+    self.StartPatcher(mock.patch('time.time'))
+    exec_time_offset = TestHealthCheckHasAttributes.CHECK_INTERVAL_SEC / 2
+    time.time.return_value = TEST_EXEC_TIME + exec_time_offset
+
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+    cfm.service_checks = {TEST_SERVICE_NAME:
+                          {TestHealthCheckHasAttributes.__name__:
+                           (TEST_MTIME, TestHealthCheckHasAttributes())}}
+    cfm.service_check_results = {
+        TEST_SERVICE_NAME: {TestHealthCheckHasAttributes.__name__:
+                            (manager.HCEXECUTION_COMPLETED, TEST_EXEC_TIME,
+                             None)}}
+
+    cfm.Execute()
+
+    _, exec_time, _ = cfm.service_check_results[TEST_SERVICE_NAME][
+        TestHealthCheckHasAttributes.__name__]
+
+    self.assertEquals(exec_time, TEST_EXEC_TIME)
+
+  def testExecuteStale(self):
+    """Test executing a health check when the result is stale."""
+    self.StartPatcher(mock.patch('time.time'))
+    exec_time_offset = TestHealthCheckHasAttributes.CHECK_INTERVAL_SEC * 2
+    time.time.return_value = TEST_EXEC_TIME + exec_time_offset
+
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+    cfm.service_checks = {TEST_SERVICE_NAME:
+                          {TestHealthCheckHasAttributes.__name__:
+                           (TEST_MTIME, TestHealthCheckHasAttributes())}}
+    cfm.service_check_results = {
+        TEST_SERVICE_NAME: {TestHealthCheckHasAttributes.__name__:
+                            (manager.HCEXECUTION_COMPLETED, TEST_EXEC_TIME,
+                             None)}}
+
+    cfm.Execute()
+
+    _, exec_time, _ = cfm.service_check_results[TEST_SERVICE_NAME][
+        TestHealthCheckHasAttributes.__name__]
+
+    self.assertEquals(exec_time, TEST_EXEC_TIME + exec_time_offset)
+
+  def testExecuteNonExistent(self):
+    """Test executing a health check when the result is nonexistent."""
+    self.StartPatcher(mock.patch('time.time'))
+    time.time.return_value = TEST_EXEC_TIME
+
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+    cfm.service_checks = {TEST_SERVICE_NAME:
+                          {TestHealthCheck.__name__:
+                           (TEST_MTIME, TestHealthCheck())}}
+
+    cfm.Execute()
+
+    resultsdict = cfm.service_check_results.get(TEST_SERVICE_NAME)
+    self.assertTrue(resultsdict is not None)
+
+    exec_status, exec_time, _ = resultsdict.get(TestHealthCheck.__name__,
+                                                (None, None, None))
+    self.assertTrue(exec_status is not None)
+    self.assertTrue(exec_time is not None)
+
+    self.assertEquals(exec_status, manager.HCEXECUTION_COMPLETED)
+    self.assertEquals(exec_time, TEST_EXEC_TIME)
+
+  def testExecuteForce(self):
+    """Test executing a health check by ignoring the check interval."""
+    self.StartPatcher(mock.patch('time.time'))
+    exec_time_offset = TestHealthCheckHasAttributes.CHECK_INTERVAL_SEC / 2
+    time.time.return_value = TEST_EXEC_TIME + exec_time_offset
+
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+    cfm.service_checks = {TEST_SERVICE_NAME:
+                          {TestHealthCheckHasAttributes.__name__:
+                           (TEST_MTIME, TestHealthCheckHasAttributes())}}
+    cfm.service_check_results = {
+        TEST_SERVICE_NAME: {TestHealthCheckHasAttributes.__name__:
+                            (manager.HCEXECUTION_COMPLETED, TEST_EXEC_TIME,
+                             None)}}
+
+    cfm.Execute(force=True)
+
+    _, exec_time, _ = cfm.service_check_results[TEST_SERVICE_NAME][
+        TestHealthCheckHasAttributes.__name__]
+
+    self.assertEquals(exec_time, TEST_EXEC_TIME + exec_time_offset)
+
+  def testConsolidateServiceStatesUnhealthy(self):
+    """Test consolidating state for a service with unhealthy checks."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    # Setup some test check results
+    hcname = TestHealthCheck.__name__
+    statuses = [
+        manager.HEALTHCHECK_STATUS(hcname, False, 'Failed', ['Repair']),
+        manager.HEALTHCHECK_STATUS(hcname, True, 'Quasi', ['RepairQuasi']),
+        manager.HEALTHCHECK_STATUS(hcname, True, '', [])]
+
+    cfm.service_check_results.setdefault(TEST_SERVICE_NAME, {})
+    for i, status in enumerate(statuses):
+      name = '%s_%s' % (hcname, i)
+      cfm.service_check_results[TEST_SERVICE_NAME][name] = (
+          manager.HCEXECUTION_COMPLETED, TEST_EXEC_TIME,
+          status)
+
+    # Run and check the results.
+    cfm.ConsolidateServiceStates()
+    self.assertTrue(TEST_SERVICE_NAME in cfm.service_states)
+
+    _, health, healthchecks = cfm.service_states[TEST_SERVICE_NAME]
+    self.assertFalse(health)
+    self.assertEquals(2, len(healthchecks))
+    self.assertTrue(all([x in healthchecks for x in statuses[:2]]))
+
+  def testConsolidateServiceStatesQuasiHealthy(self):
+    """Test consolidating state for a service with quasi-healthy checks."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    # Setup some test check results
+    hcname = TestHealthCheck.__name__
+    statuses = [
+        manager.HEALTHCHECK_STATUS(hcname, True, 'Quasi', ['RepairQuasi']),
+        manager.HEALTHCHECK_STATUS(hcname, True, '', [])]
+
+    cfm.service_check_results.setdefault(TEST_SERVICE_NAME, {})
+    for i, status in enumerate(statuses):
+      name = '%s_%s' % (hcname, i)
+      cfm.service_check_results[TEST_SERVICE_NAME][name] = (
+          manager.HCEXECUTION_COMPLETED, TEST_EXEC_TIME,
+          status)
+
+    # Run and check the results.
+    cfm.ConsolidateServiceStates()
+    self.assertTrue(TEST_SERVICE_NAME in cfm.service_states)
+
+    _, health, healthchecks = cfm.service_states[TEST_SERVICE_NAME]
+    self.assertTrue(health)
+    self.assertEquals(1, len(healthchecks))
+    self.assertTrue(statuses[0] in healthchecks)
+
+  def testConsolidateServiceStatesHealthy(self):
+    """Test consolidating state for a healthy service."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    # Setup some test check results
+    hcname = TestHealthCheck.__name__
+    hcname2 = '%s_2' % hcname
+    statuses = [
+        manager.HEALTHCHECK_STATUS(hcname, True, '', []),
+        manager.HEALTHCHECK_STATUS(hcname2, True, '', [])]
+
+    cfm.service_check_results.setdefault(TEST_SERVICE_NAME, {})
+    cfm.service_check_results[TEST_SERVICE_NAME][hcname] = (
+        manager.HCEXECUTION_COMPLETED, TEST_EXEC_TIME, statuses[0])
+    cfm.service_check_results[TEST_SERVICE_NAME][hcname2] = (
+        manager.HCEXECUTION_IN_PROGRESS, TEST_EXEC_TIME, statuses[1])
+
+    # Run and check.
+    cfm.ConsolidateServiceStates()
+
+    self.assertTrue(TEST_SERVICE_NAME in cfm.service_states)
+
+    _, health, healthchecks = cfm.service_states.get(TEST_SERVICE_NAME)
+    self.assertTrue(health)
+    self.assertEquals(0, len(healthchecks))
+
+  def testGetServiceList(self):
+    """Test the GetServiceList RPC response."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    self.assertEquals([], cfm.GetServiceList())
+
+    status = manager.SERVICE_STATUS(TEST_SERVICE_NAME, True, [])
+    cfm.service_states[TEST_SERVICE_NAME] = status
+
+    self.assertEquals([TEST_SERVICE_NAME], cfm.GetServiceList())
+
+  def testGetStatusNonExistent(self):
+    """Test the GetStatus RPC response when the service does not exist."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    self.assertFalse(TEST_SERVICE_NAME in cfm.service_states)
+
+    status = manager.SERVICE_STATUS(TEST_SERVICE_NAME, False, [])
+    self.assertEquals(status, cfm.GetStatus(TEST_SERVICE_NAME))
+
+  def testGetStatusSingleService(self):
+    """Test the GetStatus RPC response for a single service."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    s1name = TEST_SERVICE_NAME
+    s2name = '%s_2' % s1name
+    status1 = manager.SERVICE_STATUS(s1name, True, [])
+    status2 = manager.SERVICE_STATUS(s2name, True, [])
+    cfm.service_states[s1name] = status1
+    cfm.service_states[s2name] = status2
+
+    self.assertEquals(status1, cfm.GetStatus(s1name))
+    self.assertEquals(status2, cfm.GetStatus(s2name))
+
+  def testGetStatusAllServices(self):
+    """Test the GetStatus RPC response when no service is specified."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    s1name = TEST_SERVICE_NAME
+    s2name = '%s_2' % s1name
+    status1 = manager.SERVICE_STATUS(s1name, True, [])
+    status2 = manager.SERVICE_STATUS(s2name, True, [])
+    cfm.service_states[s1name] = status1
+    cfm.service_states[s2name] = status2
+
+    result = cfm.GetStatus('')
+    self.assertEquals(2, len(result))
+    self.assertTrue(all([x in result for x in [status1, status2]]))
+
+  def testRepairServiceHealthy(self):
+    """Test the RepairService RPC when the service is healthy."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    healthy_status = manager.SERVICE_STATUS(TEST_SERVICE_NAME, True, [])
+    cfm.service_states[TEST_SERVICE_NAME] = healthy_status
+
+    self.assertEquals(healthy_status, cfm.RepairService(TEST_SERVICE_NAME,
+                                                        'HealthcheckName',
+                                                        'RepairFuncName',
+                                                        [], {}))
+
+  def testRepairServiceNonExistent(self):
+    """Test the RepairService RPC when the service does not exist."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    self.assertFalse(TEST_SERVICE_NAME in cfm.service_states)
+
+    expected = manager.SERVICE_STATUS(TEST_SERVICE_NAME, False, [])
+    result = cfm.RepairService(TEST_SERVICE_NAME, 'DummyHealthcheck',
+                               'DummyAction', [], {})
+    self.assertEquals(expected, result)
+
+  def testRepairServiceInvalidAction(self):
+    """Test the RepairService RPC when the action is not recognized."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    hcobj = TestHealthCheckUnhealthy()
+    cfm.service_checks[TEST_SERVICE_NAME] = {
+        hcobj.__class__.__name__: (TEST_MTIME, hcobj)}
+
+    unhealthy_status = manager.SERVICE_STATUS(
+        TEST_SERVICE_NAME, False,
+        [manager.HEALTHCHECK_STATUS(hcobj.__class__.__name__,
+                                    False, 'Always fails', [hcobj.Repair])])
+    cfm.service_states[TEST_SERVICE_NAME] = unhealthy_status
+
+    status = cfm.GetStatus(TEST_SERVICE_NAME)
+    self.assertFalse(status.health)
+    self.assertEquals(1, len(status.healthchecks))
+
+    status = cfm.RepairService(TEST_SERVICE_NAME, hcobj.__class__.__name__,
+                               'Blah', [], {})
+    self.assertFalse(status.health)
+    self.assertEquals(1, len(status.healthchecks))
+
+  def testRepairServiceInvalidActionArguments(self):
+    """Test the RepairService RPC when the action arguments are invalid."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    hcobj = TestHealthCheckUnhealthy()
+    cfm.service_checks[TEST_SERVICE_NAME] = {
+        hcobj.__class__.__name__: (TEST_MTIME, hcobj)}
+
+    unhealthy_status = manager.SERVICE_STATUS(
+        TEST_SERVICE_NAME, False,
+        [manager.HEALTHCHECK_STATUS(hcobj.__class__.__name__,
+                                    False, 'Always fails', [hcobj.Repair])])
+    cfm.service_states[TEST_SERVICE_NAME] = unhealthy_status
+
+    status = cfm.GetStatus(TEST_SERVICE_NAME)
+    self.assertFalse(status.health)
+    self.assertEquals(1, len(status.healthchecks))
+
+    status = cfm.RepairService(TEST_SERVICE_NAME, hcobj.__class__.__name__,
+                               'Repair', [1, 2, 3], {})
+    self.assertFalse(status.health)
+    self.assertEquals(1, len(status.healthchecks))
+
+  def testRepairService(self):
+    """Test the RepairService RPC to repair an unhealthy service."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    hcobj = TestHealthCheckUnhealthy()
+    cfm.service_checks[TEST_SERVICE_NAME] = {
+        hcobj.__class__.__name__: (TEST_MTIME, hcobj)}
+
+    unhealthy_status = manager.SERVICE_STATUS(
+        TEST_SERVICE_NAME, False,
+        [manager.HEALTHCHECK_STATUS(hcobj.__class__.__name__,
+                                    False, 'Always fails', [hcobj.Repair])])
+    cfm.service_states[TEST_SERVICE_NAME] = unhealthy_status
+
+    status = cfm.GetStatus(TEST_SERVICE_NAME)
+    self.assertFalse(status.health)
+    self.assertEquals(1, len(status.healthchecks))
+
+    status = cfm.RepairService(TEST_SERVICE_NAME,
+                               hcobj.__class__.__name__,
+                               hcobj.Repair.__name__,
+                               [], {})
+    self.assertTrue(status.health)
+    self.assertEquals(0, len(status.healthchecks))
+
+  def testActionInfoServiceNonExistent(self):
+    """Test the ActionInfo RPC when the service does not exist."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    self.assertFalse(TEST_SERVICE_NAME in cfm.service_states)
+
+    expect = manager.ACTION_INFO('test', 'Service not recognized.',
+                                 [], {})
+    result = cfm.ActionInfo(TEST_SERVICE_NAME, 'test', 'test')
+    self.assertEquals(expect, result)
+
+  def testActionInfoServiceHealthy(self):
+    """Test the ActionInfo RPC when the service is healthy."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    healthy_status = manager.SERVICE_STATUS(TEST_SERVICE_NAME, True, [])
+    cfm.service_states[TEST_SERVICE_NAME] = healthy_status
+
+    expect = manager.ACTION_INFO('test', 'Service is healthy.',
+                                 [], {})
+    result = cfm.ActionInfo(TEST_SERVICE_NAME, 'test', 'test')
+    self.assertEquals(expect, result)
+
+  def testActionInfoActionNonExistent(self):
+    """Test the ActionInfo RPC when the action does not exist."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    hcobj = TestHealthCheckUnhealthy()
+    cfm.service_checks[TEST_SERVICE_NAME] = {
+        hcobj.__class__.__name__: (TEST_MTIME, hcobj)}
+
+    unhealthy_status = manager.SERVICE_STATUS(
+        TEST_SERVICE_NAME, False,
+        [manager.HEALTHCHECK_STATUS(hcobj.__class__.__name__,
+                                    False, 'Always fails', [hcobj.Repair])])
+    cfm.service_states[TEST_SERVICE_NAME] = unhealthy_status
+
+    expect = manager.ACTION_INFO('test', 'Action not recognized.', [], {})
+    result = cfm.ActionInfo(TEST_SERVICE_NAME, hcobj.__class__.__name__,
+                            'test')
+    self.assertEquals(expect, result)
+
+  def testActionInfo(self):
+    """Test the ActionInfo RPC to collect information on a repair action."""
+    cfm = manager.CheckFileManager(checkdir=CHECKDIR)
+
+    hcobj = TestHealthCheckMultipleActions()
+    hcname = hcobj.__class__.__name__
+    actions = [hcobj.NoParams, hcobj.PositionalParams, hcobj.DefaultParams,
+               hcobj.MixedParams]
+
+    cfm.service_checks[TEST_SERVICE_NAME] = {hcname: (TEST_MTIME, hcobj)}
+
+    unhealthy_status = manager.SERVICE_STATUS(
+        TEST_SERVICE_NAME, False,
+        [manager.HEALTHCHECK_STATUS(hcname, False, 'Always fails', actions)])
+    cfm.service_states[TEST_SERVICE_NAME] = unhealthy_status
+
+    # Test ActionInfo when the action has no parameters.
+    expect = manager.ACTION_INFO('NoParams', 'NoParams Action.', [], {})
+    self.assertEquals(expect,
+                      cfm.ActionInfo(TEST_SERVICE_NAME, hcname, 'NoParams'))
+
+    # Test ActionInfo when the action has only positional parameters.
+    expect = manager.ACTION_INFO('PositionalParams', 'PositionalParams Action.',
+                                 ['x', 'y', 'z'], {})
+    self.assertEquals(expect,
+                      cfm.ActionInfo(TEST_SERVICE_NAME,
+                                     hcname, 'PositionalParams'))
+
+    # Test ActionInfo when the action has only default parameters.
+    expect = manager.ACTION_INFO('DefaultParams', 'DefaultParams Action.',
+                                 [], {'x': 1, 'y': 2, 'z': 3})
+    self.assertEquals(expect,
+                      cfm.ActionInfo(TEST_SERVICE_NAME,
+                                     hcname, 'DefaultParams'))
+
+    # Test ActionInfo when the action has positional and default parameters.
+    expect = manager.ACTION_INFO('MixedParams', 'MixedParams Action.',
+                                 ['x', 'y'], {'z': 1})
+    self.assertEquals(expect, cfm.ActionInfo(TEST_SERVICE_NAME,
+                                             hcname, 'MixedParams'))
+
+
+@cros_test_lib.NetworkTest()
+class CheckFileModificationTest(cros_test_lib.MockTempDirTestCase):
+  """Unittests for checking when live changes are made to a checkfile."""
+
+  MOBMONITOR_BASENAME = 'mobmonitor'
+  MOBMONITOR_REL_CMD = 'scripts/mobmonitor.py'
+  SERVICE_DIR = 'test_service'
+  CHECKFILE_REL_PATH = 'test_check.py'
+  NOTACHECK_REL_PATH = 'notacheck.py'
+  CHERRYPY_RESTART_STR = 'ENGINE Restarting because %(checkfile)s changed.'
+  CHECKFILE_MOD_ATTEMPTS = 3
+  TIMEOUT_SEC = 5
+
+  def CreateFile(self, relpath, filestr):
+    """Create a file from a string in the temp dir."""
+    abspath = os.path.join(self.service_dir, relpath)
+    osutils.WriteFile(abspath, filestr, makedirs=True)
+    return abspath
+
+  def ResetDirectory(self):
+    """Reset files that are overwritten during test attempts."""
+    self.checkfile = self.CreateFile(self.CHECKFILE_REL_PATH,
+                                     CHECKFILE_MANY_SIMPLE)
+    self.notacheck = self.CreateFile(self.NOTACHECK_REL_PATH,
+                                     NOT_A_CHECKFILE)
+
+  def RunCheckfileMod(self, expect_handler, modpath, modfilestr):
+    """Test Mob* Monitor restart behaviour with checkfile modification."""
+    # Retry the test several times, each time with more relaxed timeouts,
+    # to try to control for flakiness as these testcases are dependent
+    # on cherrypy startup time and module change detection time.
+    for attempt in range(1, self.CHECKFILE_MOD_ATTEMPTS + 1):
+      # Prepare the test directory for the test attempt.
+      self.ResetDirectory()
+
+      # Set the current timeout.
+      timeout_sec = self.TIMEOUT_SEC * attempt
+
+      # Start the Mob* Monitor in a separate thread.
+      mobmon = RunCommand(self.cmd, timeout_sec)
+      mobmon.start()
+
+      # Wait for the monitor to start up fully.
+      time.sleep(timeout_sec)
+
+      # Get the list of services currently being monitored.
+      curlist = self.rpc.GetServiceList()
+
+      # Update the checkfile.
+      self.checkfile = self.CreateFile(modpath, modfilestr)
+
+      # Wait for the monitor to fully restart.
+      time.sleep(timeout_sec)
+
+      # Get the new list of monitored services.
+      newlist = self.rpc.GetServiceList()
+
+      # Stop the monitor and test the change in monitored services.
+      mobmon.Stop()
+      if expect_handler(curlist == newlist):
+        return True
+
+    # The test failed.
+    return False
+
+  def setUp(self):
+    """Setup the check directory and the Mob* Monitor process."""
+    # Create the test check directory and the test files.
+    self.checkdir = self.tempdir
+    self.service_dir = os.path.join(self.checkdir, self.SERVICE_DIR)
+    self.checkfile = self.CreateFile(self.CHECKFILE_REL_PATH,
+                                     CHECKFILE_MANY_SIMPLE)
+    self.notacheck = self.CreateFile(self.NOTACHECK_REL_PATH,
+                                     NOT_A_CHECKFILE)
+    self.CreateFile('__init__.py', '')
+
+    # Setup the Mob* Monitor command.
+    path = os.path.abspath(__file__)
+    while os.path.basename(path) != self.MOBMONITOR_BASENAME:
+      path = os.path.dirname(path)
+    path = os.path.join(path, self.MOBMONITOR_REL_CMD)
+    self.cmd = ['python', path, '-d', self.checkdir]
+
+    # Setup an rpc client for communicating with the Mob* Monitor.
+    self.rpc = rpc.RpcExecutor()
+
+  def testModifyCheckfile(self):
+    """Test restart behaviour when modifying an imported checkfile."""
+    expect_handler = lambda x: x == False
+
+    self.assertTrue(self.RunCheckfileMod(expect_handler,
+                                         self.CHECKFILE_REL_PATH,
+                                         NOT_A_CHECKFILE))
+
+  def testModifyNotACheckfile(self):
+    """Test that no restart occurs when a non-checkfile is modified."""
+    expect_handler = lambda x: x == True
+
+    self.assertTrue(self.RunCheckfileMod(expect_handler,
+                                         self.NOTACHECK_REL_PATH,
+                                         ANOTHER_NOT_A_CHECKFILE))
diff --git a/mobmonitor/rpc/__init__.py b/mobmonitor/rpc/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/mobmonitor/rpc/__init__.py
diff --git a/mobmonitor/rpc/rpc.py b/mobmonitor/rpc/rpc.py
new file mode 100644
index 0000000..4d1b9d9
--- /dev/null
+++ b/mobmonitor/rpc/rpc.py
@@ -0,0 +1,161 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module allows for communicating with the Mob* Monitor via RPC."""
+
+from __future__ import print_function
+
+import urllib
+import urllib2
+
+from chromite.lib import remote_access
+from chromite.lib import retry_util
+
+
+URLLIB_CALL_FORMAT_STR = '%(host)s/%(func)s/?%(args)s'
+RPC_RETRY_TIMES = 5
+RPC_SLEEP_SECS = 2
+RPC_LIST = ['GetServiceList', 'GetStatus', 'ActionInfo', 'RepairService']
+
+
+class RpcError(Exception):
+  """Raises when an error with preparing the RPC has been encountered."""
+
+
+class RpcExecutor(object):
+  """Construct and send RPCs to the Mob* Monitor with retry."""
+
+  def __init__(self, host='localhost', port=9991):
+    self.host = 'http://%s:%s' % (host, remote_access.NormalizePort(port))
+
+  def ConstructUrllibCall(self, func, **kwargs):
+    """Build a Mob* Monitor RPC to be used with urllib.
+
+    Args:
+      func: The remote function to call.
+      kwargs: The arguments to the remote function func.
+
+    Returns:
+      A string used by urllib2 to use the Mob* Monitor's
+      exposed RESTful interface.
+    """
+    # Create a string that can be used by urllib2 to interact
+    # with the Mob* Monitor's RESTful interface.
+    #
+    # For example, suppose we have:
+    #   host = 'http://localhost:9991'
+    #   func = 'repair_service'
+    #   kwargs = {'service': 's1', 'action': 'a1'}
+    #
+    # Then args becomes:
+    #   'service=s1&action=a1'
+    #
+    # And we return:
+    #   'http://localhost:9991/repair_service?service=s1&action=a1'
+    #
+    args = urllib.urlencode(kwargs)
+    return URLLIB_CALL_FORMAT_STR % dict(host=self.host, func=func, args=args)
+
+  def Execute(self, func, **kwargs):
+    """Build and execute the RPC to the Mob* Monitor.
+
+    Args:
+      func: The remote function to call.
+      kwargs: Arguments to above function.
+
+    Returns:
+      The result of the remote call.
+    """
+    def urllib_call():
+      call = self.ConstructUrllibCall(func, **kwargs)
+      return urllib2.urlopen(call).read()
+
+    return retry_util.RetryException(urllib2.URLError, RPC_RETRY_TIMES,
+                                     urllib_call, sleep=RPC_SLEEP_SECS)
+
+  def GetServiceList(self):
+    """List the monitored services.
+
+    Returns:
+      A list of the monitored services.
+    """
+    return self.Execute('GetServiceList')
+
+  def GetStatus(self, service=None):
+    """Get the service's health status.
+
+    Args:
+      service: A string. The service to query. If None, all services
+        are queried.
+
+    Returns:
+      A namedtuple with the following fields:
+        health_state: The service health state.
+        description: A string which describes the health state.
+        variables: A dictionary of variables pertaining to the service status.
+        actions: A list of actions to take.
+
+      If service is not None, a list of dictionaries is returned,
+      one for each monitored service.
+    """
+    # Urllib encodes None as the string 'None'. Use the empty string instead.
+    if service is None:
+      service = ''
+
+    return self.Execute('GetStatus', service=service)
+
+  def ActionInfo(self, service=None, healthcheck=None, action=None):
+    """Collect argument and usage information for |action|.
+
+    See checkfile.manager.ActionInfo for more documentation on the
+    behaviour of this RPC.
+
+    Args:
+      service: A string. The name of a service being monitored.
+      healthcheck: A string. The name of a healthcheck belonging to |service|.
+      action: A string. The name of an action returned by |healthcheck|'s
+        Diagnose method.
+
+    Returns:
+      A named tuple with the following fields:
+        action: The |action| string.
+        info: The docstring of |action|.
+        args: A list of the positional arguments for |action|.
+        kwargs: A dictionary of default arguments for |action|.
+    """
+    if any([x is None for x in [service, healthcheck, action]]):
+      raise RpcError('ActionInfo requires the service, the healthcheck'
+                     ' and the action to be provided.'
+                     ' Given: service=%s healthcheck=%s action=%s' % (
+                         service, healthcheck, action))
+
+    return self.Execute('ActionInfo', service=service, healthcheck=healthcheck,
+                        action=action)
+
+  def RepairService(self, service=None, healthcheck=None, action=None,
+                    args=None, kwargs=None):
+    """Apply the specified action to the specified service.
+
+    Args:
+      service: A string. The service to repair.
+      healthcheck: A string. The healthcheck of |service| that we are fixing.
+      action: A string. The action to take.
+      args: The positional argument inputs to the repair action.
+      kwargs: The keyword argument inputs to the repair action.
+
+    Returns:
+      The same output of running get_status(service=service).
+    """
+    if any([x is None for x in [service, healthcheck, action]]):
+      raise RpcError('RepairService requires the service, the healthcheck'
+                     ' and the action to be provided.'
+                     ' Given: service=%s healthcheck=%s action=%s' % (
+                         service, healthcheck, action))
+
+    args = [] if args is None else args
+    kwargs = {} if kwargs is None else kwargs
+
+    return self.Execute('RepairService', service=service,
+                        healthcheck=healthcheck, action=action, args=args,
+                        kwargs=kwargs)
diff --git a/mobmonitor/scripts/__init__.py b/mobmonitor/scripts/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/mobmonitor/scripts/__init__.py
diff --git a/mobmonitor/scripts/mobmoncli.py b/mobmonitor/scripts/mobmoncli.py
new file mode 100755
index 0000000..45c4266
--- /dev/null
+++ b/mobmonitor/scripts/mobmoncli.py
@@ -0,0 +1,129 @@
+#!/usr/bin/python2
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Command-line interface for the Mob* Monitor."""
+
+from __future__ import print_function
+
+import re
+import sys
+
+from chromite.lib import commandline
+from chromite.lib import remote_access
+from chromite.mobmonitor.rpc import rpc
+
+
+def InputsToArgs(inputs):
+  """Convert repair action input string to an args list and kwargs dict.
+
+  Args:
+    inputs: A string. A well formed input string is a comma separated
+      list of values and/or equal-sign separated key value pairs.
+      A valid input string may be the following:
+        'arg1,arg2,...,argN,kwarg1=foo,...,kwargN=bar'
+
+  Returns:
+    A list of the positional arguments contained in the |inputs| string and
+    a dictionary of the key value pairs that made up the |inputs| string.
+    All keys and values will be strings.
+  """
+  args, kwargs = ([], {})
+  if not inputs:
+    return args, kwargs
+
+  pattern = '([^,]+,)*([^,]+)$'
+  if not re.match(pattern, inputs):
+    raise ValueError('Action arguments are not well-formed.'
+                     ' Expected: "a1,...,aN,kw1=foo,...,kwN=bar".'
+                     ' Given: %s', inputs)
+
+  for kv in inputs.split(','):
+    try:
+      k, v = kv.split('=')
+      kwargs[k] = v
+    except ValueError:
+      args.append(kv)
+
+  return args, kwargs
+
+
+class MobMonCli(object):
+  """Provides command-line functionality for using the Mob* Monitor."""
+
+  def __init__(self, host='localhost', port=9991):
+    self.host = host
+    self.port = remote_access.NormalizePort(port)
+
+  def ExecuteRequest(self, request, service, healthcheck, action, inputs):
+    """Execute the request if an appropriate RPC function is defined.
+
+    Args:
+      request: The name of the RPC.
+      service: The name of the service involved in the RPC.
+      healthcheck: The name of the healthcheck involved in the RPC.
+      action: The action to be performed.
+      inputs: A string. The inputs of the specified repair action.
+    """
+    rpcexec = rpc.RpcExecutor(self.host, self.port)
+
+    if not hasattr(rpcexec, request):
+      raise rpc.RpcError('The request "%s" is not recognized.' % request)
+
+    args, kwargs = InputsToArgs(inputs)
+
+    if 'GetServiceList' == request:
+      return rpcexec.GetServiceList()
+
+    if 'GetStatus' == request:
+      return rpcexec.GetStatus(service=service)
+
+    if 'ActionInfo' == request:
+      return rpcexec.ActionInfo(service=service, healthcheck=healthcheck,
+                                action=action)
+
+    if 'RepairService' == request:
+      return rpcexec.RepairService(service=service, healthcheck=healthcheck,
+                                   action=action, args=args, kwargs=kwargs)
+
+
+def ParseArguments(argv):
+  parser = commandline.ArgumentParser()
+  parser.add_argument('request', choices=rpc.RPC_LIST)
+  parser.add_argument('-s', '--service', help='The service to act upon.')
+  parser.add_argument('-c', '--healthcheck',
+                      help='The healthcheck to act upon.')
+  parser.add_argument('-a', '--action', help='The action to execute.')
+  parser.add_argument('--host', default='localhost',
+                      help='The hostname of the Mob* Monitor.')
+  parser.add_argument('-p', '--port', type=int, default=9991,
+                      help='The Mob* Monitor port.')
+  parser.add_argument('-i', '--inputs',
+                      help='Repair action inputs. Inputs are specified'
+                           ' as a comma-separated list of values or key'
+                           ' value pairs such as: "arg1,arg2,...,argN,'
+                           'kwarg1=foo,...,kwargN=bar"')
+
+  return parser.parse_args(argv)
+
+
+def main(argv):
+  """Command line interface for the Mob* Monitor.
+
+  The basic syntax is:
+    mobmon <request> [args]
+    mobmon --help
+  """
+  options = ParseArguments(argv)
+
+  cli = MobMonCli(options.host, options.port)
+  result = cli.ExecuteRequest(options.request, options.service,
+                              options.healthcheck, options.action,
+                              options.inputs)
+
+  print(result)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/mobmonitor/scripts/mobmoncli_unittest b/mobmonitor/scripts/mobmoncli_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/mobmonitor/scripts/mobmoncli_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/mobmonitor/scripts/mobmoncli_unittest.py b/mobmonitor/scripts/mobmoncli_unittest.py
new file mode 100644
index 0000000..5e38577
--- /dev/null
+++ b/mobmonitor/scripts/mobmoncli_unittest.py
@@ -0,0 +1,82 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the Mob* Monitor CLI script."""
+
+from __future__ import print_function
+
+import mock
+
+from chromite.lib import cros_test_lib
+from chromite.mobmonitor.rpc import rpc
+from chromite.mobmonitor.scripts import mobmoncli
+
+
+class MobMonCliHelper(cros_test_lib.MockTestCase):
+  """Unittests for MobMonCli helper functions."""
+
+  def testInputsToArgs(self):
+    """Test converting string inputs to an args list and kwargs dict."""
+    inputs = '1,2,3,4,a=5,b=6,c=7'
+    expected_args = ['1', '2', '3', '4']
+    expected_kwargs = {'a': '5', 'b': '6', 'c': '7'}
+    self.assertEquals((expected_args, expected_kwargs),
+                      mobmoncli.InputsToArgs(inputs))
+
+  def testInputsNone(self):
+    """Test InputsToArgs when the user does not pass arguments."""
+    self.assertEquals(([], {}), mobmoncli.InputsToArgs(None))
+
+  def testInputsToArgMalformed(self):
+    """Test InputsToArgs when the inputs are not well-formed."""
+    bad_args = [',', 'a=1,', ',1', '1,2,a=1,,b=2']
+    for bad_arg in bad_args:
+      with self.assertRaises(ValueError):
+        mobmoncli.InputsToArgs(bad_arg)
+
+
+class MobMonCliTest(cros_test_lib.MockTestCase):
+  """Unittests for the MobMonCli."""
+
+  def setUp(self):
+    """Setup for MobMonCli tests."""
+    self.cli = mobmoncli.MobMonCli()
+
+  def testBadRequest(self):
+    """Test that we error when an unrecognized request is passed."""
+    with self.assertRaises(rpc.RpcError):
+      self.cli.ExecuteRequest('InvalidRequest', 'TestService', '', '', '')
+
+  def testGetServiceList(self):
+    """Test that we correctly execute a GetServiceList RPC."""
+    with mock.patch('chromite.mobmonitor.rpc.rpc.RpcExecutor') as rpc_executor:
+      mock_executor = mock.MagicMock()
+      rpc_executor.return_value = mock_executor
+      self.cli.ExecuteRequest('GetServiceList', 'TestService', '', '', '')
+      self.assertTrue(mock_executor.GetServiceList.called)
+
+  def testGetStatus(self):
+    """Test that we correctly execute a GetStatus RPC."""
+    with mock.patch('chromite.mobmonitor.rpc.rpc.RpcExecutor') as rpc_executor:
+      mock_executor = mock.MagicMock()
+      rpc_executor.return_value = mock_executor
+      self.cli.ExecuteRequest('GetStatus', 'TestService', '', '', '')
+      self.assertTrue(mock_executor.GetStatus.called)
+
+  def testActionInfo(self):
+    """Test that we correctly execute an ActionInfo RPC."""
+    with mock.patch('chromite.mobmonitor.rpc.rpc.RpcExecutor') as rpc_executor:
+      mock_executor = mock.MagicMock()
+      rpc_executor.return_value = mock_executor
+      self.cli.ExecuteRequest('ActionInfo', 'TestService',
+                              'healthcheck', 'action', '')
+      self.assertTrue(mock_executor.ActionInfo.called)
+
+  def testRepairService(self):
+    """Test that we correctly execute a RepairService RPC."""
+    with mock.patch('chromite.mobmonitor.rpc.rpc.RpcExecutor') as rpc_executor:
+      mock_executor = mock.MagicMock()
+      rpc_executor.return_value = mock_executor
+      self.cli.ExecuteRequest('RepairService', 'TestService', '', '', '')
+      self.assertTrue(mock_executor.RepairService.called)
diff --git a/mobmonitor/scripts/mobmonitor.py b/mobmonitor/scripts/mobmonitor.py
new file mode 100755
index 0000000..2bc70b8
--- /dev/null
+++ b/mobmonitor/scripts/mobmonitor.py
@@ -0,0 +1,187 @@
+#!/usr/bin/python2
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""The Mob* Monitor web interface."""
+
+from __future__ import print_function
+
+import cherrypy
+import json
+import os
+import sys
+
+from logging import handlers as logging_handlers
+
+from chromite.lib import remote_access
+from chromite.lib import commandline
+from chromite.lib import cros_logging as logging
+from chromite.mobmonitor.checkfile import manager
+
+
+STATICDIR = '/etc/mobmonitor/static'
+
+LOGDIR = '/var/log/mobmonitor/'
+LOGFILE = 'mobmonitor.log'
+LOGFILE_SIZE_BYTES = 1024 * 1024
+LOGFILE_COUNT = 10
+
+
+class MobMonitorRoot(object):
+  """The central object supporting the Mob* Monitor web interface."""
+
+  def __init__(self, checkfile_manager, staticdir=STATICDIR):
+    if not os.path.exists(staticdir):
+      raise IOError('Static directory does not exist: %s' % staticdir)
+
+    self.staticdir = staticdir
+    self.checkfile_manager = checkfile_manager
+
+  @cherrypy.expose
+  def index(self):
+    """Presents a welcome message."""
+    return open(os.path.join(self.staticdir, 'templates', 'index.html'))
+
+  @cherrypy.expose
+  def GetServiceList(self):
+    """Return a list of the monitored services.
+
+    Returns:
+      A list of the monitored services.
+    """
+    return json.dumps(self.checkfile_manager.GetServiceList())
+
+  @cherrypy.expose
+  def GetStatus(self, service=None):
+    """Return the health status of the specified service.
+
+    Args:
+      service: The service whose health status is being queried. If service
+        is None, return the health status of all monitored services.
+
+    Returns:
+      A list of dictionaries. Each dictionary contains the keys:
+        service: The name of the service.
+        health: A boolean describing the overall service health.
+        healthchecks: A list of unhealthy or quasi-healthy health checks.
+    """
+    service_statuses = self.checkfile_manager.GetStatus(service)
+    if not isinstance(service_statuses, list):
+      service_statuses = [service_statuses]
+
+    result = [
+        manager.MapServiceStatusToDict(status) for status in service_statuses]
+    return json.dumps(result)
+
+  @cherrypy.expose
+  def ActionInfo(self, service, healthcheck, action):
+    """Return usage and argument information for |action|.
+
+    Args:
+      service: A string. The name of a service being monitored.
+      healthcheck: A string. The name of the healthcheck the action belongs to.
+      action: A string. The name of an action specified by some healthcheck's
+        Diagnose method.
+
+    Returns:
+      TBD
+    """
+    result = self.checkfile_manager.ActionInfo(service, healthcheck, action)
+    return json.dumps(manager.MapActionInfoToDict(result))
+
+  @cherrypy.expose
+  def RepairService(self, service, healthcheck, action, args, kwargs):
+    """Execute the repair action on the specified service.
+
+    Args:
+      service: The service that the specified action will be applied to.
+      healthcheck: The particular healthcheck we are repairing.
+      action: The action to be applied.
+      args: A list of the positional arguments for the given repair action.
+      kwargs: A dictionary of keyword arguments for the given repair action.
+    """
+    # The mobmonitor's RPC library encodes arguments as strings when
+    # making a remote call to the monitor. The checkfile manager expects
+    # lists and dicts for the arugments, so we convert them here.
+    args = json.loads(args.replace('\'', '"'))
+    kwargs = json.loads(kwargs.replace('\'', '"'))
+
+    status = self.checkfile_manager.RepairService(service, healthcheck, action,
+                                                  args, kwargs)
+    return json.dumps(manager.MapServiceStatusToDict(status))
+
+
+def SetupLogging(logdir):
+  logging.basicConfig(
+      level=logging.DEBUG,
+      format='%(asctime)s:%(name)s:%(levelname)-8s %(message)s',
+      datefmt='%Y-%m-%d %H:%M',
+      filename=os.path.join(logdir, LOGFILE),
+      filemode='w'
+  )
+  rotate = logging_handlers.RotatingFileHandler(
+      os.path.join(logdir, LOGFILE), maxBytes=LOGFILE_SIZE_BYTES,
+      backupCount=LOGFILE_COUNT)
+  logging.getLogger().addHandler(rotate)
+
+
+def ParseArguments(argv):
+  """Creates the argument parser."""
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  parser.add_argument('-d', '--checkdir',
+                      default='/etc/mobmonitor/checkfiles/',
+                      help='The Mob* Monitor checkfile directory.')
+  parser.add_argument('-p', '--port', type=int, default=9991,
+                      help='The Mob* Monitor port.')
+  parser.add_argument('-s', '--staticdir', default=STATICDIR,
+                      help='Mob* Monitor web ui static content directory')
+  parser.add_argument('--logdir', dest='logdir', type='path', default=LOGDIR,
+                      help='Mob* Monitor log file directory.')
+
+  return parser.parse_args(argv)
+
+
+def main(argv):
+  options = ParseArguments(argv)
+  options.Freeze()
+
+  # Configure logger.
+  SetupLogging(options.logdir)
+
+  # Configure global cherrypy parameters.
+  cherrypy.config.update(
+      {'server.socket_host': '0.0.0.0',
+       'server.socket_port': remote_access.NormalizePort(options.port)
+      })
+
+  mobmon_appconfig = {
+      '/':
+          {'tools.staticdir.root': options.staticdir
+          },
+      '/static':
+          {'tools.staticdir.on': True,
+           'tools.staticdir.dir': ''
+          },
+      '/static/css':
+          {'tools.staticdir.dir': 'css'
+          },
+      '/static/js':
+          {'tools.staticdir.dir': 'js'
+          }
+  }
+
+  # Setup the mobmonitor
+  checkfile_manager = manager.CheckFileManager(checkdir=options.checkdir)
+  mobmonitor = MobMonitorRoot(checkfile_manager, staticdir=options.staticdir)
+
+  # Start the checkfile collection and execution background task.
+  checkfile_manager.StartCollectionExecution()
+
+  # Start the Mob* Monitor.
+  cherrypy.quickstart(mobmonitor, config=mobmon_appconfig)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/mobmonitor/scripts/mobmonitor_unittest b/mobmonitor/scripts/mobmonitor_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/mobmonitor/scripts/mobmonitor_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/mobmonitor/scripts/mobmonitor_unittest.py b/mobmonitor/scripts/mobmonitor_unittest.py
new file mode 100644
index 0000000..6fc35b1
--- /dev/null
+++ b/mobmonitor/scripts/mobmonitor_unittest.py
@@ -0,0 +1,115 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for the main Mob* Monitor script."""
+
+from __future__ import print_function
+
+import json
+import os
+
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.mobmonitor.checkfile import manager
+from chromite.mobmonitor.scripts import mobmonitor
+
+
+class MockCheckFileManager(object):
+  """Mock CheckFileManager object that returns 'real' responses for testing."""
+
+  def __init__(self):
+    failed_check = manager.HEALTHCHECK_STATUS('hc1', False, 'Failed', [])
+
+    self.service_statuses = [
+        manager.SERVICE_STATUS('service1', True, []),
+        manager.SERVICE_STATUS('service2', False, [failed_check])]
+
+    self.action_info = manager.ACTION_INFO('DummyAction', '', ['x'], {})
+
+  def GetServiceList(self):
+    """Mock GetServiceList response."""
+    return ['test_service_1', 'test_service_2']
+
+  def GetStatus(self, service=None):
+    """Mock GetStatus response."""
+    if service is None:
+      return self.service_statuses
+
+    return self.service_statuses[0]
+
+  def ActionInfo(self, _service, _healthcheck, _action):
+    """Mock ActionInfo response."""
+    return self.action_info
+
+  def RepairService(self, _service, _healthcheck, _action, _args, _kwargs):
+    """Mock RepairService response."""
+    return self.service_statuses[0]
+
+
+class MobMonitorRootTest(cros_test_lib.MockTempDirTestCase):
+  """Unittests for the MobMonitorRoot."""
+
+  STATICDIR = 'static'
+
+  def setUp(self):
+    """Setup directories expected by the Mob* Monitor."""
+    self.mobmondir = self.tempdir
+    self.staticdir = os.path.join(self.mobmondir, self.STATICDIR)
+    osutils.SafeMakedirs(self.staticdir)
+
+  def testGetServiceList(self):
+    """Test the GetServiceList RPC."""
+    cfm = MockCheckFileManager()
+    root = mobmonitor.MobMonitorRoot(cfm, staticdir=self.staticdir)
+    self.assertEqual(cfm.GetServiceList(), json.loads(root.GetServiceList()))
+
+  def testGetStatus(self):
+    """Test the GetStatus RPC."""
+    cfm = MockCheckFileManager()
+    root = mobmonitor.MobMonitorRoot(cfm, staticdir=self.staticdir)
+
+    # Test the result for a single service.
+    status = cfm.service_statuses[0]
+    expect = {'service': status.service, 'health': status.health,
+              'healthchecks': []}
+    self.assertEquals([expect], json.loads(root.GetStatus(status.service)))
+
+    # Test the result for multiple services.
+    status1, status2 = cfm.service_statuses
+    check = status2.healthchecks[0]
+    expect = [{'service': status1.service, 'health': status1.health,
+               'healthchecks': []},
+              {'service': status2.service, 'health': status2.health,
+               'healthchecks': [{'name': check.name, 'health': check.health,
+                                 'description': check.description,
+                                 'actions': []}]}]
+    self.assertEquals(expect, json.loads(root.GetStatus()))
+
+  def testActionInfo(self):
+    """Test the ActionInfo RPC."""
+    cfm = MockCheckFileManager()
+    root = mobmonitor.MobMonitorRoot(cfm, staticdir=self.staticdir)
+
+    expect = {'action': 'DummyAction', 'info': '', 'args': ['x'], 'kwargs': {}}
+    self.assertEquals(expect,
+                      json.loads(root.ActionInfo('service2',
+                                                 'dummy_healthcheck',
+                                                 'DummyAction')))
+
+  def testRepairService(self):
+    """Test the RepairService RPC."""
+    cfm = MockCheckFileManager()
+    root = mobmonitor.MobMonitorRoot(cfm, staticdir=self.staticdir)
+
+    status = cfm.service_statuses[0]
+    expect = {'service': status.service, 'health': status.health,
+              'healthchecks': []}
+    string_args = '[1, 2]'
+    string_kwargs = '{"a": 1}'
+    self.assertEquals(expect,
+                      json.loads(root.RepairService('dummy_service',
+                                                    'dummy_healthcheck',
+                                                    'dummy_action',
+                                                    string_args,
+                                                    string_kwargs)))
diff --git a/mobmonitor/static/css/style.css b/mobmonitor/static/css/style.css
new file mode 100644
index 0000000..1df1118
--- /dev/null
+++ b/mobmonitor/static/css/style.css
@@ -0,0 +1,158 @@
+/* General Style Settings */
+body {
+  background-color: #788999;
+  margin: 0;
+  padding: 0;
+}
+
+table {
+  border-collapse: collapse;
+}
+
+td {
+  border: 1px solid #999;
+  padding: 5px;
+  text-align: left;
+}
+
+.bold {
+  font-weight: bold;
+}
+
+.circle-healthy {
+  width: 20px;
+  height: 20px;
+  border-radius: 10px;
+  background: green;
+}
+
+.circle-quasi-healthy {
+  width: 20px;
+  height: 20px;
+  border-radius: 10px;
+  background: #CC7A00;
+}
+
+.circle-unhealthy {
+  width: 20px;
+  height: 20px;
+  border-radius: 10px;
+  background: red;
+}
+
+/* Site Header Settings */
+.site-header {
+  background-color: #20262c;
+  padding-top: 50px;
+  padding-right: 0px;
+  padding-left: 0px;
+  padding-bottom: 50px;
+  margin-bottom: 0px;
+}
+
+.site-header-title {
+  max-width: 1000px;
+  width: 90%;
+  margin: 0px auto;
+  margin-top: 0px;
+  margin-right: auto;
+  margin-bottom: 0px;
+  margin-left: auto;
+  text-align: center;
+  font-size: 32pt;
+  font-weight: bold;
+  color: #ACD6FF;
+}
+
+.collect-logs {
+  float: right;
+}
+
+/* Health Display Style Settings */
+.health-display {
+  max-width: 1000px;
+  width: 90%;
+  margin: 0px auto;
+  margin-top: 0px;
+  margin-right: auto;
+  margin-bottom: 0px;
+  margin-left: auto;
+}
+
+.health-container {
+  border: 1px solid black;
+  background-color: #FFF;
+}
+
+.health-container-header {
+  background-color: #FFFAAA;
+
+  width: 100%;
+}
+
+.health-container-header-element {
+  float: left;
+  margin: 10px;
+}
+
+.health-container-content {
+  background-color: #FFF;
+  padding: 10px;
+}
+
+.healthcheck-table {
+  margin: auto;
+  border: 1px solid black;
+  width: 100%;
+}
+
+.healthcheck-table-column1 {
+  width: 65%;
+}
+
+.healthcheck-table-column2 {
+  width: 35%;
+}
+
+.healthcheck-info {
+}
+
+.run-repair-action {
+  margin-right: 5px;
+}
+
+.color-healthy {
+  color: green;
+}
+
+.color-unhealthy {
+  color: red;
+}
+
+.color-quasi-healthy {
+  color: #CC7A00;
+}
+
+/* Action Repair Style Settings */
+.actionlist-dropdown {
+  width:95%;
+}
+
+.label-help {
+  width: 95%;
+  display: inline-block;
+  padding: 5px;
+  font-size: 12;
+  white-space: pre-wrap;
+}
+
+.input-text {
+  width: 95%;
+  margin-bottom: 10px;
+}
+
+.actionDisplay {
+  width: 95%;
+  height: 150px;
+  resize: vertical;
+}
diff --git a/mobmonitor/static/js/actionrepairdialog.js b/mobmonitor/static/js/actionrepairdialog.js
new file mode 100644
index 0000000..8b177ad
--- /dev/null
+++ b/mobmonitor/static/js/actionrepairdialog.js
@@ -0,0 +1,157 @@
+// Copyright 2015 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+'use strict';
+
+
+var ARGS_HELP_TEXT = 'Enter arguments as a comma separated list of the form: ' +
+                     'arg1,arg2,...,argN.';
+
+var ARGS_ACTION_HELP_TEXT = '\n\nYou must enter the arguments: ';
+
+var KWARGS_HELP_TEXT = 'Enter keyword arguments as a comma separated list of ' +
+                       'equal sign separated values of the form: ' +
+                       'kwarg1=value1,kwarg2=value2,...,kwargN=valueN.';
+
+var KWARGS_ACTION_HELP_TEXT = '\n\nYou may enter zero or more of the' +
+                              ' following arguments: ';
+
+var NO_ARGS_TEXT = '\n\nNo arguments for you to add.';
+
+
+function ActionRepairDialog(service, actionInfo) {
+  // The actionInfo parameter is an object with the following fields:
+  //  action: A string. The name of the repair action.
+  //  info: A string. A description of the repair action.
+  //  args: An array. The positional arguments taken by the action.
+  //  kwargs: An object. The keyword arguments taken by the action.
+
+  var actionRepairDialog = this;
+
+  var templateData = {
+    action: actionInfo.action,
+    info: actionInfo.info
+  };
+
+  this.service = service;
+  this.actionInfo = actionInfo;
+
+  this.dialogElement_ = $(
+      renderTemplate('actionrepairdialog', templateData)).dialog({
+    autoOpen: false,
+    width: 575,
+    modal: true,
+
+    close: function(event, ui) {
+      $(this).dialog('destroy').remove();
+    },
+
+    buttons: {
+      'Reset': function() {
+        actionRepairDialog.reset();
+      },
+      'Submit': function() {
+        actionRepairDialog.submit();
+      }
+    }
+  });
+
+  // Commonly used elements of the dialog ui.
+  var d = this.dialogElement_;
+  this.dialogArgs = $(d).find('#args')[0];
+  this.dialogArgsHelp = $(d).find('#argsHelp')[0];
+  this.dialogKwargs = $(d).find('#kwargs')[0];
+  this.dialogKwargsHelp = $(d).find('#kwargsHelp')[0];
+
+  // Set default action info.
+  this.reset();
+}
+
+ActionRepairDialog.prototype.open = function() {
+  this.dialogElement_.dialog('open');
+};
+
+ActionRepairDialog.prototype.close = function() {
+  this.dialogElement_.dialog('close');
+};
+
+ActionRepairDialog.prototype.reset = function() {
+  var actionInfo = this.actionInfo;
+
+  // Clear old input.
+  this.dialogArgs.value = '';
+  this.dialogKwargs.value = '';
+
+  // Set the argument information.
+  if (!isEmpty(actionInfo.args)) {
+    $(this.dialogArgsHelp).text(ARGS_HELP_TEXT + ARGS_ACTION_HELP_TEXT +
+                                actionInfo.args.join(','));
+    this.dialogArgs.disabled = false;
+  }
+  else {
+    $(this.dialogArgsHelp).text(ARGS_HELP_TEXT + NO_ARGS_TEXT);
+    this.dialogArgs.disabled = true;
+  }
+
+  // Set the kwarg information.
+  if (!isEmpty(actionInfo.kwargs)) {
+    var kwargs = [];
+    Object.keys(this.actionInfo.kwargs).forEach(function(key) {
+      kwargs.push(key + '=' + actionInfo.kwargs[key]);
+    });
+
+    this.dialogKwargs.value = kwargs.join(',');
+    this.dialogKwargs.disabled = false;
+    $(this.dialogKwargsHelp).text(
+        KWARGS_HELP_TEXT + KWARGS_ACTION_HELP_TEXT +
+        Object.keys(actionInfo.kwargs).join(','));
+  }
+  else {
+    $(this.dialogKwargsHelp).text(KWARGS_HELP_TEXT + NO_ARGS_TEXT);
+    this.dialogKwargs.disabled = true;
+  }
+};
+
+ActionRepairDialog.prototype.submit = function() {
+  // Caller must define the function 'submitHandler' on the created dialog.
+  // The submitHandler will be passed the following arguments:
+  //  service: A string.
+  //  action: A string.
+  //  args: An array.
+  //  kwargs: An object.
+
+  if (!this.submitHandler) {
+    alert('Caller must define submitHandler for ActionRepairDialog.');
+    return;
+  }
+
+  // Validate the argument input.
+  var args = this.dialogArgs.value;
+  var kwargs = this.dialogKwargs.value;
+
+  if (args && !/^([^,]+,)*[^,]+$/g.test(args)) {
+    alert('Arguments are not well-formed.\n' +
+          'Expected form: a1,a2,...,aN');
+    return;
+  }
+
+  if (kwargs && !/^([^,=]+=[^,=]+,)*[^,]+=[^,=]+$/g.test(kwargs)) {
+    alert('Keyword argumetns are not well-formed.\n' +
+          'Expected form: kw1=foo,...,kwN=bar');
+    return;
+  }
+
+  // Submit the action.
+  var submitArgs = args ? args.split(',') : [];
+  var submitKwargs = {};
+  kwargs.split(',').forEach(function(elem, index, array) {
+    var kv = elem.split('=');
+    submitKwargs[kv[0]] = kv[1];
+  });
+
+
+  this.submitHandler(this.service, this.actionInfo.action, submitArgs,
+                     submitKwargs);
+  this.close();
+};
diff --git a/mobmonitor/static/js/healthdisplay.js b/mobmonitor/static/js/healthdisplay.js
new file mode 100644
index 0000000..d9895cf
--- /dev/null
+++ b/mobmonitor/static/js/healthdisplay.js
@@ -0,0 +1,150 @@
+// Copyright 2015 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+'use strict';
+
+var RECORD_TTL_MS = 10000;
+
+
+$.widget('mobmonitor.healthDisplay', {
+  options: {},
+
+  _create: function() {
+    this.element.addClass('health-display');
+
+    // Service status information. The variable serviceHealthStatusInfo
+    // is a mapping of the following form:
+    //
+    //  {serviceName: {lastUpdatedTimestampInMs: lastUpdatedTimestampInMs,
+    //                 serviceStatus: serviceStatus}}
+    //
+    //  Where serviceStatus objects are of the following form:
+    //
+    //  {serviceName: serviceNameString,
+    //   health: boolean,
+    //   healthchecks: [
+    //      {name: healthCheckName, health: boolean,
+    //       description: descriptionString,
+    //       actions: [actionNameString]}
+    //    ]
+    //  }
+    this.serviceHealthStatusInfo = {};
+  },
+
+  _destroy: function() {
+    this.element.removeClass('health-display');
+    this.element.empty();
+  },
+
+  _setOption: function(key, value) {
+    this._super(key, value);
+  },
+
+  // Private widget methods.
+  // TODO (msartori): Implement crbug.com/520746.
+  _updateHealthDisplayServices: function(services) {
+    var self = this;
+
+    function _removeService(service) {
+      // Remove the UI elements.
+      var id = '#' + SERVICE_CONTAINER_PREFIX + service;
+      $(id).empty();
+      $(id).remove();
+
+      // Remove raw service status info that we are holding.
+      delete self.serviceHealthStatusInfo[service];
+    }
+
+    function _addService(serviceStatus) {
+      // This function is used as a callback to the rpcGetStatus.
+      // rpcGetStatus returns a list of service health statuses.
+      // In this widget, we add services one at a time, so take
+      // the first element.
+      serviceStatus = serviceStatus[0];
+
+      // Create the new content for the healthDisplay widget.
+      var templateData = jQuery.extend({}, serviceStatus);
+      templateData.serviceId = SERVICE_CONTAINER_PREFIX + serviceStatus.service;
+      templateData.errors = serviceStatus.healthchecks.filter(function(v) {
+        return !v.health;
+      });
+      templateData.warnings = serviceStatus.healthchecks.filter(function(v) {
+        return v.health;
+      });
+
+      var healthContainer = renderTemplate('healthstatuscontainer',
+                                           templateData);
+
+      // Insert the new container into the display widget.
+      $(healthContainer).appendTo(self.element);
+
+      // Maintain alphabetical order in our display.
+      self.element.children().sort(function(a, b) {
+          return $(a).attr('id') < $(b).attr('id') ? -1 : 1;
+      }).appendTo(self.element);
+
+      // Save information to do with this service.
+      var curtime = $.now();
+      var service = serviceStatus.service;
+
+      self.serviceHealthStatusInfo[service] = {
+          lastUpdatedTimestampInMs: curtime,
+          serviceStatus: serviceStatus
+      };
+    }
+
+    // Remove services that are no longer monitored or are stale.
+    var now = $.now();
+
+    Object.keys(this.serviceHealthStatusInfo).forEach(
+        function(elem, index, array) {
+          if ($.inArray(elem, services) < 0 ||
+            now > self.serviceHealthStatusInfo[elem].lastUpdatedTimestampInMs +
+              RECORD_TTL_MS) {
+            _removeService(elem);
+          }
+    });
+
+    // Get sublist of services to update.
+    var updateList =
+        $(services).not(Object.keys(this.serviceHealthStatusInfo)).get();
+
+    // Update the services.
+    updateList.forEach(function(elem, index, array) {
+      rpcGetStatus(elem, _addService);
+    });
+  },
+
+  // Public widget methods.
+  refreshHealthDisplay: function() {
+    var self = this;
+    rpcGetServiceList(function(services) {
+      self._updateHealthDisplayServices(services);
+    });
+  },
+
+  needsRepair: function(service) {
+    var serviceStatus = this.serviceHealthStatusInfo[service].serviceStatus;
+    return serviceStatus.health == 'false' ||
+        serviceStatus.healthchecks.length > 0;
+  },
+
+  markStale: function(service) {
+    this.serviceHealthStatusInfo[service].lastUpdatedTimestampInMs = 0;
+  },
+
+  getServiceActions: function(service) {
+    var actionSet = {};
+    var healthchecks =
+        this.serviceHealthStatusInfo[service].serviceStatus.healthchecks;
+
+    for (var i = 0; i < healthchecks.length; i++) {
+      for (var j = 0; j < healthchecks[i].actions.length; j++) {
+        actionSet[healthchecks[i].actions[j]] = true;
+      }
+    }
+
+    return Object.keys(actionSet);
+  }
+});
diff --git a/mobmonitor/static/js/main.js b/mobmonitor/static/js/main.js
new file mode 100644
index 0000000..ec1fad6
--- /dev/null
+++ b/mobmonitor/static/js/main.js
@@ -0,0 +1,59 @@
+// Copyright 2015 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+'use strict';
+
+
+var HEALTH_DISPLAY_REFRESH_MS = 1000;
+
+
+$(document).ready(function() {
+  // Setup the health status widget.
+  $('#healthStatusDisplay').healthDisplay();
+  $('#healthStatusDisplay').healthDisplay('refreshHealthDisplay');
+
+  setInterval(function() {
+    $('#healthStatusDisplay').healthDisplay('refreshHealthDisplay');
+  }, HEALTH_DISPLAY_REFRESH_MS);
+
+  // Setup the log collection button.
+  $(document).on('click', '.collect-logs', function() {
+    console.log('Collecting logs!');
+  });
+
+  // Setup the repair action buttons
+  $(document).on('click', '.run-repair-action', function() {
+    // Retrieve the service and action for this repair button.
+    var action = $(this).attr('action');
+    var healthcheck = $(this).closest('.healthcheck-info').attr('hcname');
+    var service = $(this).closest('.health-container').attr('id');
+    if (service.indexOf(SERVICE_CONTAINER_PREFIX) === 0) {
+      service = service.replace(SERVICE_CONTAINER_PREFIX, '');
+    }
+
+    // Do not launch dialog if this service does not need repair.
+    if (!$('#healthStatusDisplay').healthDisplay('needsRepair', service)) {
+      return;
+    }
+
+    function repairServiceCallback(response) {
+      $('#healthStatusDisplay').healthDisplay('markStale', response.service);
+    }
+
+    rpcActionInfo(service, healthcheck, action, function(response) {
+      if (isEmpty(response.args) && isEmpty(response.kwargs)) {
+        rpcRepairService(service, healthcheck, action,
+                         [], {}, repairServiceCallback);
+        return;
+      }
+
+      var dialog = new ActionRepairDialog(service, response);
+      dialog.submitHandler = function(service, action, args, kwargs) {
+        rpcRepairService(service, healthcheck, action,
+                         args, kwargs, repairServiceCallback);
+      };
+      dialog.open();
+    });
+  });
+});
diff --git a/mobmonitor/static/js/rpc.js b/mobmonitor/static/js/rpc.js
new file mode 100644
index 0000000..7b27e60
--- /dev/null
+++ b/mobmonitor/static/js/rpc.js
@@ -0,0 +1,50 @@
+// Copyright 2015 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+'use strict';
+
+
+function rpcGetServiceList(callback) {
+  $.getJSON('/GetServiceList', callback);
+}
+
+function rpcGetStatus(service, callback) {
+  $.getJSON('/GetStatus', {service: service}, callback);
+}
+
+function rpcActionInfo(service, healthcheck, action, callback) {
+  var data = {
+    service: service,
+    healthcheck: healthcheck,
+    action: action
+  };
+
+  $.getJSON('/ActionInfo', data, callback);
+}
+
+function rpcRepairService(service, healthcheck, action,
+                          args, kwargs, callback) {
+
+  if (isEmpty(service))
+    throw new InvalidRpcArgumentError(
+        'Must specify service in RepairService RPC');
+
+  if (isEmpty(healthcheck))
+    throw new InvalidRpcArgumentError(
+        'Must specify healthcheck in RepairService RPC');
+
+  if (isEmpty(action))
+    throw new InvalidRpcArgumentError(
+        'Must specify action in RepairService RPC');
+
+  var data = {
+    service: service,
+    healthcheck: healthcheck,
+    action: action,
+    args: JSON.stringify(args),
+    kwargs: JSON.stringify(kwargs)
+  };
+
+  $.post('/RepairService', data, callback, 'json');
+}
diff --git a/mobmonitor/static/js/template.js b/mobmonitor/static/js/template.js
new file mode 100644
index 0000000..7950f0c
--- /dev/null
+++ b/mobmonitor/static/js/template.js
@@ -0,0 +1,31 @@
+// Copyright 2015 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+'use strict';
+
+
+function renderTemplate(templateName, templateData) {
+  if (!renderTemplate.cache) {
+    renderTemplate.cache = {};
+  }
+
+  if (!renderTemplate.cache[templateName]) {
+    var dir = '/static/templates';
+    var url = dir + '/' + templateName + '.html';
+
+    var templateString;
+    $.ajax({
+      url: url,
+      method: 'GET',
+      async: false,
+      success: function(data) {
+        templateString = data;
+      }
+    });
+
+    renderTemplate.cache[templateName] = Handlebars.compile(templateString);
+  }
+
+  return renderTemplate.cache[templateName](templateData);
+}
diff --git a/mobmonitor/static/js/util.js b/mobmonitor/static/js/util.js
new file mode 100644
index 0000000..1609c9b
--- /dev/null
+++ b/mobmonitor/static/js/util.js
@@ -0,0 +1,20 @@
+// Copyright 2015 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+'use strict';
+
+var SERVICE_CONTAINER_PREFIX = 'serviceHealthContainer';
+
+
+function InvalidRpcArgumentError(message) {
+  this.message = message;
+}
+InvalidRpcArgumentError.prototype = new Error;
+
+function isEmpty(x) {
+  if (typeof(x) === 'undefined' || x === null)
+    return true;
+
+  return $.isEmptyObject(x);
+}
diff --git a/mobmonitor/static/templates/actionrepairdialog.html b/mobmonitor/static/templates/actionrepairdialog.html
new file mode 100644
index 0000000..3a4fb5b
--- /dev/null
+++ b/mobmonitor/static/templates/actionrepairdialog.html
@@ -0,0 +1,13 @@
+<div id="actionRepairDialog" title="Setup {{action}}">
+  <label for="actionDescription">Description:</label>
+  <textarea style="font-size:10pt" readonly name="actionDescription" id="actionDescription" class="actionDisplay">{{info}}
+  </textarea><br><br>
+
+  <label for="args">Arguments:</label><br>
+  <label for="args" id="argsHelp" class="label-help">Help text for args</label>
+  <input style="font-size:10pt" type="text" name="args" id="args" value="" class="input-text">
+
+  <label for="kwargs">Keyword Arguments:</label>
+  <label for="kwargs" id="kwargsHelp" class="label-help">Help text for kwargs</label>
+  <input style="font-size:10pt" type="text" name="kwargs" id="kwargs" value="" class="input-text">
+</div>
diff --git a/mobmonitor/static/templates/healthstatuscontainer.html b/mobmonitor/static/templates/healthstatuscontainer.html
new file mode 100644
index 0000000..2840094
--- /dev/null
+++ b/mobmonitor/static/templates/healthstatuscontainer.html
@@ -0,0 +1,80 @@
+<div id={{serviceId}} class="health-container">
+  <div class="health-container-header">
+    {{#if healthchecks.length}}
+      {{#if health}}
+        <div class="circle-quasi-healthy health-container-header-element"></div>
+        <div class="health-container-header-element"><b>{{service}}</b> is <font class="color-quasi-healthy">healthy</font>.</div>
+      {{else}}
+        <div class="circle-unhealthy health-container-header-element"></div>
+        <div class="health-container-header-element"><b>{{service}}</b> is <font class="color-unhealthy">unhealthy</font>.</div>
+      {{/if}}
+    {{else}}
+        <div class="circle-healthy health-container-header-element"></div>
+        <div class="health-container-header-element"><b>{{service}}</b> is <font class="color-healthy">healthy</font>.</div>
+    {{/if}}
+
+    <!--
+      Clear must be used or else floating causes the height of other
+      divs to warp.
+    -->
+    <div style="clear:both;"</div>
+  </div>
+
+  {{#if healthchecks.length}}
+    <div class="health-container-content">
+      {{#if errors.length}}
+        <b>Errors:</b>
+        <table class="healthcheck-table">
+          <tr>
+            <td class="bold healthcheck-table-column1">Issue Description</td>
+            <td class="bold healthcheck-table-column2">Recommended Action(s)</td>
+          </tr>
+          {{#each errors}}
+            <tr class="healthcheck-info" hcname="{{name}}">
+              <td>{{description}}</td>
+              <td>
+                <ul>
+                  {{#each actions}}
+                    <li>
+                      <button class="run-repair-action" action="{{this}}">Run</button>
+                      {{this}}
+                    </li>
+                  {{/each}}
+                </ul>
+              </td>
+            </tr>
+          {{/each}}
+        </table>
+      {{/if}}
+
+      {{#if warnings.length}}
+        {{#if errors.length}}
+          <br>
+        {{/if}}
+        <b>Warnings:</b>
+        <table class="healthcheck-table">
+          <tr>
+            <td class="bold healthcheck-table-column1">Issue Description</td>
+            <td class="bold healthcheck-table-column2">Recommended Action(s)</td>
+          </tr>
+          {{#each warnings}}
+            <tr class="healthcheck-info" hcname="{{name}}">
+              <td>{{description}}</td>
+              <td>
+                <ul>
+                  {{#each actions}}
+                    <li>
+                      <button class="run-repair-action" action="{{this}}">Run</button>
+                      {{this}}
+                    </li>
+                  {{/each}}
+                </ul>
+              </td>
+            </tr>
+          {{/each}}
+        </table>
+      {{/if}}
+    </div>
+  {{/if}}
+
+</div>
diff --git a/mobmonitor/static/templates/index.html b/mobmonitor/static/templates/index.html
new file mode 100644
index 0000000..7365389
--- /dev/null
+++ b/mobmonitor/static/templates/index.html
@@ -0,0 +1,29 @@
+<html>
+  <head>
+    <title>Mob* Monitor</title>
+    <link href="/static/css/style.css" rel="stylesheet">
+    <link href="https://ajax.googleapis.com/ajax/libs/jqueryui/1.11.4/themes/smoothness/jquery-ui.css" rel="stylesheet">
+    <script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js"></script>
+    <script src="https://ajax.googleapis.com/ajax/libs/jqueryui/1.11.4/jquery-ui.min.js"></script>
+    <script src="https://cdnjs.cloudflare.com/ajax/libs/handlebars.js/3.0.3/handlebars.js"></script>
+    <script src="/static/js/util.js"></script>
+    <script src="/static/js/template.js"></script>
+    <script src="/static/js/rpc.js"></script>
+    <script src="/static/js/actionrepairdialog.js"></script>
+    <script src="/static/js/healthdisplay.js"></script>
+    <script src="/static/js/main.js"></script>
+  </head>
+
+  <body>
+    <header class="site-header">
+      <div class="site-header-title">Mob* Monitor</div>
+    </header>
+
+    <button class="collect-logs">Collect Logs</button>
+    <br><br>
+
+    <div id="healthStatusDisplay">
+    </div>
+
+  </body>
+</html>
diff --git a/mobmonitor/system/__init__.py b/mobmonitor/system/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/mobmonitor/system/__init__.py
diff --git a/mobmonitor/system/systeminfo.py b/mobmonitor/system/systeminfo.py
new file mode 100644
index 0000000..71b8ad4
--- /dev/null
+++ b/mobmonitor/system/systeminfo.py
@@ -0,0 +1,493 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module allows for easy access to common system information.
+
+In order to reduce stress due to excessive checking of system information,
+the records collected by the classes in this file are stored with a period
+of validity. If a record is requested, and there is an existing record
+of the same type, and it is still valid, it is returned. If there is no
+existing record, or it has gone stale, then the system information is
+actually collected.
+
+Each class uses a few attributes to regulate when they should do
+a collection, they are:
+  update_sec: An integer that is the period of validity for a record.
+  update_times: A dictionary that maps a resource name to an epoch time.
+  resources: A dictionary that maps a resource name to the actual record.
+
+On retrieving a new record, the time at which it is collected is stored
+in update_times with the record name as key, and the record itself is
+stored in resources. Every subsequent collection returns what is stored
+in the resources dict until the record goes stale.
+
+Users should not directly access the system information classes, but
+should instead use the 'getters' (ie. GetCpu, GetDisk, GetMemory) defined
+at the bottom of this file.
+
+Each of these getters is decorated with the CacheInfoClass decorator.
+This decorator caches instances of each storage class by the specified
+update interval. With this, multiple checkfiles can access the same
+information class instance, which can help to reduce additional and
+redundant system checks being performed.
+"""
+
+from __future__ import print_function
+
+import collections
+import functools
+import itertools
+import os
+import time
+
+from chromite.lib import cros_build_lib
+from chromite.lib import osutils
+
+
+SYSTEMFILE_PROC_MOUNTS = '/proc/mounts'
+SYSTEMFILE_PROC_MEMINFO = '/proc/meminfo'
+SYSTEMFILE_PROC_FILESYSTEMS = '/proc/filesystems'
+SYSTEMFILE_PROC_STAT = '/proc/stat'
+SYSTEMFILE_DEV_DISKBY = {
+    'ids': '/dev/disk/by-id',
+    'labels': '/dev/disk/by-label',
+}
+
+
+UPDATE_DEFAULT_SEC = 30
+UPDATE_MEMORY_SEC = UPDATE_DEFAULT_SEC
+UPDATE_DISK_SEC = UPDATE_DEFAULT_SEC
+UPDATE_CPU_SEC = 2
+
+RESOURCENAME_MEMORY = 'memory'
+RESOURCENAME_DISKPARTITIONS = 'diskpartitions'
+RESOURCENAME_DISKUSAGE = 'diskusage'
+RESOURCENAME_BLOCKDEVICE = 'blockdevice'
+RESOURCENAME_CPUPREVTIMES = 'cpuprevtimes'
+RESOURCENAME_CPUTIMES = 'cputimes'
+RESOURCENAME_CPULOADS = 'cpuloads'
+
+RESOURCE_MEMORY = collections.namedtuple('memory', ['total', 'available',
+                                                    'percent_used'])
+RESOURCE_DISKPARTITION = collections.namedtuple('diskpartition',
+                                                ['devicename', 'mountpoint',
+                                                 'filesystem'])
+RESOURCE_DISKUSAGE = collections.namedtuple('diskusage', ['total', 'used',
+                                                          'free',
+                                                          'percent_used'])
+RESOURCE_BLOCKDEVICE = collections.namedtuple('blockdevice',
+                                              ['device', 'size', 'ids',
+                                               'labels'])
+RESOURCE_CPUTIME = collections.namedtuple('cputime', ['cpu', 'total', 'idle',
+                                                      'nonidle'])
+RESOURCE_CPULOAD = collections.namedtuple('cpuload', ['cpu', 'load'])
+
+
+CPU_ATTRS = ('cpu', 'user', 'nice', 'system', 'idle', 'iowait', 'irq',
+             'softirq', 'steal', 'guest', 'guest_nice')
+CPU_IDLE_ATTRS = frozenset(['idle', 'iowait'])
+CPU_NONIDLE_ATTRS = frozenset(['user', 'nice', 'system', 'irq', 'softirq'])
+
+
+def CheckStorage(resource_basename):
+  """Decorate information functions to retrieve stored records if valid.
+
+  Args:
+    resource_basename: The data value basename we are checking our
+      local storage for.
+
+  Returns:
+    The real function decorator.
+  """
+
+  def func_deco(func):
+    """Return stored record if valid, else run function and update storage.
+
+    Args:
+      func: The collection function we are executing.
+
+    Returns:
+      The function wrapper.
+    """
+
+    @functools.wraps(func)
+    def wrapper(self, *args, **kwargs):
+      """Function wrapper.
+
+      Args:
+        args: Positional arguments that will be appended to dataname when
+          searching the local storage.
+
+      Returns:
+        The stored record or the new record as a result of running the
+        collection function.
+      """
+      dataname = resource_basename
+      if args:
+        dataname = '%s:%s' % (dataname, args[0])
+
+      if not self.NeedToUpdate(dataname):
+        return self.resources.get(dataname)
+
+      datavalue = func(self, *args, **kwargs)
+      self.Update(dataname, datavalue)
+
+      return datavalue
+
+    return wrapper
+
+  return func_deco
+
+
+def CacheInfoClass(class_name, update_default_sec):
+  """Cache system information class instances by update_sec interval time.
+
+  Args:
+    class_name: The name of the system information class.
+    update_default_sec: The default update interval for this class.
+
+  Returns:
+    The real function decorator.
+  """
+  def func_deco(func):
+    """Return the cached class instance.
+
+    Args:
+      func: The system information class 'getter'.
+
+    Returns:
+      The function wrapper.
+    """
+    cache = {}
+
+    @functools.wraps(func)
+    def wrapper(update_sec=update_default_sec):
+      """Function wrapper for caching system information class objects.
+
+      Args:
+        update_sec: The update interval for the class instance.
+
+      Returns:
+        The cached class instance that has this update interval.
+      """
+      key = '%s:%s' % (class_name, update_sec)
+
+      if key not in cache:
+        cache[key] = func(update_sec=update_sec)
+
+      return cache[key]
+
+    return wrapper
+
+  return func_deco
+
+
+class SystemInfoStorage(object):
+  """Store and access system information."""
+
+  def __init__(self, update_sec=UPDATE_DEFAULT_SEC):
+    self.update_sec = update_sec
+    self.update_times = {}
+    self.resources = {}
+
+  def Update(self, resource_name, data):
+    """Update local storage and collection times of the data.
+
+    Args:
+      resource_name: The key used for local storage and update times.
+      data: The data to store that is keyed by resource_name.
+    """
+    self.update_times[resource_name] = time.time()
+    self.resources[resource_name] = data
+
+  def NeedToUpdate(self, resource_name):
+    """Check if the record keyed by resource_name needs to be (re-)collected.
+
+    Args:
+      resource_name: A string representing some system value.
+
+    Returns:
+      A boolean. If True, the data must be collected. If False, the data
+      be retrieved from the self.resources dict with key resource_name.
+    """
+    if resource_name not in self.resources:
+      return True
+
+    if resource_name not in self.update_times:
+      return True
+
+    return time.time() > self.update_sec + self.update_times.get(resource_name)
+
+
+class Memory(SystemInfoStorage):
+  """Access memory information."""
+
+  def __init__(self, update_sec=UPDATE_MEMORY_SEC):
+    super(Memory, self).__init__(update_sec=update_sec)
+
+  @CheckStorage(RESOURCENAME_MEMORY)
+  def MemoryUsage(self):
+    """Collect memory information from /proc/meminfo.
+
+    Returns:
+      A named tuple with the following fields:
+        total: Corresponds to MemTotal of /proc/meminfo.
+        available: Corresponds to (MemFree+Buffers+Cached) of /proc/meminfo.
+        percent_used: The percentage of memory that is used based on
+          total and available.
+    """
+    # See MOCK_PROC_MEMINFO in the unittest file for this module
+    # to see an example of the file this function is reading from.
+    memtotal, memfree, buffers, cached = (0, 0, 0, 0)
+    with open(SYSTEMFILE_PROC_MEMINFO, 'rb') as f:
+      for line in f:
+        if line.startswith('MemTotal'):
+          memtotal = int(line.split()[1]) * 1024
+        if line.startswith('MemFree'):
+          memfree = int(line.split()[1]) * 1024
+        if line.startswith('Buffers'):
+          buffers = int(line.split()[1]) * 1024
+        if line.startswith('Cached'):
+          cached = int(line.split()[1]) * 1024
+
+    available = memfree + buffers + cached
+    percent_used = float(memtotal - available) / memtotal * 100
+
+    memory = RESOURCE_MEMORY(memtotal, available, percent_used)
+
+    return memory
+
+
+class Disk(SystemInfoStorage):
+  """Access disk information."""
+
+  def __init__(self, update_sec=UPDATE_DISK_SEC):
+    super(Disk, self).__init__(update_sec=update_sec)
+
+  @CheckStorage(RESOURCENAME_DISKPARTITIONS)
+  def DiskPartitions(self):
+    """Collect basic information about disk partitions.
+
+    Returns:
+      A list of named tuples. Each named tuple has the following fields:
+        devicename: The name of the partition.
+        mountpoint: The mount point of the partition.
+        filesystem: The file system in use on the partition.
+    """
+    # Read /proc/mounts for mounted filesystems.
+    # See MOCK_PROC_MOUNTS in the unittest file for this module
+    # to see an example of the file this function is reading from.
+    mounts = []
+    with open(SYSTEMFILE_PROC_MOUNTS, 'rb') as f:
+      for line in f:
+        iterline = iter(line.split())
+        try:
+          mounts.append([next(iterline), next(iterline), next(iterline)])
+        except StopIteration:
+          pass
+
+    # Read /proc/filesystems for a list of physical filesystems
+    # See MOCK_PROC_FILESYSTEMS in the unittest file for this module
+    # to see an example of the file this function is reading from.
+    physmounts = []
+    with open(SYSTEMFILE_PROC_FILESYSTEMS, 'rb') as f:
+      for line in f:
+        if not line.startswith('nodev'):
+          physmounts.append(line.strip())
+
+    # From these two sources, create a list of partitions
+    diskpartitions = []
+    for mountname, mountpoint, filesystem in mounts:
+      if filesystem not in physmounts:
+        continue
+      diskpartition = RESOURCE_DISKPARTITION(mountname, mountpoint, filesystem)
+      diskpartitions.append(diskpartition)
+
+    return diskpartitions
+
+  @CheckStorage(RESOURCENAME_DISKUSAGE)
+  def DiskUsage(self, partition):
+    """Collects usage information for the specified partition.
+
+    Args:
+      partition: The partition for which to check usage. This is the
+        same as the 'devicename' attribute given in the return value
+        of DiskPartitions.
+
+    Returns:
+      A named tuple with the following fields:
+        total: The total space on the partition.
+        used: The total amount of used space on the parition.
+        free: The total amount of unused space on the partition.
+        percent_used: The percentage of the partition that is used
+          based on total and used.
+    """
+    # Collect the partition information
+    vfsdata = os.statvfs(partition)
+    total = vfsdata.f_frsize * vfsdata.f_blocks
+    free = vfsdata.f_bsize * vfsdata.f_bfree
+    used = total - free
+    percent_used = float(used) / total * 100
+
+    diskusage = RESOURCE_DISKUSAGE(total, used, free, percent_used)
+
+    return diskusage
+
+  @CheckStorage(RESOURCENAME_BLOCKDEVICE)
+  def BlockDevices(self, device=''):
+    """Collects information about block devices.
+
+    This method combines information from:
+      (1) Reading through the SYSTEMFILE_DEV_DISKBY directories.
+      (2) Executing the 'lsblk' command provided by osutils.ListBlockDevices.
+
+    Returns:
+      A list of named tuples. Each tuple has the following fields:
+        device: The name of the block device.
+        size: The size of the block device in bytes.
+        ids: A list of ids assigned to this device.
+        labels: A list of labels assigned to this device.
+    """
+    devicefilter = os.path.basename(device)
+
+    # Data collected from the SYSTEMFILE_DEV_DISKBY directories.
+    ids = {}
+    labels = {}
+
+    # Data collected from 'lsblk'.
+    sizes = {}
+
+    # Collect diskby information.
+    for prop, diskdir in SYSTEMFILE_DEV_DISKBY.iteritems():
+      cmd = ['find', diskdir, '-lname', '*%s' % devicefilter]
+      cmd_result = cros_build_lib.RunCommand(cmd, log_output=True)
+
+      if not cmd_result.output:
+        continue
+
+      results = cmd_result.output.split()
+      for result in results:
+        devicename = os.path.abspath(osutils.ResolveSymlink(result))
+        result = os.path.basename(result)
+
+        # Ensure that each of our data dicts have the same keys.
+        ids.setdefault(devicename, [])
+        labels.setdefault(devicename, [])
+        sizes.setdefault(devicename, 0)
+
+        if 'ids' == prop:
+          ids[devicename].append(result)
+        elif 'labels' == prop:
+          labels[devicename].append(result)
+
+    # Collect lsblk information.
+    for device in osutils.ListBlockDevices(in_bytes=True):
+      devicename = os.path.join('/dev', device.NAME)
+      if devicename in ids:
+        sizes[devicename] = int(device.SIZE)
+
+    return [RESOURCE_BLOCKDEVICE(device, sizes[device], ids[device],
+                                 labels[device])
+            for device in ids.iterkeys()]
+
+
+class Cpu(SystemInfoStorage):
+  """Access CPU information."""
+
+  def __init__(self, update_sec=UPDATE_CPU_SEC):
+    super(Cpu, self).__init__(update_sec=update_sec)
+
+    # CpuLoad depends on having two CpuTime collections at different
+    # points in time. One issue, is that the first call to CpuLoad,
+    # without prior calls to CpuTime will return a trivial value, that
+    # is, all cpus will be reported to have zero load. We solve this
+    # by doing an initial CpuTime collection here.
+    self.CpuTime()
+    self.update_times.pop(RESOURCENAME_CPUTIMES)
+    self.Update(RESOURCENAME_CPUPREVTIMES,
+                self.resources.pop(RESOURCENAME_CPUTIMES))
+
+  @CheckStorage(RESOURCENAME_CPUTIMES)
+  def CpuTime(self):
+    """Collect information on CPU time.
+
+    Returns:
+      A list of named tuples. Each named tuple has the following fields:
+        cpu: An identifier for the CPU.
+        total: The total CPU time in the measurement.
+        idle: The total time spent in an idle state.
+        nonidle: The total time spent not in an idle state.
+    """
+    # Collect CPU time information from /proc/stat
+    cputimes = []
+
+    # See MOCK_PROC_STAT in the unittest file for this module
+    # to see an example of the file this function is reading from.
+    with open(SYSTEMFILE_PROC_STAT, 'rb') as f:
+      for line in f:
+        if not line.startswith('cpu'):
+          continue
+        cpudesc = dict(itertools.izip(CPU_ATTRS, line.split()))
+        idle, nonidle = (0, 0)
+        for attr, value in cpudesc.iteritems():
+          if attr in CPU_IDLE_ATTRS:
+            idle += int(value)
+          if attr in CPU_NONIDLE_ATTRS:
+            nonidle += int(value)
+        total = idle + nonidle
+        cputimes.append(RESOURCE_CPUTIME(cpudesc.get('cpu'),
+                                         total, idle, nonidle))
+
+    # Store the previous cpu times if we have a 'current' measurement
+    # that is about to be replaced. This is very helpful for calculating
+    # load estimates over the update interval.
+    if RESOURCENAME_CPUTIMES in self.resources:
+      self.Update(RESOURCENAME_CPUPREVTIMES,
+                  self.resources.get(RESOURCENAME_CPUTIMES))
+
+    return cputimes
+
+  @CheckStorage(RESOURCENAME_CPULOADS)
+  def CpuLoad(self):
+    """Estimate the CPU load.
+
+    Returns:
+      A list of named tuples. Each name tuple has the following fields:
+        cpu: An identifier for the CPU.
+        load: A number representing the load/usage ranging between 0 and 1.
+    """
+    prevcputimes = self.resources.get(RESOURCENAME_CPUPREVTIMES)
+    cputimes = self.CpuTime()
+    cpuloads = []
+    for prevtime, curtime in itertools.izip(prevcputimes, cputimes):
+      ct = curtime.total
+      ci = curtime.idle
+      pt = prevtime.total
+      pi = prevtime.idle
+
+      # Cpu load is estimated using a difference between cpu timing collections
+      # taken at different points in time. To estimate how much time in that
+      # interval was spent in a non-idle state, we calculate the percent change
+      # of the non-idle time using the relative differences in total and idle
+      # time between the two collections.
+      cpu = curtime.cpu
+      load = float((ct-pt)-(ci-pi))/(ct-pt) if (ct-pt) != 0 else 0
+      cpuloads.append(RESOURCE_CPULOAD(cpu, load))
+
+    return cpuloads
+
+
+@CacheInfoClass(Cpu.__name__, UPDATE_CPU_SEC)
+def GetCpu(update_sec=UPDATE_CPU_SEC):
+  return Cpu(update_sec=update_sec)
+
+
+@CacheInfoClass(Memory.__name__, UPDATE_MEMORY_SEC)
+def GetMemory(update_sec=UPDATE_MEMORY_SEC):
+  return Memory(update_sec=update_sec)
+
+
+@CacheInfoClass(Disk.__name__, UPDATE_DISK_SEC)
+def GetDisk(update_sec=UPDATE_DISK_SEC):
+  return Disk(update_sec=update_sec)
diff --git a/mobmonitor/system/systeminfo_unittest b/mobmonitor/system/systeminfo_unittest
new file mode 120000
index 0000000..ef3e37b
--- /dev/null
+++ b/mobmonitor/system/systeminfo_unittest
@@ -0,0 +1 @@
+../../scripts/wrapper.py
\ No newline at end of file
diff --git a/mobmonitor/system/systeminfo_unittest.py b/mobmonitor/system/systeminfo_unittest.py
new file mode 100644
index 0000000..f92b4fd
--- /dev/null
+++ b/mobmonitor/system/systeminfo_unittest.py
@@ -0,0 +1,509 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for systeminfo."""
+
+from __future__ import print_function
+
+import collections
+import io
+import mock
+import os
+import time
+
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+from chromite.mobmonitor.system import systeminfo
+
+# Strings that are used to mock particular system files for testing.
+MOCK_PROC_MEMINFO = u'''
+MemTotal:       65897588 kB
+MemFree:        24802380 kB
+Buffers:         1867288 kB
+Cached:         29458948 kB
+SwapCached:            0 kB
+Active:         34858204 kB
+Inactive:        3662692 kB
+Active(anon):    7223176 kB
+Inactive(anon):   136892 kB
+Active(file):   27635028 kB
+Inactive(file):  3525800 kB
+Unevictable:       27268 kB
+Mlocked:           27268 kB
+SwapTotal:      67035132 kB
+SwapFree:       67035132 kB
+Dirty:               304 kB
+Writeback:             0 kB
+AnonPages:       7221488 kB
+Mapped:           593360 kB
+Shmem:            139136 kB
+Slab:            1740892 kB
+SReclaimable:    1539144 kB
+SUnreclaim:       201748 kB
+KernelStack:       21384 kB
+PageTables:       103680 kB
+NFS_Unstable:          0 kB
+Bounce:                0 kB
+WritebackTmp:          0 kB
+CommitLimit:    99983924 kB
+Committed_AS:   19670964 kB
+VmallocTotal:   34359738367 kB
+VmallocUsed:      359600 kB
+VmallocChunk:   34325703384 kB
+HardwareCorrupted:     0 kB
+AnonHugePages:   2924544 kB
+HugePages_Total:       0
+HugePages_Free:        0
+HugePages_Rsvd:        0
+HugePages_Surp:        0
+Hugepagesize:       2048 kB
+DirectMap4k:     1619840 kB
+DirectMap2M:    46536704 kB
+DirectMap1G:    18874368 kB
+'''
+
+MOCK_PROC_MOUNTS = u'''
+rootfs / rootfs rw 0 0
+sysfs /sys sysfs rw,nosuid,nodev,noexec,relatime 0 0
+proc /proc proc rw,nosuid,nodev,noexec,relatime 0 0
+udev /dev devtmpfs rw,relatime,size=32935004k,nr_inodes=8233751,mode=755 0 0
+devpts /dev/pts devpts rw,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000 0 0
+tmpfs /run tmpfs rw,nosuid,noexec,relatime,size=6589760k,mode=755 0 0
+securityfs /sys/kernel/security securityfs rw,relatime 0 0
+/dev/mapper/dhcp--100--106--128--134--vg-root / ext4 rw,relatime,errors=remount-ro,i_version,data=ordered 0 0
+none /sys/fs/cgroup tmpfs rw,relatime,size=4k,mode=755 0 0
+none /sys/fs/fuse/connections fusectl rw,relatime 0 0
+none /sys/kernel/debug debugfs rw,relatime 0 0
+none /run/lock tmpfs rw,nosuid,nodev,noexec,relatime,size=5120k 0 0
+none /run/shm tmpfs rw,nosuid,nodev,relatime 0 0
+none /run/user tmpfs rw,nosuid,nodev,noexec,relatime,size=102400k,mode=755 0 0
+none /sys/fs/pstore pstore rw,relatime 0 0
+/dev/sdb1 /work ext4 rw,relatime,data=ordered 0 0
+/dev/sda1 /boot ext2 rw,relatime,i_version,stripe=4 0 0
+none /dev/cgroup/cpu cgroup rw,relatime,cpuacct,cpu 0 0
+none /dev/cgroup/devices cgroup rw,relatime,devices 0 0
+/dev/sdb1 /usr/local/autotest ext4 rw,relatime,data=ordered 0 0
+rpc_pipefs /run/rpc_pipefs rpc_pipefs rw,relatime 0 0
+/dev/mapper/dhcp--100--106--128--134--vg-usr+local+google /usr/local/google ext4 rw,relatime,i_version,data=writeback 0 0
+binfmt_misc /proc/sys/fs/binfmt_misc binfmt_misc rw,nosuid,nodev,noexec,relatime 0 0
+systemd /sys/fs/cgroup/systemd cgroup rw,nosuid,nodev,noexec,relatime,name=systemd 0 0
+/etc/auto.auto /auto autofs rw,relatime,fd=7,pgrp=2440,timeout=600,minproto=5,maxproto=5,indirect 0 0
+/etc/auto.home /home autofs rw,relatime,fd=13,pgrp=2440,timeout=600,minproto=5,maxproto=5,indirect 0 0
+srcfsd /google/src fuse.srcfsd rw,nosuid,nodev,relatime,user_id=906,group_id=65534,default_permissions,allow_other 0 0
+/dev/mapper/dhcp--100--106--128--134--vg-root /home/build ext4 rw,relatime,errors=remount-ro,i_version,data=ordered 0 0
+/dev/mapper/dhcp--100--106--128--134--vg-root /auto/buildstatic ext4 rw,relatime,errors=remount-ro,i_version,data=ordered 0 0
+cros /sys/fs/cgroup/cros cgroup rw,relatime,cpuset 0 0
+gvfsd-fuse /run/user/307266/gvfs fuse.gvfsd-fuse rw,nosuid,nodev,relatime,user_id=307266,group_id=5000 0 0
+objfsd /google/obj fuse.objfsd rw,nosuid,nodev,relatime,user_id=903,group_id=65534,default_permissions,allow_other 0 0
+x20fsd /google/data fuse.x20fsd rw,nosuid,nodev,relatime,user_id=904,group_id=65534,allow_other 0 0
+'''
+
+MOCK_PROC_FILESYSTEMS = u'''
+nodev sysfs
+nodev rootfs
+nodev ramfs
+nodev bdev
+nodev proc
+nodev cgroup
+nodev cpuset
+nodev tmpfs
+nodev devtmpfs
+nodev debugfs
+nodev securityfs
+nodev sockfs
+nodev pipefs
+nodev anon_inodefs
+nodev devpts
+      ext3
+      ext2
+      ext4
+nodev hugetlbfs
+      vfat
+nodev ecryptfs
+      fuseblk
+nodev fuse
+nodev fusectl
+nodev pstore
+nodev mqueue
+nodev rpc_pipefs
+nodev nfs
+nodev nfs4
+nodev nfsd
+nodev binfmt_misc
+nodev autofs
+      xfs
+      jfs
+      msdos
+      ntfs
+      minix
+      hfs
+      hfsplus
+      qnx4
+      ufs
+      btrfs
+'''
+
+MOCK_PROC_STAT = u'''
+cpu  36790888 1263256 11740848 7297360559 3637601 444 116964 0 0 0
+intr 3386552889 27 3 0 0 0 0 0 0 1 0 0 0 4 0 0 0 33 26 0 0 0 0 0 3508578 0 0 0
+ctxt 9855640605
+btime 1431536959
+processes 3593799
+procs_running 2
+procs_blocked 0
+softirq 652938841 3508230 208834832 49297758 52442647 16919039 0 47196001 147658748 3188900 123892686
+'''
+
+
+class SystemInfoStorageTest(cros_test_lib.MockTestCase):
+  """Unittests for SystemInfoStorage."""
+
+  def _CreateSystemInfoStorage(self, update_sec):
+    """Setup a SystemInfoStorage object."""
+    return systeminfo.SystemInfoStorage(update_sec=update_sec)
+
+  def testUpdate(self):
+    """Test SystemInfoStorage Update."""
+    si = self._CreateSystemInfoStorage(1)
+    si.Update('test', 'testvalue')
+    self.assertEquals(si.resources.get('test'), 'testvalue')
+
+  def testNeedToUpdateNoData(self):
+    """Test NeedToUpdate with no data."""
+    si = self._CreateSystemInfoStorage(1)
+    self.assertEquals(si.NeedToUpdate('test'), True)
+
+  def testNeedToUpdateNoUpdateTime(self):
+    """Test NeedToUpdate with data that has no update time recorded."""
+    si = self._CreateSystemInfoStorage(1)
+    dataname, data = ('testname', 'testvalue')
+    si.resources[dataname] = data
+    self.assertEquals(si.NeedToUpdate(dataname), True)
+
+  def testNeedToUpdateStaleData(self):
+    """Test NeedToUpdate with data that is out of date."""
+    si = self._CreateSystemInfoStorage(1)
+
+    dataname, data = ('testname', 'testvalue')
+    curtime = time.time()
+
+    si.resources[dataname] = data
+    si.update_times[dataname] = curtime
+
+    self.StartPatcher(mock.patch('time.time'))
+    time.time.return_value = curtime + 2
+
+    self.assertEquals(si.NeedToUpdate(dataname), True)
+
+  def testNeedToUpdateFreshData(self):
+    """Test NeedToUpdate with data that is not out of date."""
+    si = self._CreateSystemInfoStorage(1)
+
+    dataname, data = ('testname', 'testvalue')
+    curtime = time.time()
+
+    si.resources[dataname] = data
+    si.update_times[dataname] = curtime
+
+    self.StartPatcher(mock.patch('time.time'))
+    time.time.return_value = curtime + 0.5
+
+    self.assertEquals(si.NeedToUpdate(dataname), False)
+
+
+class MemoryTest(cros_test_lib.MockTestCase):
+  """Unittests for Memory."""
+
+  def _CreateMemory(self, update_sec):
+    """Setup a Memory object."""
+    return systeminfo.Memory(update_sec=update_sec)
+
+  def testMemoryExisting(self):
+    """Test memory information collection when a record exists."""
+    mem = self._CreateMemory(1)
+
+    dataname, data = (systeminfo.RESOURCENAME_MEMORY, 'testvalue')
+    mem.Update(dataname, data)
+
+    self.assertEquals(mem.MemoryUsage(), 'testvalue')
+
+  def testMemory(self):
+    """Test memory info when there is no record, or record is stale."""
+    mem = self._CreateMemory(1)
+
+    mock_file = io.StringIO(MOCK_PROC_MEMINFO)
+    with mock.patch('__builtin__.open', return_value=mock_file, create=True):
+      mem.MemoryUsage()
+
+    # The correct values that mem.MemoryUsage() should produce based on the
+    # mocked /proc/meminfo file.
+    mock_memtotal = 67479130112
+    mock_available = 57475702784
+    mock_percent_used = 14.824475821482267
+
+    mock_memory = systeminfo.RESOURCE_MEMORY(mock_memtotal, mock_available,
+                                             mock_percent_used)
+
+    self.assertEquals(mem.resources.get(systeminfo.RESOURCENAME_MEMORY),
+                      mock_memory)
+
+
+class DiskTest(cros_test_lib.MockTestCase):
+  """Unittests for Disk."""
+
+  def _CreateDisk(self, update_sec):
+    """Setup a Disk object."""
+    return systeminfo.Disk(update_sec=update_sec)
+
+  def testDiskPartitionsExisting(self):
+    """Test disk partition information collection when a record exists."""
+    disk = self._CreateDisk(1)
+
+    dataname, data = (systeminfo.RESOURCENAME_DISKPARTITIONS, 'testvalue')
+    disk.Update(dataname, data)
+
+    self.assertEquals(disk.DiskPartitions(), 'testvalue')
+
+  def testDiskPartitions(self):
+    """Test disk partition info when there is no record, or record is stale."""
+    disk = self._CreateDisk(1)
+
+    mock_mounts_file = io.StringIO(MOCK_PROC_MOUNTS)
+    mock_filesystems_file = io.StringIO(MOCK_PROC_FILESYSTEMS)
+
+    def _file_returner(fname, _mode):
+      if systeminfo.SYSTEMFILE_PROC_MOUNTS == fname:
+        return mock_mounts_file
+      elif systeminfo.SYSTEMFILE_PROC_FILESYSTEMS == fname:
+        return mock_filesystems_file
+
+    with mock.patch('__builtin__.open', side_effect=_file_returner,
+                    create=True):
+      disk.DiskPartitions()
+
+    # The expected return value.
+    mock_disk_partitions = [
+        systeminfo.RESOURCE_DISKPARTITION(
+            '/dev/mapper/dhcp--100--106--128--134--vg-root', '/', 'ext4'),
+        systeminfo.RESOURCE_DISKPARTITION('/dev/sdb1', '/work', 'ext4'),
+        systeminfo.RESOURCE_DISKPARTITION('/dev/sda1', '/boot', 'ext2'),
+        systeminfo.RESOURCE_DISKPARTITION(
+            '/dev/sdb1', '/usr/local/autotest', 'ext4'),
+        systeminfo.RESOURCE_DISKPARTITION(
+            '/dev/mapper/dhcp--100--106--128--134--vg-usr+local+google',
+            '/usr/local/google', 'ext4'),
+        systeminfo.RESOURCE_DISKPARTITION(
+            '/dev/mapper/dhcp--100--106--128--134--vg-root',
+            '/home/build', 'ext4'),
+        systeminfo.RESOURCE_DISKPARTITION(
+            '/dev/mapper/dhcp--100--106--128--134--vg-root',
+            '/auto/buildstatic', 'ext4')]
+
+    self.assertEquals(
+        disk.resources.get(systeminfo.RESOURCENAME_DISKPARTITIONS),
+        mock_disk_partitions)
+
+  def testDiskUsageExisting(self):
+    """Test disk usage information collection when a record exists."""
+    disk = self._CreateDisk(1)
+
+    partition = 'fakepartition'
+    dataname = '%s:%s' % (systeminfo.RESOURCENAME_DISKUSAGE, partition)
+    data = 'testvalue'
+
+    disk.Update(dataname, data)
+
+    self.assertEquals(disk.DiskUsage(partition), 'testvalue')
+
+  def testDiskUsage(self):
+    """Test disk usage info when there is no record or record is stale."""
+    disk = self._CreateDisk(1)
+
+    partition = 'fakepartition'
+
+    # Mock value for test. These values came from statvfs'ing root.
+    mock_statvfs_return = collections.namedtuple(
+        'mock_disk', ['f_bsize', 'f_frsize', 'f_blocks', 'f_bfree', 'f_bavail',
+                      'f_files', 'f_ffree', 'f_favail', 'f_flag', 'f_namemax'])
+    mock_value = mock_statvfs_return(4096, 4096, 9578876, 5332865, 4840526,
+                                     2441216, 2079830, 2079830, 4096, 255)
+
+    self.StartPatcher(mock.patch('os.statvfs'))
+    os.statvfs.return_value = mock_value
+
+    # Expected results of the test.
+    mock_total = 39235076096
+    mock_free = 21843415040
+    mock_used = 17391661056
+    mock_percent_used = 44.326818720693325
+    mock_diskusage = systeminfo.RESOURCE_DISKUSAGE(mock_total, mock_used,
+                                                   mock_free,
+                                                   mock_percent_used)
+
+    self.assertEquals(disk.DiskUsage(partition), mock_diskusage)
+
+  def testBlockDevicesExisting(self):
+    """Test block device information collection when a record exists."""
+    disk = self._CreateDisk(1)
+
+    device = '/dev/sda1'
+    dataname = '%s:%s' % (systeminfo.RESOURCENAME_BLOCKDEVICE, device)
+    data = 'testvalue'
+
+    disk.Update(dataname, data)
+
+    self.assertEquals(disk.BlockDevices(device), data)
+
+  def testBlockDevice(self):
+    """Test block device info when there is no record or a record is stale."""
+    disk = self._CreateDisk(1)
+
+    mock_device = '/dev/sda1'
+    mock_size = 12345678987654321
+    mock_ids = ['ata-ST1000DM003-1ER162_Z4Y3WQDB-part1']
+    mock_labels = ['BOOT-PARTITION']
+    mock_lsblk = 'NAME="sda1" RM="0" TYPE="part" SIZE="%s"' % mock_size
+
+    self.StartPatcher(mock.patch('chromite.lib.osutils.ResolveSymlink'))
+    osutils.ResolveSymlink.return_value = '/dev/sda1'
+
+    with cros_build_lib_unittest.RunCommandMock() as rc_mock:
+      rc_mock.AddCmdResult(
+          partial_mock.In(systeminfo.SYSTEMFILE_DEV_DISKBY['ids']),
+          output='\n'.join(mock_ids))
+      rc_mock.AddCmdResult(
+          partial_mock.In(systeminfo.SYSTEMFILE_DEV_DISKBY['labels']),
+          output='\n'.join(mock_labels))
+      rc_mock.AddCmdResult(partial_mock.In('lsblk'), output=mock_lsblk)
+
+      mock_blockdevice = [systeminfo.RESOURCE_BLOCKDEVICE(mock_device,
+                                                          mock_size,
+                                                          mock_ids,
+                                                          mock_labels)]
+
+      self.assertEquals(disk.BlockDevices(mock_device), mock_blockdevice)
+
+
+class Cpu(cros_test_lib.MockTestCase):
+  """Unittests for Cpu."""
+
+  def _CreateCpu(self, update_sec):
+    """Setup a Cpu object."""
+    return systeminfo.Cpu(update_sec=update_sec)
+
+  def testCpuTimeExisting(self):
+    """Test cpu timing information collection when a record exists."""
+    cpu = self._CreateCpu(1)
+
+    dataname, data = (systeminfo.RESOURCENAME_CPUTIMES, 'testvalue')
+    cpu.Update(dataname, data)
+
+    self.assertEquals(cpu.CpuTime(), 'testvalue')
+
+  def testCpuTime(self):
+    """Test cpu timing info when there is no record or record is stale."""
+    cpu = self._CreateCpu(1)
+
+    mock_file = io.StringIO(MOCK_PROC_STAT)
+    with mock.patch('__builtin__.open', return_value=mock_file, create=True):
+      cpu.CpuTime()
+
+    # The expected return value.
+    mock_cpu_name = 'cpu'
+    mock_cpu_total = 7350910560
+    mock_cpu_idle = 7300998160
+    mock_cpu_nonidle = 49912400
+
+    mock_cputimes = [systeminfo.RESOURCE_CPUTIME(mock_cpu_name, mock_cpu_total,
+                                                 mock_cpu_idle,
+                                                 mock_cpu_nonidle)]
+
+    self.assertEquals(cpu.resources.get(systeminfo.RESOURCENAME_CPUTIMES),
+                      mock_cputimes)
+
+  def testCpuLoadExisting(self):
+    """Test cpu load information collection when a record exists."""
+    self.assertTrue(True)
+    cpu = self._CreateCpu(1)
+
+    dataname, data = (systeminfo.RESOURCENAME_CPULOADS, 'testvalue')
+    cpu.Update(dataname, data)
+
+    self.assertEquals(cpu.CpuLoad(), 'testvalue')
+
+  def testCpuLoad(self):
+    """Test cpu load collection info when we have previous timing data."""
+    cpu = self._CreateCpu(1)
+
+    # Create a cpu timing record.
+    mock_cpu_prev_name = 'cpu'
+    mock_cpu_prev_total = 7340409560
+    mock_cpu_prev_idle = 7300497160
+    mock_cpu_prev_nonidle = 39912400
+
+    mock_cputimes_prev = [systeminfo.RESOURCE_CPUTIME(mock_cpu_prev_name,
+                                                      mock_cpu_prev_total,
+                                                      mock_cpu_prev_idle,
+                                                      mock_cpu_prev_nonidle)]
+
+    cpu.Update(systeminfo.RESOURCENAME_CPUPREVTIMES, mock_cputimes_prev)
+
+    # Mock and execute.
+    mock_file = io.StringIO(MOCK_PROC_STAT)
+    with mock.patch('__builtin__.open', return_value=mock_file, create=True):
+      cpu.CpuLoad()
+
+    # The expected return value.
+    mock_cpu_name = 'cpu'
+    mock_cpu_load = 0.9522902580706599
+
+    mock_cpuloads = [systeminfo.RESOURCE_CPULOAD(mock_cpu_name, mock_cpu_load)]
+
+    self.assertEquals(cpu.resources.get(systeminfo.RESOURCENAME_CPULOADS),
+                      mock_cpuloads)
+
+
+class InfoClassCacheTest(cros_test_lib.MockTestCase):
+  """Unittests for checking that information class caching."""
+
+  def testGetCpu(self):
+    """Test caching explicitly for Cpu information objects."""
+    cpus1 = [systeminfo.GetCpu(), systeminfo.GetCpu(),
+             systeminfo.GetCpu(systeminfo.UPDATE_CPU_SEC),
+             systeminfo.GetCpu(update_sec=systeminfo.UPDATE_CPU_SEC)]
+
+    cpus2 = [systeminfo.GetCpu(10), systeminfo.GetCpu(10),
+             systeminfo.GetCpu(update_sec=10)]
+
+    self.assertTrue(all(id(x) == id(cpus1[0]) for x in cpus1))
+    self.assertTrue(all(id(x) == id(cpus2[0]) for x in cpus2))
+    self.assertTrue(id(cpus1[0]) != id(cpus2[0]))
+
+  def testGetMemory(self):
+    """Test caching explicitly for Memory information objects."""
+    mems1 = [systeminfo.GetMemory(), systeminfo.GetMemory(),
+             systeminfo.GetMemory(systeminfo.UPDATE_MEMORY_SEC),
+             systeminfo.GetMemory(update_sec=systeminfo.UPDATE_MEMORY_SEC)]
+
+    mems2 = [systeminfo.GetMemory(10), systeminfo.GetMemory(10),
+             systeminfo.GetMemory(update_sec=10)]
+
+    self.assertTrue(all(id(x) == id(mems1[0]) for x in mems1))
+    self.assertTrue(all(id(x) == id(mems2[0]) for x in mems2))
+    self.assertTrue(id(mems1[0]) != id(mems2[0]))
+
+  def testGetDisk(self):
+    """Test caching explicitly for Disk information objects."""
+    disks1 = [systeminfo.GetDisk(), systeminfo.GetDisk(),
+              systeminfo.GetDisk(systeminfo.UPDATE_MEMORY_SEC),
+              systeminfo.GetDisk(update_sec=systeminfo.UPDATE_MEMORY_SEC)]
+
+    disks2 = [systeminfo.GetDisk(10), systeminfo.GetDisk(10),
+              systeminfo.GetDisk(update_sec=10)]
+
+    self.assertTrue(all(id(x) == id(disks1[0]) for x in disks1))
+    self.assertTrue(all(id(x) == id(disks2[0]) for x in disks2))
+    self.assertTrue(id(disks1[0]) != id(disks2[0]))
diff --git a/pylintrc b/pylintrc
new file mode 100644
index 0000000..0fc12be
--- /dev/null
+++ b/pylintrc
@@ -0,0 +1,283 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+[MASTER]
+
+# Specify a configuration file.
+#rcfile=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Profiled execution.
+#profile=no
+
+# Add <file or directory> to the black list. It should be a base name, not a
+# path. You may set this option multiple times.
+#ignore=CVS
+
+# Pickle collected data for later comparisons.
+#persistent=yes
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+load-plugins=chromite.cli.cros.lint
+
+
+[MESSAGES CONTROL]
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple times.
+# R9301: logging is deprecated. Use "from chromite.lib import cros_logging as
+# logging" to import chromite/lib/cros_logging
+enable=cros-logging-import
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifier separated by comma (,) or put this option
+# multiple times (only on the command line, not in the configuration file where
+# it should appear only once).
+# C0103: Invalid name "LOADERS" (should match [a-z_][a-z0-9_]{2,30}$)
+# C0111: Missing docstring
+# C0302: Too many lines in module (N)
+# E1103: Instance has no '*' member (but some types could not be inferred)
+# I0011: Locally disabling warning.
+# I0012: Locally enabling warning.
+# R0201: Method could be a function
+# R0902: Too many instance attributes (N/7)
+# R0903: Too few public methods (N/2)
+# R0911: Too many return statements (N/6)
+# R0912: Too many branches (N/12)
+# R0913: Too many arguments (N/5)
+# R0914: Too many local variables (N/15)
+# R0915: Too many statements (N/50)
+# W0122: Use of the exec statement
+# W0141: Used builtin function ''
+# W0142: Used * or ** magic
+# W0403: Relative import 'constants', should be 'chromite.cbuildbot.constants'
+# W0511: Used when a warning note as FIXME or XXX is detected.
+# W0703: Catching too general exception Exception.
+# R0904: Too many public methods
+# R0921: Abstract class not referenced.
+# R0922: Abstract class is only referenced N times.
+disable=C0103,C0111,C0302,E1103,I0011,I0012,R0201,R0902,R0903,R0911,R0912,R0913,R0914,R0915,W0122,W0141,W0142,W0403,W0511,W0703,R0904,R0921,R0922
+
+
+[REPORTS]
+
+# Set the output format. Available formats are text, parseable, colorized, msvs
+# (visual studio) and html
+#output-format=text
+
+# Put messages in a separate file for each module / package specified on the
+# command line instead of printing them on stdout. Reports (if any) will be
+# written in a file name "pylint_global.[txt|html]".
+#files-output=no
+
+# Tells whether to display a full report or only the messages
+# CHANGE: No report.
+reports=no
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note). You have access to the variables errors warning, statement which
+# respectively contain the number of errors / warnings messages and the total
+# number of statements analyzed. This is used by the global evaluation report
+# (RP0004).
+#evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Add a comment according to your evaluation note. This is used by the global
+# evaluation report (RP0004).
+#comment=no
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+#notes=FIXME,XXX,TODO
+
+
+[FORMAT]
+
+# Maximum number of characters on a single line.
+max-line-length=80
+
+# Maximum number of lines in a module
+#max-module-lines=1000
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+# CHANGE: Use "  " instead.
+indent-string='  '
+
+
+[TYPECHECK]
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+#ignore-mixin-members=yes
+
+# List of classes names for which member attributes should not be checked
+# (useful for classes with attributes dynamically set).
+ignored-classes=hashlib,numpy
+
+# When zope mode is activated, add a predefined set of Zope acquired attributes
+# to generated-members.
+#zope=no
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E0201 when accessed.
+# CHANGE: Added 'AndRaise', 'AndReturn', 'InAnyOrder' and 'MultipleTimes' for pymox.
+# CHANGE: Added tempdir for @osutils.TempDirDecorator.
+generated-members=REQUEST,acl_users,aq_parent,AndRaise,AndReturn,InAnyOrder,MultipleTimes,tempdir
+
+
+[BASIC]
+
+# Required attributes for module, separated by a comma
+#required-attributes=
+
+# List of builtins function names that should not be used, separated by a comma
+#bad-functions=map,filter,apply,input
+
+# Regular expression which should only match correct module names
+#module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Regular expression which should only match correct module level names
+#const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Regular expression which should only match correct class names
+#class-rgx=[A-Z_][a-zA-Z0-9]+$
+
+# Regular expression which should only match correct function names
+#
+# CHANGE: The ChromiumOS standard is different than PEP-8, so we need to
+# redefine this.
+#
+# Common exceptions to ChromiumOS standard:
+# - main: Standard for main function
+function-rgx=([A-Z_][a-zA-Z0-9]{2,30}|main)$
+
+# Regular expression which should only match correct method names
+#
+# CHANGE: The ChromiumOS standard is different than PEP-8, so we need to
+# redefine this. Here's what we allow:
+# - CamelCaps, starting with a capital letter.  No underscores in function
+#   names.  Can also have a "_" prefix (private method) or a "test" prefix
+#   (unit test).
+# - Methods that look like __xyz__, which are used to do things like
+#   __init__, __del__, etc.
+# - setUp, tearDown: For unit tests.
+method-rgx=((_|test)?[A-Z][a-zA-Z0-9]{2,30}|__[a-z]+__|setUp|tearDown)$
+
+# Regular expression which should only match correct instance attribute names
+#attr-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct argument names
+#argument-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct variable names
+#variable-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct list comprehension /
+# generator expression variable names
+#inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
+
+# Good variable names which should always be accepted, separated by a comma
+#good-names=i,j,k,ex,Run,_
+
+# Bad variable names which should always be refused, separated by a comma
+#bad-names=foo,bar,baz,toto,tutu,tata
+
+# Regular expression which should only match functions or classes name which do
+# not require a docstring
+#no-docstring-rgx=__.*__
+
+
+[SIMILARITIES]
+
+# Minimum lines number of a similarity.
+min-similarity-lines=20
+
+# Ignore comments when computing similarities.
+#ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+#ignore-docstrings=yes
+
+
+[VARIABLES]
+
+# Tells whether we should check for unused import in __init__ files.
+#init-import=no
+
+# A regular expression matching the beginning of the name of dummy variables
+# (i.e. not used).
+dummy-variables-rgx=_|unused_
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid to define new builtins when possible.
+#additional-builtins=
+
+
+[CLASSES]
+
+# List of interface methods to ignore, separated by a comma. This is used for
+# instance to not check methods defines in Zope's Interface base class.
+#ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
+
+# List of method names used to declare (i.e. assign) instance attributes.
+#defining-attr-methods=__init__,__new__,setUp
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method
+#max-args=5
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore
+#ignored-argument-names=_.*
+
+# Maximum number of locals for function / method body
+#max-locals=15
+
+# Maximum number of return / yield for function / method body
+#max-returns=6
+
+# Maximum number of branch for function / method body
+#max-branchs=12
+
+# Maximum number of statements in function / method body
+#max-statements=50
+
+# Maximum number of parents for a class (see R0901).
+max-parents=10
+
+# Maximum number of attributes for a class (see R0902).
+#max-attributes=7
+
+# Minimum number of public methods for a class (see R0903).
+#min-public-methods=2
+
+# Maximum number of public methods for a class (see R0904).
+#max-public-methods=20
+
+
+[IMPORTS]
+
+# Deprecated modules which should not be used, separated by a comma
+#deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled)
+#import-graph=
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled)
+#ext-import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled)
+#int-import-graph=
diff --git a/scripts/__init__.py b/scripts/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/scripts/__init__.py
diff --git a/scripts/account_tool.py b/scripts/account_tool.py
new file mode 100644
index 0000000..3769b0e
--- /dev/null
+++ b/scripts/account_tool.py
@@ -0,0 +1,117 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script installs users and groups into sysroots."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import accounts_lib
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import osutils
+from chromite.lib import sysroot_lib
+from chromite.lib import user_db
+
+
+ACCOUNT_DB_FILENAME = 'accounts.json'
+
+ACTION_GET_ENTRY = 'get_entry'
+ACTION_INSTALL_USER = 'install_user'
+ACTION_INSTALL_GROUP = 'install_group'
+
+USER_DB = 'passwd'
+GROUP_DB = 'group'
+
+
+def GetOptions(argv):
+  """Returns the parsed command line arguments in |argv|."""
+  parser = commandline.ArgumentParser(description=__doc__)
+  command_parsers = parser.add_subparsers(dest='action')
+
+  get_ent_parser = command_parsers.add_parser(
+      ACTION_GET_ENTRY, help='Get an entry from an account database.')
+  get_ent_parser.add_argument(
+      '--nolock', action='store_true', default=False,
+      help='Skip locking the database before reading it.')
+  get_ent_parser.add_argument('sysroot', type='path',
+                              help='Path to sysroot containing the database')
+  get_ent_parser.add_argument('database', choices=(USER_DB, GROUP_DB),
+                              help='Name of database to get')
+  get_ent_parser.add_argument('name', type=str, help='Name of account to get')
+
+  user_parser = command_parsers.add_parser(
+      ACTION_INSTALL_USER, help='Install a user to a sysroot')
+  user_parser.add_argument('name', type=str,
+                           help='Name of user to install')
+  user_parser.add_argument('--uid', type=int,
+                           help='UID of the user')
+  user_parser.add_argument('--shell', type='path',
+                           help='Shell of user')
+  user_parser.add_argument('--home', type='path',
+                           help='Home directory of user')
+  user_parser.add_argument('--primary_group', type=str,
+                           help='Name of primary group for user')
+
+  group_parser = command_parsers.add_parser(
+      ACTION_INSTALL_GROUP, help='Install a group to a sysroot')
+  group_parser.add_argument('name', type=str,
+                            help='Name of group to install.')
+  group_parser.add_argument('--gid', type=int, help='GID of the group')
+
+  # Both group and user parsers need to understand the target sysroot.
+  for sub_parser in (user_parser, group_parser):
+    sub_parser.add_argument(
+        'sysroot', type='path', help='The sysroot to install the user into')
+
+  options = parser.parse_args(argv)
+  options.Freeze()
+  return options
+
+
+def main(argv):
+  cros_build_lib.AssertInsideChroot()
+  options = GetOptions(argv)
+
+  if options.action == ACTION_GET_ENTRY:
+    db = user_db.UserDB(options.sysroot)
+    if options.database == USER_DB:
+      print(db.GetUserEntry(options.name, skip_lock=options.nolock))
+    else:
+      print(db.GetGroupEntry(options.name, skip_lock=options.nolock))
+    return 0
+
+  overlays = sysroot_lib.Sysroot(options.sysroot).GetStandardField(
+      sysroot_lib.STANDARD_FIELD_PORTDIR_OVERLAY).split()
+
+  # TODO(wiley) This process could be optimized to avoid reparsing these
+  #             overlay databases each time.
+  account_db = accounts_lib.AccountDatabase()
+  for overlay_path in overlays:
+    database_path = os.path.join(overlay_path, ACCOUNT_DB_FILENAME)
+    if os.path.exists(database_path):
+      account_db.AddAccountsFromDatabase(database_path)
+
+  installed_users = user_db.UserDB(options.sysroot)
+
+  if options.action == ACTION_INSTALL_USER:
+    account_db.InstallUser(options.name, installed_users,
+                           uid=options.uid, shell=options.shell,
+                           homedir=options.home,
+                           primary_group=options.primary_group)
+
+    homedir = account_db.users[options.name].home
+    homedir_path = os.path.join(options.sysroot, homedir)
+
+    if homedir != '/dev/null' and not os.path.exists(homedir_path):
+      osutils.SafeMakedirs(homedir_path, sudo=True)
+      uid = account_db.users[options.name].uid
+      cros_build_lib.SudoRunCommand(
+          ['chown', '%d:%d' % (uid, uid), homedir_path], print_cmd=False)
+
+  elif options.action == ACTION_INSTALL_GROUP:
+    account_db.InstallGroup(options.name, installed_users, gid=options.gid)
+  else:
+    cros_build_lib.Die('Unsupported account type: %s' % options.account_type)
diff --git a/scripts/autotest-quickmerge-includepatterns b/scripts/autotest-quickmerge-includepatterns
new file mode 100644
index 0000000..b429c26
--- /dev/null
+++ b/scripts/autotest-quickmerge-includepatterns
@@ -0,0 +1,32 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+client/
+client/**
+
+server/
+server/**
+
+site_utils/
+site_utils/**
+
+test_suites/
+test_suites/**
+
+tko/
+tko/**
+
+utils/
+utils/**
+
+conmux/
+conmux/**
+
+frontend/
+frontend/**
+
+global_config.ini
+shadow_config.ini
+
+.quickmerge_sentinel
\ No newline at end of file
diff --git a/scripts/autotest_quickmerge.py b/scripts/autotest_quickmerge.py
new file mode 100644
index 0000000..c536447
--- /dev/null
+++ b/scripts/autotest_quickmerge.py
@@ -0,0 +1,413 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Fast alternative to `emerge-$BOARD autotest-all`
+
+Simple script to be run inside the chroot. Used as a fast approximation of
+emerge-$board autotest-all, by simply rsync'ing changes from trunk to sysroot.
+"""
+
+from __future__ import print_function
+
+import argparse
+import glob
+import os
+import re
+import sys
+from collections import namedtuple
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import portage_util
+
+if cros_build_lib.IsInsideChroot():
+  # Only import portage after we've checked that we're inside the chroot.
+  import portage
+
+
+INCLUDE_PATTERNS_FILENAME = 'autotest-quickmerge-includepatterns'
+AUTOTEST_PROJECT_NAME = 'chromiumos/third_party/autotest'
+AUTOTEST_EBUILD = 'chromeos-base/autotest'
+DOWNGRADE_EBUILDS = ['chromeos-base/autotest']
+
+IGNORE_SUBDIRS = ['ExternalSource',
+                  'logs',
+                  'results',
+                  'site-packages']
+
+# Data structure describing a single rsync filesystem change.
+#
+# change_description: An 11 character string, the rsync change description
+#                     for the particular file.
+# absolute_path: The absolute path of the created or modified file.
+ItemizedChange = namedtuple('ItemizedChange', ['change_description',
+                                               'absolute_path'])
+
+# Data structure describing the rsync new/modified files or directories.
+#
+# new_files: A list of ItemizedChange objects for new files.
+# modified_files: A list of ItemizedChange objects for modified files.
+# new_directories: A list of ItemizedChange objects for new directories.
+ItemizedChangeReport = namedtuple('ItemizedChangeReport',
+                                  ['new_files', 'modified_files',
+                                   'new_directories'])
+
+
+class PortagePackageAPIError(Exception):
+  """Exception thrown when unable to retrieve a portage package API."""
+
+
+def GetStalePackageNames(change_list, autotest_sysroot):
+  """Given a rsync change report, returns the names of stale test packages.
+
+  This function pulls out test package names for client-side tests, stored
+  within the client/site_tests directory tree, that had any files added or
+  modified and for whom any existing bzipped test packages may now be stale.
+
+  Args:
+    change_list: A list of ItemizedChange objects corresponding to changed
+                 or modified files.
+    autotest_sysroot: Absolute path of autotest in the sysroot,
+                      e.g. '/build/lumpy/usr/local/build/autotest'
+
+  Returns:
+    A list of test package names, eg ['factory_Leds', 'login_UserPolicyKeys'].
+    May contain duplicate entries if multiple files within a test directory
+    were modified.
+  """
+  exp = os.path.abspath(autotest_sysroot) + r'/client/site_tests/(.*?)/.*'
+  matches = [re.match(exp, change.absolute_path) for change in change_list]
+  return [match.group(1) for match in matches if match]
+
+
+def ItemizeChangesFromRsyncOutput(rsync_output, destination_path):
+  """Convert the output of an rsync with `-i` to a ItemizedChangeReport object.
+
+  Args:
+    rsync_output: String stdout of rsync command that was run with `-i` option.
+    destination_path: String absolute path of the destination directory for the
+                      rsync operations. This argument is necessary because
+                      rsync's output only gives the relative path of
+                      touched/added files.
+
+  Returns:
+    ItemizedChangeReport object giving the absolute paths of files that were
+    created or modified by rsync.
+  """
+  modified_matches = re.findall(r'([.>]f[^+]{9}) (.*)', rsync_output)
+  new_matches = re.findall(r'(>f\+{9}) (.*)', rsync_output)
+  new_symlink_matches = re.findall(r'(cL\+{9}) (.*) -> .*', rsync_output)
+  new_dir_matches = re.findall(r'(cd\+{9}) (.*)', rsync_output)
+
+  absolute_modified = [ItemizedChange(c, os.path.join(destination_path, f))
+                       for (c, f) in modified_matches]
+
+  # Note: new symlinks are treated as new files.
+  absolute_new = [ItemizedChange(c, os.path.join(destination_path, f))
+                  for (c, f) in new_matches + new_symlink_matches]
+
+  absolute_new_dir = [ItemizedChange(c, os.path.join(destination_path, f))
+                      for (c, f) in new_dir_matches]
+
+  return ItemizedChangeReport(new_files=absolute_new,
+                              modified_files=absolute_modified,
+                              new_directories=absolute_new_dir)
+
+
+def GetPackageAPI(portage_root, package_cp):
+  """Gets portage API handles for the given package.
+
+  Args:
+    portage_root: Root directory of portage tree. Eg '/' or '/build/lumpy'
+    package_cp: A string similar to 'chromeos-base/autotest-tests'.
+
+  Returns:
+    Returns (package, vartree) tuple, where
+      package is of type portage.dbapi.vartree.dblink
+      vartree is of type portage.dbapi.vartree.vartree
+  """
+  if portage_root is None:
+    # pylint: disable=no-member
+    portage_root = portage.root
+  # Ensure that portage_root ends with trailing slash.
+  portage_root = os.path.join(portage_root, '')
+
+  # Create a vartree object corresponding to portage_root.
+  trees = portage.create_trees(portage_root, portage_root)
+  vartree = trees[portage_root]['vartree']
+
+  # List the matching installed packages in cpv format.
+  matching_packages = vartree.dbapi.cp_list(package_cp)
+
+  if not matching_packages:
+    raise PortagePackageAPIError('No matching package for %s in portage_root '
+                                 '%s' % (package_cp, portage_root))
+
+  if len(matching_packages) > 1:
+    raise PortagePackageAPIError('Too many matching packages for %s in '
+                                 'portage_root %s' % (package_cp,
+                                                      portage_root))
+
+  # Convert string match to package dblink.
+  package_cpv = matching_packages[0]
+  package_split = portage_util.SplitCPV(package_cpv)
+  # pylint: disable=no-member
+  package = portage.dblink(package_split.category,
+                           package_split.pv, settings=vartree.settings,
+                           vartree=vartree)
+
+  return package, vartree
+
+
+def DowngradePackageVersion(portage_root, package_cp,
+                            downgrade_to_version='0'):
+  """Downgrade the specified portage package version.
+
+  Args:
+    portage_root: Root directory of portage tree. Eg '/' or '/build/lumpy'
+    package_cp: A string similar to 'chromeos-base/autotest-tests'.
+    downgrade_to_version: String version to downgrade to. Default: '0'
+
+  Returns:
+    True on success. False on failure (nonzero return code from `mv` command).
+  """
+  try:
+    package, _ = GetPackageAPI(portage_root, package_cp)
+  except PortagePackageAPIError:
+    # Unable to fetch a corresponding portage package API for this
+    # package_cp (either no such package, or name ambigious and matches).
+    # So, just fail out.
+    return False
+
+  source_directory = package.dbdir
+  destination_path = os.path.join(
+      package.dbroot, package_cp + '-' + downgrade_to_version)
+  if os.path.abspath(source_directory) == os.path.abspath(destination_path):
+    return True
+  command = ['mv', source_directory, destination_path]
+  code = cros_build_lib.SudoRunCommand(command, error_code_ok=True).returncode
+  return code == 0
+
+
+def UpdatePackageContents(change_report, package_cp, portage_root=None):
+  """Add newly created files/directors to package contents.
+
+  Given an ItemizedChangeReport, add the newly created files and directories
+  to the CONTENTS of an installed portage package, such that these files are
+  considered owned by that package.
+
+  Args:
+    change_report: ItemizedChangeReport object for the changes to be
+                   made to the package.
+    package_cp: A string similar to 'chromeos-base/autotest-tests' giving
+                the package category and name of the package to be altered.
+    portage_root: Portage root path, corresponding to the board that
+                  we are working on. Defaults to '/'
+  """
+  package, vartree = GetPackageAPI(portage_root, package_cp)
+
+  # Append new contents to package contents dictionary.
+  contents = package.getcontents().copy()
+  for _, filename in change_report.new_files:
+    contents.setdefault(filename, (u'obj', '0', '0'))
+  for _, dirname in change_report.new_directories:
+    # Strip trailing slashes if present.
+    contents.setdefault(dirname.rstrip('/'), (u'dir',))
+
+  # Write new contents dictionary to file.
+  vartree.dbapi.writeContentsToContentsFile(package, contents)
+
+
+def RemoveBzipPackages(autotest_sysroot):
+  """Remove all bzipped test/dep/profiler packages from sysroot autotest.
+
+  Args:
+    autotest_sysroot: Absolute path of autotest in the sysroot,
+                      e.g. '/build/lumpy/usr/local/build/autotest'
+  """
+  osutils.RmDir(os.path.join(autotest_sysroot, 'packages'),
+                ignore_missing=True)
+  osutils.SafeUnlink(os.path.join(autotest_sysroot, 'packages.checksum'))
+
+
+def RsyncQuickmerge(source_path, sysroot_autotest_path,
+                    include_pattern_file=None, pretend=False,
+                    overwrite=False):
+  """Run rsync quickmerge command, with specified arguments.
+
+  Command will take form `rsync -a [options] --exclude=**.pyc
+                         --exclude=**.pyo
+                         [optional --include-from include_pattern_file]
+                         --exclude=* [source_path] [sysroot_autotest_path]`
+
+  Args:
+    source_path: Directory to rsync from.
+    sysroot_autotest_path: Directory to rsync too.
+    include_pattern_file: Optional pattern of files to include in rsync.
+    pretend: True to use the '-n' option to rsync, to perform dry run.
+    overwrite: True to omit '-u' option, overwrite all files in sysroot,
+               not just older files.
+
+  Returns:
+    The cros_build_lib.CommandResult object resulting from the rsync command.
+  """
+  command = ['rsync', '-a']
+
+  # For existing files, preserve destination permissions. This ensures that
+  # existing files end up with the file permissions set by the ebuilds.
+  # If this script copies over a file that does not exist in the destination
+  # tree, it will set the least restrictive permissions allowed in the
+  # destination tree. This could happen if the file copied is not installed by
+  # *any* ebuild, or if the ebuild that installs the file was never emerged.
+  command += ['--no-p', '--chmod=ugo=rwX']
+
+  if pretend:
+    command += ['-n']
+
+  if not overwrite:
+    command += ['-u']
+
+  command += ['-i']
+
+  command += ['--exclude=**.pyc']
+  command += ['--exclude=**.pyo']
+
+  # Exclude files with a specific substring in their name, because
+  # they create an ambiguous itemized report. (see unit test file for details)
+  command += ['--exclude=** -> *']
+
+  if include_pattern_file:
+    command += ['--include-from=%s' % include_pattern_file]
+
+  command += ['--exclude=*']
+
+  command += [source_path, sysroot_autotest_path]
+
+  return cros_build_lib.SudoRunCommand(command, redirect_stdout=True)
+
+
+def ParseArguments(argv):
+  """Parse command line arguments
+
+  Returns:
+    parsed arguments.
+  """
+  parser = argparse.ArgumentParser(description='Perform a fast approximation '
+                                   'to emerge-$board autotest-all, by '
+                                   'rsyncing source tree to sysroot.')
+
+  default_board = cros_build_lib.GetDefaultBoard()
+  parser.add_argument('--board', metavar='BOARD', default=default_board,
+                      help='Board to perform quickmerge for. Default: ' +
+                      (default_board or 'Not configured.'))
+  parser.add_argument('--pretend', action='store_true',
+                      help='Dry run only, do not modify sysroot autotest.')
+  parser.add_argument('--overwrite', action='store_true',
+                      help='Overwrite existing files even if newer.')
+  parser.add_argument('--force', action='store_true',
+                      help=argparse.SUPPRESS)
+  parser.add_argument('--verbose', action='store_true',
+                      help='Print detailed change report.')
+
+  # Used only if test_that is calling autotest_quickmerge and has detected that
+  # the sysroot autotest path is still in usr/local/autotest (ie the build
+  # pre-dates https://chromium-review.googlesource.com/#/c/62880/ )
+  parser.add_argument('--legacy_path', action='store_true',
+                      help=argparse.SUPPRESS)
+
+  return parser.parse_args(argv)
+
+
+def main(argv):
+  cros_build_lib.AssertInsideChroot()
+
+  args = ParseArguments(argv)
+
+  if os.geteuid() != 0:
+    try:
+      cros_build_lib.SudoRunCommand([sys.executable] + sys.argv)
+    except cros_build_lib.RunCommandError:
+      return 1
+    return 0
+
+  if not args.board:
+    print('No board specified. Aborting.')
+    return 1
+
+  manifest = git.ManifestCheckout.Cached(constants.SOURCE_ROOT)
+  checkout = manifest.FindCheckout(AUTOTEST_PROJECT_NAME)
+  brillo_autotest_src_path = os.path.join(checkout.GetPath(absolute=True), '')
+
+  script_path = os.path.dirname(__file__)
+  include_pattern_file = os.path.join(script_path, INCLUDE_PATTERNS_FILENAME)
+
+  # TODO: Determine the following string programatically.
+  sysroot_path = os.path.join('/build', args.board, '')
+  sysroot_autotest_path = os.path.join(sysroot_path,
+                                       constants.AUTOTEST_BUILD_PATH, '')
+  if args.legacy_path:
+    sysroot_autotest_path = os.path.join(sysroot_path, 'usr/local/autotest',
+                                         '')
+
+  # Generate the list of source paths to copy.
+  src_paths = {os.path.abspath(brillo_autotest_src_path)}
+  for quickmerge_file in glob.glob(os.path.join(sysroot_autotest_path,
+                                                'quickmerge', '*', '*')):
+    try:
+      path = osutils.ReadFile(quickmerge_file).strip()
+      if path and os.path.exists(path):
+        src_paths.add(os.path.abspath(path))
+    except IOError:
+      logging.error('Could not quickmerge for project: %s',
+                    os.path.basename(quickmerge_file))
+
+  num_new_files = 0
+  num_modified_files = 0
+  for src_path in src_paths:
+    rsync_output = RsyncQuickmerge(src_path +'/', sysroot_autotest_path,
+                                   include_pattern_file, args.pretend,
+                                   args.overwrite)
+
+    if args.verbose:
+      logging.info(rsync_output.output)
+    change_report = ItemizeChangesFromRsyncOutput(rsync_output.output,
+                                                  sysroot_autotest_path)
+    num_new_files = num_new_files + len(change_report.new_files)
+    num_modified_files = num_modified_files + len(change_report.modified_files)
+    if not args.pretend:
+      logging.info('Updating portage database.')
+      UpdatePackageContents(change_report, AUTOTEST_EBUILD, sysroot_path)
+
+  if not args.pretend:
+    for logfile in glob.glob(os.path.join(sysroot_autotest_path, 'packages',
+                                          '*.log')):
+      try:
+        # Open file in a try-except block, for atomicity, instead of
+        # doing existence check.
+        with open(logfile, 'r') as f:
+          package_cp = f.readline().strip()
+          DOWNGRADE_EBUILDS.append(package_cp)
+      except IOError:
+        pass
+
+    for ebuild in DOWNGRADE_EBUILDS:
+      if not DowngradePackageVersion(sysroot_path, ebuild):
+        logging.warning('Unable to downgrade package %s version number.',
+                        ebuild)
+    RemoveBzipPackages(sysroot_autotest_path)
+
+    sentinel_filename = os.path.join(sysroot_autotest_path,
+                                     '.quickmerge_sentinel')
+    cros_build_lib.RunCommand(['touch', sentinel_filename])
+
+  if args.pretend:
+    logging.info('The following message is pretend only. No filesystem '
+                 'changes made.')
+  logging.info('Quickmerge complete. Created or modified %s files.',
+               num_new_files + num_modified_files)
+
+  return 0
diff --git a/scripts/autotest_quickmerge_unittest b/scripts/autotest_quickmerge_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/autotest_quickmerge_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/autotest_quickmerge_unittest.py b/scripts/autotest_quickmerge_unittest.py
new file mode 100644
index 0000000..47b9b39
--- /dev/null
+++ b/scripts/autotest_quickmerge_unittest.py
@@ -0,0 +1,196 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for autotest_quickmerge."""
+
+from __future__ import print_function
+
+import mock
+import types
+
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.scripts import autotest_quickmerge
+
+
+RSYNC_TEST_OUTPUT = """.d..t...... ./
+>f..t...... touched file with spaces
+>f..t...... touched_file
+>f.st...... modified_contents_file
+.f...p..... modified_permissions_file
+.f....o.... modified_owner_file
+>f+++++++++ new_file
+cL+++++++++ new_symlink -> directory_a/new_file_in_directory
+.d..t...... directory_a/
+>f+++++++++ directory_a/new_file_in_directory
+>f..t...... directory_a/touched_file_in_directory
+cd+++++++++ new_empty_directory/
+.d..t...... touched_empty_directory/"""
+# The output format of rsync's itemized changes has a few unusual cases
+# that are ambiguous. For instance, if the operation involved creating a
+# symbolic link named "a -> b" to a file named "c", the rsync output would be:
+# cL+++++++++ a -> b -> c
+# which is indistinguishable from the output for creating a symbolic link named
+# "a" to a file named "b -> c".
+# Since there is no easy resolution to this ambiguity, and it seems like a case
+# that would rarely or never be encountered in the wild, rsync quickmerge
+# will exclude all files which contain the substring " -> " in their name.
+
+RSYNC_TEST_OUTPUT_FOR_PACKAGE_UPDATE = \
+""">f..t...... client/ardvark.py
+.d..t...... client/site_tests/
+>f+++++++++ client/site_tests/nothing.py
+.d..t...... client/site_tests/factory_Leds/
+>f+++++++++ client/site_tests/factory_Leds/factory_Leds2.py
+>f..tpog... client/site_tests/login_UserPolicyKeys/control
+>f..tpog... client/site_tests/login_UserPolicyKeys/login_UserPolicyKeys.py
+>f..t...... client/site_tests/platform_Cryptohome/platform_Cryptohome.py
+>f..tpog... server/site_tests/security_DbusFuzzServer/control
+>f..t.og... utils/coverage_suite.py
+.d..t...... client/site_tests/power_Thermal/
+cd+++++++++ client/site_tests/power_Thermal/a/
+cd+++++++++ client/site_tests/power_Thermal/a/b/
+cd+++++++++ client/site_tests/power_Thermal/a/b/c/
+>f+++++++++ client/site_tests/power_Thermal/a/b/c/d.py"""
+
+RSYNC_TEST_DESTINATION_PATH = '/foo/bar/'
+
+TEST_PACKAGE_CP = 'a_cute/little_puppy'
+TEST_PACKAGE_CPV = 'a_cute/little_puppy-3.14159'
+TEST_PACKAGE_C = 'a_cute'
+TEST_PACKAGE_PV = 'little_puppy-3.14159'
+TEST_PORTAGE_ROOT = '/bib/bob/'
+TEST_PACKAGE_OLDCONTENTS = {
+    u'/by/the/prickling/of/my/thumbs': (u'obj', '1234', '4321'),
+    u'/something/wicked/this/way/comes': (u'dir',)
+}
+
+
+class ItemizeChangesFromRsyncOutput(cros_test_lib.TestCase):
+  """Test autotest_quickmerge.ItemizeChangesFromRsyncOutput."""
+
+  def testItemizeChangesFromRsyncOutput(self):
+    """Test that rsync output parser returns correct FileMutations."""
+    expected_new = set(
+        [('>f+++++++++', '/foo/bar/new_file'),
+         ('>f+++++++++', '/foo/bar/directory_a/new_file_in_directory'),
+         ('cL+++++++++', '/foo/bar/new_symlink')])
+
+    expected_mod = set(
+        [('>f..t......', '/foo/bar/touched file with spaces'),
+         ('>f..t......', '/foo/bar/touched_file'),
+         ('>f.st......', '/foo/bar/modified_contents_file'),
+         ('.f...p.....', '/foo/bar/modified_permissions_file'),
+         ('.f....o....', '/foo/bar/modified_owner_file'),
+         ('>f..t......', '/foo/bar/directory_a/touched_file_in_directory')])
+
+    expected_dir = set([('cd+++++++++', '/foo/bar/new_empty_directory/')])
+
+    report = autotest_quickmerge.ItemizeChangesFromRsyncOutput(
+        RSYNC_TEST_OUTPUT, RSYNC_TEST_DESTINATION_PATH)
+
+    self.assertEqual(expected_new, set(report.new_files))
+    self.assertEqual(expected_mod, set(report.modified_files))
+    self.assertEqual(expected_dir, set(report.new_directories))
+
+
+class PackageNameParsingTest(cros_test_lib.TestCase):
+  """Test autotest_quickmerge.GetStalePackageNames."""
+
+  def testGetStalePackageNames(self):
+    autotest_sysroot = '/an/arbitrary/path/'
+    change_report = autotest_quickmerge.ItemizeChangesFromRsyncOutput(
+        RSYNC_TEST_OUTPUT_FOR_PACKAGE_UPDATE, autotest_sysroot)
+    package_matches = autotest_quickmerge.GetStalePackageNames(
+        change_report.modified_files + change_report.new_files,
+        autotest_sysroot)
+    expected_set = set(['factory_Leds', 'login_UserPolicyKeys',
+                        'platform_Cryptohome', 'power_Thermal'])
+    self.assertEqual(set(package_matches), expected_set)
+
+
+class RsyncCommandTest(cros_build_lib_unittest.RunCommandTestCase):
+  """Test autotest_quickmerge.RsyncQuickmerge."""
+
+  def testRsyncQuickmergeCommand(self):
+    """Test that RsyncQuickMerge makes correct call to SudoRunCommand"""
+    include_file_name = 'an_include_file_name'
+    source_path = 'a_source_path'
+    sysroot_path = 'a_sysroot_path'
+
+    expected_command = ['rsync', '-a', '-n', '-u', '-i',
+                        '--exclude=**.pyc', '--exclude=**.pyo',
+                        '--exclude=** -> *',
+                        '--include-from=%s' % include_file_name,
+                        '--exclude=*',
+                        source_path,
+                        sysroot_path]
+
+    autotest_quickmerge.RsyncQuickmerge(source_path, sysroot_path,
+                                        include_file_name,
+                                        pretend=True,
+                                        overwrite=False)
+
+    self.assertCommandContains(expected_command)
+
+
+class PortageManipulationsTest(cros_test_lib.MockTestCase):
+  """Test usage of autotest_quickmerge.portage."""
+
+  def testUpdatePackageContents(self):
+    """Test that UpdatePackageContents makes the correct calls to portage."""
+    autotest_quickmerge.portage = mock.MagicMock()
+    portage = autotest_quickmerge.portage
+
+    portage.root = TEST_PORTAGE_ROOT
+
+    mock_vartree = mock.MagicMock()
+    mock_vartree.settings = {'an arbitrary' : 'dictionary'}
+    mock_tree = {TEST_PORTAGE_ROOT : {'vartree' : mock_vartree}}
+    portage.create_trees.return_value = mock_tree
+
+    mock_vartree.dbapi = mock.MagicMock()
+    mock_vartree.dbapi.cp_list.return_value = [TEST_PACKAGE_CPV]
+
+    mock_package = mock.MagicMock()
+    portage.dblink.return_value = mock_package  # pylint: disable=no-member
+    mock_package.getcontents.return_value = TEST_PACKAGE_OLDCONTENTS
+
+    EXPECTED_NEW_ENTRIES = {
+        '/foo/bar/new_empty_directory': (u'dir',),
+        '/foo/bar/directory_a/new_file_in_directory': (u'obj', '0', '0'),
+        '/foo/bar/new_file': (u'obj', '0', '0'),
+        '/foo/bar/new_symlink': (u'obj', '0', '0')
+    }
+    RESULT_DICIONARY = TEST_PACKAGE_OLDCONTENTS.copy()
+    RESULT_DICIONARY.update(EXPECTED_NEW_ENTRIES)
+
+    mock_vartree.dbapi.writeContentsToContentsFile(mock_package,
+                                                   RESULT_DICIONARY)
+
+    change_report = autotest_quickmerge.ItemizeChangesFromRsyncOutput(
+        RSYNC_TEST_OUTPUT, RSYNC_TEST_DESTINATION_PATH)
+    autotest_quickmerge.UpdatePackageContents(change_report, TEST_PACKAGE_CP,
+                                              TEST_PORTAGE_ROOT)
+
+
+class PortageAPITest(cros_test_lib.TestCase):
+  """Ensures that required portage API exists."""
+
+  def runTest(self):
+    try:
+      import portage
+    except ImportError:
+      self.skipTest('Portage not available in test environment. Re-run test '
+                    'in chroot.')
+    try:
+      # pylint: disable=no-member
+      f = portage.vardbapi.writeContentsToContentsFile
+    except AttributeError:
+      self.fail('Required writeContentsToContentsFile function does '
+                'not exist.')
+
+    self.assertIsInstance(f, types.UnboundMethodType,
+                          'Required writeContentsToContentsFile is not '
+                          'a function.')
diff --git a/scripts/cbuildbot.py b/scripts/cbuildbot.py
new file mode 100644
index 0000000..c1c2dab
--- /dev/null
+++ b/scripts/cbuildbot.py
@@ -0,0 +1,1272 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Main builder code for Chromium OS.
+
+Used by Chromium OS buildbot configuration for all Chromium OS builds including
+full and pre-flight-queue builds.
+"""
+
+from __future__ import print_function
+
+import distutils.version
+import glob
+import json
+import mock
+import optparse
+import os
+import pickle
+import sys
+
+from chromite.cbuildbot import builders
+from chromite.cbuildbot import cbuildbot_run
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot import remote_try
+from chromite.cbuildbot import repository
+from chromite.cbuildbot import tee
+from chromite.cbuildbot import topology
+from chromite.cbuildbot import tree_status
+from chromite.cbuildbot import trybot_patch_pool
+from chromite.cbuildbot.stages import completion_stages
+from chromite.lib import cidb
+from chromite.lib import cgroups
+from chromite.lib import cleanup
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import graphite
+from chromite.lib import gob_util
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import retry_stats
+from chromite.lib import sudo
+from chromite.lib import timeout_util
+
+
+_DEFAULT_LOG_DIR = 'cbuildbot_logs'
+_BUILDBOT_LOG_FILE = 'cbuildbot.log'
+_DEFAULT_EXT_BUILDROOT = 'trybot'
+_DEFAULT_INT_BUILDROOT = 'trybot-internal'
+_BUILDBOT_REQUIRED_BINARIES = ('pbzip2',)
+_API_VERSION_ATTR = 'api_version'
+
+
+def _PrintValidConfigs(site_config, display_all=False):
+  """Print a list of valid buildbot configs.
+
+  Args:
+    site_config: config_lib.SiteConfig containing all config info.
+    display_all: Print all configs.  Otherwise, prints only configs with
+                 trybot_list=True.
+  """
+  def _GetSortKey(config_name):
+    config_dict = site_config[config_name]
+    return (not config_dict['trybot_list'], config_dict['description'],
+            config_name)
+
+  COLUMN_WIDTH = 45
+  if not display_all:
+    print('Note: This is the common list; for all configs, use --all.')
+  print('config'.ljust(COLUMN_WIDTH), 'description')
+  print('------'.ljust(COLUMN_WIDTH), '-----------')
+  config_names = site_config.keys()
+  config_names.sort(key=_GetSortKey)
+  for name in config_names:
+    if display_all or site_config[name]['trybot_list']:
+      desc = site_config[name].get('description')
+      desc = desc if desc else ''
+      print(name.ljust(COLUMN_WIDTH), desc)
+
+
+def _ConfirmBuildRoot(buildroot):
+  """Confirm with user the inferred buildroot, and mark it as confirmed."""
+  logging.warning('Using default directory %s as buildroot', buildroot)
+  if not cros_build_lib.BooleanPrompt(default=False):
+    print('Please specify a different buildroot via the --buildroot option.')
+    sys.exit(0)
+
+  if not os.path.exists(buildroot):
+    os.mkdir(buildroot)
+
+  repository.CreateTrybotMarker(buildroot)
+
+
+def _ConfirmRemoteBuildbotRun():
+  """Confirm user wants to run with --buildbot --remote."""
+  logging.warning(
+      'You are about to launch a PRODUCTION job!  This is *NOT* a '
+      'trybot run! Are you sure?')
+  if not cros_build_lib.BooleanPrompt(default=False):
+    print('Please specify --pass-through="--debug".')
+    sys.exit(0)
+
+
+def _DetermineDefaultBuildRoot(sourceroot, internal_build):
+  """Default buildroot to be under the directory that contains current checkout.
+
+  Args:
+    internal_build: Whether the build is an internal build
+    sourceroot: Use specified sourceroot.
+  """
+  if not repository.IsARepoRoot(sourceroot):
+    cros_build_lib.Die(
+        'Could not find root of local checkout at %s.  Please specify '
+        'using the --sourceroot option.' % sourceroot)
+
+  # Place trybot buildroot under the directory containing current checkout.
+  top_level = os.path.dirname(os.path.realpath(sourceroot))
+  if internal_build:
+    buildroot = os.path.join(top_level, _DEFAULT_INT_BUILDROOT)
+  else:
+    buildroot = os.path.join(top_level, _DEFAULT_EXT_BUILDROOT)
+
+  return buildroot
+
+
+def _BackupPreviousLog(log_file, backup_limit=25):
+  """Rename previous log.
+
+  Args:
+    log_file: The absolute path to the previous log.
+    backup_limit: Maximum number of old logs to keep.
+  """
+  if os.path.exists(log_file):
+    old_logs = sorted(glob.glob(log_file + '.*'),
+                      key=distutils.version.LooseVersion)
+
+    if len(old_logs) >= backup_limit:
+      os.remove(old_logs[0])
+
+    last = 0
+    if old_logs:
+      last = int(old_logs.pop().rpartition('.')[2])
+
+    os.rename(log_file, log_file + '.' + str(last + 1))
+
+
+def _IsDistributedBuilder(options, chrome_rev, build_config):
+  """Determines whether the builder should be a DistributedBuilder.
+
+  Args:
+    options: options passed on the commandline.
+    chrome_rev: Chrome revision to build.
+    build_config: Builder configuration dictionary.
+
+  Returns:
+    True if the builder should be a distributed_builder
+  """
+  if build_config['pre_cq']:
+    return True
+  elif not options.buildbot:
+    return False
+  elif chrome_rev in (constants.CHROME_REV_TOT,
+                      constants.CHROME_REV_LOCAL,
+                      constants.CHROME_REV_SPEC):
+    # We don't do distributed logic to TOT Chrome PFQ's, nor local
+    # chrome roots (e.g. chrome try bots)
+    # TODO(davidjames): Update any builders that rely on this logic to use
+    # manifest_version=False instead.
+    return False
+  elif build_config['manifest_version']:
+    return True
+
+  return False
+
+
+def _RunBuildStagesWrapper(options, site_config, build_config):
+  """Helper function that wraps RunBuildStages()."""
+  logging.info('cbuildbot was executed with args %s' %
+               cros_build_lib.CmdToStr(sys.argv))
+
+  chrome_rev = build_config['chrome_rev']
+  if options.chrome_rev:
+    chrome_rev = options.chrome_rev
+  if chrome_rev == constants.CHROME_REV_TOT:
+    options.chrome_version = gob_util.GetTipOfTrunkRevision(
+        constants.CHROMIUM_GOB_URL)
+    options.chrome_rev = constants.CHROME_REV_SPEC
+
+  # If it's likely we'll need to build Chrome, fetch the source.
+  if build_config['sync_chrome'] is None:
+    options.managed_chrome = (
+        chrome_rev != constants.CHROME_REV_LOCAL and
+        (not build_config['usepkg_build_packages'] or chrome_rev or
+         build_config['profile'] or options.rietveld_patches))
+  else:
+    options.managed_chrome = build_config['sync_chrome']
+
+  if options.managed_chrome:
+    # Tell Chrome to fetch the source locally.
+    internal = constants.USE_CHROME_INTERNAL in build_config['useflags']
+    chrome_src = 'chrome-src-internal' if internal else 'chrome-src'
+    options.chrome_root = os.path.join(options.cache_dir, 'distfiles', 'target',
+                                       chrome_src)
+  elif options.rietveld_patches:
+    cros_build_lib.Die('This builder does not support Rietveld patches.')
+
+  metadata_dump_dict = {}
+  if options.metadata_dump:
+    with open(options.metadata_dump, 'r') as metadata_file:
+      metadata_dump_dict = json.loads(metadata_file.read())
+
+  # We are done munging options values, so freeze options object now to avoid
+  # further abuse of it.
+  # TODO(mtennant): one by one identify each options value override and see if
+  # it can be handled another way.  Try to push this freeze closer and closer
+  # to the start of the script (e.g. in or after _PostParseCheck).
+  options.Freeze()
+
+  with parallel.Manager() as manager:
+    builder_run = cbuildbot_run.BuilderRun(
+        options, site_config, build_config, manager)
+    if metadata_dump_dict:
+      builder_run.attrs.metadata.UpdateWithDict(metadata_dump_dict)
+
+    if builder_run.config.builder_class_name is None:
+      # TODO: This should get relocated to chromeos_config.
+      if _IsDistributedBuilder(options, chrome_rev, build_config):
+        builder_cls_name = 'simple_builders.DistributedBuilder'
+      else:
+        builder_cls_name = 'simple_builders.SimpleBuilder'
+      builder_cls = builders.GetBuilderClass(builder_cls_name)
+      builder = builder_cls(builder_run)
+    else:
+      builder = builders.Builder(builder_run)
+
+    if not builder.Run():
+      sys.exit(1)
+
+
+# Parser related functions
+def _CheckLocalPatches(sourceroot, local_patches):
+  """Do an early quick check of the passed-in patches.
+
+  If the branch of a project is not specified we append the current branch the
+  project is on.
+
+  TODO(davidjames): The project:branch format isn't unique, so this means that
+  we can't differentiate what directory the user intended to apply patches to.
+  We should references by directory instead.
+
+  Args:
+    sourceroot: The checkout where patches are coming from.
+    local_patches: List of patches to check in project:branch format.
+
+  Returns:
+    A list of patches that have been verified, in project:branch format.
+  """
+  verified_patches = []
+  manifest = git.ManifestCheckout.Cached(sourceroot)
+  for patch in local_patches:
+    project, _, branch = patch.partition(':')
+
+    checkouts = manifest.FindCheckouts(project, only_patchable=True)
+    if not checkouts:
+      cros_build_lib.Die('Project %s does not exist.' % (project,))
+    if len(checkouts) > 1:
+      cros_build_lib.Die(
+          'We do not yet support local patching for projects that are checked '
+          'out to multiple directories. Try uploading your patch to gerrit '
+          'and referencing it via the -g option instead.'
+      )
+
+    ok = False
+    for checkout in checkouts:
+      project_dir = checkout.GetPath(absolute=True)
+
+      # If no branch was specified, we use the project's current branch.
+      if not branch:
+        local_branch = git.GetCurrentBranch(project_dir)
+      else:
+        local_branch = branch
+
+      if local_branch and git.DoesCommitExistInRepo(project_dir, local_branch):
+        verified_patches.append('%s:%s' % (project, local_branch))
+        ok = True
+
+    if not ok:
+      if branch:
+        cros_build_lib.Die('Project %s does not have branch %s'
+                           % (project, branch))
+      else:
+        cros_build_lib.Die('Project %s is not on a branch!' % (project,))
+
+  return verified_patches
+
+
+def _CheckChromeVersionOption(_option, _opt_str, value, parser):
+  """Upgrade other options based on chrome_version being passed."""
+  value = value.strip()
+
+  if parser.values.chrome_rev is None and value:
+    parser.values.chrome_rev = constants.CHROME_REV_SPEC
+
+  parser.values.chrome_version = value
+
+
+def _CheckChromeRootOption(_option, _opt_str, value, parser):
+  """Validate and convert chrome_root to full-path form."""
+  if parser.values.chrome_rev is None:
+    parser.values.chrome_rev = constants.CHROME_REV_LOCAL
+
+  parser.values.chrome_root = value
+
+
+def _CheckChromeRevOption(_option, _opt_str, value, parser):
+  """Validate the chrome_rev option."""
+  value = value.strip()
+  if value not in constants.VALID_CHROME_REVISIONS:
+    raise optparse.OptionValueError('Invalid chrome rev specified')
+
+  parser.values.chrome_rev = value
+
+
+def FindCacheDir(_parser, _options):
+  return None
+
+
+class CustomGroup(optparse.OptionGroup):
+  """Custom option group which supports arguments passed-through to trybot."""
+
+  def add_remote_option(self, *args, **kwargs):
+    """For arguments that are passed-through to remote trybot."""
+    return optparse.OptionGroup.add_option(self, *args,
+                                           remote_pass_through=True,
+                                           **kwargs)
+
+
+class CustomOption(commandline.FilteringOption):
+  """Subclass FilteringOption class to implement pass-through and api."""
+
+  ACTIONS = commandline.FilteringOption.ACTIONS + ('extend',)
+  STORE_ACTIONS = commandline.FilteringOption.STORE_ACTIONS + ('extend',)
+  TYPED_ACTIONS = commandline.FilteringOption.TYPED_ACTIONS + ('extend',)
+  ALWAYS_TYPED_ACTIONS = (commandline.FilteringOption.ALWAYS_TYPED_ACTIONS +
+                          ('extend',))
+
+  def __init__(self, *args, **kwargs):
+    # The remote_pass_through argument specifies whether we should directly
+    # pass the argument (with its value) onto the remote trybot.
+    self.pass_through = kwargs.pop('remote_pass_through', False)
+    self.api_version = int(kwargs.pop('api', '0'))
+    commandline.FilteringOption.__init__(self, *args, **kwargs)
+
+  def take_action(self, action, dest, opt, value, values, parser):
+    if action == 'extend':
+      # If there is extra spaces between each argument, we get '' which later
+      # code barfs on, so skip those.  e.g. We see this with the forms:
+      #  cbuildbot -p 'proj:branch ' ...
+      #  cbuildbot -p ' proj:branch' ...
+      #  cbuildbot -p 'proj:branch  proj2:branch' ...
+      lvalue = value.split()
+      values.ensure_value(dest, []).extend(lvalue)
+
+    commandline.FilteringOption.take_action(
+        self, action, dest, opt, value, values, parser)
+
+
+class CustomParser(commandline.FilteringParser):
+  """Custom option parser which supports arguments passed-trhough to trybot"""
+
+  DEFAULT_OPTION_CLASS = CustomOption
+
+  def add_remote_option(self, *args, **kwargs):
+    """For arguments that are passed-through to remote trybot."""
+    return self.add_option(*args, remote_pass_through=True, **kwargs)
+
+
+def _CreateParser():
+  """Generate and return the parser with all the options."""
+  # Parse options
+  usage = 'usage: %prog [options] buildbot_config [buildbot_config ...]'
+  parser = CustomParser(usage=usage, caching=FindCacheDir)
+
+  # Main options
+  parser.add_option('-l', '--list', action='store_true', dest='list',
+                    default=False,
+                    help='List the suggested trybot configs to use (see --all)')
+  parser.add_option('-a', '--all', action='store_true', dest='print_all',
+                    default=False,
+                    help='List all of the buildbot configs available w/--list')
+
+  parser.add_option('--local', default=False, action='store_true',
+                    help=('Specifies that this tryjob should be run locally. '
+                          'Implies --debug.'))
+  parser.add_option('--remote', default=False, action='store_true',
+                    help='Specifies that this tryjob should be run remotely.')
+
+  parser.add_remote_option('-b', '--branch',
+                           help=('The manifest branch to test.  The branch to '
+                                 'check the buildroot out to.'))
+  parser.add_option('-r', '--buildroot', dest='buildroot', type='path',
+                    help=('Root directory where source is checked out to, and '
+                          'where the build occurs. For external build configs, '
+                          "defaults to 'trybot' directory at top level of your "
+                          'repo-managed checkout.'))
+  parser.add_option('--bootstrap-dir', type='path', default=None,
+                    help='Bootstrapping cbuildbot may involve checking out '
+                         'multiple copies of chromite. All these checkouts '
+                         'will be contained in the directory specified here. '
+                         'Default:%s' % osutils.GetGlobalTempDir())
+  parser.add_remote_option('--chrome_rev', default=None, type='string',
+                           action='callback', dest='chrome_rev',
+                           callback=_CheckChromeRevOption,
+                           help=('Revision of Chrome to use, of type [%s]'
+                                 % '|'.join(constants.VALID_CHROME_REVISIONS)))
+  parser.add_remote_option('--profile', default=None, type='string',
+                           action='store', dest='profile',
+                           help='Name of profile to sub-specify board variant.')
+  parser.add_option('-c', '--config_repo',
+                    help=('Cloneable path to the git repository containing '
+                          'the site configuration to use.'))
+
+  #
+  # Patch selection options.
+  #
+
+  group = CustomGroup(
+      parser,
+      'Patch Options')
+
+  group.add_remote_option('-g', '--gerrit-patches', action='extend',
+                          default=[], type='string',
+                          metavar="'Id1 *int_Id2...IdN'",
+                          help=('Space-separated list of short-form Gerrit '
+                                "Change-Id's or change numbers to patch. "
+                                "Please prepend '*' to internal Change-Id's"))
+  group.add_remote_option('-G', '--rietveld-patches', action='extend',
+                          default=[], type='string',
+                          metavar="'id1[:subdir1]...idN[:subdirN]'",
+                          help=('Space-separated list of short-form Rietveld '
+                                'issue numbers to patch. If no subdir is '
+                                'specified, the src directory is used.'))
+  group.add_option('-p', '--local-patches', action='extend', default=[],
+                   metavar="'<project1>[:<branch1>]...<projectN>[:<branchN>]'",
+                   help=('Space-separated list of project branches with '
+                         'patches to apply.  Projects are specified by name. '
+                         'If no branch is specified the current branch of the '
+                         'project will be used.'))
+
+  parser.add_option_group(group)
+
+  #
+  # Remote trybot options.
+  #
+
+  group = CustomGroup(
+      parser,
+      'Remote Trybot Options (--remote)')
+
+  group.add_remote_option('--hwtest', dest='hwtest', action='store_true',
+                          default=False,
+                          help='Run the HWTest stage (tests on real hardware)')
+  group.add_option('--remote-description', default=None,
+                   help=('Attach an optional description to a --remote run '
+                         'to make it easier to identify the results when it '
+                         'finishes'))
+  group.add_option('--slaves', action='extend', default=[],
+                   help=('Specify specific remote tryslaves to run on (e.g. '
+                         'build149-m2); if the bot is busy, it will be queued'))
+  group.add_remote_option('--channel', dest='channels', action='extend',
+                          default=[],
+                          help=('Specify a channel for a payloads trybot. Can '
+                                'be specified multiple times. No valid for '
+                                'non-payloads configs.'))
+  group.add_option('--test-tryjob', action='store_true',
+                   default=False,
+                   help=('Submit a tryjob to the test repository.  Will not '
+                         'show up on the production trybot waterfall.'))
+
+  parser.add_option_group(group)
+
+  #
+  # Branch creation options.
+  #
+
+  group = CustomGroup(
+      parser,
+      'Branch Creation Options (used with branch-util)')
+
+  group.add_remote_option('--branch-name',
+                          help='The branch to create or delete.')
+  group.add_remote_option('--delete-branch', default=False, action='store_true',
+                          help='Delete the branch specified in --branch-name.')
+  group.add_remote_option('--rename-to', type='string',
+                          help='Rename a branch to the specified name.')
+  group.add_remote_option('--force-create', default=False, action='store_true',
+                          help='Overwrites an existing branch.')
+  group.add_remote_option('--skip-remote-push', default=False,
+                          action='store_true',
+                          help='Do not actually push to remote git repos.  '
+                               'Used for end-to-end testing branching.')
+
+  parser.add_option_group(group)
+
+  #
+  # Advanced options.
+  #
+
+  group = CustomGroup(
+      parser,
+      'Advanced Options',
+      'Caution: use these options at your own risk.')
+
+  group.add_remote_option('--bootstrap-args', action='append', default=[],
+                          help=('Args passed directly to the bootstrap re-exec '
+                                'to skip verification by the bootstrap code'))
+  group.add_remote_option('--buildbot', dest='buildbot', action='store_true',
+                          default=False, help='This is running on a buildbot')
+  group.add_remote_option('--no-buildbot-tags', action='store_false',
+                          dest='enable_buildbot_tags', default=True,
+                          help='Suppress buildbot specific tags from log '
+                               'output. This is used to hide recursive '
+                               'cbuilbot runs on the waterfall.')
+  group.add_remote_option('--buildnumber', help='build number', type='int',
+                          default=0)
+  group.add_option('--chrome_root', default=None, type='path',
+                   action='callback', callback=_CheckChromeRootOption,
+                   dest='chrome_root', help='Local checkout of Chrome to use.')
+  group.add_remote_option('--chrome_version', default=None, type='string',
+                          action='callback', dest='chrome_version',
+                          callback=_CheckChromeVersionOption,
+                          help=('Used with SPEC logic to force a particular '
+                                'git revision of chrome rather than the '
+                                'latest.'))
+  group.add_remote_option('--clobber', action='store_true', dest='clobber',
+                          default=False,
+                          help='Clears an old checkout before syncing')
+  group.add_remote_option('--latest-toolchain', action='store_true',
+                          default=False,
+                          help='Use the latest toolchain.')
+  parser.add_option('--log_dir', dest='log_dir', type='path',
+                    help=('Directory where logs are stored.'))
+  group.add_remote_option('--maxarchives', dest='max_archive_builds',
+                          default=3, type='int',
+                          help='Change the local saved build count limit.')
+  parser.add_remote_option('--manifest-repo-url',
+                           help=('Overrides the default manifest repo url.'))
+  group.add_remote_option('--compilecheck', action='store_true', default=False,
+                          help='Only verify compilation and unit tests.')
+  group.add_remote_option('--noarchive', action='store_false', dest='archive',
+                          default=True, help="Don't run archive stage.")
+  group.add_remote_option('--nobootstrap', action='store_false',
+                          dest='bootstrap', default=True,
+                          help=("Don't checkout and run from a standalone "
+                                'chromite repo.'))
+  group.add_remote_option('--nobuild', action='store_false', dest='build',
+                          default=True,
+                          help="Don't actually build (for cbuildbot dev)")
+  group.add_remote_option('--noclean', action='store_false', dest='clean',
+                          default=True, help="Don't clean the buildroot")
+  group.add_remote_option('--nocgroups', action='store_false', dest='cgroups',
+                          default=True,
+                          help='Disable cbuildbots usage of cgroups.')
+  group.add_remote_option('--nochromesdk', action='store_false',
+                          dest='chrome_sdk', default=True,
+                          help=("Don't run the ChromeSDK stage which builds "
+                                'Chrome outside of the chroot.'))
+  group.add_remote_option('--noprebuilts', action='store_false',
+                          dest='prebuilts', default=True,
+                          help="Don't upload prebuilts.")
+  group.add_remote_option('--nopatch', action='store_false',
+                          dest='postsync_patch', default=True,
+                          help=("Don't run PatchChanges stage.  This does not "
+                                'disable patching in of chromite patches '
+                                'during BootstrapStage.'))
+  group.add_remote_option('--nopaygen', action='store_false',
+                          dest='paygen', default=True,
+                          help="Don't generate payloads.")
+  group.add_remote_option('--noreexec', action='store_false',
+                          dest='postsync_reexec', default=True,
+                          help="Don't reexec into the buildroot after syncing.")
+  group.add_remote_option('--nosdk', action='store_true',
+                          default=False,
+                          help='Re-create the SDK from scratch.')
+  group.add_remote_option('--nosync', action='store_false', dest='sync',
+                          default=True, help="Don't sync before building.")
+  group.add_remote_option('--notests', action='store_false', dest='tests',
+                          default=True,
+                          help=('Override values from buildconfig and run no '
+                                'tests.'))
+  group.add_remote_option('--noimagetests', action='store_false',
+                          dest='image_test', default=True,
+                          help=('Override values from buildconfig and run no '
+                                'image tests.'))
+  group.add_remote_option('--nouprev', action='store_false', dest='uprev',
+                          default=True,
+                          help=('Override values from buildconfig and never '
+                                'uprev.'))
+  group.add_option('--reference-repo', action='store', default=None,
+                   dest='reference_repo',
+                   help=('Reuse git data stored in an existing repo '
+                         'checkout. This can drastically reduce the network '
+                         'time spent setting up the trybot checkout.  By '
+                         "default, if this option isn't given but cbuildbot "
+                         'is invoked from a repo checkout, cbuildbot will '
+                         'use the repo root.'))
+  group.add_option('--resume', action='store_true', default=False,
+                   help='Skip stages already successfully completed.')
+  group.add_remote_option('--timeout', action='store', type='int', default=0,
+                          help=('Specify the maximum amount of time this job '
+                                'can run for, at which point the build will be '
+                                'aborted.  If set to zero, then there is no '
+                                'timeout.'))
+  group.add_remote_option('--version', dest='force_version', default=None,
+                          help=('Used with manifest logic.  Forces use of this '
+                                'version rather than create or get latest. '
+                                'Examples: 4815.0.0-rc1, 4815.1.2'))
+
+  parser.add_option_group(group)
+
+  #
+  # Internal options.
+  #
+
+  group = CustomGroup(
+      parser,
+      'Internal Chromium OS Build Team Options',
+      'Caution: these are for meant for the Chromium OS build team only')
+
+  group.add_remote_option('--archive-base', type='gs_path',
+                          help=('Base GS URL (gs://<bucket_name>/<path>) to '
+                                'upload archive artifacts to'))
+  group.add_remote_option(
+      '--cq-gerrit-query', dest='cq_gerrit_override', default=None,
+      help=('If given, this gerrit query will be used to find what patches to '
+            "test, rather than the normal 'CommitQueue>=1 AND Verified=1 AND "
+            "CodeReview=2' query it defaults to.  Use with care- note "
+            'additionally this setting only has an effect if the buildbot '
+            "target is a cq target, and we're in buildbot mode."))
+  group.add_option('--pass-through', dest='pass_through_args', action='append',
+                   type='string', default=[])
+  group.add_option('--reexec-api-version', dest='output_api_version',
+                   action='store_true', default=False,
+                   help=('Used for handling forwards/backwards compatibility '
+                         'with --resume and --bootstrap'))
+  group.add_option('--remote-trybot', dest='remote_trybot',
+                   action='store_true', default=False,
+                   help='Indicates this is running on a remote trybot machine')
+  group.add_remote_option('--remote-patches', action='extend', default=[],
+                          help=('Patches uploaded by the trybot client when '
+                                'run using the -p option'))
+  # Note the default here needs to be hardcoded to 3; that is the last version
+  # that lacked this functionality.
+  group.add_option('--remote-version', default=3, type=int, action='store',
+                   help=('Used for compatibility checks w/tryjobs running in '
+                         'older chromite instances'))
+  group.add_option('--sourceroot', type='path', default=constants.SOURCE_ROOT)
+  group.add_remote_option('--test-bootstrap', action='store_true',
+                          default=False,
+                          help=('Causes cbuildbot to bootstrap itself twice, '
+                                'in the sequence A->B->C: A(unpatched) patches '
+                                'and bootstraps B; B patches and bootstraps C'))
+  group.add_remote_option('--validation_pool', default=None,
+                          help=('Path to a pickled validation pool. Intended '
+                                'for use only with the commit queue.'))
+  group.add_remote_option('--metadata_dump', default=None,
+                          help=('Path to a json dumped metadata file. This '
+                                'will be used as the initial metadata.'))
+  group.add_remote_option('--master-build-id', default=None, type=int,
+                          api=constants.REEXEC_API_MASTER_BUILD_ID,
+                          help=('cidb build id of the master build to this '
+                                'slave build.'))
+  group.add_remote_option('--mock-tree-status', dest='mock_tree_status',
+                          default=None, action='store',
+                          help=('Override the tree status value that would be '
+                                'returned from the the actual tree. Example '
+                                'values: open, closed, throttled. When used '
+                                'in conjunction with --debug, the tree status '
+                                'will not be ignored as it usually is in a '
+                                '--debug run.'))
+  group.add_remote_option(
+      '--mock-slave-status', dest='mock_slave_status', default=None,
+      action='store', metavar='MOCK_SLAVE_STATUS_PICKLE_FILE',
+      help=('Override the result of the _FetchSlaveStatuses method of '
+            'MasterSlaveSyncCompletionStage, by specifying a file with a '
+            'pickle of the result to be returned.'))
+
+  parser.add_option_group(group)
+
+  #
+  # Debug options
+  #
+  # Temporary hack; in place till --dry-run replaces --debug.
+  # pylint: disable=W0212
+  group = parser.debug_group
+  debug = [x for x in group.option_list if x._long_opts == ['--debug']][0]
+  debug.help += '  Currently functions as --dry-run in addition.'
+  debug.pass_through = True
+  group.add_option('--notee', action='store_false', dest='tee', default=True,
+                   help=('Disable logging and internal tee process.  Primarily '
+                         'used for debugging cbuildbot itself.'))
+  return parser
+
+
+def _FinishParsing(options, args):
+  """Perform some parsing tasks that need to take place after optparse.
+
+  This function needs to be easily testable!  Keep it free of
+  environment-dependent code.  Put more detailed usage validation in
+  _PostParseCheck().
+
+  Args:
+    options: The options object returned by optparse
+    args: The args object returned by optparse
+  """
+  # Populate options.pass_through_args.
+  accepted, _ = commandline.FilteringParser.FilterArgs(
+      options.parsed_args, lambda x: x.opt_inst.pass_through)
+  options.pass_through_args.extend(accepted)
+
+  if options.chrome_root:
+    if options.chrome_rev != constants.CHROME_REV_LOCAL:
+      cros_build_lib.Die('Chrome rev must be %s if chrome_root is set.' %
+                         constants.CHROME_REV_LOCAL)
+  elif options.chrome_rev == constants.CHROME_REV_LOCAL:
+    cros_build_lib.Die('Chrome root must be set if chrome_rev is %s.' %
+                       constants.CHROME_REV_LOCAL)
+
+  if options.chrome_version:
+    if options.chrome_rev != constants.CHROME_REV_SPEC:
+      cros_build_lib.Die('Chrome rev must be %s if chrome_version is set.' %
+                         constants.CHROME_REV_SPEC)
+  elif options.chrome_rev == constants.CHROME_REV_SPEC:
+    cros_build_lib.Die(
+        'Chrome rev must not be %s if chrome_version is not set.'
+        % constants.CHROME_REV_SPEC)
+
+  patches = bool(options.gerrit_patches or options.local_patches or
+                 options.rietveld_patches)
+  if options.remote:
+    if options.local:
+      cros_build_lib.Die('Cannot specify both --remote and --local')
+
+    # options.channels is a convenient way to detect payloads builds.
+    if (not options.list and not options.buildbot and not options.channels and
+        not patches):
+      prompt = ('No patches were provided; are you sure you want to just '
+                'run a remote build of %s?' % (
+                    options.branch if options.branch else 'ToT'))
+      if not cros_build_lib.BooleanPrompt(prompt=prompt, default=False):
+        cros_build_lib.Die('Must provide patches when running with --remote.')
+
+    # --debug needs to be explicitly passed through for remote invocations.
+    release_mode_with_patches = (options.buildbot and patches and
+                                 '--debug' not in options.pass_through_args)
+  else:
+    if len(args) > 1:
+      cros_build_lib.Die('Multiple configs not supported if not running with '
+                         '--remote.  Got %r', args)
+
+    if options.slaves:
+      cros_build_lib.Die('Cannot use --slaves if not running with --remote.')
+
+    release_mode_with_patches = (options.buildbot and patches and
+                                 not options.debug)
+
+  # When running in release mode, make sure we are running with checked-in code.
+  # We want checked-in cbuildbot/scripts to prevent errors, and we want to build
+  # a release image with checked-in code for CrOS packages.
+  if release_mode_with_patches:
+    cros_build_lib.Die(
+        'Cannot provide patches when running with --buildbot!')
+
+  if options.buildbot and options.remote_trybot:
+    cros_build_lib.Die(
+        '--buildbot and --remote-trybot cannot be used together.')
+
+  # Record whether --debug was set explicitly vs. it was inferred.
+  options.debug_forced = False
+  if options.debug:
+    options.debug_forced = True
+  if not options.debug:
+    # We don't set debug by default for
+    # 1. --buildbot invocations.
+    # 2. --remote invocations, because it needs to push changes to the tryjob
+    #    repo.
+    options.debug = not options.buildbot and not options.remote
+
+  # Record the configs targeted.
+  options.build_targets = args[:]
+
+  if constants.BRANCH_UTIL_CONFIG in options.build_targets:
+    if options.remote:
+      cros_build_lib.Die(
+          'Running %s as a remote tryjob is not yet supported.',
+          constants.BRANCH_UTIL_CONFIG)
+    if len(options.build_targets) > 1:
+      cros_build_lib.Die(
+          'Cannot run %s with any other configs.',
+          constants.BRANCH_UTIL_CONFIG)
+    if not options.branch_name:
+      cros_build_lib.Die(
+          'Must specify --branch-name with the %s config.',
+          constants.BRANCH_UTIL_CONFIG)
+    if options.branch and options.branch != options.branch_name:
+      cros_build_lib.Die(
+          'If --branch is specified with the %s config, it must'
+          ' have the same value as --branch-name.',
+          constants.BRANCH_UTIL_CONFIG)
+
+    exclusive_opts = {'--version': options.force_version,
+                      '--delete-branch': options.delete_branch,
+                      '--rename-to': options.rename_to}
+    if 1 != sum(1 for x in exclusive_opts.values() if x):
+      cros_build_lib.Die('When using the %s config, you must'
+                         ' specifiy one and only one of the following'
+                         ' options: %s.', constants.BRANCH_UTIL_CONFIG,
+                         ', '.join(exclusive_opts.keys()))
+
+    # When deleting or renaming a branch, the --branch and --nobootstrap
+    # options are implied.
+    if options.delete_branch or options.rename_to:
+      if not options.branch:
+        logging.info('Automatically enabling sync to branch %s for this %s '
+                     'flow.', options.branch_name,
+                     constants.BRANCH_UTIL_CONFIG)
+        options.branch = options.branch_name
+      if options.bootstrap:
+        logging.info('Automatically disabling bootstrap step for this %s flow.',
+                     constants.BRANCH_UTIL_CONFIG)
+        options.bootstrap = False
+
+  elif any([options.delete_branch, options.rename_to, options.branch_name]):
+    cros_build_lib.Die(
+        'Cannot specify --delete-branch, --rename-to or --branch-name when not '
+        'running the %s config', constants.BRANCH_UTIL_CONFIG)
+
+
+# pylint: disable=W0613
+def _PostParseCheck(parser, options, args, site_config):
+  """Perform some usage validation after we've parsed the arguments
+
+  Args:
+    parser: Option parser that was used to parse arguments.
+    options: The options returned by optparse.
+    args: The args returned by optparse.
+    site_config: config_lib.SiteConfig containing all config info.
+  """
+  if not args:
+    parser.error('Invalid usage: no configuration targets provided.'
+                 'Use -h to see usage.  Use -l to list supported configs.')
+
+  if not options.branch:
+    options.branch = git.GetChromiteTrackingBranch()
+
+  if not repository.IsARepoRoot(options.sourceroot):
+    if options.local_patches:
+      raise Exception('Could not find repo checkout at %s!'
+                      % options.sourceroot)
+
+  # Because the default cache dir depends on other options, FindCacheDir
+  # always returns None, and we setup the default here.
+  if options.cache_dir is None:
+    # Note, options.sourceroot is set regardless of the path
+    # actually existing.
+    if options.buildroot is not None:
+      options.cache_dir = os.path.join(options.buildroot, '.cache')
+    elif os.path.exists(options.sourceroot):
+      options.cache_dir = os.path.join(options.sourceroot, '.cache')
+    else:
+      options.cache_dir = parser.FindCacheDir(parser, options)
+    options.cache_dir = os.path.abspath(options.cache_dir)
+    parser.ConfigureCacheDir(options.cache_dir)
+
+  osutils.SafeMakedirsNonRoot(options.cache_dir)
+
+  if options.local_patches:
+    options.local_patches = _CheckLocalPatches(
+        options.sourceroot, options.local_patches)
+
+  default = os.environ.get('CBUILDBOT_DEFAULT_MODE')
+  if (default and not any([options.local, options.buildbot,
+                           options.remote, options.remote_trybot])):
+    logging.info('CBUILDBOT_DEFAULT_MODE=%s env var detected, using it.'
+                 % default)
+    default = default.lower()
+    if default == 'local':
+      options.local = True
+    elif default == 'remote':
+      options.remote = True
+    elif default == 'buildbot':
+      options.buildbot = True
+    else:
+      cros_build_lib.Die("CBUILDBOT_DEFAULT_MODE value %s isn't supported. "
+                         % default)
+
+  # Ensure that all args are legitimate config targets.
+  invalid_targets = []
+  for arg in args:
+    if arg not in site_config:
+      invalid_targets.append(arg)
+      logging.error('No such configuraton target: "%s".', arg)
+      continue
+
+    build_config = site_config[arg]
+
+    is_payloads_build = build_config.build_type == constants.PAYLOADS_TYPE
+
+    if options.channels and not is_payloads_build:
+      cros_build_lib.Die('--channel must only be used with a payload config,'
+                         ' not target (%s).' % arg)
+
+    if not options.channels and is_payloads_build:
+      cros_build_lib.Die('payload configs (%s) require --channel to do anything'
+                         ' useful.' % arg)
+
+    # The --version option is not compatible with an external target unless the
+    # --buildbot option is specified.  More correctly, only "paladin versions"
+    # will work with external targets, and those are only used with --buildbot.
+    # If --buildbot is specified, then user should know what they are doing and
+    # only specify a version that will work.  See crbug.com/311648.
+    if (options.force_version and
+        not (options.buildbot or build_config.internal)):
+      cros_build_lib.Die('Cannot specify --version without --buildbot for an'
+                         ' external target (%s).' % arg)
+
+  if invalid_targets:
+    cros_build_lib.Die('One or more invalid configuration targets specified. '
+                       'You can check the available configs by running '
+                       '`cbuildbot --list --all`')
+
+
+def _ParseCommandLine(parser, argv):
+  """Completely parse the commandline arguments"""
+  (options, args) = parser.parse_args(argv)
+
+  # Strip out null arguments.
+  # TODO(rcui): Remove when buildbot is fixed
+  args = [arg for arg in args if arg]
+
+  if options.output_api_version:
+    print(constants.REEXEC_API_VERSION)
+    sys.exit(0)
+
+  _FinishParsing(options, args)
+  return options, args
+
+
+_ENVIRONMENT_PROD = 'prod'
+_ENVIRONMENT_DEBUG = 'debug'
+_ENVIRONMENT_STANDALONE = 'standalone'
+
+
+def _GetRunEnvironment(options, build_config):
+  """Determine whether this is a prod/debug/standalone run."""
+  # TODO(akeshet): This is a temporary workaround to make sure that the cidb
+  # is not used on waterfalls that the db schema does not support (in particular
+  # the chromeos.chrome waterfall).
+  # See crbug.com/406940
+  waterfall = os.environ.get('BUILDBOT_MASTERNAME', '')
+  if not waterfall in constants.CIDB_KNOWN_WATERFALLS:
+    return _ENVIRONMENT_STANDALONE
+
+  # TODO(akeshet): Clean up this code once we have better defined flags to
+  # specify on-or-off waterfall and on-or-off production runs of cbuildbot.
+  # See crbug.com/331417
+
+  # --buildbot runs should use the production services, unless the --debug flag
+  # is also present.
+  if options.buildbot:
+    if options.debug:
+      return _ENVIRONMENT_DEBUG
+    else:
+      return _ENVIRONMENT_PROD
+
+  # --remote-trybot runs should use the debug services, with the exception of
+  # pre-cq builds, which should use the production services.
+  if options.remote_trybot:
+    if build_config['pre_cq']:
+      return _ENVIRONMENT_PROD
+    else:
+      return _ENVIRONMENT_DEBUG
+
+  # If neither --buildbot nor --remote-trybot flag was used, don't use external
+  # services.
+  return _ENVIRONMENT_STANDALONE
+
+
+def _SetupConnections(options, build_config):
+  """Set up CIDB and graphite connections using the appropriate Setup call.
+
+  Args:
+    options: Command line options structure.
+    build_config: Config object for this build.
+  """
+  # Outline:
+  # 1) Based on options and build_config, decide whether we are a production
+  # run, debug run, or standalone run.
+  # 2) Set up cidb instance accordingly.
+  # 3) Update topology info from cidb, so that any other service set up can use
+  # topology.
+  # 4) Set up any other services.
+  run_type = _GetRunEnvironment(options, build_config)
+
+  if run_type == _ENVIRONMENT_PROD:
+    cidb.CIDBConnectionFactory.SetupProdCidb()
+  elif run_type == _ENVIRONMENT_DEBUG:
+    cidb.CIDBConnectionFactory.SetupDebugCidb()
+  else:
+    cidb.CIDBConnectionFactory.SetupNoCidb()
+
+  db = cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
+  topology.FetchTopologyFromCIDB(db)
+
+  if run_type == _ENVIRONMENT_PROD:
+    graphite.ESMetadataFactory.SetupProd()
+    graphite.StatsFactory.SetupProd()
+  elif run_type == _ENVIRONMENT_DEBUG:
+    graphite.ESMetadataFactory.SetupReadOnly()
+    graphite.StatsFactory.SetupDebug()
+  else:
+    graphite.ESMetadataFactory.SetupReadOnly()
+    graphite.StatsFactory.SetupMock()
+
+
+def _FetchInitialBootstrapConfigRepo(repo_url, branch_name):
+  """Fetch the TOT site config repo, if necessary to start bootstrap."""
+
+  # If we are part of a repo checkout, the manifest stages control things.
+  if git.FindRepoDir(constants.SOURCE_ROOT):
+    return
+
+  # We must be part of a bootstrap chromite checkout, probably on a buildbot.
+
+  # If the config directory already exists, we have started the bootstrap
+  # process. Assume the boostrap stage did the right thing, and leave the config
+  # directory alone.
+  if os.path.exists(constants.SITE_CONFIG_DIR):
+    return
+
+  # We are part of a clean chromite checkout (buildbot always cleans chromite
+  # before launching us), so create the initial site config checkout.
+  logging.info('Fetching Config Repo: %s', repo_url)
+  git.Clone(constants.SITE_CONFIG_DIR, repo_url)
+
+  if branch_name:
+    git.RunGit(constants.SITE_CONFIG_DIR, ['checkout', branch_name])
+
+  # Clear the cached SiteConfig, if there was one.
+  config_lib.ClearConfigCache()
+
+# TODO(build): This function is too damn long.
+def main(argv):
+  # Turn on strict sudo checks.
+  cros_build_lib.STRICT_SUDO = True
+
+  # Set umask to 022 so files created by buildbot are readable.
+  os.umask(0o22)
+
+  parser = _CreateParser()
+  options, args = _ParseCommandLine(parser, argv)
+
+  if options.buildbot and options.config_repo:
+    _FetchInitialBootstrapConfigRepo(options.config_repo, options.branch)
+
+  if options.config_repo:
+    # Ensure expected config file is present.
+    if not os.path.exists(constants.SITE_CONFIG_FILE):
+      cros_build_lib.Die('Unabled to find: %s', constants.SITE_CONFIG_FILE)
+
+  # Fetch our site_config now, because we need it to do anything else.
+  site_config = config_lib.GetConfig()
+
+  if options.list:
+    _PrintValidConfigs(site_config, options.print_all)
+    sys.exit(0)
+
+  _PostParseCheck(parser, options, args, site_config)
+
+  cros_build_lib.AssertOutsideChroot()
+
+  if options.enable_buildbot_tags:
+    logging.EnableBuildbotMarkers()
+  if options.remote:
+    logging.getLogger().setLevel(logging.WARNING)
+
+    # Verify configs are valid.
+    # If hwtest flag is enabled, post a warning that HWTest step may fail if the
+    # specified board is not a released platform or it is a generic overlay.
+    for bot in args:
+      build_config = site_config[bot]
+      if options.hwtest:
+        logging.warning(
+            'If %s is not a released platform or it is a generic overlay, '
+            'the HWTest step will most likely not run; please ask the lab '
+            'team for help if this is unexpected.' % build_config['boards'])
+
+    # Verify gerrit patches are valid.
+    print('Verifying patches...')
+    patch_pool = trybot_patch_pool.TrybotPatchPool.FromOptions(
+        gerrit_patches=options.gerrit_patches,
+        local_patches=options.local_patches,
+        sourceroot=options.sourceroot,
+        remote_patches=options.remote_patches)
+
+    # --debug need to be explicitly passed through for remote invocations.
+    if options.buildbot and '--debug' not in options.pass_through_args:
+      _ConfirmRemoteBuildbotRun()
+
+    print('Submitting tryjob...')
+    tryjob = remote_try.RemoteTryJob(options, args, patch_pool.local_patches)
+    tryjob.Submit(testjob=options.test_tryjob, dryrun=False)
+    print('Tryjob submitted!')
+    print(('Go to %s to view the status of your job.'
+           % tryjob.GetTrybotWaterfallLink()))
+    sys.exit(0)
+
+  elif (not options.buildbot and not options.remote_trybot
+        and not options.resume and not options.local):
+    cros_build_lib.Die('Please use --remote or --local to run trybots')
+
+  # Only one config arg is allowed in this mode, which was confirmed earlier.
+  bot_id = args[-1]
+  build_config = site_config[bot_id]
+
+  # TODO: Re-enable this block when reference_repo support handles this
+  #       properly. (see chromium:330775)
+  # if options.reference_repo is None:
+  #   repo_path = os.path.join(options.sourceroot, '.repo')
+  #   # If we're being run from a repo checkout, reuse the repo's git pool to
+  #   # cut down on sync time.
+  #   if os.path.exists(repo_path):
+  #     options.reference_repo = options.sourceroot
+
+  if options.reference_repo:
+    if not os.path.exists(options.reference_repo):
+      parser.error('Reference path %s does not exist'
+                   % (options.reference_repo,))
+    elif not os.path.exists(os.path.join(options.reference_repo, '.repo')):
+      parser.error('Reference path %s does not look to be the base of a '
+                   'repo checkout; no .repo exists in the root.'
+                   % (options.reference_repo,))
+
+  if (options.buildbot or options.remote_trybot) and not options.resume:
+    if not options.cgroups:
+      parser.error('Options --buildbot/--remote-trybot and --nocgroups cannot '
+                   'be used together.  Cgroup support is required for '
+                   'buildbot/remote-trybot mode.')
+    if not cgroups.Cgroup.IsSupported():
+      parser.error('Option --buildbot/--remote-trybot was given, but this '
+                   'system does not support cgroups.  Failing.')
+
+    missing = osutils.FindMissingBinaries(_BUILDBOT_REQUIRED_BINARIES)
+    if missing:
+      parser.error('Option --buildbot/--remote-trybot requires the following '
+                   "binaries which couldn't be found in $PATH: %s"
+                   % (', '.join(missing)))
+
+  if options.reference_repo:
+    options.reference_repo = os.path.abspath(options.reference_repo)
+
+  if not options.buildroot:
+    if options.buildbot:
+      parser.error('Please specify a buildroot with the --buildroot option.')
+
+    options.buildroot = _DetermineDefaultBuildRoot(options.sourceroot,
+                                                   build_config['internal'])
+    # We use a marker file in the buildroot to indicate the user has
+    # consented to using this directory.
+    if not os.path.exists(repository.GetTrybotMarkerPath(options.buildroot)):
+      _ConfirmBuildRoot(options.buildroot)
+
+  # Sanity check of buildroot- specifically that it's not pointing into the
+  # midst of an existing repo since git-repo doesn't support nesting.
+  if (not repository.IsARepoRoot(options.buildroot) and
+      git.FindRepoDir(options.buildroot)):
+    parser.error('Configured buildroot %s points into a repository checkout, '
+                 'rather than the root of it.  This is not supported.'
+                 % options.buildroot)
+
+  if not options.log_dir:
+    options.log_dir = os.path.join(options.buildroot, _DEFAULT_LOG_DIR)
+
+  log_file = None
+  if options.tee:
+    log_file = os.path.join(options.log_dir, _BUILDBOT_LOG_FILE)
+    osutils.SafeMakedirs(options.log_dir)
+    _BackupPreviousLog(log_file)
+
+  with cros_build_lib.ContextManagerStack() as stack:
+    # TODO(ferringb): update this once
+    # https://chromium-review.googlesource.com/25359
+    # is landed- it's sensitive to the manifest-versions cache path.
+    options.preserve_paths = set(['manifest-versions', '.cache',
+                                  'manifest-versions-internal'])
+    if log_file is not None:
+      # We don't want the critical section to try to clean up the tee process,
+      # so we run Tee (forked off) outside of it. This prevents a deadlock
+      # because the Tee process only exits when its pipe is closed, and the
+      # critical section accidentally holds on to that file handle.
+      stack.Add(tee.Tee, log_file)
+      options.preserve_paths.add(_DEFAULT_LOG_DIR)
+
+    critical_section = stack.Add(cleanup.EnforcedCleanupSection)
+    stack.Add(sudo.SudoKeepAlive)
+
+    if not options.resume:
+      # If we're in resume mode, use our parents tempdir rather than
+      # nesting another layer.
+      stack.Add(osutils.TempDir, prefix='cbuildbot-tmp', set_global=True)
+      logging.debug('Cbuildbot tempdir is %r.', os.environ.get('TMP'))
+
+    if options.cgroups:
+      stack.Add(cgroups.SimpleContainChildren, 'cbuildbot')
+
+    # Mark everything between EnforcedCleanupSection and here as having to
+    # be rolled back via the contextmanager cleanup handlers.  This
+    # ensures that sudo bits cannot outlive cbuildbot, that anything
+    # cgroups would kill gets killed, etc.
+    stack.Add(critical_section.ForkWatchdog)
+
+    if not options.buildbot:
+      build_config = config_lib.OverrideConfigForTrybot(
+          build_config, options)
+
+    if options.mock_tree_status is not None:
+      stack.Add(mock.patch.object, tree_status, '_GetStatus',
+                return_value=options.mock_tree_status)
+
+    if options.mock_slave_status is not None:
+      with open(options.mock_slave_status, 'r') as f:
+        mock_statuses = pickle.load(f)
+        for key, value in mock_statuses.iteritems():
+          mock_statuses[key] = manifest_version.BuilderStatus(**value)
+      stack.Add(mock.patch.object,
+                completion_stages.MasterSlaveSyncCompletionStage,
+                '_FetchSlaveStatuses',
+                return_value=mock_statuses)
+
+    _SetupConnections(options, build_config)
+    retry_stats.SetupStats()
+
+    # For master-slave builds: Update slave's timeout using master's published
+    # deadline.
+    if options.buildbot and options.master_build_id is not None:
+      slave_timeout = None
+      if cidb.CIDBConnectionFactory.IsCIDBSetup():
+        cidb_handle = cidb.CIDBConnectionFactory.GetCIDBConnectionForBuilder()
+        if cidb_handle:
+          slave_timeout = cidb_handle.GetTimeToDeadline(options.master_build_id)
+
+      if slave_timeout is not None:
+        # Cut me some slack. We artificially add a a small time here to the
+        # slave_timeout because '0' is handled specially, and because we don't
+        # want to timeout while trying to set things up.
+        slave_timeout = slave_timeout + 20
+        if options.timeout == 0 or slave_timeout < options.timeout:
+          logging.info('Updating slave build timeout to %d seconds enforced '
+                       'by the master', slave_timeout)
+          options.timeout = slave_timeout
+      else:
+        logging.warning('Could not get master deadline for master-slave build. '
+                        'Can not set slave timeout.')
+
+    if options.timeout > 0:
+      stack.Add(timeout_util.FatalTimeout, options.timeout)
+
+    _RunBuildStagesWrapper(options, site_config, build_config)
diff --git a/scripts/cbuildbot_unittest b/scripts/cbuildbot_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/cbuildbot_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/cbuildbot_unittest.py b/scripts/cbuildbot_unittest.py
new file mode 100644
index 0000000..4210154
--- /dev/null
+++ b/scripts/cbuildbot_unittest.py
@@ -0,0 +1,103 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the cbuildbot program"""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.scripts import cbuildbot
+
+
+# pylint: disable=protected-access
+
+
+class IsDistributedBuilderTest(cros_test_lib.TestCase):
+  """Test for cbuildbot._IsDistributedBuilder."""
+
+  # pylint: disable=W0212
+  def testIsDistributedBuilder(self):
+    """Tests for _IsDistributedBuilder() under various configurations."""
+    parser = cbuildbot._CreateParser()
+    argv = ['x86-generic-paladin']
+    (options, _) = cbuildbot._ParseCommandLine(parser, argv)
+    options.buildbot = False
+
+    build_config = dict(pre_cq=False,
+                        manifest_version=False)
+    chrome_rev = None
+
+    def _TestConfig(expected):
+      self.assertEquals(expected,
+                        cbuildbot._IsDistributedBuilder(
+                            options=options,
+                            chrome_rev=chrome_rev,
+                            build_config=build_config))
+
+    # Default options.
+    _TestConfig(False)
+
+    build_config['pre_cq'] = True
+    _TestConfig(True)
+
+    build_config['pre_cq'] = False
+    build_config['manifest_version'] = True
+    # Not running in buildbot mode even though manifest_version=True.
+    _TestConfig(False)
+    options.buildbot = True
+    _TestConfig(True)
+
+    for chrome_rev in (constants.CHROME_REV_TOT,
+                       constants.CHROME_REV_LOCAL,
+                       constants.CHROME_REV_SPEC):
+      _TestConfig(False)
+
+
+class FetchInitialBootstrapConfigRepoTest(cros_test_lib.MockTempDirTestCase):
+  """Test for cbuildbot._FetchInitialBootstrapConfig."""
+
+
+  def setUp(self):
+    self.config_dir = os.path.join(self.tempdir, 'config')
+
+    self.PatchObject(constants, "SOURCE_ROOT", self.tempdir)
+    self.PatchObject(constants, "SITE_CONFIG_DIR", self.config_dir)
+    self.mockGit = self.PatchObject(git, "RunGit")
+
+  def testDoesClone(self):
+    # Test
+    cbuildbot._FetchInitialBootstrapConfigRepo('repo_url', None)
+    # Verify
+    self.mockGit.assert_called_once_with(
+        self.config_dir, ['clone', 'repo_url', self.config_dir])
+
+  def testDoesCloneBranch(self):
+    # Test
+    cbuildbot._FetchInitialBootstrapConfigRepo('repo_url', 'test_branch')
+    # Verify
+    self.assertEqual(
+        self.mockGit.mock_calls,
+        [mock.call(self.config_dir, ['clone', 'repo_url', self.config_dir]),
+         mock.call(self.config_dir, ['checkout', 'test_branch'])])
+
+  def testNoCloneForRepo(self):
+    # Setup
+    os.mkdir(os.path.join(self.tempdir, '.repo'))
+    # Test
+    cbuildbot._FetchInitialBootstrapConfigRepo('repo_url', None)
+    # Verify
+    self.assertEqual(self.mockGit.call_count, 0)
+
+  def testNoCloneIfExists(self):
+    # Setup
+    os.mkdir(os.path.join(self.tempdir, 'config'))
+    # Test
+    cbuildbot._FetchInitialBootstrapConfigRepo('repo_url', None)
+    # Verify
+    self.assertEqual(self.mockGit.call_count, 0)
diff --git a/scripts/cbuildbot_view_config.py b/scripts/cbuildbot_view_config.py
new file mode 100644
index 0000000..c4502d5
--- /dev/null
+++ b/scripts/cbuildbot_view_config.py
@@ -0,0 +1,37 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script for dumping build config contents."""
+
+from __future__ import print_function
+
+import sys
+
+from chromite.cbuildbot import chromeos_config
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+
+def GetParser():
+  """Creates the argparse parser."""
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  parser.add_argument('-f', '--full', action='store_true', default=False,
+                      help='Dump fully expanded configs.')
+  parser.add_argument('-u', '--update_config', action='store_true',
+                      default=False, help='Update the site config json dump.')
+
+  return parser
+
+def main(argv):
+  parser = GetParser()
+  options = parser.parse_args(argv)
+
+  site_config = chromeos_config.GetConfig()
+
+  with (open(constants.CHROMEOS_CONFIG_FILE,
+             'w') if options.update_config else sys.stdout) as filehandle:
+    if options.full:
+      filehandle.write(site_config.DumpExpandedConfigToString())
+    else:
+      filehandle.write(site_config.SaveConfigToString())
diff --git a/scripts/check_gdata_token.py b/scripts/check_gdata_token.py
new file mode 100644
index 0000000..04845c6
--- /dev/null
+++ b/scripts/check_gdata_token.py
@@ -0,0 +1,233 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Validate or replace the standard gdata authorization token.
+
+Run outside of chroot to validate the gdata token file at ~/.gdata_token or
+update it if it has expired.
+To update the token file, there must be a valid credentials file at
+~/.gdata_cred.txt.
+
+If run inside chroot the updated token file is still valid but will not be
+preserved if chroot is deleted.
+"""
+
+from __future__ import print_function
+
+import filecmp
+import os
+import shutil
+
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib as build_lib
+from chromite.lib import operation
+
+
+MODULE = os.path.splitext(os.path.basename(__file__))[0]
+oper = operation.Operation(MODULE)
+
+TOKEN_FILE = os.path.expanduser('~/.gdata_token')
+CRED_FILE = os.path.expanduser('~/.gdata_cred.txt')
+
+
+def _ChrootPathToExternalPath(path):
+  """Translate |path| inside chroot to external path to same location."""
+  if path:
+    return os.path.join(constants.SOURCE_ROOT,
+                        constants.DEFAULT_CHROOT_DIR,
+                        path.lstrip('/'))
+
+  return None
+
+
+class OutsideChroot(object):
+  """Class for managing functionality when run outside chroot."""
+
+  def __init__(self, args):
+    self.args = args
+
+  def Run(self):
+    """Re-start |args| inside chroot and copy out auth file."""
+
+    # Note that enter_chroot (cros_sdk) will automatically copy both
+    # the token file and the cred file into the chroot, so no need
+    # to do that here.
+
+    # Rerun the same command that launched this run inside the chroot.
+    cmd = [MODULE] + self.args
+    result = build_lib.RunCommand(cmd, enter_chroot=True,
+                                  print_cmd=False, error_code_ok=True)
+    if result.returncode != 0:
+      oper.Die('Token validation failed, exit code was %r.' %
+               result.returncode)
+
+    # Copy the token file back from chroot if different.
+    chroot_token_file = _ChrootPathToExternalPath(TOKEN_FILE)
+    if not os.path.exists(chroot_token_file):
+      oper.Die('No token file generated inside chroot.')
+    elif (not os.path.exists(TOKEN_FILE) or not
+          filecmp.cmp(TOKEN_FILE, chroot_token_file)):
+      oper.Notice('Copying new token file from chroot to %r' % TOKEN_FILE)
+      shutil.copy2(chroot_token_file, TOKEN_FILE)
+    else:
+      oper.Notice('No change in token file.')
+
+
+class InsideChroot(object):
+  """Class for managing functionality when run inside chroot.
+
+  Note that some additional imports happen within code in this class
+  because those imports are only available inside the chroot.
+  """
+
+  def __init__(self):
+    self.creds = None     # gdata_lib.Creds object.
+    self.gd_client = None  # For interacting with Google Docs.
+    self.it_client = None  # For interacting with Issue Tracker.
+
+  def _LoadTokenFile(self):
+    """Load existing auth token file."""
+    if not os.path.exists(TOKEN_FILE):
+      oper.Warning('No current token file at %r.' % TOKEN_FILE)
+      return False
+
+    # Load token file, if it exists.
+    self.creds.LoadAuthToken(TOKEN_FILE)
+    return True
+
+  def _SaveTokenFile(self):
+    """Save to auth toke file if anything changed."""
+    self.creds.StoreAuthTokenIfNeeded(TOKEN_FILE)
+
+  def _ValidateDocsToken(self):
+    """Validate the existing Docs token."""
+    import gdata.service
+
+    if not self.creds.docs_auth_token:
+      return False
+
+    oper.Notice('Attempting to log into Docs using auth token.')
+    self.gd_client.source = 'Package Status'
+    self.gd_client.SetClientLoginToken(self.creds.docs_auth_token)
+
+    try:
+      # Try to access generic spreadsheets feed, which will check access.
+      self.gd_client.GetSpreadsheetsFeed()
+
+      # Token accepted.  We're done here.
+      oper.Notice('Docs token validated.')
+      return True
+    except gdata.service.RequestError as ex:
+      reason = ex[0]['reason']
+      if reason == 'Token expired':
+        return False
+
+      raise
+
+  def _GenerateDocsToken(self):
+    """Generate a new Docs token from credentials."""
+    import gdata.service
+
+    oper.Warning('Docs token not valid.  Will try to generate a new one.')
+    self.creds.LoadCreds(CRED_FILE)
+    self.gd_client.email = self.creds.user
+    self.gd_client.password = self.creds.password
+
+    try:
+      self.gd_client.ProgrammaticLogin()
+      self.creds.SetDocsAuthToken(self.gd_client.GetClientLoginToken())
+
+      oper.Notice('New Docs token generated.')
+      return True
+    except gdata.service.BadAuthentication:
+      oper.Error('Credentials from %r not accepted.'
+                 '  Unable to generate new Docs token.' % CRED_FILE)
+      return False
+
+  def _ValidateTrackerToken(self):
+    """Validate the existing Tracker token."""
+    import gdata.gauth
+    import gdata.projecthosting.client
+
+    if not self.creds.tracker_auth_token:
+      return False
+
+    oper.Notice('Attempting to log into Tracker using auth token.')
+    self.it_client.source = 'Package Status'
+    self.it_client.auth_token = gdata.gauth.ClientLoginToken(
+        self.creds.tracker_auth_token)
+
+    try:
+      # Try to access Tracker Issue #1, which will check access.
+      query = gdata.projecthosting.client.Query(issue_id='1')
+      self.it_client.get_issues('chromium-os', query=query)
+
+      # Token accepted.  We're done here.
+      oper.Notice('Tracker token validated.')
+      return True
+    except gdata.client.Error:
+      # Exception is gdata.client.Unauthorized in the case of bad token, but
+      # I do not know what the error is for an expired token so I do not
+      # want to limit the catching here.  All the errors for gdata.client
+      # functionality extend gdata.client.Error (I do not see one that is
+      # obviously about an expired token).
+      return False
+
+  def _GenerateTrackerToken(self):
+    """Generate a new Tracker token from credentials."""
+    import gdata.client
+
+    oper.Warning('Tracker token not valid.  Will try to generate a new one.')
+    self.creds.LoadCreds(CRED_FILE)
+
+    try:
+      self.it_client.ClientLogin(self.creds.user, self.creds.password,
+                                 source='Package Status', service='code',
+                                 account_type='GOOGLE')
+      self.creds.SetTrackerAuthToken(self.it_client.auth_token.token_string)
+
+      oper.Notice('New Tracker token generated.')
+      return True
+    except gdata.client.BadAuthentication:
+      oper.Error('Credentials from %r not accepted.'
+                 '  Unable to generate new Tracker token.' % CRED_FILE)
+      return False
+
+  def Run(self):
+    """Validate existing auth token or generate new one from credentials."""
+    import chromite.lib.gdata_lib as gdata_lib
+    import gdata.spreadsheet.service
+
+    self.creds = gdata_lib.Creds()
+    self.gd_client = gdata.spreadsheet.service.SpreadsheetsService()
+    self.it_client = gdata.projecthosting.client.ProjectHostingClient()
+
+    self._LoadTokenFile()
+
+    if not self._ValidateTrackerToken():
+      if not self._GenerateTrackerToken():
+        oper.Die('Failed to validate or generate Tracker token.')
+
+    if not self._ValidateDocsToken():
+      if not self._GenerateDocsToken():
+        oper.Die('Failed to validate or generate Docs token.')
+
+    self._SaveTokenFile()
+
+
+def GetParser():
+  return commandline.ArgumentParser(description=__doc__)
+
+
+def main(argv):
+  """Main function."""
+  # No actual options used, but --help is still supported.
+  parser = GetParser()
+  _opts = parser.parse_args(argv)
+
+  if build_lib.IsInsideChroot():
+    InsideChroot().Run()
+  else:
+    OutsideChroot(argv).Run()
diff --git a/scripts/check_gdata_token_unittest b/scripts/check_gdata_token_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/check_gdata_token_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/check_gdata_token_unittest.py b/scripts/check_gdata_token_unittest.py
new file mode 100644
index 0000000..d3f18aa
--- /dev/null
+++ b/scripts/check_gdata_token_unittest.py
@@ -0,0 +1,529 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for check_gdata_token_unittest.py."""
+
+from __future__ import print_function
+
+import filecmp
+import mox
+import os
+import shutil
+
+from chromite.lib import cros_build_lib as build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import gdata_lib
+from chromite.scripts import check_gdata_token as cgt
+
+import gdata.service
+from gdata.projecthosting import client as gdata_ph_client
+from gdata.spreadsheet import service as gdata_ss_service
+
+
+# pylint: disable=protected-access
+
+
+class MainTest(cros_test_lib.MoxOutputTestCase):
+  """Test argument handling at the main method level."""
+
+  def testHelp(self):
+    """Test that --help is functioning"""
+    argv = ['--help']
+
+    with self.OutputCapturer() as output:
+      # Running with --help should exit with code==0.
+      self.AssertFuncSystemExitZero(cgt.main, argv)
+
+    # Verify that a message beginning with "usage: " was printed.
+    stdout = output.GetStdout()
+    self.assertTrue(stdout.startswith('usage: '))
+
+  def testMainOutsideChroot(self):
+    """Test flow outside chroot"""
+    argv = []
+    mocked_outsidechroot = self.mox.CreateMock(cgt.OutsideChroot)
+
+    # Create replay script.
+    self.mox.StubOutWithMock(build_lib, 'IsInsideChroot')
+    self.mox.StubOutWithMock(cgt.OutsideChroot, '__new__')
+
+    build_lib.IsInsideChroot().AndReturn(False)
+    cgt.OutsideChroot.__new__(cgt.OutsideChroot, argv).AndReturn(
+        mocked_outsidechroot)
+    mocked_outsidechroot.Run()
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      cgt.main(argv)
+    self.mox.VerifyAll()
+
+  def testMainInsideChroot(self):
+    """Test flow inside chroot"""
+    argv = []
+    mocked_insidechroot = self.mox.CreateMock(cgt.InsideChroot)
+
+    # Create replay script.
+    self.mox.StubOutWithMock(build_lib, 'IsInsideChroot')
+    self.mox.StubOutWithMock(cgt.InsideChroot, '__new__')
+
+    build_lib.IsInsideChroot().AndReturn(True)
+    cgt.InsideChroot.__new__(cgt.InsideChroot).AndReturn(mocked_insidechroot)
+    mocked_insidechroot.Run()
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      cgt.main(argv)
+    self.mox.VerifyAll()
+
+
+class OutsideChrootTest(cros_test_lib.MoxOutputTestCase):
+  """Test flow when run outside chroot."""
+
+  def _MockOutsideChroot(self, *args):
+    """Prepare mocked OutsideChroot object with |args|."""
+    mocked_outsidechroot = self.mox.CreateMock(cgt.OutsideChroot)
+
+    mocked_outsidechroot.args = list(args) if args else []
+
+    return mocked_outsidechroot
+
+  def testOutsideChrootRestartFail(self):
+    mocked_outsidechroot = self._MockOutsideChroot()
+
+    self.mox.StubOutWithMock(build_lib, 'RunCommand')
+    cmd = ['check_gdata_token']
+    run_result = cros_test_lib.EasyAttr(returncode=1)
+
+    # Create replay script.
+    build_lib.RunCommand(cmd, enter_chroot=True,
+                         print_cmd=False,
+                         error_code_ok=True).AndReturn(run_result)
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      # Test should exit with failure.
+      self.AssertFuncSystemExitNonZero(cgt.OutsideChroot.Run,
+                                       mocked_outsidechroot)
+
+    self.mox.VerifyAll()
+
+    self.AssertOutputContainsError()
+
+  def testOutsideChrootNoTokenFile(self):
+    mocked_outsidechroot = self._MockOutsideChroot('foo')
+
+    self.mox.StubOutWithMock(cgt, '_ChrootPathToExternalPath')
+    self.mox.StubOutWithMock(os.path, 'exists')
+    self.mox.StubOutWithMock(build_lib, 'RunCommand')
+    cmd = ['check_gdata_token', 'foo']
+    run_result = cros_test_lib.EasyAttr(returncode=0)
+
+    # Create replay script.
+    build_lib.RunCommand(cmd, enter_chroot=True,
+                         print_cmd=False,
+                         error_code_ok=True).AndReturn(run_result)
+    cgt._ChrootPathToExternalPath(cgt.TOKEN_FILE).AndReturn('chr-tok')
+    os.path.exists('chr-tok').AndReturn(False)
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      # Test should exit with failure.
+      self.AssertFuncSystemExitNonZero(cgt.OutsideChroot.Run,
+                                       mocked_outsidechroot)
+
+    self.mox.VerifyAll()
+
+    self.AssertOutputContainsError()
+
+  def testOutsideChrootNewTokenFile(self):
+    mocked_outsidechroot = self._MockOutsideChroot('foo')
+
+    self.mox.StubOutWithMock(cgt, '_ChrootPathToExternalPath')
+    self.mox.StubOutWithMock(os.path, 'exists')
+    self.mox.StubOutWithMock(shutil, 'copy2')
+    self.mox.StubOutWithMock(build_lib, 'RunCommand')
+    cmd = ['check_gdata_token', 'foo']
+    run_result = cros_test_lib.EasyAttr(returncode=0)
+
+    # Create replay script.
+    build_lib.RunCommand(cmd, enter_chroot=True,
+                         print_cmd=False,
+                         error_code_ok=True).AndReturn(run_result)
+    cgt._ChrootPathToExternalPath(cgt.TOKEN_FILE).AndReturn('chr-tok')
+    os.path.exists('chr-tok').AndReturn(True)
+    os.path.exists(cgt.TOKEN_FILE).AndReturn(False)
+    shutil.copy2('chr-tok', cgt.TOKEN_FILE)
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      cgt.OutsideChroot.Run(mocked_outsidechroot)
+    self.mox.VerifyAll()
+
+  def testOutsideChrootDifferentTokenFile(self):
+    mocked_outsidechroot = self._MockOutsideChroot('foo')
+
+    self.mox.StubOutWithMock(cgt, '_ChrootPathToExternalPath')
+    self.mox.StubOutWithMock(os.path, 'exists')
+    self.mox.StubOutWithMock(shutil, 'copy2')
+    self.mox.StubOutWithMock(filecmp, 'cmp')
+    self.mox.StubOutWithMock(build_lib, 'RunCommand')
+    cmd = ['check_gdata_token', 'foo']
+    run_result = cros_test_lib.EasyAttr(returncode=0)
+
+    # Create replay script.
+    build_lib.RunCommand(cmd, enter_chroot=True,
+                         print_cmd=False,
+                         error_code_ok=True).AndReturn(run_result)
+    cgt._ChrootPathToExternalPath(cgt.TOKEN_FILE).AndReturn('chr-tok')
+    os.path.exists('chr-tok').AndReturn(True)
+    os.path.exists(cgt.TOKEN_FILE).AndReturn(True)
+    filecmp.cmp(cgt.TOKEN_FILE, 'chr-tok').AndReturn(False)
+    shutil.copy2('chr-tok', cgt.TOKEN_FILE)
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      cgt.OutsideChroot.Run(mocked_outsidechroot)
+    self.mox.VerifyAll()
+
+  def testOutsideChrootNoChangeInTokenFile(self):
+    mocked_outsidechroot = self._MockOutsideChroot('foo')
+
+    self.mox.StubOutWithMock(cgt, '_ChrootPathToExternalPath')
+    self.mox.StubOutWithMock(os.path, 'exists')
+    self.mox.StubOutWithMock(filecmp, 'cmp')
+    self.mox.StubOutWithMock(build_lib, 'RunCommand')
+    cmd = ['check_gdata_token', 'foo']
+    run_result = cros_test_lib.EasyAttr(returncode=0)
+
+    # Create replay script.
+    build_lib.RunCommand(cmd, enter_chroot=True,
+                         print_cmd=False,
+                         error_code_ok=True).AndReturn(run_result)
+    cgt._ChrootPathToExternalPath(cgt.TOKEN_FILE).AndReturn('chr-tok')
+    os.path.exists('chr-tok').AndReturn(True)
+    os.path.exists(cgt.TOKEN_FILE).AndReturn(True)
+    filecmp.cmp(cgt.TOKEN_FILE, 'chr-tok').AndReturn(True)
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      cgt.OutsideChroot.Run(mocked_outsidechroot)
+    self.mox.VerifyAll()
+
+
+class InsideChrootTest(cros_test_lib.MoxOutputTestCase):
+  """Test flow when run inside chroot."""
+
+  def _MockInsideChroot(self):
+    """Prepare mocked OutsideChroot object."""
+    mic = self.mox.CreateMock(cgt.InsideChroot)
+
+    mic.creds = self.mox.CreateMock(gdata_lib.Creds)
+    mic.gd_client = self.mox.CreateMock(gdata_ss_service.SpreadsheetsService)
+    mic.it_client = self.mox.CreateMock(gdata_ph_client.ProjectHostingClient)
+
+    return mic
+
+  def testLoadTokenFile(self):
+    mocked_insidechroot = self._MockInsideChroot()
+
+    self.mox.StubOutWithMock(os.path, 'exists')
+
+    # Create replay script
+    os.path.exists(cgt.TOKEN_FILE).AndReturn(True)
+    mocked_insidechroot.creds.LoadAuthToken(cgt.TOKEN_FILE)
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      result = cgt.InsideChroot._LoadTokenFile(mocked_insidechroot)
+    self.mox.VerifyAll()
+    self.assertTrue(result)
+
+  def testSaveTokenFile(self):
+    mocked_insidechroot = self._MockInsideChroot()
+
+    # Create replay script.
+    mocked_insidechroot.creds.StoreAuthTokenIfNeeded(cgt.TOKEN_FILE)
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      cgt.InsideChroot._SaveTokenFile(mocked_insidechroot)
+    self.mox.VerifyAll()
+
+  def testLoadTokenFileMissing(self):
+    mocked_insidechroot = self._MockInsideChroot()
+
+    self.mox.StubOutWithMock(os.path, 'exists')
+
+    # Create replay script
+    os.path.exists(cgt.TOKEN_FILE).AndReturn(False)
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      result = cgt.InsideChroot._LoadTokenFile(mocked_insidechroot)
+    self.mox.VerifyAll()
+    self.assertFalse(result)
+
+  def testInsideChrootValidateOK(self):
+    mocked_insidechroot = self._MockInsideChroot()
+
+    # Create replay script.
+    mocked_insidechroot._LoadTokenFile()
+    mocked_insidechroot._ValidateTrackerToken().AndReturn(True)
+    mocked_insidechroot._ValidateDocsToken().AndReturn(True)
+    mocked_insidechroot._SaveTokenFile()
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      cgt.InsideChroot.Run(mocked_insidechroot)
+    self.mox.VerifyAll()
+
+  def testInsideChrootTrackerValidateFailGenerateOK(self):
+    mocked_insidechroot = self._MockInsideChroot()
+
+    # Create replay script.
+    mocked_insidechroot._LoadTokenFile()
+    mocked_insidechroot._ValidateTrackerToken().AndReturn(True)
+    mocked_insidechroot._ValidateDocsToken().AndReturn(False)
+    mocked_insidechroot._GenerateDocsToken().AndReturn(True)
+    mocked_insidechroot._SaveTokenFile()
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      cgt.InsideChroot.Run(mocked_insidechroot)
+    self.mox.VerifyAll()
+
+  def testInsideChrootDocsValidateFailGenerateOK(self):
+    mocked_insidechroot = self._MockInsideChroot()
+
+    # Create replay script.
+    mocked_insidechroot._LoadTokenFile()
+    mocked_insidechroot._ValidateTrackerToken().AndReturn(False)
+    mocked_insidechroot._GenerateTrackerToken().AndReturn(True)
+    mocked_insidechroot._ValidateDocsToken().AndReturn(True)
+    mocked_insidechroot._SaveTokenFile()
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      cgt.InsideChroot.Run(mocked_insidechroot)
+    self.mox.VerifyAll()
+
+  def testInsideChrootTrackerValidateFailGenerateFail(self):
+    mocked_insidechroot = self._MockInsideChroot()
+
+    # Create replay script.
+    mocked_insidechroot._LoadTokenFile()
+    mocked_insidechroot._ValidateTrackerToken().AndReturn(False)
+    mocked_insidechroot._GenerateTrackerToken().AndReturn(False)
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      # Test should exit with failure.
+      self.AssertFuncSystemExitNonZero(cgt.InsideChroot.Run,
+                                       mocked_insidechroot)
+    self.mox.VerifyAll()
+
+    self.AssertOutputContainsError()
+
+  def testInsideChrootDocsValidateFailGenerateFail(self):
+    mocked_insidechroot = self._MockInsideChroot()
+
+    # Create replay script.
+    mocked_insidechroot._LoadTokenFile()
+    mocked_insidechroot._ValidateTrackerToken().AndReturn(True)
+    mocked_insidechroot._ValidateDocsToken().AndReturn(False)
+    mocked_insidechroot._GenerateDocsToken().AndReturn(False)
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      # Test should exit with failure.
+      self.AssertFuncSystemExitNonZero(cgt.InsideChroot.Run,
+                                       mocked_insidechroot)
+    self.mox.VerifyAll()
+
+    self.AssertOutputContainsError()
+
+  def testGenerateTrackerTokenOK(self):
+    mocked_insidechroot = self._MockInsideChroot()
+
+    # Create replay script.
+    mocked_creds = mocked_insidechroot.creds
+    mocked_itclient = mocked_insidechroot.it_client
+    mocked_creds.user = 'joe@chromium.org'
+    mocked_creds.password = 'shhh'
+    auth_token = 'SomeToken'
+    mocked_itclient.auth_token = cros_test_lib.EasyAttr(token_string=auth_token)
+
+    mocked_creds.LoadCreds(cgt.CRED_FILE)
+    mocked_itclient.ClientLogin(mocked_creds.user, mocked_creds.password,
+                                source='Package Status', service='code',
+                                account_type='GOOGLE')
+    mocked_creds.SetTrackerAuthToken(auth_token)
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      result = cgt.InsideChroot._GenerateTrackerToken(mocked_insidechroot)
+      self.assertTrue(result, '_GenerateTrackerToken should have passed')
+    self.mox.VerifyAll()
+
+  def testGenerateTrackerTokenFail(self):
+    mocked_insidechroot = self._MockInsideChroot()
+
+    # Create replay script.
+    mocked_creds = mocked_insidechroot.creds
+    mocked_itclient = mocked_insidechroot.it_client
+    mocked_creds.user = 'joe@chromium.org'
+    mocked_creds.password = 'shhh'
+
+    mocked_creds.LoadCreds(cgt.CRED_FILE)
+    mocked_itclient.ClientLogin(mocked_creds.user, mocked_creds.password,
+                                source='Package Status', service='code',
+                                account_type='GOOGLE').AndRaise(
+                                    gdata.client.BadAuthentication())
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      result = cgt.InsideChroot._GenerateTrackerToken(mocked_insidechroot)
+      self.assertFalse(result, '_GenerateTrackerToken should have failed')
+    self.mox.VerifyAll()
+
+    self.AssertOutputContainsError()
+
+  def testValidateTrackerTokenOK(self):
+    mocked_insidechroot = self._MockInsideChroot()
+    mocked_itclient = mocked_insidechroot.it_client
+
+    self.mox.StubOutWithMock(gdata.gauth.ClientLoginToken, '__new__')
+
+    # Create replay script.
+    auth_token = 'SomeToken'
+    mocked_insidechroot.creds.tracker_auth_token = auth_token
+
+    gdata.gauth.ClientLoginToken.__new__(gdata.gauth.ClientLoginToken,
+                                         auth_token).AndReturn('TokenObj')
+    mocked_itclient.get_issues('chromium-os', query=mox.IgnoreArg())
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      result = cgt.InsideChroot._ValidateTrackerToken(mocked_insidechroot)
+    self.mox.VerifyAll()
+    self.assertTrue(result, '_ValidateTrackerToken should have passed')
+
+  def testValidateTrackerTokenFail(self):
+    mocked_insidechroot = self._MockInsideChroot()
+    mocked_itclient = mocked_insidechroot.it_client
+
+    self.mox.StubOutWithMock(gdata.gauth.ClientLoginToken, '__new__')
+
+    # Create replay script.
+    auth_token = 'SomeToken'
+    mocked_insidechroot.creds.tracker_auth_token = auth_token
+
+    gdata.gauth.ClientLoginToken.__new__(gdata.gauth.ClientLoginToken,
+                                         auth_token).AndReturn('TokenObj')
+    mocked_itclient.get_issues('chromium-os', query=mox.IgnoreArg()).AndRaise(
+        gdata.client.Error())
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      result = cgt.InsideChroot._ValidateTrackerToken(mocked_insidechroot)
+      self.assertFalse(result, '_ValidateTrackerToken should have failed')
+    self.mox.VerifyAll()
+
+  def testGenerateDocsTokenOK(self):
+    mocked_insidechroot = self._MockInsideChroot()
+
+    # Create replay script.
+    mocked_creds = mocked_insidechroot.creds
+    mocked_gdclient = mocked_insidechroot.gd_client
+    mocked_creds.user = 'joe@chromium.org'
+    mocked_creds.password = 'shhh'
+    auth_token = 'SomeToken'
+
+    mocked_creds.LoadCreds(cgt.CRED_FILE)
+    mocked_gdclient.ProgrammaticLogin()
+    mocked_gdclient.GetClientLoginToken().AndReturn(auth_token)
+    mocked_creds.SetDocsAuthToken(auth_token)
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      result = cgt.InsideChroot._GenerateDocsToken(mocked_insidechroot)
+      self.assertTrue(result, '_GenerateDocsToken should have passed')
+    self.mox.VerifyAll()
+
+  def testGenerateDocsTokenFail(self):
+    mocked_insidechroot = self._MockInsideChroot()
+
+    # Create replay script.
+    mocked_creds = mocked_insidechroot.creds
+    mocked_gdclient = mocked_insidechroot.gd_client
+    mocked_creds.user = 'joe@chromium.org'
+    mocked_creds.password = 'shhh'
+
+    mocked_creds.LoadCreds(cgt.CRED_FILE)
+    mocked_gdclient.ProgrammaticLogin().AndRaise(
+        gdata.service.BadAuthentication())
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      result = cgt.InsideChroot._GenerateDocsToken(mocked_insidechroot)
+      self.assertFalse(result, '_GenerateTrackerToken should have failed')
+    self.mox.VerifyAll()
+
+    self.AssertOutputContainsError()
+
+  def testValidateDocsTokenOK(self):
+    mocked_insidechroot = self._MockInsideChroot()
+
+    # Create replay script.
+    auth_token = 'SomeToken'
+    mocked_insidechroot.creds.docs_auth_token = auth_token
+
+    mocked_insidechroot.gd_client.SetClientLoginToken(auth_token)
+    mocked_insidechroot.gd_client.GetSpreadsheetsFeed()
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      result = cgt.InsideChroot._ValidateDocsToken(mocked_insidechroot)
+      self.assertTrue(result, '_ValidateDocsToken should have passed')
+    self.mox.VerifyAll()
+
+  def testValidateDocsTokenFail(self):
+    mocked_insidechroot = self._MockInsideChroot()
+
+    # Create replay script.
+    auth_token = 'SomeToken'
+    mocked_insidechroot.creds.docs_auth_token = auth_token
+
+    mocked_insidechroot.gd_client.SetClientLoginToken(auth_token)
+    expired_error = gdata.service.RequestError({'reason': 'Token expired'})
+    mocked_insidechroot.gd_client.GetSpreadsheetsFeed().AndRaise(expired_error)
+    self.mox.ReplayAll()
+
+    # Run test verification.
+    with self.OutputCapturer():
+      result = cgt.InsideChroot._ValidateDocsToken(mocked_insidechroot)
+      self.assertFalse(result, '_ValidateDocsToken should have failed')
+    self.mox.VerifyAll()
diff --git a/scripts/chrome_update_extension_cache.py b/scripts/chrome_update_extension_cache.py
new file mode 100644
index 0000000..02f1048
--- /dev/null
+++ b/scripts/chrome_update_extension_cache.py
@@ -0,0 +1,191 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate and upload tarballs for default apps cache.
+
+Run inside the 'files' dir containing 'external_extensions.json' file:
+$ chromite/bin/chrome_update_extension_cache --create --upload \\
+    chromeos-default-apps-1.0.0
+
+Always increment the version when you update an existing package.
+If no new files are added, increment the third version number.
+  e.g. 1.0.0 -> 1.0.1
+If you change list of default extensions, increment the second version number.
+  e.g. 1.0.0 -> 1.1.0
+
+Also you need to regenerate the Manifest with the new tarball digest.
+Run inside the chroot:
+$ ebuild chromeos-default-apps-1.0.0.ebuild manifest --force
+"""
+
+from __future__ import print_function
+
+import json
+import os
+import urllib
+import xml.dom.minidom
+
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import osutils
+
+
+UPLOAD_URL_BASE = 'gs://chromeos-localmirror-private/distfiles'
+
+
+def DownloadCrx(ext, extension, crxdir):
+  """Download .crx file from WebStore and update entry."""
+  logging.info('Extension "%s"(%s)...', extension['name'], ext)
+
+  update_url = ('%s?x=prodversion%%3D35.1.1.1%%26id%%3D%s%%26uc' %
+                (extension['external_update_url'], ext))
+  response = urllib.urlopen(update_url)
+  if response.getcode() != 200:
+    logging.error('Cannot get update response, URL: %s, error: %d', update_url,
+                  response.getcode())
+    return False
+
+  dom = xml.dom.minidom.parse(response)
+  status = dom.getElementsByTagName('app')[0].getAttribute('status')
+  if status != 'ok':
+    logging.error('Cannot fetch extension, status: %s', status)
+    return False
+
+  node = dom.getElementsByTagName('updatecheck')[0]
+  url = node.getAttribute('codebase')
+  version = node.getAttribute('version')
+  filename = '%s-%s.crx' % (ext, version)
+  response = urllib.urlopen(url)
+  if response.getcode() != 200:
+    logging.error('Cannot download extension, URL: %s, error: %d', url,
+                  response.getcode())
+    return False
+
+  osutils.WriteFile(os.path.join(crxdir, 'extensions', filename),
+                    response.read())
+
+  # Keep external_update_url in json file, ExternalCache will take care about
+  # replacing it with proper external_crx path and version.
+
+  logging.info('Downloaded, current version %s', version)
+  return True
+
+
+def CreateValidationFiles(validationdir, crxdir, identifier):
+  """Create validationfiles for all extensions in |crxdir|."""
+
+  verified_files = []
+
+  # Discover all extensions to be validated (but not JSON files).
+  for directory, _, filenames in os.walk(os.path.join(crxdir, 'extensions')):
+
+    # Make directory relative to output dir by removing crxdir and /.
+    for filename in filenames:
+      verified_files.append(os.path.join(directory[len(crxdir) + 1:],
+                                         filename))
+
+  validation_file = os.path.join(validationdir, '%s.validation' % identifier)
+
+  osutils.SafeMakedirs(validationdir)
+  cros_build_lib.RunCommand(['sha256sum'] + verified_files,
+                            log_stdout_to_file=validation_file,
+                            cwd=crxdir, print_cmd=False)
+  logging.info('Hashes created.')
+
+
+def CreateCacheTarball(extensions, outputdir, identifier, tarball):
+  """Cache |extensions| in |outputdir| and pack them in |tarball|."""
+
+  crxdir = os.path.join(outputdir, 'crx')
+  jsondir = os.path.join(outputdir, 'json')
+  validationdir = os.path.join(outputdir, 'validation')
+
+  osutils.SafeMakedirs(os.path.join(crxdir, 'extensions', 'managed_users'))
+  osutils.SafeMakedirs(os.path.join(jsondir, 'extensions', 'managed_users'))
+  was_errors = False
+  for ext in extensions:
+    managed_users = extensions[ext].get('managed_users', 'no')
+    cache_crx = extensions[ext].get('cache_crx', 'yes')
+
+    # Remove fields that shouldn't be in the output file.
+    for key in ('cache_crx', 'managed_users'):
+      extensions[ext].pop(key, None)
+
+    if cache_crx == 'yes':
+      if not DownloadCrx(ext, extensions[ext], crxdir):
+        was_errors = True
+    elif cache_crx == 'no':
+      pass
+    else:
+      cros_build_lib.Die('Unknown value for "cache_crx" %s for %s',
+                         cache_crx, ext)
+
+    if managed_users == 'yes':
+      json_file = os.path.join(jsondir,
+                               'extensions/managed_users/%s.json' % ext)
+      json.dump(extensions[ext],
+                open(json_file, 'w'),
+                sort_keys=True,
+                indent=2,
+                separators=(',', ': '))
+
+    if managed_users != 'only':
+      json_file = os.path.join(jsondir, 'extensions/%s.json' % ext)
+      json.dump(extensions[ext],
+                open(json_file, 'w'),
+                sort_keys=True,
+                indent=2,
+                separators=(',', ': '))
+
+  if was_errors:
+    cros_build_lib.Die('FAIL to download some extensions')
+
+  CreateValidationFiles(validationdir, crxdir, identifier)
+  cros_build_lib.CreateTarball(tarball, outputdir)
+  logging.info('Tarball created %s', tarball)
+
+
+def main(argv):
+  parser = commandline.ArgumentParser(
+      '%%(prog)s [options] <version>\n\n%s' % __doc__, caching=True)
+  parser.add_argument('version', nargs=1)
+  parser.add_argument('--path', default=None, type='path',
+                      help='Path of files dir with external_extensions.json')
+  parser.add_argument('--create', default=False, action='store_true',
+                      help='Create cache tarball with specified name')
+  parser.add_argument('--upload', default=False, action='store_true',
+                      help='Upload cache tarball with specified name')
+  options = parser.parse_args(argv)
+
+  if options.path:
+    os.chdir(options.path)
+
+  if not (options.create or options.upload):
+    cros_build_lib.Die('Need at least --create or --upload args')
+
+  if not os.path.exists('external_extensions.json'):
+    cros_build_lib.Die('No external_extensions.json in %s. Did you forget the '
+                       '--path option?', os.getcwd())
+
+  identifier = options.version[0]
+  tarball = '%s.tar.xz' % identifier
+  if options.create:
+    extensions = json.load(open('external_extensions.json', 'r'))
+    with osutils.TempDir() as tempdir:
+      CreateCacheTarball(extensions, tempdir, identifier,
+                         os.path.abspath(tarball))
+
+  if options.upload:
+    ctx = gs.GSContext()
+    url = os.path.join(UPLOAD_URL_BASE, tarball)
+    if ctx.Exists(url):
+      cros_build_lib.Die('This version already exists on Google Storage (%s)!\n'
+                         'NEVER REWRITE EXISTING FILE. IT WILL BREAK CHROME OS '
+                         'BUILD!!!', url)
+    ctx.Copy(os.path.abspath(tarball), url, acl='project-private')
+    logging.info('Tarball uploaded %s', url)
+    osutils.SafeUnlink(os.path.abspath(tarball))
diff --git a/scripts/cidb_admin.py b/scripts/cidb_admin.py
new file mode 100644
index 0000000..7bf6e57
--- /dev/null
+++ b/scripts/cidb_admin.py
@@ -0,0 +1,83 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script for administering the Continuous Integration Database."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import cidb
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+
+
+MIGRATE = 'migrate'
+WIPE = 'wipe'
+
+COMMANDS = [MIGRATE, WIPE]
+
+
+def GetParser():
+  """Creates the argparse parser."""
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  # Put options that control the mode of script into mutually exclusive group.
+
+  parser.add_argument('command', action='store', choices=COMMANDS,
+                      help='The action to execute.')
+  parser.add_argument('cred_dir', action='store',
+                      metavar='CIDB_CREDENTIALS_DIR',
+                      help='Database credentials directory with certificates '
+                           'and other connection information.')
+  parser.add_argument('--migrate-version', action='store', default=None,
+                      help='Maximum schema version to migrate to.')
+
+  return parser
+
+
+def main(argv):
+  parser = GetParser()
+  options = parser.parse_args(argv)
+
+  logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
+
+  if options.command == MIGRATE:
+    positive_confirmation = 'please modify my database'
+    warn = ('This option will apply schema changes to your existing database. '
+            'You should not run this against the production database unless '
+            'your changes are thoroughly tested, and those tests included '
+            'in cidb_integration_test.py (including tests that old data is '
+            'sanely migrated forward). Database corruption could otherwise '
+            'result. Are you sure you want to proceed? If so, type "%s" '
+            'now.\n') % positive_confirmation
+  elif options.command == WIPE:
+    positive_confirmation = 'please delete my data'
+    warn = ('This operation will wipe (i.e. DELETE!) the entire contents of '
+            'the database pointed at by %s. Are you sure you want to proceed? '
+            'If so, type "%s" now.\n') % (
+                os.path.join(options.cred_dir, 'host.txt'),
+                positive_confirmation)
+  else:
+    print('No command or unsupported command. Exiting.')
+    exit()
+
+  print(warn)
+  conf_string = cros_build_lib.GetInput('(%s)?: ' % positive_confirmation)
+  if conf_string != positive_confirmation:
+    print('You changed your mind. Aborting.')
+    exit()
+
+  if options.command == MIGRATE:
+    print('OK, applying migrations...')
+    db = cidb.CIDBConnection(options.cred_dir)
+    db.ApplySchemaMigrations(maxVersion=options.migrate_version)
+  elif options.command == WIPE:
+    print('OK, wiping database...')
+    db = cidb.CIDBConnection(options.cred_dir)
+    db.DropDatabase()
+    print('Done.')
+
+
diff --git a/scripts/cros.py b/scripts/cros.py
new file mode 100644
index 0000000..5ecb413
--- /dev/null
+++ b/scripts/cros.py
@@ -0,0 +1,96 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This implements the entry point for the `cros` CLI toolset.
+
+This script is invoked by chromite/bin/cros, which sets up the
+proper execution environment and calls this module's main() function.
+
+In turn, this script looks for a subcommand based on how it was invoked. For
+example, `cros lint` will use the cli/cros/cros_lint.py subcommand.
+
+See cli/ for actual command implementations.
+"""
+
+from __future__ import print_function
+
+import sys
+
+from chromite.cli import command
+from chromite.lib import commandline
+from chromite.lib import cros_logging as logging
+from chromite.lib import stats
+
+
+def GetOptions(my_commands):
+  """Returns the parser to use for commandline parsing.
+
+  Args:
+    my_commands: A dictionary mapping subcommand names to classes.
+
+  Returns:
+    A commandline.ArgumentParser object.
+  """
+  parser = commandline.ArgumentParser(caching=True, default_log_level='notice')
+
+  if my_commands:
+    subparsers = parser.add_subparsers(title='Subcommands')
+    for cmd_name in sorted(my_commands.iterkeys()):
+      class_def = my_commands[cmd_name]
+      epilog = getattr(class_def, 'EPILOG', None)
+      sub_parser = subparsers.add_parser(
+          cmd_name, description=class_def.__doc__, epilog=epilog,
+          caching=class_def.use_caching_options,
+          formatter_class=commandline.argparse.RawDescriptionHelpFormatter)
+      class_def.AddParser(sub_parser)
+
+  return parser
+
+
+def _RunSubCommand(subcommand):
+  """Helper function for testing purposes."""
+  return subcommand.Run()
+
+
+def main(argv):
+  try:
+    parser = GetOptions(command.ListCommands())
+    # Cros currently does nothing without a subcmd. Print help if no args are
+    # specified.
+    if not argv:
+      parser.print_help()
+      return 1
+
+    namespace = parser.parse_args(argv)
+    subcommand = namespace.command_class(namespace)
+    with stats.UploadContext() as queue:
+      if subcommand.upload_stats:
+        cmd_base = subcommand.options.command_class.command_name
+        cmd_stats = stats.Stats.SafeInit(cmd_line=sys.argv, cmd_base=cmd_base)
+        if cmd_stats:
+          queue.put([cmd_stats, stats.StatsUploader.URL,
+                     subcommand.upload_stats_timeout])
+      # TODO: to make command completion faster, send an interrupt signal to the
+      # stats uploader task after the subcommand completes.
+      try:
+        code = _RunSubCommand(subcommand)
+      except (commandline.ChrootRequiredError, commandline.ExecRequiredError):
+        # The higher levels want these passed back, so oblige.
+        raise
+      except Exception as e:
+        code = 1
+        logging.error('cros %s failed before completing.',
+                      subcommand.command_name)
+        if namespace.debug:
+          raise
+        else:
+          logging.error(e)
+
+      if code is not None:
+        return code
+
+    return 0
+  except KeyboardInterrupt:
+    logging.debug('Aborted due to keyboard interrupt.')
+    return 1
diff --git a/scripts/cros_best_revision.py b/scripts/cros_best_revision.py
new file mode 100644
index 0000000..8416721
--- /dev/null
+++ b/scripts/cros_best_revision.py
@@ -0,0 +1,235 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Examine success/fail history for Chrome/ium OS builds.
+
+Used to check in a LKGM version for Chrome OS for other consumers.
+"""
+
+from __future__ import print_function
+
+import distutils.version
+import os
+
+from chromite.cbuildbot import archive_lib
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot import tree_status
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gclient
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import parallel
+
+
+class LKGMNotFound(Exception):
+  """Raised if a newer valid LKGM could not be found."""
+
+
+class LKGMNotCommitted(Exception):
+  """Raised if we could not submit a new LKGM."""
+
+
+class ChromeCommitter(object):
+  """Committer object responsible for obtaining a new LKGM and committing it."""
+
+  _COMMIT_MSG = ('Automated Commit: Committing new LKGM version %(version)s '
+                 'for chromeos.')
+  _CANDIDATES_TO_CONSIDER = 10
+
+  _SLEEP_TIMEOUT = 30
+  _TREE_TIMEOUT = 7200
+
+  def __init__(self, checkout_dir, dryrun):
+    self._checkout_dir = checkout_dir
+    self._dryrun = dryrun
+    self._lkgm = None
+    self._old_lkgm = None
+    self.site_config = config_lib.LoadConfigFromFile()
+
+
+  def CheckoutChromeLKGM(self):
+    """Checkout chromeos LKGM file for chrome into tmp checkout dir."""
+    if not os.path.exists(self._checkout_dir):
+      cros_build_lib.RunCommand(
+          ['git', 'clone', constants.CHROMIUM_GOB_URL,
+           self._checkout_dir])
+    else:
+      cros_build_lib.RunCommand(
+          ['git', 'fetch', 'origin'], cwd=self._checkout_dir)
+      cros_build_lib.RunCommand(
+          ['git', 'checkout', '-f', 'origin/master'], cwd=self._checkout_dir)
+
+    cros_build_lib.RunCommand(
+        ['git', 'branch', '-D', 'lkgm-roll'], cwd=self._checkout_dir,
+        error_code_ok=True)
+    cros_build_lib.RunCommand(
+        ['git', 'checkout', '-b', 'lkgm-roll', 'origin/master'],
+        cwd=self._checkout_dir)
+
+    self._old_lkgm = osutils.ReadFile(
+        os.path.join(self._checkout_dir, constants.PATH_TO_CHROME_LKGM))
+
+  @cros_build_lib.MemoizedSingleCall
+  def _GetLatestCanaryVersions(self):
+    """Returns the latest CANDIDATES_TO_CONSIDER canary versions."""
+    gs_handle = gs.GSContext()
+    version_paths = gs_handle.LS(manifest_version.BUILD_STATUS_URL)
+
+    # Strip gs://<path> prefix and trailing /'s.
+    versions = [os.path.basename(v.rstrip('/')) for v in version_paths]
+
+    lv = distutils.version.LooseVersion
+    # We only care about canary versions which always end in 0.0.
+    canary_versions = [v for v in versions if v.endswith('.0.0')]
+    new_canary_versions = [v for v in canary_versions
+                           if lv(v) > lv(self._old_lkgm)]
+    return sorted(new_canary_versions, key=lv,
+                  reverse=True)[0:self._CANDIDATES_TO_CONSIDER]
+
+  def GetCanariesForChromeLKGM(self):
+    """Grabs a list of builders that are important for the Chrome LKGM."""
+    builders = []
+    for build_name, conf in self.site_config.iteritems():
+      if (conf['build_type'] == constants.CANARY_TYPE and
+          conf['critical_for_chrome'] and not conf['child_configs']):
+        builders.append(build_name)
+
+    return builders
+
+  def FindNewLKGM(self):
+    """Finds a new LKGM for chrome from previous chromeos releases."""
+    versions = self._GetLatestCanaryVersions()
+    if not versions:
+      raise LKGMNotFound('No valid LKGM found newer than the old LKGM.')
+
+    canaries = self.GetCanariesForChromeLKGM()
+    logging.info('Considering the following versions: %s', ' '.join(versions))
+    logging.info('Using scores from the following canaries: %s',
+                 ' '.join(canaries))
+
+    # Scores are based on passing builders.
+    version_scores = {}
+    for version in versions:
+      for builder in canaries:
+        status = manifest_version.BuildSpecsManager.GetBuildStatus(
+            builder, version, retries=0)
+        if status:
+          if status.Passed():
+            version_scores[version] = version_scores.get(version, 0) + 1
+          elif status.Failed():
+            # We don't consider builds with any reporting failures.
+            version_scores[version] = 0
+            break
+
+      logging.info('Version %s had score %d', version,
+                   version_scores.get(version, 0))
+
+    # We want to get the version with the highest score. In case of a tie, we
+    # want to choose the highest version.
+    lkgm = max((v, k) for k, v in version_scores.iteritems())[1]
+    if not version_scores[lkgm] > 0:
+      raise LKGMNotFound('No valid LKGM found. Scores are too low.')
+
+    self._lkgm = lkgm
+
+  def CommitNewLKGM(self):
+    """Commits the new LKGM file using our template commit message."""
+    lv = distutils.version.LooseVersion
+    if not self._lkgm and not lv(self._lkgm) < lv(self._old_lkgm):
+      raise LKGMNotFound('No valid LKGM found. Did you run FindNewLKGM?')
+    commit_msg = self._COMMIT_MSG % dict(version=self._lkgm)
+
+    try:
+      # Add the new versioned file.
+      osutils.WriteFile(
+          os.path.join(self._checkout_dir, constants.PATH_TO_CHROME_LKGM),
+          self._lkgm)
+      cros_build_lib.RunCommand(
+          ['git', 'add', constants.PATH_TO_CHROME_LKGM], cwd=self._checkout_dir)
+
+      # Commit it!
+      cros_build_lib.RunCommand(
+          ['git', 'commit', '-m', commit_msg],
+          cwd=self._checkout_dir)
+    except cros_build_lib.RunCommandError as e:
+      raise LKGMNotCommitted(
+          'Could not create git commit with new LKGM: %r' % e)
+
+    if not tree_status.IsTreeOpen(status_url=gclient.STATUS_URL,
+                                  period=self._SLEEP_TIMEOUT,
+                                  timeout=self._TREE_TIMEOUT):
+      raise LKGMNotCommitted('Chromium Tree is closed')
+
+    if not self._dryrun:
+      try:
+        cros_build_lib.RunCommand(
+            ['git', 'cl', 'land', '-f', '--bypass-hooks', '-m', commit_msg],
+            cwd=self._checkout_dir)
+      except cros_build_lib.RunCommandError as e:
+        raise LKGMNotCommitted('Could not submit LKGM: %r' % e)
+
+  def UpdateLatestFilesForBot(self, config, versions):
+    """Update the LATEST files, for a given bot, in Google Storage.
+
+    Args:
+      config: The builder config to update.
+      versions: Versions of ChromeOS to look at, sorted in descending order.
+    """
+    base_url = archive_lib.GetBaseUploadURI(config)
+    acl = archive_lib.GetUploadACL(config)
+    latest_url = None
+    # gs.GSContext skips over all commands (including read-only checks)
+    # when dry_run is True, so we have to create two context objects.
+    # TODO(davidjames): Fix this.
+    gs_ctx = gs.GSContext()
+    copy_ctx = gs.GSContext(dry_run=self._dryrun)
+    for version in reversed(versions):
+      url = os.path.join(base_url, 'LATEST-%s' % version)
+      found = gs_ctx.Exists(url, print_cmd=False)
+      if not found and latest_url:
+        try:
+          copy_ctx.Copy(latest_url, url, version=0, acl=acl)
+          logging.info('Copied %s -> %s', latest_url, url)
+        except gs.GSContextPreconditionFailed:
+          found = True
+
+      if found:
+        logging.info('Found %s', url)
+        latest_url = url
+
+  def UpdateLatestFiles(self):
+    """Update the LATEST files since LKGM, in Google Storage."""
+    ext_cfgs, int_cfgs = self.site_config.FindFullConfigsForBoard(board=None)
+    versions = self._GetLatestCanaryVersions() + [self._old_lkgm]
+    tasks = [[cfg, versions] for cfg in ext_cfgs + int_cfgs]
+    parallel.RunTasksInProcessPool(self.UpdateLatestFilesForBot, tasks,
+                                   processes=100)
+
+
+def _GetParser():
+  """Returns the parser to use for this module."""
+  parser = commandline.ArgumentParser(usage=__doc__, caching=True)
+  parser.add_argument('--dryrun', action='store_true', default=False,
+                      help="Find the next LKGM but don't commit it.")
+  parser.add_argument('--workdir', default=os.path.join(os.getcwd(), 'src'),
+                      help=('Path to a checkout of chromium/src. '
+                            'Defaults to PWD/src'))
+
+  return parser
+
+
+def main(argv):
+  parser = _GetParser()
+  args = parser.parse_args(argv)
+
+  committer = ChromeCommitter(args.workdir, dryrun=args.dryrun)
+  committer.CheckoutChromeLKGM()
+  committer.UpdateLatestFiles()
+  committer.FindNewLKGM()
+  committer.CommitNewLKGM()
+  return 0
diff --git a/scripts/cros_best_revision_unittest b/scripts/cros_best_revision_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/cros_best_revision_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/cros_best_revision_unittest.py b/scripts/cros_best_revision_unittest.py
new file mode 100644
index 0000000..43db8f3
--- /dev/null
+++ b/scripts/cros_best_revision_unittest.py
@@ -0,0 +1,118 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the cros_best_revision program."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import manifest_version
+from chromite.cbuildbot import tree_status
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import gs_unittest
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+from chromite.scripts import cros_best_revision
+
+
+class BaseChromeCommitterTest(cros_test_lib.MockTempDirTestCase):
+  """Base class for tests using cros_best_revision.ChromeCommitter."""
+
+  def setUp(self):
+    """Common set up method for all tests."""
+    self.committer = cros_best_revision.ChromeCommitter(self.tempdir, False)
+    self.lkgm_file = os.path.join(self.tempdir, constants.PATH_TO_CHROME_LKGM)
+    self.pass_status = manifest_version.BuilderStatus(
+        constants.BUILDER_STATUS_PASSED, None)
+    self.fail_status = manifest_version.BuilderStatus(
+        constants.BUILDER_STATUS_FAILED, None)
+
+
+# pylint: disable=W0212
+class ChromeGSTest(BaseChromeCommitterTest):
+  """Test cros_best_revision.ChromeCommitter version filtering."""
+
+  def testGetLatestCanaryVersions(self):
+    """Test that we correctly filter out non-canary and older versions."""
+    output = '\n'.join(['2910.0.1', '2900.0.0', '2908.0.0', '2909.0.0',
+                        '2910.0.0'])
+    # Only return 2 -- the 2 newest canary results.
+    cros_best_revision.ChromeCommitter._CANDIDATES_TO_CONSIDER = 2
+    expected_output = ['2910.0.0', '2909.0.0']
+
+    self.committer._old_lkgm = '2905.0.0'
+    with gs_unittest.GSContextMock() as gs_mock:
+      gs_mock.AddCmdResult(partial_mock.In('ls'), output=output)
+      versions = self.committer._GetLatestCanaryVersions()
+    self.assertEqual(versions, expected_output)
+
+
+class ChromeCommitterTester(cros_build_lib_unittest.RunCommandTestCase,
+                            BaseChromeCommitterTest):
+  """Test cros_best_revision.Committer."""
+
+  canaries = ['a-release', 'b-release', 'c-release']
+  versions = ['4.0.0', '3.0.0']
+
+  def testCheckoutChromeLKGM(self):
+    "Tests that we can read/obtain the old LKGM from mocked out SVN."
+    # Write out an old lkgm file as if we got it from svn update.
+    old_lkgm = '2098.0.0'
+    osutils.SafeMakedirs(os.path.dirname(self.lkgm_file))
+    osutils.WriteFile(self.lkgm_file, old_lkgm)
+    self.committer.CheckoutChromeLKGM()
+    self.assertTrue(self.committer._old_lkgm, old_lkgm)
+
+  def _TestFindNewLKGM(self, all_results, lkgm):
+    """Stubs out methods used by FindNewLKGM."""
+    expected = {}
+    for canary, results in zip(self.canaries, all_results):
+      for version, status in zip(self.versions, results):
+        expected[(canary, version)] = status
+    def _GetBuildStatus(canary, version, **_):
+      return expected[(canary, version)]
+    self.PatchObject(self.committer, '_GetLatestCanaryVersions',
+                     return_value=self.versions)
+    self.PatchObject(self.committer, 'GetCanariesForChromeLKGM',
+                     return_value=self.canaries)
+    self.PatchObject(manifest_version.BuildSpecsManager, 'GetBuildStatus',
+                     side_effect=_GetBuildStatus)
+    self.committer.FindNewLKGM()
+    self.assertTrue(self.committer._lkgm, lkgm)
+
+  def testFindNewLKGMBasic(self):
+    """Tests that we return the highest version if all versions are good."""
+    self._TestFindNewLKGM([[self.pass_status] * 2] * 3, '4.0.0')
+
+  def testFindNewLKGMAdvanced(self):
+    """Tests that we return the only version with passing canaries."""
+    self._TestFindNewLKGM([[self.fail_status, self.pass_status]] * 3, '3.0.0')
+
+  def testFindNewLKGMWithFailures(self):
+    """Ensure we reject versions with failed builds.
+
+    This test case is a bit more complex than the two above and tests the logic
+    where we want to reject versions with failed builds.
+
+    In this example both versions have 2 passing builds. The older version
+    is missing a score from one builder where the newer version reports
+    a failure. In this instance, our scoring mechanism should choose the older
+    version.
+    """
+    all_results = [[self.pass_status] * 2] * 2 + [[self.fail_status, None]]
+    self._TestFindNewLKGM(all_results, '3.0.0')
+
+  def testCommitNewLKGM(self):
+    """Tests that we can commit a new LKGM file."""
+    osutils.SafeMakedirs(os.path.dirname(self.lkgm_file))
+    self.committer._lkgm = '4.0.0'
+    self.PatchObject(tree_status, 'IsTreeOpen', return_value=True)
+    self.committer.CommitNewLKGM()
+
+    # Check the file was actually written out correctly.
+    self.assertEqual(osutils.ReadFile(self.lkgm_file), self.committer._lkgm)
+    self.assertCommandContains(['git', 'cl', 'land'])
diff --git a/scripts/cros_brick_utils.py b/scripts/cros_brick_utils.py
new file mode 100644
index 0000000..102d7a7
--- /dev/null
+++ b/scripts/cros_brick_utils.py
@@ -0,0 +1,40 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Collection of tools used in scripts while we migrate to bricks."""
+
+from __future__ import print_function
+
+from chromite.lib import brick_lib
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+
+
+def ParseArgs(argv):
+  """Parse arguments.
+
+  Args:
+    argv: array of arguments passed to the script.
+  """
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('brick')
+  parser.add_argument(
+      '--friendly-name', action='store_true', dest='friendlyname',
+      help='Returns the friendly name for a given brick. This name is used in '
+      'the sysroot path and as "board name" in our legacy tools.')
+  options = parser.parse_args(argv)
+  options.Freeze()
+  return options
+
+
+def main(argv):
+  opts = ParseArgs(argv)
+
+  try:
+    brick = brick_lib.Brick(opts.brick, allow_legacy=False)
+  except brick_lib.BrickNotFound:
+    cros_build_lib.Die('Brick %s not found.' % opts.brick)
+
+  if opts.friendlyname:
+    print(brick.FriendlyName())
diff --git a/scripts/cros_check_patches.py b/scripts/cros_check_patches.py
new file mode 100644
index 0000000..b0e62c0
--- /dev/null
+++ b/scripts/cros_check_patches.py
@@ -0,0 +1,240 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Command to list patches applies to a repository."""
+
+from __future__ import print_function
+
+import functools
+import json
+import os
+import parallel_emerge
+import portage  # pylint: disable=import-error
+import re
+import shutil
+import sys
+import tempfile
+
+from chromite.lib import cros_build_lib
+from chromite.lib import osutils
+
+
+class PatchReporter(object):
+  """Help discover patches being applied by ebuilds.
+
+  The patches can be compared to a set of expected patches.  They can also be
+  sorted into categories like 'needs_upstreaming', etc.  Use of this can help
+  ensure that critical (e.g. security) patches are not inadvertently dropped,
+  and help surface forgotten-about patches that are yet-to-be upstreamed.
+  """
+
+  PATCH_TYPES = ('upstreamed', 'needs_upstreaming', 'not_for_upstream',
+                 'uncategorized')
+
+  def __init__(self, config, overlay_dir, ebuild_cmd, equery_cmd, sudo=False):
+    """Initialize.
+
+    The 'config' dictionary should look like this:
+    {
+      "ignored_packages": ["chromeos-base/chromeos-chrome"],
+      "upstreamed": [],
+      "needs_upstreaming": [],
+      "not_for_upstream": [],
+      "uncategorized": [
+        "net-misc/htpdate htpdate-1.0.4-checkagainstbuildtime.patch",
+        "net-misc/htpdate htpdate-1.0.4-errorcheckhttpresp.patch"
+      ]
+    }
+    """
+    self.overlay_dir = os.path.realpath(overlay_dir)
+    self.ebuild_cmd = ebuild_cmd
+    self.equery_cmd = equery_cmd
+    self._invoke_command = cros_build_lib.RunCommand
+    if sudo:
+      self._invoke_command = functools.partial(cros_build_lib.SudoRunCommand,
+                                               strict=False)
+    self.ignored_packages = config['ignored_packages']
+    self.package_count = 0
+    # The config format is stored as category: [ list of patches ]
+    # for ease of maintenance. But it's actually more useful to us
+    # in the code if kept as a map of patch:patch_type.
+    self.patches = {}
+    for cat in self.PATCH_TYPES:
+      for patch in config[cat]:
+        self.patches[patch] = cat
+
+  def Ignored(self, package_name):
+    """See if |package_name| should be ignored.
+
+    Args:
+      package_name: A package name (e.g. 'chromeos-base/chromeos-chrome')
+
+    Returns:
+       True if this package should be skipped in the analysis. False otherwise.
+    """
+    return package_name in self.ignored_packages
+
+  def ObservePatches(self, deps_map):
+    """Observe the patches being applied by ebuilds in |deps_map|.
+
+    Args:
+      deps_map: The packages to analyze.
+
+    Returns:
+       A list of patches being applied.
+    """
+    original = os.environ.get('PORT_LOGDIR', None)
+    temp_space = None
+    try:
+      temp_space = tempfile.mkdtemp(prefix='check_patches')
+      os.environ['PORT_LOGDIR'] = temp_space
+      return self._ObservePatches(temp_space, deps_map)
+    finally:
+      if temp_space:
+        shutil.rmtree(os.environ['PORT_LOGDIR'])
+      if original:
+        os.environ['PORT_LOGDIR'] = original
+      else:
+        os.environ.pop('PORT_LOGDIR')
+
+  def _ObservePatches(self, temp_space, deps_map):
+    for cpv in deps_map:
+      cat, name, _, _ = portage.versions.catpkgsplit(cpv)
+      if self.Ignored('%s/%s' % (cat, name)):
+        continue
+      cmd = self.equery_cmd[:]
+      cmd.extend(['which', cpv])
+      ebuild_path = self._invoke_command(cmd, print_cmd=False,
+                                         redirect_stdout=True).output.rstrip()
+      # Some of these packages will be from other portdirs. Since we are
+      # only interested in extracting the patches from one particular
+      # overlay, we skip ebuilds not from that overlay.
+      if self.overlay_dir != os.path.commonprefix([self.overlay_dir,
+                                                   ebuild_path]):
+        continue
+
+      # By running 'ebuild blah.ebuild prepare', we get logs in PORT_LOGDIR
+      # of what patches were applied. We clean first, to ensure we get a
+      # complete log, and clean again afterwards to avoid leaving a mess.
+      cmd = self.ebuild_cmd[:]
+      cmd.extend([ebuild_path, 'clean', 'prepare', 'clean'])
+      self._invoke_command(cmd, print_cmd=False, redirect_stdout=True)
+      self.package_count += 1
+
+    # Done with ebuild. Now just harvest the logs and we're finished.
+    # This regex is tuned intentionally to ignore a few unhelpful cases.
+    # E.g. elibtoolize repetitively applies a set of sed/portage related
+    # patches. And media-libs/jpeg says it is applying
+    # "various patches (bugfixes/updates)", which isn't very useful for us.
+    # So, if you noticed these omissions, it was intentional, not a bug. :-)
+    patch_regex = r'^ [*] Applying ([^ ]*) [.][.][.].*'
+    output = cros_build_lib.RunCommand(
+        ['egrep', '-r', patch_regex, temp_space], print_cmd=False,
+        redirect_stdout=True).output
+    lines = output.splitlines()
+    patches = []
+    patch_regex = re.compile(patch_regex)
+    for line in lines:
+      cat, pkg, _, patchmsg = line.split(':')
+      cat = os.path.basename(cat)
+      _, pkg, _, _ = portage.versions.catpkgsplit('x-x/%s' % pkg)
+      patch_name = re.sub(patch_regex, r'\1', patchmsg)
+      patches.append('%s/%s %s' % (cat, pkg, patch_name))
+
+    return patches
+
+  def ReportDiffs(self, observed_patches):
+    """Prints a report on any differences to stdout.
+
+    Returns:
+      An int representing the total number of discrepancies found.
+    """
+    expected_patches = set(self.patches.keys())
+    observed_patches = set(observed_patches)
+    missing_patches = sorted(list(expected_patches - observed_patches))
+    unexpected_patches = sorted(list(observed_patches - expected_patches))
+
+    if missing_patches:
+      print('Missing Patches:')
+      for p in missing_patches:
+        print('%s (%s)' % (p, self.patches[p]))
+
+    if unexpected_patches:
+      print('Unexpected Patches:')
+      print('\n'.join(unexpected_patches))
+
+    return len(missing_patches) + len(unexpected_patches)
+
+
+def Usage():
+  """Print usage."""
+  print("""Usage:
+cros_check_patches [--board=BOARD] [emerge args] package overlay-dir config.json
+
+Given a package name (e.g. 'virtual/target-os') and an overlay directory
+(e.g. /usr/local/portage/chromiumos), outputs a list of patches
+applied by that overlay, in the course of building the specified
+package and all its dependencies. Additional configuration options are
+specified in the JSON-format config file named on the command line.
+
+First run? Try this for a starter config:
+{
+  "ignored_packages": ["chromeos-base/chromeos-chrome"],
+  "upstreamed": [],
+  "needs_upstreaming": [],
+  "not_for_upstream": [],
+  "uncategorized": []
+}
+""")
+
+
+def main(argv):
+  if len(argv) < 4:
+    Usage()
+    sys.exit(1)
+
+  # Avoid parsing most of argv because most of it is destined for
+  # DepGraphGenerator/emerge rather than us. Extract what we need
+  # without disturbing the rest.
+  config_path = argv.pop()
+  config = json.loads(osutils.ReadFile(config_path))
+  overlay_dir = argv.pop()
+  board = [x.split('=')[1] for x in argv if x.find('--board=') != -1]
+  if board:
+    ebuild_cmd = ['ebuild-%s' % board[0]]
+    equery_cmd = ['equery-%s' % board[0]]
+  else:
+    ebuild_cmd = ['ebuild']
+    equery_cmd = ['equery']
+
+  use_sudo = not board
+
+  # We want the toolchain to be quiet to avoid interfering with our output.
+  depgraph_argv = ['--quiet', '--pretend', '--emptytree']
+
+  # Defaults to rdeps, but allow command-line override.
+  default_rootdeps_arg = ['--root-deps=rdeps']
+  for arg in argv:
+    if arg.startswith('--root-deps'):
+      default_rootdeps_arg = []
+
+  # Now, assemble the overall argv as the concatenation of the
+  # default list + possible rootdeps-default + actual command line.
+  depgraph_argv.extend(default_rootdeps_arg)
+  depgraph_argv.extend(argv)
+
+  deps = parallel_emerge.DepGraphGenerator()
+  deps.Initialize(depgraph_argv)
+  deps_tree, deps_info = deps.GenDependencyTree()
+  deps_map = deps.GenDependencyGraph(deps_tree, deps_info)
+
+  reporter = PatchReporter(config, overlay_dir, ebuild_cmd, equery_cmd,
+                           sudo=use_sudo)
+  observed = reporter.ObservePatches(deps_map)
+  diff_count = reporter.ReportDiffs(observed)
+
+  print('Packages analyzed: %d' % reporter.package_count)
+  print('Patches observed: %d' % len(observed))
+  print('Patches expected: %d' % len(reporter.patches.keys()))
+  sys.exit(diff_count)
diff --git a/scripts/cros_deps_diff.py b/scripts/cros_deps_diff.py
new file mode 100644
index 0000000..f56ca65
--- /dev/null
+++ b/scripts/cros_deps_diff.py
@@ -0,0 +1,183 @@
+# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates dependency graph diffs.
+
+As an input it takes 2 or more dependency graphs output from cros_extract_deps
+and it finds all divergent packages (packages whose versions differ between
+some of these dependency graphs) and outputs graphs that trace the divergence
+in the dependency trees until common packages are found.
+"""
+
+from __future__ import print_function
+
+import json
+import os
+
+from chromite.lib import commandline
+from chromite.lib import dot_helper
+
+NORMAL_COLOR = 'black'
+BASE_COLORS = ['red', 'green', 'blue']
+
+
+def UnversionedName(dep):
+  """Returns the name of the package, omitting the version."""
+  return '%s/%s' % (dep['category'], dep['name'])
+
+
+def GetColor(index):
+  """Maps index to a color."""
+  try:
+    return BASE_COLORS[index]
+  except IndexError:
+    # Generate a color by splicing the bits to generate high contrast colors
+    index -= len(BASE_COLORS) - 1
+    chars = [0] * 3
+    for bit in xrange(0, 24):
+      chars[bit % 3] |= ((index >> bit) & 0x1) << (7-bit/3)
+    return "#%02x%02x%02x" % tuple(chars)
+
+
+def GetReverseDependencyClosure(full_name, deps_map, divergent_set):
+  """Gets the closure of the reverse dependencies of a node.
+
+  Walks the tree along all the reverse dependency paths to find all the nodes
+  of the divergent set that transitively depend on the input node.
+  """
+  s = set()
+  def GetClosure(name):
+    node = deps_map[name]
+    if UnversionedName(node) in divergent_set:
+      s.add(name)
+      for dep in node['rev_deps']:
+        if dep in s:
+          continue
+        GetClosure(dep)
+
+  GetClosure(full_name)
+  return s
+
+
+def GetVersionMap(input_deps):
+  """Creates the version map for the input data.
+
+  The version map maps an unversioned package name to its corresponding
+  versioned name depending on the input dependency graph.
+
+  For every package, it maps the input data index to the full name (versioned)
+  of the package in that input data. E.g.
+  map['x11-base/xorg-server'] = {0:'x11-base/xorg-server-1.6.5-r203',
+                                 1:'x11-base/xorg-server-1.7.6-r8'}
+  """
+  version_map = {}
+  i = 0
+  for deps_map in input_deps:
+    for full_name, dep in deps_map.iteritems():
+      pkg = UnversionedName(dep)
+      entry = version_map.setdefault(pkg, {})
+      entry[i] = full_name
+    i += 1
+  return version_map
+
+
+def GetDivergentSet(version_map, count):
+  """Gets the set of divergent packages.
+
+  Divergent packages are those that have a different version among the input
+  dependency graphs (or missing version altogether).
+  """
+  divergent_set = set()
+  for pkg, value in version_map.iteritems():
+    if len(value.keys()) != count or len(set(value.values())) > 1:
+      # The package doesn't exist for at least one ot the input, or there are
+      # more than 2 versions.
+      divergent_set.add(pkg)
+  return divergent_set
+
+
+def BuildDependencyGraph(pkg, input_deps, version_map, divergent_set):
+  graph = dot_helper.Graph(pkg)
+
+  # A subgraph for the divergent package we're considering. Add all its
+  # versions as a sink.
+  pkg_subgraph = graph.AddNewSubgraph('sink')
+
+  # The outer packages are those that aren't divergent but depend on a
+  # divergent package. Add them in their own subgraph, as sources.
+  outer_subgraph = graph.AddNewSubgraph('source')
+
+  emitted = set()
+  for i in xrange(0, len(input_deps)):
+    try:
+      pkg_name = version_map[pkg][i]
+    except KeyError:
+      continue
+
+    color = GetColor(i)
+
+    if pkg_name not in emitted:
+      pkg_subgraph.AddNode(pkg_name, pkg_name, color, None)
+      emitted.add(pkg_name)
+
+    # Add one subgraph per version for generally better layout.
+    subgraph = graph.AddNewSubgraph()
+
+    nodes = GetReverseDependencyClosure(pkg_name, input_deps[i], divergent_set)
+    for node_name in nodes:
+      if node_name not in emitted:
+        subgraph.AddNode(node_name, node_name, color, None)
+        emitted.add(node_name)
+
+      # Add outer packages, and all the arcs.
+      for dep in input_deps[i][node_name]['rev_deps']:
+        dep_node = input_deps[i][dep]
+        if (UnversionedName(dep_node) not in divergent_set
+            and dep not in emitted):
+          outer_subgraph.AddNode(dep, dep, NORMAL_COLOR, None)
+          emitted.add(dep)
+        graph.AddArc(dep, node_name)
+
+  return graph
+
+
+def main(argv):
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('-f', '--format', default='svg',
+                      help='Dot output format (png, svg, etc.).')
+  parser.add_argument('-o', '--output-dir', default='.',
+                      help='Output directory.')
+  parser.add_argument('-s', '--save-dot', action='store_true',
+                      help='Save dot files.')
+  parser.add_argument('inputs', nargs='+')
+  options = parser.parse_args(argv)
+
+  input_deps = []
+  for i in options.inputs:
+    with open(i) as handle:
+      input_deps.append(json.loads(handle.read()))
+
+  version_map = GetVersionMap(input_deps)
+  divergent_set = GetDivergentSet(version_map, len(input_deps))
+
+  # Get all the output directories
+  all_dirs = set(os.path.dirname(pkg) for pkg in divergent_set)
+
+  for i in all_dirs:
+    try:
+      os.makedirs(os.path.join(options.output_dir, i))
+    except OSError:
+      # The directory already exists.
+      pass
+
+  for pkg in divergent_set:
+    filename = os.path.join(options.output_dir, pkg) + '.' + options.format
+
+    save_dot_filename = None
+    if options.save_dot:
+      save_dot_filename = filename + '.dot'
+
+    graph = BuildDependencyGraph(pkg, input_deps, version_map, divergent_set)
+    lines = graph.Gen()
+    dot_helper.GenerateImage(lines, filename, options.format, save_dot_filename)
diff --git a/scripts/cros_env_whitelist b/scripts/cros_env_whitelist
new file mode 120000
index 0000000..72196ce
--- /dev/null
+++ b/scripts/cros_env_whitelist
@@ -0,0 +1 @@
+../scripts/wrapper.py
\ No newline at end of file
diff --git a/scripts/cros_env_whitelist.py b/scripts/cros_env_whitelist.py
new file mode 100644
index 0000000..83c329e
--- /dev/null
+++ b/scripts/cros_env_whitelist.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Print the environment whitelist."""
+
+from __future__ import print_function
+
+from chromite.cbuildbot import constants
+
+
+def main(_argv):
+  print(' '.join(constants.CHROOT_ENVIRONMENT_WHITELIST))
diff --git a/scripts/cros_extract_deps.py b/scripts/cros_extract_deps.py
new file mode 100644
index 0000000..c4a4a99
--- /dev/null
+++ b/scripts/cros_extract_deps.py
@@ -0,0 +1,186 @@
+# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Command to extract the dependancy tree for a given package."""
+
+from __future__ import print_function
+
+import json
+import portage  # pylint: disable=F0401
+
+from parallel_emerge import DepGraphGenerator
+
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+
+def FlattenDepTree(deptree, pkgtable=None, parentcpv=None, get_cpe=False):
+  """Simplify dependency json.
+
+Turn something like this (the parallel_emerge DepsTree format):
+{
+  "app-admin/eselect-1.2.9": {
+    "action": "merge",
+    "deps": {
+      "sys-apps/coreutils-7.5-r1": {
+        "action": "merge",
+        "deps": {},
+        "deptype": "runtime"
+      },
+      ...
+    }
+  }
+}
+  ...into something like this (the cros_extract_deps format):
+{
+  "app-admin/eselect-1.2.9": {
+    "deps": ["coreutils-7.5-r1"],
+    "rev_deps": [],
+    "name": "eselect",
+    "category": "app-admin",
+    "version": "1.2.9",
+    "full_name": "app-admin/eselect-1.2.9",
+    "action": "merge"
+  },
+  "sys-apps/coreutils-7.5-r1": {
+    "deps": [],
+    "rev_deps": ["app-admin/eselect-1.2.9"],
+    "name": "coreutils",
+    "category": "sys-apps",
+    "version": "7.5-r1",
+    "full_name": "sys-apps/coreutils-7.5-r1",
+    "action": "merge"
+  }
+}
+
+  Args:
+    deptree: The dependency tree.
+    pkgtable: The package table to update. If None, create a new one.
+    parentcpv: The parent CPV.
+    get_cpe: If set True, include CPE in the flattened dependency tree.
+
+  Returns:
+    A flattened dependency tree.
+  """
+  if pkgtable is None:
+    pkgtable = {}
+  for cpv, record in deptree.iteritems():
+    if cpv not in pkgtable:
+      cat, nam, ver, rev = portage.versions.catpkgsplit(cpv)
+      pkgtable[cpv] = {'deps': [],
+                       'rev_deps': [],
+                       'name': nam,
+                       'category': cat,
+                       'version': '%s-%s' % (ver, rev),
+                       'full_name': cpv,
+                       'cpes': [],
+                       'action': record['action']}
+      if get_cpe:
+        pkgtable[cpv]['cpes'].extend(GetCPEFromCPV(cat, nam, ver))
+
+    # If we have a parent, that is a rev_dep for the current package.
+    if parentcpv:
+      pkgtable[cpv]['rev_deps'].append(parentcpv)
+    # If current package has any deps, record those.
+    for childcpv in record['deps']:
+      pkgtable[cpv]['deps'].append(childcpv)
+    # Visit the subtree recursively as well.
+    FlattenDepTree(record['deps'], pkgtable=pkgtable, parentcpv=cpv,
+                   get_cpe=get_cpe)
+  return pkgtable
+
+
+def GetCPEFromCPV(category, package, version):
+  """Look up the CPE for a specified Portage package.
+
+  Args:
+    category: The Portage package's category, e.g. "net-misc"
+    package: The Portage package's name, e.g. "curl"
+    version: The Portage version, e.g. "7.30.0"
+
+  Returns:
+    A list of CPE Name strings, e.g.
+    ["cpe:/a:curl:curl:7.30.0", "cpe:/a:curl:libcurl:7.30.0"]
+  """
+  equery_cmd = ['equery', 'm', '-U', '%s/%s' % (category, package)]
+  lines = cros_build_lib.RunCommand(equery_cmd, error_code_ok=True,
+                                    print_cmd=False,
+                                    redirect_stdout=True).output.splitlines()
+  # Look for lines like "Remote-ID:   cpe:/a:kernel:linux-pam ID: cpe"
+  # and extract the cpe URI.
+  cpes = []
+  for line in lines:
+    if 'ID: cpe' not in line:
+      continue
+    cpes.append('%s:%s' % (line.split()[1], version.replace('_', '')))
+  # Note that we're assuming we can combine the root of the CPE, taken
+  # from metadata.xml, and tack on the version number as used by
+  # Portage, and come up with a legitimate CPE. This works so long as
+  # Portage and CPE agree on the precise formatting of the version
+  # number, which they almost always do. The major exception we've
+  # identified thus far is that our ebuilds have a pattern of inserting
+  # underscores prior to patchlevels, that neither upstream nor CPE
+  # use. For example, our code will decide we have
+  # cpe:/a:todd_miller:sudo:1.8.6_p7 yet the advisories use a format
+  # like cpe:/a:todd_miller:sudo:1.8.6p7, without the underscore. (CPE
+  # is "right" in this example, in that it matches www.sudo.ws.)
+  #
+  # Removing underscores seems to improve our chances of correctly
+  # arriving at the CPE used by NVD. However, at the end of the day,
+  # ebuild version numbers are rev'd by people who don't have "try to
+  # match NVD" as one of their goals, and there is always going to be
+  # some risk of minor formatting disagreements at the version number
+  # level, if not from stray underscores then from something else.
+  #
+  # This is livable so long as you do some fuzzy version number
+  # comparison in your vulnerability monitoring, between what-we-have
+  # and what-the-advisory-says-is-affected.
+  return cpes
+
+
+def ExtractCPEList(deps_list):
+  cpe_dump = []
+  for cpv, record in deps_list.iteritems():
+    if record['cpes']:
+      name = '%s/%s' % (record['category'], record['name'])
+      cpe_dump.append({'ComponentName': name,
+                       'Repository': 'cros',
+                       'Targets': sorted(record['cpes'])})
+    else:
+      logging.warning('No CPE entry for %s', cpv)
+  return sorted(cpe_dump, key=lambda k: k['ComponentName'])
+
+
+def main(argv):
+  parser = commandline.ArgumentParser(description="""
+This extracts the dependency tree for the specified package, and outputs it
+to stdout, in a serialized JSON format.""")
+  parser.add_argument('--board', default=None,
+                      help='The board to use when computing deps.')
+  parser.add_argument('--format', default='deps',
+                      choices=['deps', 'cpe'],
+                      help='Output either traditional deps or CPE-only JSON.')
+  parser.add_argument('--output-path', default=None,
+                      help='Write output to the given path.')
+  known_args, unknown_args = parser.parse_known_args(argv)
+
+  lib_argv = []
+  if known_args.board:
+    lib_argv += ['--board=%s' % known_args.board]
+  lib_argv += ['--quiet', '--pretend', '--emptytree']
+  lib_argv.extend(unknown_args)
+
+  deps = DepGraphGenerator()
+  deps.Initialize(lib_argv)
+  deps_tree, _deps_info = deps.GenDependencyTree()
+  deps_list = FlattenDepTree(deps_tree, get_cpe=(known_args.format == 'cpe'))
+  if known_args.format == 'cpe':
+    deps_list = ExtractCPEList(deps_list)
+
+  deps_output = json.dumps(deps_list, sort_keys=True, indent=2)
+  if known_args.output_path:
+    with open(known_args.output_path, 'w') as f:
+      f.write(deps_output)
+  else:
+    print(deps_output)
diff --git a/scripts/cros_gdb.py b/scripts/cros_gdb.py
new file mode 100644
index 0000000..75316b2
--- /dev/null
+++ b/scripts/cros_gdb.py
@@ -0,0 +1,615 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper for running gdb.
+
+This handles the fun details like running against the right sysroot, via
+qemu, bind mounts, etc...
+"""
+
+from __future__ import print_function
+
+import argparse
+import contextlib
+import errno
+import os
+import sys
+import tempfile
+
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import namespaces
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import qemu
+from chromite.lib import remote_access
+from chromite.lib import retry_util
+from chromite.lib import timeout_util
+from chromite.lib import toolchain
+
+class GdbException(Exception):
+  """Base exception for this module."""
+
+
+class GdbBadRemoteDeviceError(GdbException):
+  """Raised when remote device does not exist or is not responding."""
+
+
+class GdbMissingSysrootError(GdbException):
+  """Raised when path to sysroot cannot be found in chroot."""
+
+
+class GdbMissingInferiorError(GdbException):
+  """Raised when the binary to be debugged cannot be found."""
+
+
+class GdbMissingDebuggerError(GdbException):
+  """Raised when cannot find correct version of debugger."""
+
+
+class GdbCannotFindRemoteProcessError(GdbException):
+  """Raised when cannot find requested executing process on remote device."""
+
+
+class GdbUnableToStartGdbserverError(GdbException):
+  """Raised when error occurs trying to start gdbserver on remote device."""
+
+
+class GdbTooManyPidsError(GdbException):
+  """Raised when more than one matching pid is found running on device."""
+
+
+class GdbEarlyExitError(GdbException):
+  """Raised when user requests to exit early."""
+
+
+class GdbCannotDetectBoardError(GdbException):
+  """Raised when board isn't specified and can't be automatically determined."""
+
+
+class BoardSpecificGdb(object):
+  """Framework for running gdb."""
+
+  _BIND_MOUNT_PATHS = ('dev', 'dev/pts', 'proc', 'mnt/host/source', 'sys')
+  _GDB = '/usr/bin/gdb'
+  _EXTRA_SSH_SETTINGS = {'CheckHostIP': 'no',
+                         'BatchMode': 'yes'}
+  _MISSING_DEBUG_INFO_MSG = """
+%(inf_cmd)s is stripped and %(debug_file)s does not exist on your local machine.
+  The debug symbols for that package may not be installed.  To install the debug
+ symbols for %(package)s only, run:
+
+   cros_install_debug_syms --board=%(board)s %(package)s
+
+To install the debug symbols for all available packages, run:
+
+   cros_install_debug_syms --board=%(board)s --all"""
+
+  def __init__(self, board, gdb_args, inf_cmd, inf_args, remote, pid,
+               remote_process_name, cgdb_flag, ping):
+    self.board = board
+    self.sysroot = None
+    self.prompt = '(gdb) '
+    self.inf_cmd = inf_cmd
+    self.run_as_root = False
+    self.gdb_args = gdb_args
+    self.inf_args = inf_args
+    self.remote = remote.hostname if remote else None
+    self.pid = pid
+    self.remote_process_name = remote_process_name
+    # Port used for sending ssh commands to DUT.
+    self.remote_port = remote.port if remote else None
+    # Port for communicating between gdb & gdbserver.
+    self.gdbserver_port = remote_access.GetUnusedPort()
+    self.ssh_settings = remote_access.CompileSSHConnectSettings(
+        **self._EXTRA_SSH_SETTINGS)
+    self.cgdb = cgdb_flag
+    self.framework = 'auto'
+    self.qemu = None
+    self.device = None
+    self.cross_gdb = None
+    self.ping = ping
+
+  def VerifyAndFinishInitialization(self, device):
+    """Verify files/processes exist and flags are correct."""
+    if not self.board:
+      if self.remote:
+        self.board = cros_build_lib.GetBoard(device_board=device.board,
+                                             override_board=self.board)
+      else:
+        raise GdbCannotDetectBoardError('Cannot determine which board to use. '
+                                        'Please specify the with --board flag.')
+
+    self.sysroot = cros_build_lib.GetSysroot(board=self.board)
+    self.prompt = '(%s-gdb) ' % self.board
+    self.inf_cmd = self.RemoveSysrootPrefix(self.inf_cmd)
+    self.cross_gdb = self.GetCrossGdb()
+
+    if self.remote:
+
+      # If given remote process name, find pid & inf_cmd on remote device.
+      if self.remote_process_name or self.pid:
+        self._FindRemoteProcess(device)
+
+      # Verify that sysroot is valid (exists).
+      if not os.path.isdir(self.sysroot):
+        raise GdbMissingSysrootError('Sysroot does not exist: %s' %
+                                     self.sysroot)
+
+    self.device = device
+    sysroot_inf_cmd = ''
+    if self.inf_cmd:
+      sysroot_inf_cmd = os.path.join(self.sysroot,
+                                     self.inf_cmd.lstrip('/'))
+
+    # Verify that inf_cmd, if given, exists.
+    if sysroot_inf_cmd and not os.path.exists(sysroot_inf_cmd):
+      raise GdbMissingInferiorError('Cannot find file %s (in sysroot).' %
+                                    sysroot_inf_cmd)
+
+    # Check to see if inf_cmd is stripped, and if so, check to see if debug file
+    # exists.  If not, tell user and give them the option of quitting & getting
+    # the debug info.
+    if sysroot_inf_cmd:
+      stripped_info = cros_build_lib.RunCommand(['file', sysroot_inf_cmd],
+                                                capture_output=True).output
+      if not ' not stripped' in stripped_info:
+        debug_file = os.path.join(self.sysroot, 'usr/lib/debug',
+                                  self.inf_cmd.lstrip('/'))
+        debug_file += '.debug'
+        if not os.path.exists(debug_file):
+          equery = 'equery-%s' % self.board
+          package = cros_build_lib.RunCommand([equery, '-q', 'b',
+                                               self.inf_cmd],
+                                              capture_output=True).output
+          logging.info(self._MISSING_DEBUG_INFO_MSG % {
+              'board': self.board,
+              'inf_cmd': self.inf_cmd,
+              'package': package,
+              'debug_file': debug_file})
+          answer = cros_build_lib.BooleanPrompt()
+          if not answer:
+            raise GdbEarlyExitError('Exiting early, at user request.')
+
+    # Set up qemu, if appropriate.
+    qemu_arch = qemu.Qemu.DetectArch(self._GDB, self.sysroot)
+    if qemu_arch is None:
+      self.framework = 'ldso'
+    else:
+      self.framework = 'qemu'
+      self.qemu = qemu.Qemu(self.sysroot, arch=qemu_arch)
+
+    if self.remote:
+      # Verify cgdb flag info.
+      if self.cgdb:
+        if osutils.Which('cgdb') is None:
+          raise GdbMissingDebuggerError('Cannot find cgdb.  Please install '
+                                        'cgdb first.')
+
+  def RemoveSysrootPrefix(self, path):
+    """Returns the given path with any sysroot prefix removed."""
+    # If the sysroot is /, then the paths are already normalized.
+    if self.sysroot != '/' and path.startswith(self.sysroot):
+      path = path.replace(self.sysroot, '', 1)
+    return path
+
+  @staticmethod
+  def GetNonRootAccount():
+    """Return details about the non-root account we want to use.
+
+    Returns:
+      A tuple of (username, uid, gid, home).
+    """
+    return (
+        os.environ.get('SUDO_USER', 'nobody'),
+        int(os.environ.get('SUDO_UID', '65534')),
+        int(os.environ.get('SUDO_GID', '65534')),
+        # Should we find a better home?
+        '/tmp/portage',
+    )
+
+  @staticmethod
+  @contextlib.contextmanager
+  def LockDb(db):
+    """Lock an account database.
+
+    We use the same algorithm as shadow/user.eclass.  This way we don't race
+    and corrupt things in parallel.
+    """
+    lock = '%s.lock' % db
+    _, tmplock = tempfile.mkstemp(prefix='%s.platform.' % lock)
+
+    # First try forever to grab the lock.
+    retry = lambda e: e.errno == errno.EEXIST
+    # Retry quickly at first, but slow down over time.
+    try:
+      retry_util.GenericRetry(retry, 60, os.link, tmplock, lock, sleep=0.1)
+    except Exception as e:
+      raise Exception('Could not grab lock %s. %s' % (lock, e))
+
+    # Yield while holding the lock, but try to clean it no matter what.
+    try:
+      os.unlink(tmplock)
+      yield lock
+    finally:
+      os.unlink(lock)
+
+  def SetupUser(self):
+    """Propogate the user name<->id mapping from outside the chroot.
+
+    Some unittests use getpwnam($USER), as does bash.  If the account
+    is not registered in the sysroot, they get back errors.
+    """
+    MAGIC_GECOS = 'Added by your friendly platform test helper; do not modify'
+    # This is kept in sync with what sdk_lib/make_chroot.sh generates.
+    SDK_GECOS = 'ChromeOS Developer'
+
+    user, uid, gid, home = self.GetNonRootAccount()
+    if user == 'nobody':
+      return
+
+    passwd_db = os.path.join(self.sysroot, 'etc', 'passwd')
+    with self.LockDb(passwd_db):
+      data = osutils.ReadFile(passwd_db)
+      accts = data.splitlines()
+      for acct in accts:
+        passwd = acct.split(':')
+        if passwd[0] == user:
+          # Did the sdk make this account?
+          if passwd[4] == SDK_GECOS:
+            # Don't modify it (see below) since we didn't create it.
+            return
+
+          # Did we make this account?
+          if passwd[4] != MAGIC_GECOS:
+            raise RuntimeError('your passwd db (%s) has unmanaged acct %s' %
+                               (passwd_db, user))
+
+          # Maybe we should see if it needs to be updated?  Like if they
+          # changed UIDs?  But we don't really check that elsewhere ...
+          return
+
+      acct = '%(name)s:x:%(uid)s:%(gid)s:%(gecos)s:%(homedir)s:%(shell)s' % {
+          'name': user,
+          'uid': uid,
+          'gid': gid,
+          'gecos': MAGIC_GECOS,
+          'homedir': home,
+          'shell': '/bin/bash',
+      }
+      with open(passwd_db, 'a') as f:
+        if data[-1] != '\n':
+          f.write('\n')
+        f.write('%s\n' % acct)
+
+  def _FindRemoteProcess(self, device):
+    """Find a named process (or a pid) running on a remote device."""
+    if not self.remote_process_name and not self.pid:
+      return
+
+    if self.remote_process_name:
+      # Look for a process with the specified name on the remote device; if
+      # found, get its pid.
+      pname = self.remote_process_name
+      if pname == 'browser':
+        all_chrome_pids = set(device.GetRunningPids(
+            '/opt/google/chrome/chrome'))
+        sandbox_pids = set(device.GetRunningPids(
+            '/opt/google/chrome/chrome-sandbox'))
+        non_main_chrome_pids = set(device.GetRunningPids('type='))
+        pids = list(all_chrome_pids - sandbox_pids - non_main_chrome_pids)
+      elif pname == 'renderer' or pname == 'gpu-process':
+        pids = device.GetRunningPids('type=%s'% pname)
+      else:
+        pids = device.GetRunningPids(pname)
+
+      if pids:
+        if len(pids) == 1:
+          self.pid = pids[0]
+        else:
+          raise GdbTooManyPidsError('Multiple pids found for %s process: %s. '
+                                    'You must specify the correct pid.'
+                                    % (pname, repr(pids)))
+      else:
+        raise GdbCannotFindRemoteProcessError('Cannot find pid for "%s" on %s' %
+                                              (pname, self.remote))
+
+    # Find full path for process, from pid (and verify pid).
+    command = [
+        'readlink',
+        '-e', '/proc/%s/exe' % self.pid,
+    ]
+    try:
+      res = device.RunCommand(command, capture_output=True)
+      if res.returncode == 0:
+        self.inf_cmd = res.output.rstrip('\n')
+    except cros_build_lib.RunCommandError:
+      raise GdbCannotFindRemoteProcessError('Unable to find name of process '
+                                            'with pid %s on %s' %
+                                            (self.pid, self.remote))
+
+  def GetCrossGdb(self):
+    """Find the appropriate cross-version of gdb for the board."""
+    toolchains = toolchain.GetToolchainsForBoard(self.board)
+    tc = toolchain.FilterToolchains(toolchains, 'default', True).keys()
+    cross_gdb = tc[0] + '-gdb'
+    if not osutils.Which(cross_gdb):
+      raise GdbMissingDebuggerError('Cannot find %s; do you need to run '
+                                    'setup_board?' % cross_gdb)
+    return cross_gdb
+
+  def StartGdbserver(self, inf_cmd, device):
+    """Set up and start gdbserver running on remote."""
+
+    # Generate appropriate gdbserver command.
+    command = ['gdbserver']
+    if self.pid:
+      # Attach to an existing process.
+      command += [
+          '--attach',
+          'localhost:%s' % self.gdbserver_port,
+          '%s' % self.pid,
+      ]
+    elif inf_cmd:
+      # Start executing a new process.
+      command += ['localhost:%s' % self.gdbserver_port, inf_cmd] + self.inf_args
+
+    self.ssh_settings.append('-n')
+    self.ssh_settings.append('-L%s:localhost:%s' %
+                             (self.gdbserver_port, self.gdbserver_port))
+    return device.RunCommand(command,
+                             connect_settings=self.ssh_settings,
+                             input=open('/dev/null')).returncode
+
+  def GetGdbInitCommands(self, inferior_cmd):
+    """Generate list of commands with which to initialize the gdb session."""
+    gdb_init_commands = []
+
+    if self.remote:
+      sysroot_var = self.sysroot
+    else:
+      sysroot_var = '/'
+
+    gdb_init_commands = [
+        'set sysroot %s' % sysroot_var,
+        'set solib-absolute-prefix %s' % sysroot_var,
+        'set solib-search-path %s' % sysroot_var,
+        'set debug-file-directory %s/usr/lib/debug' % sysroot_var,
+        'set prompt %s' % self.prompt,
+    ]
+
+    if self.remote:
+      if inferior_cmd and not inferior_cmd.startswith(self.sysroot):
+        inferior_cmd = os.path.join(self.sysroot, inferior_cmd.lstrip('/'))
+
+      if inferior_cmd:
+        gdb_init_commands.append('file %s' % inferior_cmd)
+      gdb_init_commands.append('target remote localhost:%s' %
+                               self.gdbserver_port)
+    else:
+      if inferior_cmd:
+        gdb_init_commands.append('file %s ' % inferior_cmd)
+        gdb_init_commands.append('set args %s' % ' '.join(self.inf_args))
+
+    return gdb_init_commands
+
+  def RunRemote(self):
+    """Handle remote debugging, via gdbserver & cross debugger."""
+    device = None
+    try:
+      device = remote_access.ChromiumOSDeviceHandler(
+          self.remote,
+          port=self.remote_port,
+          connect_settings=self.ssh_settings,
+          ping=self.ping).device
+    except remote_access.DeviceNotPingableError:
+      raise GdbBadRemoteDeviceError('Remote device %s is not responding to '
+                                    'ping.' % self.remote)
+
+    self.VerifyAndFinishInitialization(device)
+    gdb_cmd = self.cross_gdb
+
+    gdb_commands = self.GetGdbInitCommands(self.inf_cmd)
+    gdb_args = [gdb_cmd, '--quiet'] + ['--eval-command=%s' % x
+                                       for x in gdb_commands]
+    if self.cgdb:
+      gdb_args = ['cgdb'] + gdb_args
+
+    with parallel.BackgroundTaskRunner(self.StartGdbserver,
+                                       self.inf_cmd,
+                                       device) as task:
+      task.put([])
+      # Verify that gdbserver finished launching.
+      try:
+        timeout_util.WaitForSuccess(
+            lambda x: len(x) == 0, self.device.GetRunningPids,
+            4, func_args=('gdbserver',))
+      except timeout_util.TimeoutError:
+        raise GdbUnableToStartGdbserverError('gdbserver did not start on'
+                                             ' remote device.')
+      cros_build_lib.RunCommand(gdb_args)
+
+  def Run(self):
+    """Runs the debugger in a proper environment (e.g. qemu)."""
+
+    self.VerifyAndFinishInitialization(None)
+    self.SetupUser()
+    if self.framework == 'qemu':
+      self.qemu.Install(self.sysroot)
+      self.qemu.RegisterBinfmt()
+
+    for mount in self._BIND_MOUNT_PATHS:
+      path = os.path.join(self.sysroot, mount)
+      osutils.SafeMakedirs(path)
+      osutils.Mount('/' + mount, path, 'none', osutils.MS_BIND)
+
+    gdb_cmd = self._GDB
+    inferior_cmd = self.inf_cmd
+
+    gdb_argv = self.gdb_args[:]
+    if gdb_argv:
+      gdb_argv[0] = self.RemoveSysrootPrefix(gdb_argv[0])
+    # Some programs expect to find data files via $CWD, so doing a chroot
+    # and dropping them into / would make them fail.
+    cwd = self.RemoveSysrootPrefix(os.getcwd())
+
+    os.chroot(self.sysroot)
+    os.chdir(cwd)
+    # The TERM the user is leveraging might not exist in the sysroot.
+    # Force a sane default that supports standard color sequences.
+    os.environ['TERM'] = 'ansi'
+    # Some progs want this like bash else they get super confused.
+    os.environ['PWD'] = cwd
+    if not self.run_as_root:
+      _, uid, gid, home = self.GetNonRootAccount()
+      os.setgid(gid)
+      os.setuid(uid)
+      os.environ['HOME'] = home
+
+    gdb_commands = self.GetGdbInitCommands(inferior_cmd)
+
+    gdb_args = [gdb_cmd, '--quiet'] + ['--eval-command=%s' % x
+                                       for x in gdb_commands]
+    gdb_args += self.gdb_args
+
+    sys.exit(os.execvp(gdb_cmd, gdb_args))
+
+
+def _ReExecuteIfNeeded(argv, ns_net=False, ns_pid=False):
+  """Re-execute gdb as root.
+
+  We often need to do things as root, so make sure we're that.  Like chroot
+  for proper library environment or do bind mounts.
+
+  Also unshare the mount namespace so as to ensure that doing bind mounts for
+  tests don't leak out to the normal chroot.  Also unshare the UTS namespace
+  so changes to `hostname` do not impact the host.
+  """
+  if os.geteuid() != 0:
+    cmd = ['sudo', '-E', '--'] + argv
+    os.execvp(cmd[0], cmd)
+  else:
+    namespaces.SimpleUnshare(net=ns_net, pid=ns_pid)
+
+
+def FindInferior(arg_list):
+  """Look for the name of the inferior (to be debugged) in arg list."""
+
+  program_name = ''
+  new_list = []
+  for item in arg_list:
+    if item[0] == '-':
+      new_list.append(item)
+    elif not program_name:
+      program_name = item
+    else:
+      raise RuntimeError('Found multiple program names: %s  %s'
+                         % (program_name, item))
+
+  return program_name, new_list
+
+
+def main(argv):
+
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  parser.add_argument('--board', default=None,
+                      help='board to debug for')
+  parser.add_argument('-g', '--gdb_args', action='append', default=[],
+                      help='Arguments to gdb itself.  If multiple arguments are'
+                      ' passed, each argument needs a separate \'-g\' flag.')
+  parser.add_argument(
+      '--remote', default=None,
+      type=commandline.DeviceParser(commandline.DEVICE_SCHEME_SSH),
+      help='Remote device on which to run the binary. Use'
+      ' "--remote=localhost:9222" to debug in a ChromeOS image in an'
+      ' already running local virtual machine.')
+  parser.add_argument('--pid', default='',
+                      help='Process ID of the (already) running process on the'
+                      ' remote device to which to attach.')
+  parser.add_argument('--remote_pid', dest='pid', default='',
+                      help='Deprecated alias for --pid.')
+  parser.add_argument('--no-ping', dest='ping', default=True,
+                      action='store_false',
+                      help='Do not ping remote before attempting to connect.')
+  parser.add_argument('--attach', dest='attach_name', default='',
+                      help='Name of existing process to which to attach, on'
+                      ' remote device (remote debugging only). "--attach'
+                      ' browser" will find the main chrome browser process;'
+                      ' "--attach renderer" will find a chrome renderer'
+                      ' process; "--attach gpu-process" will find the chrome'
+                      ' gpu process.')
+  parser.add_argument('--cgdb', default=False,
+                      action='store_true',
+                      help='Use cgdb curses interface rather than plain gdb.'
+                      'This option is only valid for remote debugging.')
+  parser.add_argument('inf_args', nargs=argparse.REMAINDER,
+                      help='Arguments for gdb to pass to the program being'
+                      ' debugged. These are positional and must come at the end'
+                      ' of the command line.  This will not work if attaching'
+                      ' to an already running program.')
+
+  options = parser.parse_args(argv)
+  options.Freeze()
+
+  gdb_args = []
+  inf_args = []
+  inf_cmd = ''
+
+  if options.inf_args:
+    inf_cmd = options.inf_args[0]
+    inf_args = options.inf_args[1:]
+
+  if options.gdb_args:
+    gdb_args = options.gdb_args
+
+  if inf_cmd:
+    fname = os.path.join(cros_build_lib.GetSysroot(options.board),
+                         inf_cmd.lstrip('/'))
+    if not os.path.exists(fname):
+      cros_build_lib.Die('Cannot find program %s.' % fname)
+  else:
+    if inf_args:
+      parser.error('Cannot specify arguments without a program.')
+
+  if inf_args and (options.pid or options.attach_name):
+    parser.error('Cannot pass arguments to an already'
+                 ' running process (--remote-pid or --attach).')
+
+  if options.remote:
+    if not options.pid and not inf_cmd and not options.attach_name:
+      parser.error('Must specify a program to start or a pid to attach '
+                   'to on the remote device.')
+    if options.attach_name and options.attach_name == 'browser':
+      inf_cmd = '/opt/google/chrome/chrome'
+  else:
+    if options.cgdb:
+      parser.error('--cgdb option can only be used with remote debugging.')
+    if options.pid:
+      parser.error('Must specify a remote device (--remote) if you want '
+                   'to attach to a remote pid.')
+    if options.attach_name:
+      parser.error('Must specify remote device (--remote) when using'
+                   ' --attach option.')
+
+  # Once we've finished sanity checking args, make sure we're root.
+  if not options.remote:
+    _ReExecuteIfNeeded([sys.argv[0]] + argv)
+
+  gdb = BoardSpecificGdb(options.board, gdb_args, inf_cmd, inf_args,
+                         options.remote, options.pid, options.attach_name,
+                         options.cgdb, options.ping)
+
+  try:
+    if options.remote:
+      gdb.RunRemote()
+    else:
+      gdb.Run()
+
+  except GdbException as e:
+    if options.debug:
+      raise
+    else:
+      raise cros_build_lib.Die(str(e))
diff --git a/scripts/cros_generate_breakpad_symbols.py b/scripts/cros_generate_breakpad_symbols.py
new file mode 100644
index 0000000..2c7606b
--- /dev/null
+++ b/scripts/cros_generate_breakpad_symbols.py
@@ -0,0 +1,337 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate minidump symbols for use by the Crash server.
+
+Note: This should be run inside the chroot.
+
+This produces files in the breakpad format required by minidump_stackwalk and
+the crash server to dump stack information.
+
+Basically it scans all the split .debug files in /build/$BOARD/usr/lib/debug/
+and converts them over using the `dump_syms` programs.  Those plain text .sym
+files are then stored in /build/$BOARD/usr/lib/debug/breakpad/.
+
+If you want to actually upload things, see upload_symbols.py.
+"""
+
+from __future__ import print_function
+
+import collections
+import ctypes
+import multiprocessing
+import os
+import tempfile
+
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import signals
+
+
+SymbolHeader = collections.namedtuple('SymbolHeader',
+                                      ('cpu', 'id', 'name', 'os',))
+
+
+def ReadSymsHeader(sym_file):
+  """Parse the header of the symbol file
+
+  The first line of the syms file will read like:
+    MODULE Linux arm F4F6FA6CCBDEF455039C8DE869C8A2F40 blkid
+
+  https://code.google.com/p/google-breakpad/wiki/SymbolFiles
+
+  Args:
+    sym_file: The symbol file to parse
+
+  Returns:
+    A SymbolHeader object
+
+  Raises:
+    ValueError if the first line of |sym_file| is invalid
+  """
+  with cros_build_lib.Open(sym_file) as f:
+    header = f.readline().split()
+
+  if header[0] != 'MODULE' or len(header) != 5:
+    raise ValueError('header of sym file is invalid')
+
+  return SymbolHeader(os=header[1], cpu=header[2], id=header[3], name=header[4])
+
+
+def GenerateBreakpadSymbol(elf_file, debug_file=None, breakpad_dir=None,
+                           board=None, strip_cfi=False, num_errors=None):
+  """Generate the symbols for |elf_file| using |debug_file|
+
+  Args:
+    elf_file: The file to dump symbols for
+    debug_file: Split debug file to use for symbol information
+    breakpad_dir: The dir to store the output symbol file in
+    board: If |breakpad_dir| is not specified, use |board| to find it
+    strip_cfi: Do not generate CFI data
+    num_errors: An object to update with the error count (needs a .value member)
+
+  Returns:
+    The number of errors that were encountered.
+  """
+  if breakpad_dir is None:
+    breakpad_dir = FindBreakpadDir(board)
+  if num_errors is None:
+    num_errors = ctypes.c_int()
+
+  cmd_base = ['dump_syms']
+  if strip_cfi:
+    cmd_base += ['-c']
+  # Some files will not be readable by non-root (e.g. set*id /bin/su).
+  needs_sudo = not os.access(elf_file, os.R_OK)
+
+  def _DumpIt(cmd_args):
+    if needs_sudo:
+      run_command = cros_build_lib.SudoRunCommand
+    else:
+      run_command = cros_build_lib.RunCommand
+    return run_command(
+        cmd_base + cmd_args, redirect_stderr=True, log_stdout_to_file=temp.name,
+        error_code_ok=True, debug_level=logging.DEBUG)
+
+  def _CrashCheck(ret, msg):
+    if ret < 0:
+      logging.PrintBuildbotStepWarnings()
+      logging.warning('dump_syms crashed with %s; %s',
+                      signals.StrSignal(-ret), msg)
+
+  osutils.SafeMakedirs(breakpad_dir)
+  with tempfile.NamedTemporaryFile(dir=breakpad_dir, bufsize=0) as temp:
+    if debug_file:
+      # Try to dump the symbols using the debug file like normal.
+      cmd_args = [elf_file, os.path.dirname(debug_file)]
+      result = _DumpIt(cmd_args)
+
+      if result.returncode:
+        # Sometimes dump_syms can crash because there's too much info.
+        # Try dumping and stripping the extended stuff out.  At least
+        # this way we'll get the extended symbols.  http://crbug.com/266064
+        _CrashCheck(result.returncode, 'retrying w/out CFI')
+        cmd_args = ['-c', '-r'] + cmd_args
+        result = _DumpIt(cmd_args)
+        _CrashCheck(result.returncode, 'retrying w/out debug')
+
+      basic_dump = result.returncode
+    else:
+      basic_dump = True
+
+    if basic_dump:
+      # If that didn't work (no debug, or dump_syms still failed), try
+      # dumping just the file itself directly.
+      result = _DumpIt([elf_file])
+      if result.returncode:
+        # A lot of files (like kernel files) contain no debug information,
+        # do not consider such occurrences as errors.
+        logging.PrintBuildbotStepWarnings()
+        _CrashCheck(result.returncode, 'giving up entirely')
+        if 'file contains no debugging information' in result.error:
+          logging.warning('no symbols found for %s', elf_file)
+        else:
+          num_errors.value += 1
+          logging.error('dumping symbols for %s failed:\n%s', elf_file,
+                        result.error)
+        return num_errors.value
+
+    # Move the dumped symbol file to the right place:
+    # /build/$BOARD/usr/lib/debug/breakpad/<module-name>/<id>/<module-name>.sym
+    header = ReadSymsHeader(temp)
+    logging.info('Dumped %s as %s : %s', elf_file, header.name, header.id)
+    sym_file = os.path.join(breakpad_dir, header.name, header.id,
+                            header.name + '.sym')
+    osutils.SafeMakedirs(os.path.dirname(sym_file))
+    os.rename(temp.name, sym_file)
+    os.chmod(sym_file, 0o644)
+    temp.delete = False
+
+  return num_errors.value
+
+
+def GenerateBreakpadSymbols(board, breakpad_dir=None, strip_cfi=False,
+                            generate_count=None, sysroot=None,
+                            num_processes=None, clean_breakpad=False,
+                            exclude_dirs=(), file_list=None):
+  """Generate symbols for this board.
+
+  If |file_list| is None, symbols are generated for all executables, otherwise
+  only for the files included in |file_list|.
+
+  TODO(build):
+  This should be merged with buildbot_commands.GenerateBreakpadSymbols()
+  once we rewrite cros_generate_breakpad_symbols in python.
+
+  Args:
+    board: The board whose symbols we wish to generate
+    breakpad_dir: The full path to the breakpad directory where symbols live
+    strip_cfi: Do not generate CFI data
+    generate_count: If set, only generate this many symbols (meant for testing)
+    sysroot: The root where to find the corresponding ELFs
+    num_processes: Number of jobs to run in parallel
+    clean_breakpad: Should we `rm -rf` the breakpad output dir first; note: we
+      do not do any locking, so do not run more than one in parallel when True
+    exclude_dirs: List of dirs (relative to |sysroot|) to not search
+    file_list: Only generate symbols for files in this list. Each file must be a
+      full path (including |sysroot| prefix).
+      TODO(build): Support paths w/o |sysroot|.
+
+  Returns:
+    The number of errors that were encountered.
+  """
+  if breakpad_dir is None:
+    breakpad_dir = FindBreakpadDir(board)
+  if sysroot is None:
+    sysroot = cros_build_lib.GetSysroot(board=board)
+  if clean_breakpad:
+    logging.info('cleaning out %s first', breakpad_dir)
+    osutils.RmDir(breakpad_dir, ignore_missing=True, sudo=True)
+  # Make sure non-root can write out symbols as needed.
+  osutils.SafeMakedirs(breakpad_dir, sudo=True)
+  if not os.access(breakpad_dir, os.W_OK):
+    cros_build_lib.SudoRunCommand(['chown', '-R', str(os.getuid()),
+                                   breakpad_dir])
+  debug_dir = FindDebugDir(board)
+  exclude_paths = [os.path.join(debug_dir, x) for x in exclude_dirs]
+  if file_list is None:
+    file_list = []
+  file_filter = dict.fromkeys([os.path.normpath(x) for x in file_list], False)
+
+  logging.info('generating breakpad symbols using %s', debug_dir)
+
+  # Let's locate all the debug_files and elfs first along with the debug file
+  # sizes.  This way we can start processing the largest files first in parallel
+  # with the small ones.
+  # If |file_list| was given, ignore all other files.
+  targets = []
+  for root, dirs, files in os.walk(debug_dir):
+    if root in exclude_paths:
+      logging.info('Skipping excluded dir %s', root)
+      del dirs[:]
+      continue
+
+    for debug_file in files:
+      debug_file = os.path.join(root, debug_file)
+      # Turn /build/$BOARD/usr/lib/debug/sbin/foo.debug into
+      # /build/$BOARD/sbin/foo.
+      elf_file = os.path.join(sysroot, debug_file[len(debug_dir) + 1:-6])
+
+      if file_filter:
+        if elf_file in file_filter:
+          file_filter[elf_file] = True
+        elif debug_file in file_filter:
+          file_filter[debug_file] = True
+        else:
+          continue
+
+      # Filter out files based on common issues with the debug file.
+      if not debug_file.endswith('.debug'):
+        continue
+
+      elif debug_file.endswith('.ko.debug'):
+        logging.debug('Skipping kernel module %s', debug_file)
+        continue
+
+      elif os.path.islink(debug_file):
+        # The build-id stuff is common enough to filter out by default.
+        if '/.build-id/' in debug_file:
+          msg = logging.debug
+        else:
+          msg = logging.warning
+        msg('Skipping symbolic link %s', debug_file)
+        continue
+
+      # Filter out files based on common issues with the elf file.
+      if not os.path.exists(elf_file):
+        # Sometimes we filter out programs from /usr/bin but leave behind
+        # the .debug file.
+        logging.warning('Skipping missing %s', elf_file)
+        continue
+
+      targets.append((os.path.getsize(debug_file), elf_file, debug_file))
+
+  bg_errors = multiprocessing.Value('i')
+  if file_filter:
+    files_not_found = [x for x, found in file_filter.iteritems() if not found]
+    bg_errors.value += len(files_not_found)
+    if files_not_found:
+      logging.error('Failed to find requested files: %s', files_not_found)
+
+  # Now start generating symbols for the discovered elfs.
+  with parallel.BackgroundTaskRunner(GenerateBreakpadSymbol,
+                                     breakpad_dir=breakpad_dir, board=board,
+                                     strip_cfi=strip_cfi,
+                                     num_errors=bg_errors,
+                                     processes=num_processes) as queue:
+    for _, elf_file, debug_file in sorted(targets, reverse=True):
+      if generate_count == 0:
+        break
+
+      queue.put([elf_file, debug_file])
+      if generate_count is not None:
+        generate_count -= 1
+        if generate_count == 0:
+          break
+
+  return bg_errors.value
+
+
+def FindDebugDir(board):
+  """Given a |board|, return the path to the split debug dir for it"""
+  sysroot = cros_build_lib.GetSysroot(board=board)
+  return os.path.join(sysroot, 'usr', 'lib', 'debug')
+
+
+def FindBreakpadDir(board):
+  """Given a |board|, return the path to the breakpad dir for it"""
+  return os.path.join(FindDebugDir(board), 'breakpad')
+
+
+def main(argv):
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  parser.add_argument('--board', default=None,
+                      help='board to generate symbols for')
+  parser.add_argument('--breakpad_root', type='path', default=None,
+                      help='root directory for breakpad symbols')
+  parser.add_argument('--exclude-dir', type=str, action='append',
+                      default=[],
+                      help='directory (relative to |board| root) to not search')
+  parser.add_argument('--generate-count', type=int, default=None,
+                      help='only generate # number of symbols')
+  parser.add_argument('--noclean', dest='clean', action='store_false',
+                      default=True,
+                      help='do not clean out breakpad dir before running')
+  parser.add_argument('--jobs', type=int, default=None,
+                      help='limit number of parallel jobs')
+  parser.add_argument('--strip_cfi', action='store_true', default=False,
+                      help='do not generate CFI data (pass -c to dump_syms)')
+  parser.add_argument('file_list', nargs='*', default=None,
+                      help='generate symbols for only these files '
+                           '(e.g. /build/$BOARD/usr/bin/foo)')
+
+  opts = parser.parse_args(argv)
+  opts.Freeze()
+
+  if opts.board is None:
+    cros_build_lib.Die('--board is required')
+
+  ret = GenerateBreakpadSymbols(opts.board, breakpad_dir=opts.breakpad_root,
+                                strip_cfi=opts.strip_cfi,
+                                generate_count=opts.generate_count,
+                                num_processes=opts.jobs,
+                                clean_breakpad=opts.clean,
+                                exclude_dirs=opts.exclude_dir,
+                                file_list=opts.file_list)
+  if ret:
+    logging.error('encountered %i problem(s)', ret)
+    # Since exit(status) gets masked, clamp it to 1 so we don't inadvertently
+    # return 0 in case we are a multiple of the mask.
+    ret = 1
+
+  return ret
diff --git a/scripts/cros_generate_breakpad_symbols_unittest b/scripts/cros_generate_breakpad_symbols_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/cros_generate_breakpad_symbols_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/cros_generate_breakpad_symbols_unittest.py b/scripts/cros_generate_breakpad_symbols_unittest.py
new file mode 100644
index 0000000..9c27c50
--- /dev/null
+++ b/scripts/cros_generate_breakpad_symbols_unittest.py
@@ -0,0 +1,383 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test cros_generate_breakpad_symbols."""
+
+from __future__ import print_function
+
+import ctypes
+import mock
+import os
+import StringIO
+
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import parallel_unittest
+from chromite.lib import partial_mock
+from chromite.scripts import cros_generate_breakpad_symbols
+
+
+class FindDebugDirMock(partial_mock.PartialMock):
+  """Mock out the DebugDir helper so we can point it to a tempdir."""
+
+  TARGET = 'chromite.scripts.cros_generate_breakpad_symbols'
+  ATTRS = ('FindDebugDir',)
+  DEFAULT_ATTR = 'FindDebugDir'
+
+  def __init__(self, path, *args, **kwargs):
+    self.path = path
+    super(FindDebugDirMock, self).__init__(*args, **kwargs)
+
+  def FindDebugDir(self, _board):
+    return self.path
+
+
+@mock.patch('chromite.scripts.cros_generate_breakpad_symbols.'
+            'GenerateBreakpadSymbol')
+class GenerateSymbolsTest(cros_test_lib.MockTempDirTestCase):
+  """Test GenerateBreakpadSymbols."""
+
+  def setUp(self):
+    self.board = 'monkey-board'
+    self.board_dir = os.path.join(self.tempdir, 'build', self.board)
+    self.debug_dir = os.path.join(self.board_dir, 'usr', 'lib', 'debug')
+    self.breakpad_dir = os.path.join(self.debug_dir, 'breakpad')
+
+    # Generate a tree of files which we'll scan through.
+    elf_files = [
+        'bin/elf',
+        'iii/large-elf',
+        # Need some kernel modules (with & without matching .debug).
+        'lib/modules/3.10/module.ko',
+        'lib/modules/3.10/module-no-debug.ko',
+        # Need a file which has an ELF only, but not a .debug.
+        'usr/bin/elf-only',
+        'usr/sbin/elf',
+    ]
+    debug_files = [
+        'bin/bad-file',
+        'bin/elf.debug',
+        'iii/large-elf.debug',
+        'lib/modules/3.10/module.ko.debug',
+        # Need a file which has a .debug only, but not an ELF.
+        'sbin/debug-only.debug',
+        'usr/sbin/elf.debug',
+    ]
+    for f in ([os.path.join(self.board_dir, x) for x in elf_files] +
+              [os.path.join(self.debug_dir, x) for x in debug_files]):
+      osutils.Touch(f, makedirs=True)
+
+    # Set up random build dirs and symlinks.
+    buildid = os.path.join(self.debug_dir, '.build-id', '00')
+    osutils.SafeMakedirs(buildid)
+    os.symlink('/asdf', os.path.join(buildid, 'foo'))
+    os.symlink('/bin/sh', os.path.join(buildid, 'foo.debug'))
+    os.symlink('/bin/sh', os.path.join(self.debug_dir, 'file.debug'))
+    osutils.WriteFile(os.path.join(self.debug_dir, 'iii', 'large-elf.debug'),
+                      'just some content')
+
+    self.StartPatcher(FindDebugDirMock(self.debug_dir))
+
+  def testNormal(self, gen_mock):
+    """Verify all the files we expect to get generated do"""
+    with parallel_unittest.ParallelMock():
+      ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbols(
+          self.board, sysroot=self.board_dir)
+      self.assertEquals(ret, 0)
+      self.assertEquals(gen_mock.call_count, 3)
+
+      # The largest ELF should be processed first.
+      call1 = (os.path.join(self.board_dir, 'iii/large-elf'),
+               os.path.join(self.debug_dir, 'iii/large-elf.debug'))
+      self.assertEquals(gen_mock.call_args_list[0][0], call1)
+
+      # The other ELFs can be called in any order.
+      call2 = (os.path.join(self.board_dir, 'bin/elf'),
+               os.path.join(self.debug_dir, 'bin/elf.debug'))
+      call3 = (os.path.join(self.board_dir, 'usr/sbin/elf'),
+               os.path.join(self.debug_dir, 'usr/sbin/elf.debug'))
+      exp_calls = set((call2, call3))
+      actual_calls = set((gen_mock.call_args_list[1][0],
+                          gen_mock.call_args_list[2][0]))
+      self.assertEquals(exp_calls, actual_calls)
+
+  def testFileList(self, gen_mock):
+    """Verify that file_list restricts the symbols generated"""
+    with parallel_unittest.ParallelMock():
+      call1 = (os.path.join(self.board_dir, 'usr/sbin/elf'),
+               os.path.join(self.debug_dir, 'usr/sbin/elf.debug'))
+
+      # Filter with elf path.
+      ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbols(
+          self.board, sysroot=self.board_dir, breakpad_dir=self.breakpad_dir,
+          file_list=[os.path.join(self.board_dir, 'usr', 'sbin', 'elf')])
+      self.assertEquals(ret, 0)
+      self.assertEquals(gen_mock.call_count, 1)
+      self.assertEquals(gen_mock.call_args_list[0][0], call1)
+
+      # Filter with debug symbols file path.
+      gen_mock.reset_mock()
+      ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbols(
+          self.board, sysroot=self.board_dir, breakpad_dir=self.breakpad_dir,
+          file_list=[os.path.join(self.debug_dir, 'usr', 'sbin', 'elf.debug')])
+      self.assertEquals(ret, 0)
+      self.assertEquals(gen_mock.call_count, 1)
+      self.assertEquals(gen_mock.call_args_list[0][0], call1)
+
+
+  def testGenLimit(self, gen_mock):
+    """Verify generate_count arg works"""
+    with parallel_unittest.ParallelMock():
+      # Generate nothing!
+      ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbols(
+          self.board, sysroot=self.board_dir, breakpad_dir=self.breakpad_dir,
+          generate_count=0)
+      self.assertEquals(ret, 0)
+      self.assertEquals(gen_mock.call_count, 0)
+
+      # Generate just one.
+      ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbols(
+          self.board, sysroot=self.board_dir, breakpad_dir=self.breakpad_dir,
+          generate_count=1)
+      self.assertEquals(ret, 0)
+      self.assertEquals(gen_mock.call_count, 1)
+
+      # The largest ELF should be processed first.
+      call1 = (os.path.join(self.board_dir, 'iii/large-elf'),
+               os.path.join(self.debug_dir, 'iii/large-elf.debug'))
+      self.assertEquals(gen_mock.call_args_list[0][0], call1)
+
+  def testGenErrors(self, gen_mock):
+    """Verify we handle errors from generation correctly"""
+    def _SetError(*_args, **kwargs):
+      kwargs['num_errors'].value += 1
+      return 1
+    gen_mock.side_effect = _SetError
+    with parallel_unittest.ParallelMock():
+      ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbols(
+          self.board, sysroot=self.board_dir)
+      self.assertEquals(ret, 3)
+      self.assertEquals(gen_mock.call_count, 3)
+
+  def testCleaningTrue(self, gen_mock):
+    """Verify behavior of clean_breakpad=True"""
+    with parallel_unittest.ParallelMock():
+      # Dir does not exist, and then does.
+      self.assertNotExists(self.breakpad_dir)
+      ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbols(
+          self.board, sysroot=self.board_dir, generate_count=1,
+          clean_breakpad=True)
+      self.assertEquals(ret, 0)
+      self.assertEquals(gen_mock.call_count, 1)
+      self.assertExists(self.breakpad_dir)
+
+      # Dir exists before & after.
+      # File exists, but then doesn't.
+      dummy_file = os.path.join(self.breakpad_dir, 'fooooooooo')
+      osutils.Touch(dummy_file)
+      ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbols(
+          self.board, sysroot=self.board_dir, generate_count=1,
+          clean_breakpad=True)
+      self.assertEquals(ret, 0)
+      self.assertEquals(gen_mock.call_count, 2)
+      self.assertNotExists(dummy_file)
+
+  def testCleaningFalse(self, gen_mock):
+    """Verify behavior of clean_breakpad=False"""
+    with parallel_unittest.ParallelMock():
+      # Dir does not exist, and then does.
+      self.assertNotExists(self.breakpad_dir)
+      ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbols(
+          self.board, sysroot=self.board_dir, generate_count=1,
+          clean_breakpad=False)
+      self.assertEquals(ret, 0)
+      self.assertEquals(gen_mock.call_count, 1)
+      self.assertExists(self.breakpad_dir)
+
+      # Dir exists before & after.
+      # File exists before & after.
+      dummy_file = os.path.join(self.breakpad_dir, 'fooooooooo')
+      osutils.Touch(dummy_file)
+      ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbols(
+          self.board, sysroot=self.board_dir, generate_count=1,
+          clean_breakpad=False)
+      self.assertEquals(ret, 0)
+      self.assertEquals(gen_mock.call_count, 2)
+      self.assertExists(dummy_file)
+
+  def testExclusionList(self, gen_mock):
+    """Verify files in directories of the exclusion list are excluded"""
+    exclude_dirs = ['bin', 'usr', 'fake/dir/fake']
+    with parallel_unittest.ParallelMock():
+      ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbols(
+          self.board, sysroot=self.board_dir, exclude_dirs=exclude_dirs)
+      self.assertEquals(ret, 0)
+      self.assertEquals(gen_mock.call_count, 1)
+
+class GenerateSymbolTest(cros_test_lib.MockTempDirTestCase):
+  """Test GenerateBreakpadSymbol."""
+
+  def setUp(self):
+    self.elf_file = os.path.join(self.tempdir, 'elf')
+    osutils.Touch(self.elf_file)
+    self.debug_dir = os.path.join(self.tempdir, 'debug')
+    self.debug_file = os.path.join(self.debug_dir, 'elf.debug')
+    osutils.Touch(self.debug_file, makedirs=True)
+    # Not needed as the code itself should create it as needed.
+    self.breakpad_dir = os.path.join(self.debug_dir, 'breakpad')
+
+    self.rc_mock = self.StartPatcher(cros_build_lib_unittest.RunCommandMock())
+    self.rc_mock.SetDefaultCmdResult(output='MODULE OS CPU ID NAME')
+    self.assertCommandContains = self.rc_mock.assertCommandContains
+    self.sym_file = os.path.join(self.breakpad_dir, 'NAME/ID/NAME.sym')
+
+    self.StartPatcher(FindDebugDirMock(self.debug_dir))
+
+  def assertCommandArgs(self, i, args):
+    """Helper for looking at the args of the |i|th call"""
+    self.assertEqual(self.rc_mock.call_args_list[i][0][0], args)
+
+  def testNormal(self):
+    """Normal run -- given an ELF and a debug file"""
+    ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
+        self.elf_file, self.debug_file, breakpad_dir=self.breakpad_dir)
+    self.assertEqual(ret, 0)
+    self.assertEqual(self.rc_mock.call_count, 1)
+    self.assertCommandArgs(0, ['dump_syms', self.elf_file, self.debug_dir])
+    self.assertExists(self.sym_file)
+
+  def testNormalBoard(self):
+    """Normal run w/board info but not breakpad dir"""
+    ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
+        self.elf_file, board='foo')
+    self.assertEqual(ret, 0)
+    self.assertCommandArgs(0, ['dump_syms', self.elf_file])
+    self.assertEqual(self.rc_mock.call_count, 1)
+    self.assertExists(self.sym_file)
+
+  def testNormalNoCfi(self):
+    """Normal run w/out CFI"""
+    # Make sure the num_errors flag works too.
+    num_errors = ctypes.c_int()
+    ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
+        self.elf_file, strip_cfi=True, num_errors=num_errors)
+    self.assertEqual(ret, 0)
+    self.assertEqual(num_errors.value, 0)
+    self.assertCommandArgs(0, ['dump_syms', '-c', self.elf_file])
+    self.assertEqual(self.rc_mock.call_count, 1)
+    self.assertExists(self.sym_file)
+
+  def testNormalElfOnly(self):
+    """Normal run -- given just an ELF"""
+    ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(self.elf_file)
+    self.assertEqual(ret, 0)
+    self.assertCommandArgs(0, ['dump_syms', self.elf_file])
+    self.assertEqual(self.rc_mock.call_count, 1)
+    self.assertExists(self.sym_file)
+
+  def testNormalSudo(self):
+    """Normal run where ELF is readable only by root"""
+    with mock.patch.object(os, 'access') as mock_access:
+      mock_access.return_value = False
+      ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(self.elf_file)
+      self.assertEqual(ret, 0)
+      self.assertCommandArgs(0, ['sudo', '--', 'dump_syms', self.elf_file])
+
+  def testLargeDebugFail(self):
+    """Running w/large .debug failed, but retry worked"""
+    self.rc_mock.AddCmdResult(['dump_syms', self.elf_file, self.debug_dir],
+                              returncode=1)
+    ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
+        self.elf_file, self.debug_file)
+    self.assertEqual(ret, 0)
+    self.assertEqual(self.rc_mock.call_count, 2)
+    self.assertCommandArgs(0, ['dump_syms', self.elf_file, self.debug_dir])
+    self.assertCommandArgs(
+        1, ['dump_syms', '-c', '-r', self.elf_file, self.debug_dir])
+    self.assertExists(self.sym_file)
+
+  def testDebugFail(self):
+    """Running w/.debug always failed, but works w/out"""
+    self.rc_mock.AddCmdResult(['dump_syms', self.elf_file, self.debug_dir],
+                              returncode=1)
+    self.rc_mock.AddCmdResult(['dump_syms', '-c', '-r', self.elf_file,
+                               self.debug_dir],
+                              returncode=1)
+    ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
+        self.elf_file, self.debug_file)
+    self.assertEqual(ret, 0)
+    self.assertEqual(self.rc_mock.call_count, 3)
+    self.assertCommandArgs(0, ['dump_syms', self.elf_file, self.debug_dir])
+    self.assertCommandArgs(
+        1, ['dump_syms', '-c', '-r', self.elf_file, self.debug_dir])
+    self.assertCommandArgs(2, ['dump_syms', self.elf_file])
+    self.assertExists(self.sym_file)
+
+  def testCompleteFail(self):
+    """Running dump_syms always fails"""
+    self.rc_mock.SetDefaultCmdResult(returncode=1)
+    ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(self.elf_file)
+    self.assertEqual(ret, 1)
+    # Make sure the num_errors flag works too.
+    num_errors = ctypes.c_int()
+    ret = cros_generate_breakpad_symbols.GenerateBreakpadSymbol(
+        self.elf_file, num_errors=num_errors)
+    self.assertEqual(ret, 1)
+    self.assertEqual(num_errors.value, 1)
+
+
+class UtilsTestDir(cros_test_lib.TempDirTestCase):
+  """Tests ReadSymsHeader."""
+
+  def testReadSymsHeaderGoodFile(self):
+    """Make sure ReadSymsHeader can parse sym files"""
+    sym_file = os.path.join(self.tempdir, 'sym')
+    osutils.WriteFile(sym_file, 'MODULE Linux x86 s0m31D chrooome')
+    result = cros_generate_breakpad_symbols.ReadSymsHeader(sym_file)
+    self.assertEquals(result.cpu, 'x86')
+    self.assertEquals(result.id, 's0m31D')
+    self.assertEquals(result.name, 'chrooome')
+    self.assertEquals(result.os, 'Linux')
+
+
+class UtilsTest(cros_test_lib.TestCase):
+  """Tests ReadSymsHeader."""
+
+  def testReadSymsHeaderGoodBuffer(self):
+    """Make sure ReadSymsHeader can parse sym file handles"""
+    result = cros_generate_breakpad_symbols.ReadSymsHeader(
+        StringIO.StringIO('MODULE Linux arm MY-ID-HERE blkid'))
+    self.assertEquals(result.cpu, 'arm')
+    self.assertEquals(result.id, 'MY-ID-HERE')
+    self.assertEquals(result.name, 'blkid')
+    self.assertEquals(result.os, 'Linux')
+
+  def testReadSymsHeaderBadd(self):
+    """Make sure ReadSymsHeader throws on bad sym files"""
+    self.assertRaises(ValueError, cros_generate_breakpad_symbols.ReadSymsHeader,
+                      StringIO.StringIO('asdf'))
+
+  def testBreakpadDir(self):
+    """Make sure board->breakpad path expansion works"""
+    expected = '/build/blah/usr/lib/debug/breakpad'
+    result = cros_generate_breakpad_symbols.FindBreakpadDir('blah')
+    self.assertEquals(expected, result)
+
+  def testDebugDir(self):
+    """Make sure board->debug path expansion works"""
+    expected = '/build/blah/usr/lib/debug'
+    result = cros_generate_breakpad_symbols.FindDebugDir('blah')
+    self.assertEquals(expected, result)
+
+
+def main(_argv):
+  # pylint: disable=W0212
+  # Set timeouts small so that if the unit test hangs, it won't hang for long.
+  parallel._BackgroundTask.STARTUP_TIMEOUT = 5
+  parallel._BackgroundTask.EXIT_TIMEOUT = 5
+
+  # Run the tests.
+  cros_test_lib.main(level='info', module=__name__)
diff --git a/scripts/cros_generate_deps_graphs.py b/scripts/cros_generate_deps_graphs.py
new file mode 100644
index 0000000..17244bd
--- /dev/null
+++ b/scripts/cros_generate_deps_graphs.py
@@ -0,0 +1,153 @@
+# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates pretty dependency graphs for Chrome OS packages."""
+
+from __future__ import print_function
+
+import json
+import optparse
+import os
+import sys
+
+from chromite.lib import dot_helper
+
+
+NORMAL_COLOR = 'black'
+TARGET_COLOR = 'red'
+SEED_COLOR = 'green'
+CHILD_COLOR = 'grey'
+
+
+def GetReverseDependencyClosure(full_name, deps_map):
+  """Gets the closure of the reverse dependencies of a node.
+
+  Walks the tree along all the reverse dependency paths to find all the nodes
+  that transitively depend on the input node.
+  """
+  s = set()
+  def GetClosure(name):
+    s.add(name)
+    node = deps_map[name]
+    for dep in node['rev_deps']:
+      if dep in s:
+        continue
+      GetClosure(dep)
+
+  GetClosure(full_name)
+  return s
+
+
+def GetOutputBaseName(node, options):
+  """Gets the basename of the output file for a node."""
+  return '%s_%s-%s.%s' % (node['category'], node['name'], node['version'],
+                          options.format)
+
+
+def AddNodeToSubgraph(subgraph, node, options, color):
+  """Gets the dot definition for a node."""
+  name = node['full_name']
+  href = None
+  if options.link:
+    filename = GetOutputBaseName(node, options)
+    href = '%s%s' % (options.base_url, filename)
+  subgraph.AddNode(name, name, color, href)
+
+
+
+def GenerateDotGraph(package, deps_map, options):
+  """Generates the dot source for the dependency graph leading to a node.
+
+  The output is a list of lines.
+  """
+  deps = GetReverseDependencyClosure(package, deps_map)
+  node = deps_map[package]
+
+  # Keep track of all the emitted nodes so that we don't issue multiple
+  # definitions
+  emitted = set()
+
+  graph = dot_helper.Graph(package)
+
+  # Add all the children if we want them, all of them in their own subgraph,
+  # as a sink. Keep the arcs outside of the subgraph though (it generates
+  # better layout).
+  children_subgraph = None
+  if options.children and node['deps']:
+    children_subgraph = graph.AddNewSubgraph('sink')
+    for child in node['deps']:
+      child_node = deps_map[child]
+      AddNodeToSubgraph(children_subgraph, child_node, options, CHILD_COLOR)
+      emitted.add(child)
+      graph.AddArc(package, child)
+
+  # Add the package in its own subgraph. If we didn't have children, make it
+  # a sink
+  if children_subgraph:
+    rank = 'same'
+  else:
+    rank = 'sink'
+  package_subgraph = graph.AddNewSubgraph(rank)
+  AddNodeToSubgraph(package_subgraph, node, options, TARGET_COLOR)
+  emitted.add(package)
+
+  # Add all the other nodes, as well as all the arcs.
+  for dep in deps:
+    dep_node = deps_map[dep]
+    if not dep in emitted:
+      color = NORMAL_COLOR
+      if dep_node['action'] == 'seed':
+        color = SEED_COLOR
+      AddNodeToSubgraph(graph, dep_node, options, color)
+    for j in dep_node['rev_deps']:
+      graph.AddArc(j, dep)
+
+  return graph.Gen()
+
+
+def GenerateImages(data, options):
+  """Generate the output images for all the nodes in the input."""
+  deps_map = json.loads(data)
+
+  for package in deps_map:
+    lines = GenerateDotGraph(package, deps_map, options)
+
+    filename = os.path.join(options.output_dir,
+                            GetOutputBaseName(deps_map[package], options))
+
+    save_dot_filename = None
+    if options.save_dot:
+      save_dot_filename = filename + '.dot'
+
+    dot_helper.GenerateImage(lines, filename, options.format, save_dot_filename)
+
+
+def main(argv):
+  parser = optparse.OptionParser(usage='usage: %prog [options] input')
+  parser.add_option('-f', '--format', default='svg',
+                    help='Dot output format (png, svg, etc.).')
+  parser.add_option('-o', '--output-dir', default='.',
+                    help='Output directory.')
+  parser.add_option('-c', '--children', action='store_true',
+                    help='Also add children.')
+  parser.add_option('-l', '--link', action='store_true',
+                    help='Embed links.')
+  parser.add_option('-b', '--base-url', default='',
+                    help='Base url for links.')
+  parser.add_option('-s', '--save-dot', action='store_true',
+                    help='Save dot files.')
+  (options, inputs) = parser.parse_args(argv)
+
+  try:
+    os.makedirs(options.output_dir)
+  except OSError:
+    # The directory already exists.
+    pass
+
+  if not inputs:
+    GenerateImages(sys.stdin.read(), options)
+  else:
+    for i in inputs:
+      with open(i) as handle:
+        GenerateImages(handle.read(), options)
diff --git a/scripts/cros_generate_local_binhosts.py b/scripts/cros_generate_local_binhosts.py
new file mode 100644
index 0000000..3bed5d2
--- /dev/null
+++ b/scripts/cros_generate_local_binhosts.py
@@ -0,0 +1,81 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script for calculating compatible binhosts.
+
+Generates a file that sets the specified board's binhosts to include all of the
+other compatible boards in this buildroot.
+"""
+
+from __future__ import print_function
+
+import collections
+import glob
+import optparse
+import os
+import sys
+
+from chromite.lib import cros_build_lib
+
+
+def FindCandidateBoards():
+  """Find candidate local boards to grab prebuilts from."""
+  portageq_prefix = "/usr/local/bin/portageq-"
+  for path in sorted(glob.glob("%s*" % portageq_prefix)):
+    # Strip off the portageq prefix, leaving only the board.
+    yield path.replace(portageq_prefix, "")
+
+
+def SummarizeCompatibility(board):
+  """Returns a string that will be the same for compatible boards."""
+  cmd = ["portageq-%s" % board, "envvar", "ARCH", "CFLAGS"]
+  summary = cros_build_lib.RunCommand(cmd, redirect_stdout=True,
+                                      print_cmd=False).output.rstrip()
+  # We will add -clang-syntax to falco and nyan board. So we need to
+  # filter out -clang-syntax to make the flags from PFQ are the same as
+  # the release-board. See crbug.com/499115
+  # TODO(yunlian): Remove this when all the boards are build with -clang-syntax
+  return summary.replace(" -clang-syntax", "")
+
+
+def GenerateBinhostLine(build_root, compatible_boards):
+  """Generate a binhost line pulling binaries from the specified boards."""
+  # TODO(davidjames): Prioritize binhosts with more matching use flags.
+  local_binhosts = " ".join([
+      "file://localhost" + os.path.join(build_root, x, "packages")
+      for x in sorted(compatible_boards)])
+  return "LOCAL_BINHOST='%s'" % local_binhosts
+
+
+def main(argv):
+  parser = optparse.OptionParser(usage="USAGE: ./%prog --board=board [options]")
+
+  parser.add_option("--build_root", default="/build",
+                    dest="build_root",
+                    help="Location of boards (normally /build)")
+  parser.add_option("--board", default=None,
+                    dest="board",
+                    help="Board name (required).")
+
+  flags, remaining_arguments = parser.parse_args(argv)
+
+  if remaining_arguments or not flags.board:
+    parser.print_help()
+    sys.exit(1)
+
+  by_compatibility = collections.defaultdict(set)
+  compatible_boards = None
+  for other_board in FindCandidateBoards():
+    compat_id = SummarizeCompatibility(other_board)
+    if other_board == flags.board:
+      compatible_boards = by_compatibility[compat_id]
+    else:
+      by_compatibility[compat_id].add(other_board)
+
+  if compatible_boards is None:
+    print('Missing portageq wrapper for %s' % flags.board, file=sys.stderr)
+    sys.exit(1)
+
+  print('# Generated by cros_generate_local_binhosts.')
+  print(GenerateBinhostLine(flags.build_root, compatible_boards))
diff --git a/scripts/cros_generate_sysroot.py b/scripts/cros_generate_sysroot.py
new file mode 100644
index 0000000..569dd9c
--- /dev/null
+++ b/scripts/cros_generate_sysroot.py
@@ -0,0 +1,132 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates a sysroot tarball for building a specific package.
+
+Meant for use after setup_board and build_packages have been run.
+"""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import commandline
+from chromite.lib import osutils
+from chromite.lib import sudo
+from chromite.lib import sysroot_lib
+
+DEFAULT_NAME = 'sysroot_%(package)s.tar.xz'
+PACKAGE_SEPARATOR = '/'
+SYSROOT = 'sysroot'
+
+
+def ParseCommandLine(argv):
+  """Parse args, and run environment-independent checks."""
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('--board', required=True,
+                      help=('The board to generate the sysroot for.'))
+  parser.add_argument('--package', required=True,
+                      help=('The package to generate the sysroot for.'))
+  parser.add_argument('--out-dir', type='path', required=True,
+                      help='Directory to place the generated tarball.')
+  parser.add_argument('--out-file',
+                      help=('The name to give to the tarball.  Defaults to %r.'
+                            % DEFAULT_NAME))
+  options = parser.parse_args(argv)
+
+  if not options.out_file:
+    options.out_file = DEFAULT_NAME % {
+        'package': options.package.replace(PACKAGE_SEPARATOR, '_')
+    }
+
+  return options
+
+
+class GenerateSysroot(object):
+  """Wrapper for generation functionality."""
+
+  PARALLEL_EMERGE = os.path.join(constants.CHROMITE_BIN_DIR, 'parallel_emerge')
+
+  def __init__(self, sysroot, options):
+    """Initialize
+
+    Args:
+      sysroot: Path to sysroot.
+      options: Parsed options.
+    """
+    self.sysroot = sysroot
+    self.options = options
+    self.extra_env = {'ROOT': self.sysroot, 'USE': os.environ.get('USE', '')}
+
+  def _Emerge(self, *args, **kwargs):
+    """Emerge the given packages using parallel_emerge."""
+    cmd = [self.PARALLEL_EMERGE, '--board=%s' % self.options.board,
+           '--usepkgonly', '--noreplace'] + list(args)
+    kwargs.setdefault('extra_env', self.extra_env)
+    cros_build_lib.SudoRunCommand(cmd, **kwargs)
+
+  def _InstallToolchain(self):
+    # Create the sysroot's config.
+    sysroot = sysroot_lib.Sysroot(self.sysroot)
+    sysroot.WriteConfig(sysroot.GenerateBoardConfig(self.options.board))
+    cros_build_lib.RunCommand(
+        [os.path.join(constants.CROSUTILS_DIR, 'install_toolchain'),
+         '--noconfigure', '--sysroot', self.sysroot])
+
+  def _InstallKernelHeaders(self):
+    self._Emerge('sys-kernel/linux-headers')
+
+  def _InstallBuildDependencies(self):
+    # Calculate buildtime deps that are not runtime deps.
+    raw_sysroot = cros_build_lib.GetSysroot(board=self.options.board)
+    cmd = ['qdepends', '-q', '-C', self.options.package]
+    output = cros_build_lib.RunCommand(
+        cmd, extra_env={'ROOT': raw_sysroot}, capture_output=True).output
+
+    if output.count('\n') > 1:
+      raise AssertionError('Too many packages matched given package pattern')
+
+    # qdepend outputs "package: deps", so only grab the deps.
+    atoms = output.partition(':')[2].split()
+
+    # Install the buildtime deps.
+    if atoms:
+      self._Emerge(*atoms)
+
+  def _CreateTarball(self):
+    target = os.path.join(self.options.out_dir, self.options.out_file)
+    cros_build_lib.CreateTarball(target, self.sysroot, sudo=True)
+
+  def Perform(self):
+    """Generate the sysroot."""
+    self._InstallToolchain()
+    self._InstallKernelHeaders()
+    self._InstallBuildDependencies()
+    self._CreateTarball()
+
+
+def FinishParsing(options):
+  """Run environment dependent checks on parsed args."""
+  target = os.path.join(options.out_dir, options.out_file)
+  if os.path.exists(target):
+    cros_build_lib.Die('Output file %r already exists.' % target)
+
+  if not os.path.isdir(options.out_dir):
+    cros_build_lib.Die(
+        'Non-existent directory %r specified for --out-dir' % options.out_dir)
+
+
+def main(argv):
+  options = ParseCommandLine(argv)
+  FinishParsing(options)
+
+  cros_build_lib.AssertInsideChroot()
+
+  with sudo.SudoKeepAlive(ttyless_sudo=False):
+    with osutils.TempDir(set_global=True, sudo_rm=True) as tempdir:
+      sysroot = os.path.join(tempdir, SYSROOT)
+      os.mkdir(sysroot)
+      GenerateSysroot(sysroot, options).Perform()
diff --git a/scripts/cros_generate_sysroot_unittest b/scripts/cros_generate_sysroot_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/cros_generate_sysroot_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/cros_generate_sysroot_unittest.py b/scripts/cros_generate_sysroot_unittest.py
new file mode 100644
index 0000000..fcb5e30
--- /dev/null
+++ b/scripts/cros_generate_sysroot_unittest.py
@@ -0,0 +1,98 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for cros_generate_sysroot."""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.scripts import cros_generate_sysroot as cros_gen
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+
+
+Dir = cros_test_lib.Directory
+
+
+class CrosGenMock(partial_mock.PartialMock):
+  """Helper class to Mock out cros_generate_sysroot.GenerateSysroot."""
+
+  TARGET = 'chromite.scripts.cros_generate_sysroot.GenerateSysroot'
+  ATTRS = ('_InstallToolchain', '_InstallKernelHeaders',
+           '_InstallBuildDependencies')
+
+  TOOLCHAIN = 'toolchain'
+  KERNEL_HEADERS = 'kernel_headers'
+  BUILD_DEPS = 'build-deps'
+
+  def _InstallToolchain(self, inst):
+    osutils.Touch(os.path.join(inst.sysroot, self.TOOLCHAIN))
+
+  def _InstallKernelHeaders(self, inst):
+    osutils.Touch(os.path.join(inst.sysroot, self.KERNEL_HEADERS))
+
+  def _InstallBuildDependencies(self, inst):
+    osutils.Touch(os.path.join(inst.sysroot, self.BUILD_DEPS))
+
+  def VerifyTarball(self, tarball):
+    dir_struct = [Dir('.', []), self.TOOLCHAIN, self.KERNEL_HEADERS,
+                  self.BUILD_DEPS]
+    cros_test_lib.VerifyTarball(tarball, dir_struct)
+
+
+BOARD = 'lumpy'
+TAR_NAME = 'test.tar.xz'
+
+
+class OverallTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for cros_generate_sysroot."""
+
+  def setUp(self):
+    self.cg_mock = self.StartPatcher(CrosGenMock())
+
+  def testTarballGeneration(self):
+    """End-to-end test of tarball generation."""
+    with mock.patch.object(cros_build_lib, 'IsInsideChroot'):
+      cros_build_lib.IsInsideChroot.returnvalue = True
+      cros_gen.main(
+          ['--board', BOARD, '--out-dir', self.tempdir,
+           '--out-file', TAR_NAME, '--package', constants.CHROME_CP])
+      self.cg_mock.VerifyTarball(os.path.join(self.tempdir, TAR_NAME))
+
+
+class InterfaceTest(cros_test_lib.TempDirTestCase):
+  """Test Parsing and error checking functionality."""
+
+  BAD_TARGET_DIR = '/path/to/nowhere'
+
+  def _Parse(self, extra_args):
+    return cros_gen.ParseCommandLine(
+        ['--board', BOARD, '--out-dir', self.tempdir,
+         '--package', constants.CHROME_CP] + extra_args)
+
+  def testDefaultTargetName(self):
+    """We are getting the right default target name."""
+    options = self._Parse([])
+    self.assertEquals(
+        options.out_file, 'sysroot_chromeos-base_chromeos-chrome.tar.xz')
+
+  def testExistingTarget(self):
+    """Erroring out on pre-existing target."""
+    options = self._Parse(['--out-file', TAR_NAME])
+    osutils.Touch(os.path.join(self.tempdir, TAR_NAME))
+    self.assertRaises(cros_build_lib.DieSystemExit,
+                      cros_gen.FinishParsing, options)
+
+  def testNonExisting(self):
+    """Erroring out on non-existent output dir."""
+    options = cros_gen.ParseCommandLine(
+        ['--board', BOARD, '--out-dir', self.BAD_TARGET_DIR, '--package',
+         constants.CHROME_CP])
+    self.assertRaises(cros_build_lib.DieSystemExit,
+                      cros_gen.FinishParsing, options)
diff --git a/scripts/cros_install_debug_syms.py b/scripts/cros_install_debug_syms.py
new file mode 100644
index 0000000..97d1cf2
--- /dev/null
+++ b/scripts/cros_install_debug_syms.py
@@ -0,0 +1,331 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Install debug symbols for specified packages.
+
+Only reinstall the debug symbols if they are not already installed to save time.
+
+The debug symbols are packaged outside of the prebuilt package in a
+.debug.tbz2 archive when FEATURES=separatedebug is set (by default on
+builders). On local machines, separatedebug is not set and the debug symbols
+are part of the prebuilt package.
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import pickle
+import sys
+import tempfile
+import urlparse
+
+from chromite.lib import binpkg
+from chromite.lib import cache
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import path_util
+from chromite.lib import gs
+
+if cros_build_lib.IsInsideChroot():
+  from portage import create_trees
+
+
+DEBUG_SYMS_EXT = '.debug.tbz2'
+
+
+# We cache the package indexes. When the format of what we store changes,
+# bump the cache version to avoid problems.
+CACHE_VERSION = '1'
+
+
+class DebugSymbolsInstaller(object):
+  """Container for enviromnent objects, needed to make multiprocessing work.
+
+  This also redirects stdout to null when stdout_to_null=True to avoid
+  polluting the output with portage QA warnings.
+  """
+  _old_stdout = None
+  _null = None
+
+  def __init__(self, vartree, gs_context, sysroot, stdout_to_null):
+    self._vartree = vartree
+    self._gs_context = gs_context
+    self._sysroot = sysroot
+    self._stdout_to_null = stdout_to_null
+
+  def __enter__(self):
+    if self._stdout_to_null:
+      self._old_stdout = sys.stdout
+      self._null = open(os.devnull, 'w')
+      sys.stdout = self._null
+    return self
+
+  def __exit__(self, _exc_type, _exc_val, _exc_tb):
+    if self._stdout_to_null:
+      sys.stdout = self._old_stdout
+      self._null.close()
+
+  def Install(self, cpv, url):
+    """Install the debug symbols for |cpv|.
+
+    This will install the debug symbols tarball in PKGDIR so that it can be
+    used later.
+
+    Args:
+      cpv: the cpv of the package to build. This assumes that the cpv is
+        installed in the sysroot.
+      url: url of the debug symbols archive. This could be a Google Storage url
+        or a local path.
+    """
+    archive = os.path.join(self._vartree.settings['PKGDIR'],
+                           cpv + DEBUG_SYMS_EXT)
+    # GsContext does not understand file:// scheme so we need to extract the
+    # path ourselves.
+    parsed_url = urlparse.urlsplit(url)
+    if not parsed_url.scheme or parsed_url.scheme == 'file':
+      url = parsed_url.path
+
+    if not os.path.isfile(archive):
+      self._gs_context.Copy(url, archive, debug_level=logging.DEBUG)
+
+    with osutils.TempDir(sudo_rm=True) as tempdir:
+      cros_build_lib.SudoRunCommand(['tar', '-I', 'bzip2 -q', '-xf', archive,
+                                     '-C', tempdir], quiet=True)
+
+      with open(self._vartree.getpath(cpv, filename='CONTENTS'),
+                'a') as content_file:
+        # Merge the content of the temporary dir into the sysroot.
+        # pylint: disable=protected-access
+        link = self._vartree.dbapi._dblink(cpv)
+        link.mergeme(tempdir, self._sysroot, content_file, None, '', {}, None)
+
+
+def ParseArgs(argv):
+  """Parse arguments and initialize field.
+
+  Args:
+    argv: arguments passed to the script.
+  """
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('--board', help='Board name (required).', required=True)
+  parser.add_argument('--all', dest='all', action='store_true',
+                      help='Install the debug symbols for all installed '
+                      'packages', default=False)
+  parser.add_argument('packages', nargs=argparse.REMAINDER,
+                      help='list of packages that need the debug symbols.')
+
+  advanced = parser.add_argument_group('Advanced options')
+  advanced.add_argument('--nocachebinhost', dest='cachebinhost', default=True,
+                        action='store_false', help="Don't cache the list of"
+                        " files contained in binhosts. (Default: cache)")
+  advanced.add_argument('--clearcache', dest='clearcache', action='store_true',
+                        default=False, help='Clear the binhost cache.')
+  advanced.add_argument('--jobs', default=None, type=int,
+                        help='Number of processes to run in parallel.')
+
+  options = parser.parse_args(argv)
+  options.Freeze()
+
+  if options.all and options.packages:
+    cros_build_lib.Die('Cannot use --all with a list of packages')
+  return options
+
+
+def ShouldGetSymbols(cpv, vardb, remote_symbols):
+  """Return True if the symbols for cpv are available and are not installed.
+
+  We try to check if the symbols are installed before checking availability as
+  a GS request is more expensive than checking locally.
+
+  Args:
+    cpv: cpv of the package
+    vardb: a vartree dbapi
+    remote_symbols: a mapping from cpv to debug symbols url
+
+  Returns:
+    True if |cpv|'s debug symbols are not installed and are available
+  """
+  features, contents = vardb.aux_get(cpv, ['FEATURES', 'CONTENTS'])
+
+  return ('separatedebug' in features and not '/usr/lib/debug/' in contents
+          and cpv in remote_symbols)
+
+
+def RemoteSymbols(vartree, binhost_cache=None):
+  """Get the cpv to debug symbols mapping.
+
+  If several binhost contain debug symbols for the same cpv, keep only the
+  highest priority one.
+
+  Args:
+    vartree: a vartree
+    binhost_cache: a cache containing the cpv to debug symbols url for all
+      known binhosts. None if we are not caching binhosts.
+
+  Returns:
+    a dictionary mapping the cpv to a remote debug symbols gsurl.
+  """
+  symbols_mapping = {}
+  for binhost in vartree.settings['PORTAGE_BINHOST'].split():
+    if binhost:
+      symbols_mapping.update(ListBinhost(binhost, binhost_cache))
+  return symbols_mapping
+
+
+def GetPackageIndex(binhost, binhost_cache=None):
+  """Get the packages index for |binhost|.
+
+  If a cache is provided, use it to a cache remote packages index.
+
+  Args:
+    binhost: a portage binhost, local, google storage or http.
+    binhost_cache: a cache for the remote packages index.
+
+  Returns:
+    A PackageIndex object.
+  """
+  key = binhost.split('://')[-1]
+  key = key.rstrip('/').split('/')
+
+  if binhost_cache and binhost_cache.Lookup(key).Exists():
+    with open(binhost_cache.Lookup(key).path) as f:
+      return pickle.load(f)
+
+  pkgindex = binpkg.GrabRemotePackageIndex(binhost)
+  if pkgindex and binhost_cache:
+    # Only cache remote binhosts as local binhosts can change.
+    with tempfile.NamedTemporaryFile(delete=False) as temp_file:
+      pickle.dump(pkgindex, temp_file)
+      temp_file.file.close()
+      binhost_cache.Lookup(key).Assign(temp_file.name)
+  elif pkgindex is None:
+    urlparts = urlparse.urlsplit(binhost)
+    if urlparts.scheme not in ('file', ''):
+      # Don't fail the build on network errors. Print a warning message and
+      # continue.
+      logging.warning('Could not get package index %s' % binhost)
+      return None
+
+    binhost = urlparts.path
+    if not os.path.isdir(binhost):
+      raise ValueError('unrecognized binhost format for %s.')
+    pkgindex = binpkg.GrabLocalPackageIndex(binhost)
+
+  return pkgindex
+
+
+def ListBinhost(binhost, binhost_cache=None):
+  """Return the cpv to debug symbols mapping for a given binhost.
+
+  List the content of the binhost to extract the cpv to debug symbols
+  mapping. If --cachebinhost is set, we cache the result to avoid the
+  cost of gsutil every time.
+
+  Args:
+    binhost: a portage binhost, local or on google storage.
+    binhost_cache: a cache containing mappings cpv to debug symbols url for a
+      given binhost (None if we don't want to cache).
+
+  Returns:
+    A cpv to debug symbols url mapping.
+  """
+
+  symbols = {}
+  pkgindex = GetPackageIndex(binhost, binhost_cache)
+  if pkgindex is None:
+    return symbols
+
+  for p in pkgindex.packages:
+    if p.get('DEBUG_SYMBOLS') == 'yes':
+      path = p.get('PATH', p['CPV'] + '.tbz2')
+      base_url = pkgindex.header.get('URI', binhost)
+      symbols[p['CPV']] = os.path.join(base_url,
+                                       path.replace('.tbz2', DEBUG_SYMS_EXT))
+
+  return symbols
+
+
+def GetMatchingCPV(package, vardb):
+  """Return the cpv of the installed package matching |package|.
+
+  Args:
+    package: package name
+    vardb: a vartree dbapi
+
+  Returns:
+    The cpv of the installed package whose name matchex |package|.
+  """
+  matches = vardb.match(package)
+  if not matches:
+    cros_build_lib.Die('Could not find package %s' % package)
+  if len(matches) != 1:
+    cros_build_lib.Die('Ambiguous package name: %s.\n'
+                       'Matching: %s' % (package, ' '.join(matches)))
+  return matches[0]
+
+
+def main(argv):
+  options = ParseArgs(argv)
+
+  if not cros_build_lib.IsInsideChroot():
+    raise commandline.ChrootRequiredError()
+
+  if os.geteuid() != 0:
+    cros_build_lib.SudoRunCommand(sys.argv)
+    return
+
+  # sysroot must have a trailing / as the tree dictionary produced by
+  # create_trees in indexed with a trailing /.
+  sysroot = cros_build_lib.GetSysroot(options.board) + '/'
+  trees = create_trees(target_root=sysroot, config_root=sysroot)
+
+  vartree = trees[sysroot]['vartree']
+
+  cache_dir = os.path.join(path_util.FindCacheDir(),
+                           'cros_install_debug_syms-v' + CACHE_VERSION)
+
+  if options.clearcache:
+    osutils.RmDir(cache_dir, ignore_missing=True)
+
+  binhost_cache = None
+  if options.cachebinhost:
+    binhost_cache = cache.DiskCache(cache_dir)
+
+  boto_file = vartree.settings['BOTO_CONFIG']
+  if boto_file:
+    os.environ['BOTO_CONFIG'] = boto_file
+
+  gs_context = gs.GSContext()
+  symbols_mapping = RemoteSymbols(vartree, binhost_cache)
+
+  if options.all:
+    to_install = vartree.dbapi.cpv_all()
+  else:
+    to_install = [GetMatchingCPV(p, vartree.dbapi) for p in options.packages]
+
+  to_install = [p for p in to_install
+                if ShouldGetSymbols(p, vartree.dbapi, symbols_mapping)]
+
+  if not to_install:
+    logging.info('nothing to do, exit')
+    return
+
+  with DebugSymbolsInstaller(vartree, gs_context, sysroot,
+                             not options.debug) as installer:
+    args = [(p, symbols_mapping[p]) for p in to_install]
+    parallel.RunTasksInProcessPool(installer.Install, args,
+                                   processes=options.jobs)
+
+  logging.debug('installation done, updating packages index file')
+  packages_dir = os.path.join(sysroot, 'packages')
+  packages_file = os.path.join(packages_dir, 'Packages')
+  # binpkg will set DEBUG_SYMBOLS automatically if it detects the debug symbols
+  # in the packages dir.
+  pkgindex = binpkg.GrabLocalPackageIndex(packages_dir)
+  with open(packages_file, 'w') as p:
+    pkgindex.Write(p)
diff --git a/scripts/cros_install_debug_syms_unittest b/scripts/cros_install_debug_syms_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/cros_install_debug_syms_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/cros_install_debug_syms_unittest.py b/scripts/cros_install_debug_syms_unittest.py
new file mode 100644
index 0000000..e993f13
--- /dev/null
+++ b/scripts/cros_install_debug_syms_unittest.py
@@ -0,0 +1,79 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for cros_install_debug_syms.py"""
+
+from __future__ import print_function
+
+from collections import namedtuple
+import os
+
+from chromite.lib import cros_test_lib
+from chromite.scripts import cros_install_debug_syms
+
+
+SimpleIndex = namedtuple('SimpleIndex', 'header packages')
+
+
+class InstallDebugSymsTest(cros_test_lib.MockTestCase):
+  """Test the parsing of package index"""
+
+  def setUp(self):
+    self.local_binhosts = ['/build/something/packages/',
+                           'file:///build/somethingelse/packages',
+                           'file://localhost/build/another/packages']
+
+    self.remote_binhosts = ['http://domain.com/binhost',
+                            'gs://chromeos-stuff/binhost']
+
+  def testGetLocalPackageIndex(self):
+    """Check that local binhosts are fetched correctly."""
+    self.PatchObject(cros_install_debug_syms.binpkg, "GrabLocalPackageIndex",
+                     return_value=SimpleIndex({}, {}))
+    self.PatchObject(cros_install_debug_syms.os.path, 'isdir',
+                     return_value=True)
+    for binhost in self.local_binhosts:
+      cros_install_debug_syms.GetPackageIndex(binhost)
+
+  def testGetRemotePackageIndex(self):
+    """Check that remote binhosts are fetched correctly."""
+    self.PatchObject(cros_install_debug_syms.binpkg, "GrabRemotePackageIndex",
+                     return_value=SimpleIndex({}, {}))
+    for binhost in self.remote_binhosts:
+      cros_install_debug_syms.GetPackageIndex(binhost)
+
+  def testListRemoteBinhost(self):
+    """Check that urls are generated correctly for remote binhosts."""
+    chaps_cpv = 'chromeos-base/chaps-0-r2'
+    metrics_cpv = 'chromeos-base/metrics-0-r4'
+
+    index = SimpleIndex({}, [{'CPV': 'chromeos-base/shill-0-r1'},
+                             {'CPV': chaps_cpv,
+                              'DEBUG_SYMBOLS': 'yes'},
+                             {'CPV': metrics_cpv,
+                              'DEBUG_SYMBOLS': 'yes',
+                              'PATH': 'path/to/binpkg.tbz2'}])
+    self.PatchObject(cros_install_debug_syms, 'GetPackageIndex',
+                     return_value=index)
+
+    for binhost in self.remote_binhosts:
+      expected = {chaps_cpv: os.path.join(binhost, chaps_cpv + '.debug.tbz2'),
+                  metrics_cpv: os.path.join(binhost,
+                                            'path/to/binpkg.debug.tbz2')}
+      self.assertEquals(cros_install_debug_syms.ListBinhost(binhost), expected)
+
+  def testListRemoteBinhostWithURI(self):
+    """Check that urls are generated correctly when URI is defined."""
+    index = SimpleIndex({'URI': 'gs://chromeos-prebuilts'},
+                        [{'CPV': 'chromeos-base/shill-0-r1',
+                          'DEBUG_SYMBOLS': 'yes',
+                          'PATH': 'amd64-generic/paladin1234/shill-0-r1.tbz2'}])
+    self.PatchObject(cros_install_debug_syms, 'GetPackageIndex',
+                     return_value=index)
+
+    binhost = 'gs://chromeos-prebuilts/gizmo-paladin/'
+    debug_symbols_url = ('gs://chromeos-prebuilts/amd64-generic'
+                         '/paladin1234/shill-0-r1.debug.tbz2')
+    self.assertEquals(cros_install_debug_syms.ListBinhost(binhost),
+                      {'chromeos-base/shill-0-r1': debug_symbols_url})
diff --git a/scripts/cros_list_compatible_boards b/scripts/cros_list_compatible_boards
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/cros_list_compatible_boards
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/cros_list_compatible_boards.py b/scripts/cros_list_compatible_boards.py
new file mode 100644
index 0000000..9ddeea4
--- /dev/null
+++ b/scripts/cros_list_compatible_boards.py
@@ -0,0 +1,39 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""List compatible boards that we can pull prebuilts from."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import binhost
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+
+
+def _ParseArguments(argv):
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('--buildroot', default=constants.SOURCE_ROOT,
+                      help='Root directory where source is checked out to.')
+  parser.add_argument('--prebuilt-type', required=True,
+                      help='Type of prebuilt we want to look at.')
+  parser.add_argument('--board', required=True,
+                      help='Board to request prebuilts for.')
+  opts = parser.parse_args(argv)
+  opts.Freeze()
+  return opts
+
+
+def main(argv):
+  cros_build_lib.AssertInsideChroot()
+  opts = _ParseArguments(argv)
+  filename = binhost.PrebuiltMapping.GetFilename(opts.buildroot,
+                                                 opts.prebuilt_type)
+  pfq_configs = binhost.PrebuiltMapping.Load(filename)
+  extra_useflags = os.environ.get('USE', '').split()
+  compat_id = binhost.CalculateCompatId(opts.board, extra_useflags)
+  for key in pfq_configs.GetPrebuilts(compat_id):
+    print(key.board)
diff --git a/scripts/cros_list_modified_packages.py b/scripts/cros_list_modified_packages.py
new file mode 100644
index 0000000..b5327e9
--- /dev/null
+++ b/scripts/cros_list_modified_packages.py
@@ -0,0 +1,261 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Calculate what workon packages have changed since the last build.
+
+A workon package is treated as changed if any of the below are true:
+  1) The package is not installed.
+  2) A file exists in the associated repository which has a newer modification
+     time than the installed package.
+  3) The source ebuild has a newer modification time than the installed package.
+
+Some caveats:
+  - We do not look at eclasses. This replicates the existing behavior of the
+    commit queue, which also does not look at eclass changes.
+  - We do not try to fallback to the non-workon package if the local tree is
+    unmodified. This is probably a good thing, since developers who are
+    "working on" a package want to compile it locally.
+  - Portage only stores the time that a package finished building, so we
+    aren't able to detect when users modify source code during builds.
+"""
+
+from __future__ import print_function
+
+import errno
+import multiprocessing
+import os
+try:
+  import Queue
+except ImportError:
+  # Python-3 renamed to "queue".  We still use Queue to avoid collisions
+  # with naming variables as "queue".  Maybe we'll transition at some point.
+  # pylint: disable=F0401
+  import queue as Queue
+
+from chromite.cbuildbot import constants
+from chromite.lib import brick_lib
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import portage_util
+from chromite.lib import sysroot_lib
+from chromite.lib import workon_helper
+
+
+class ModificationTimeMonitor(object):
+  """Base class for monitoring last modification time of paths.
+
+  This takes a list of (keys, path) pairs and finds the latest mtime of an
+  object within each of the path's subtrees, populating a map from keys to
+  mtimes. Note that a key may be associated with multiple paths, in which case
+  the latest mtime among them will be returned.
+
+  Members:
+    _tasks: A list of (key, path) pairs to check.
+    _result_queue: A queue populated with corresponding (key, mtime) pairs.
+  """
+
+  def __init__(self, key_path_pairs):
+    self._tasks = list(key_path_pairs)
+    self._result_queue = multiprocessing.Queue(len(self._tasks))
+
+  def _EnqueueModificationTime(self, key, path):
+    """Calculate the last modification time of |path| and enqueue it."""
+    if os.path.isdir(path):
+      self._result_queue.put((key, self._LastModificationTime(path)))
+
+  def _LastModificationTime(self, path):
+    """Returns the latest modification time for anything under |path|."""
+    cmd = 'find . -name .git -prune -o -printf "%T@\n" | sort -nr | head -n1'
+    ret = cros_build_lib.RunCommand(cmd, cwd=path, shell=True, print_cmd=False,
+                                    capture_output=True)
+    return float(ret.output) if ret.output else 0
+
+  def GetModificationTimes(self):
+    """Get the latest modification time for each of the queued keys."""
+    parallel.RunTasksInProcessPool(self._EnqueueModificationTime, self._tasks)
+    mtimes = {}
+    try:
+      while True:
+        key, mtime = self._result_queue.get_nowait()
+        mtimes[key] = max((mtimes.get(key, 0), mtime))
+    except Queue.Empty:
+      return mtimes
+
+
+class WorkonPackageInfo(object):
+  """Class for getting information about workon packages.
+
+  Members:
+    cp: The package name (e.g. chromeos-base/power_manager).
+    mtime: The modification time of the installed package.
+    projects: The project(s) associated with the package.
+    full_srcpaths: The brick source path(s) associated with the package.
+    src_ebuild_mtime: The modification time of the source ebuild.
+  """
+
+  def __init__(self, cp, mtime, projects, full_srcpaths, src_ebuild_mtime):
+    self.cp = cp
+    self.pkg_mtime = int(mtime)
+    self.projects = projects
+    self.full_srcpaths = full_srcpaths
+    self.src_ebuild_mtime = src_ebuild_mtime
+
+
+def ListWorkonPackages(sysroot, all_opt=False):
+  """List the packages that are currently being worked on.
+
+  Args:
+    sysroot: sysroot_lib.Sysroot object.
+    all_opt: Pass --all to cros_workon. For testing purposes.
+  """
+  helper = workon_helper.WorkonHelper(sysroot.path)
+  return helper.ListAtoms(use_all=all_opt)
+
+
+def ListWorkonPackagesInfo(sysroot):
+  """Find the specified workon packages for the specified board.
+
+  Args:
+    sysroot: sysroot_lib.Sysroot object.
+
+  Returns:
+    A list of WorkonPackageInfo objects for unique packages being worked on.
+  """
+  # Import portage late so that this script can be imported outside the chroot.
+  # pylint: disable=F0401
+  import portage.const
+  packages = ListWorkonPackages(sysroot)
+  if not packages:
+    return []
+  results = {}
+
+  if sysroot.path == '/':
+    overlays = portage_util.FindOverlays(constants.BOTH_OVERLAYS, None)
+  else:
+    overlays = sysroot.GetStandardField('PORTDIR_OVERLAY').splitlines()
+
+  vdb_path = os.path.join(sysroot.path, portage.const.VDB_PATH)
+
+  for overlay in overlays:
+    # Is this a brick overlay? Get its source base directory.
+    brick_srcbase = ''
+    brick = brick_lib.FindBrickInPath(overlay)
+    if brick and brick.OverlayDir() == overlay.rstrip(os.path.sep):
+      brick_srcbase = brick.SourceDir()
+
+    for filename, projects, srcpaths in portage_util.GetWorkonProjectMap(
+        overlay, packages):
+      # chromeos-base/power_manager/power_manager-9999
+      # cp = chromeos-base/power_manager
+      # cpv = chromeos-base/power_manager-9999
+      category, pn, p = portage_util.SplitEbuildPath(filename)
+      cp = '%s/%s' % (category, pn)
+      cpv = '%s/%s' % (category, p)
+
+      # Get the time the package finished building. TODO(build): Teach Portage
+      # to store the time the package started building and use that here.
+      pkg_mtime_file = os.path.join(vdb_path, cpv, 'BUILD_TIME')
+      try:
+        pkg_mtime = int(osutils.ReadFile(pkg_mtime_file))
+      except EnvironmentError as ex:
+        if ex.errno != errno.ENOENT:
+          raise
+        pkg_mtime = 0
+
+      # Get the modificaton time of the ebuild in the overlay.
+      src_ebuild_mtime = os.lstat(os.path.join(overlay, filename)).st_mtime
+
+      # Translate relative srcpath values into their absolute counterparts.
+      full_srcpaths = [os.path.join(brick_srcbase, s) for s in srcpaths]
+
+      # Write info into the results dictionary, overwriting any previous
+      # values. This ensures that overlays override appropriately.
+      results[cp] = WorkonPackageInfo(cp, pkg_mtime, projects, full_srcpaths,
+                                      src_ebuild_mtime)
+
+  return results.values()
+
+
+def WorkonProjectsMonitor(projects):
+  """Returns a monitor for project modification times."""
+  # TODO(garnold) In order for the mtime monitor to be as accurate as
+  # possible, this only needs to enqueue the checkout(s) relevant for the
+  # task at hand, e.g. the specific ebuild we want to emerge. In general, the
+  # CROS_WORKON_LOCALNAME variable in workon ebuilds defines the source path
+  # uniquely and can be used for this purpose.
+  project_path_pairs = []
+  manifest = git.ManifestCheckout.Cached(constants.SOURCE_ROOT)
+  for project in set(projects).intersection(manifest.checkouts_by_name):
+    for checkout in manifest.FindCheckouts(project):
+      project_path_pairs.append((project, checkout.GetPath(absolute=True)))
+
+  return ModificationTimeMonitor(project_path_pairs)
+
+
+def WorkonSrcpathsMonitor(srcpaths):
+  """Returns a monitor for srcpath modification times."""
+  return ModificationTimeMonitor(zip(srcpaths, srcpaths))
+
+
+def ListModifiedWorkonPackages(sysroot):
+  """List the workon packages that need to be rebuilt.
+
+  Args:
+    sysroot: sysroot_lib.Sysroot object.
+  """
+  packages = ListWorkonPackagesInfo(sysroot)
+  if not packages:
+    return
+
+  # Get mtimes for all projects and source paths associated with our packages.
+  all_projects = [p for info in packages for p in info.projects]
+  project_mtimes = WorkonProjectsMonitor(all_projects).GetModificationTimes()
+  all_srcpaths = [s for info in packages for s in info.full_srcpaths]
+  srcpath_mtimes = WorkonSrcpathsMonitor(all_srcpaths).GetModificationTimes()
+
+  for info in packages:
+    mtime = int(max([project_mtimes.get(p, 0) for p in info.projects] +
+                    [srcpath_mtimes.get(s, 0) for s in info.full_srcpaths] +
+                    [info.src_ebuild_mtime]))
+    if mtime >= info.pkg_mtime:
+      yield info.cp
+
+
+def _ParseArguments(argv):
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  target = parser.add_mutually_exclusive_group(required=True)
+  target.add_argument('--board', help='Board name')
+  target.add_argument('--brick', help='Brick locator')
+  target.add_argument('--host', default=False, action='store_true',
+                      help='Look at host packages instead of board packages')
+  target.add_argument('--sysroot', help='Sysroot path.')
+
+  flags = parser.parse_args(argv)
+  flags.Freeze()
+  return flags
+
+
+def main(argv):
+  logging.getLogger().setLevel(logging.INFO)
+  flags = _ParseArguments(argv)
+  sysroot = None
+  if flags.brick:
+    try:
+      sysroot = cros_build_lib.GetSysroot(brick_lib.Brick(flags.brick))
+    except brick_lib.BrickNotFound:
+      cros_build_lib.Die('Could not load brick %s.' % flags.brick)
+  elif flags.board:
+    sysroot = cros_build_lib.GetSysroot(flags.board)
+  elif flags.host:
+    sysroot = '/'
+  else:
+    sysroot = flags.sysroot
+
+  modified = ListModifiedWorkonPackages(sysroot_lib.Sysroot(sysroot))
+  print(' '.join(sorted(modified)))
diff --git a/scripts/cros_list_modified_packages_unittest b/scripts/cros_list_modified_packages_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/cros_list_modified_packages_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/cros_list_modified_packages_unittest.py b/scripts/cros_list_modified_packages_unittest.py
new file mode 100644
index 0000000..23fca59
--- /dev/null
+++ b/scripts/cros_list_modified_packages_unittest.py
@@ -0,0 +1,34 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the cros_list_modified_packages program"""
+
+from __future__ import print_function
+
+import functools
+
+from chromite.lib import cros_test_lib
+from chromite.lib import sysroot_lib
+from chromite.scripts import cros_list_modified_packages
+
+
+class ListModifiedWorkonPackagesTest(cros_test_lib.MockTestCase):
+  """Test for cros_list_modified_packages.ListModifiedWorkonPackages."""
+
+  def testListModifiedWorkonPackages(self):
+    """Test that no ebuild breaks cros_list_modified_packages"""
+
+    # A hook to set the "all_opt" parameter when calling ListWorkonPackages
+    _ListWorkonPackagesPatch = \
+      functools.partial(cros_list_modified_packages.ListWorkonPackages,
+                        all_opt=True)
+
+    with self.PatchObject(cros_list_modified_packages, 'ListWorkonPackages',
+                          side_effect=_ListWorkonPackagesPatch):
+      # ListModifiedWorkonPackages returns a generator object and doesn't
+      # actually do any work automatically. We have to extract the elements
+      # from it to get it to exercise the code, and we can do that by turning
+      # it into a list.
+      list(cros_list_modified_packages.ListModifiedWorkonPackages(
+          sysroot=sysroot_lib.Sysroot('/')))
diff --git a/scripts/cros_list_overlays.py b/scripts/cros_list_overlays.py
new file mode 100644
index 0000000..2cead1b
--- /dev/null
+++ b/scripts/cros_list_overlays.py
@@ -0,0 +1,78 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Calculate what overlays are needed for a particular board."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import brick_lib
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import portage_util
+
+
+def _ParseArguments(argv):
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  parser.add_argument('--board', default=None, help='Board name')
+  parser.add_argument('--board_overlay', default=None,
+                      help='Location of the board overlay. Used by '
+                           './setup_board to allow developers to add custom '
+                           'overlays.')
+  parser.add_argument('--primary_only', default=False, action='store_true',
+                      help='Only return the path to the primary overlay. This '
+                           'only makes sense when --board is specified.')
+  parser.add_argument('-a', '--all', default=False, action='store_true',
+                      help='Show all overlays (even common ones).')
+  parser.add_argument('--brick', help='Main brick to use')
+
+  opts = parser.parse_args(argv)
+  opts.Freeze()
+
+  if opts.primary_only and opts.board is None:
+    parser.error('--board is required when --primary_only is supplied.')
+
+  if opts.brick:
+    if opts.board:
+      parser.error('--board and --brick are incompatible.')
+
+    if opts.all:
+      parser.error('Cannot list all overlays with --brick')
+
+  return opts
+
+
+def main(argv):
+  opts = _ParseArguments(argv)
+  args = (constants.BOTH_OVERLAYS, opts.board)
+
+  if opts.brick:
+    main_brick = brick_lib.Brick(opts.brick)
+    overlays = [b.OverlayDir() for b in main_brick.BrickStack()]
+  else:
+    # Verify that a primary overlay exists.
+    try:
+      primary_overlay = portage_util.FindPrimaryOverlay(*args)
+    except portage_util.MissingOverlayException as ex:
+      cros_build_lib.Die(str(ex))
+
+    # Get the overlays to print.
+    if opts.primary_only:
+      overlays = [primary_overlay]
+    else:
+      overlays = portage_util.FindOverlays(*args)
+
+  # Exclude any overlays in src/third_party, for backwards compatibility with
+  # scripts that expected these to not be listed.
+  if not opts.all:
+    ignore_prefix = os.path.join(constants.SOURCE_ROOT, 'src', 'third_party')
+    overlays = [o for o in overlays if not o.startswith(ignore_prefix)]
+
+  if opts.board_overlay and os.path.isdir(opts.board_overlay):
+    overlays.append(os.path.abspath(opts.board_overlay))
+
+  print('\n'.join(overlays))
diff --git a/scripts/cros_list_overlays_unittest b/scripts/cros_list_overlays_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/cros_list_overlays_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/cros_list_overlays_unittest.py b/scripts/cros_list_overlays_unittest.py
new file mode 100644
index 0000000..6d65823
--- /dev/null
+++ b/scripts/cros_list_overlays_unittest.py
@@ -0,0 +1,31 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for cros_list_overlays.py"""
+
+from __future__ import print_function
+
+from chromite.lib import cros_test_lib
+from chromite.lib import portage_util
+from chromite.scripts import cros_list_overlays
+
+
+class ListOverlaysTest(cros_test_lib.MockTestCase):
+  """Tests for main()"""
+
+  def setUp(self):
+    self.pfind_mock = self.PatchObject(portage_util, 'FindPrimaryOverlay')
+    self.find_mock = self.PatchObject(portage_util, 'FindOverlays')
+
+  def testSmoke(self):
+    """Basic sanity check"""
+    cros_list_overlays.main([])
+
+  def testPrimary(self):
+    """Basic primary check"""
+    cros_list_overlays.main(['--primary_only', '--board', 'foo'])
+
+
+def main(_argv):
+  cros_test_lib.main(level='info', module=__name__)
diff --git a/scripts/cros_mark_as_stable.py b/scripts/cros_mark_as_stable.py
new file mode 100644
index 0000000..531f652
--- /dev/null
+++ b/scripts/cros_mark_as_stable.py
@@ -0,0 +1,315 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module uprevs a given package's ebuild to the next revision."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import portage_util
+
+# Commit message subject for uprevving Portage packages.
+GIT_COMMIT_SUBJECT = 'Marking set of ebuilds as stable'
+
+# Commit message for uprevving Portage packages.
+_GIT_COMMIT_MESSAGE = 'Marking 9999 ebuild for %s as stable.'
+
+# Dictionary of valid commands with usage information.
+COMMAND_DICTIONARY = {
+    'commit': 'Marks given ebuilds as stable locally',
+    'push': 'Pushes previous marking of ebuilds to remote repo',
+}
+
+
+# ======================= Global Helper Functions ========================
+
+
+def CleanStalePackages(srcroot, boards, package_atoms):
+  """Cleans up stale package info from a previous build.
+
+  Args:
+    srcroot: Root directory of the source tree.
+    boards: Boards to clean the packages from.
+    package_atoms: A list of package atoms to unmerge.
+  """
+  if package_atoms:
+    logging.info('Cleaning up stale packages %s.' % package_atoms)
+
+  # First unmerge all the packages for a board, then eclean it.
+  # We need these two steps to run in order (unmerge/eclean),
+  # but we can let all the boards run in parallel.
+  def _CleanStalePackages(board):
+    if board:
+      suffix = '-' + board
+      runcmd = cros_build_lib.RunCommand
+    else:
+      suffix = ''
+      runcmd = cros_build_lib.SudoRunCommand
+
+    emerge, eclean = 'emerge' + suffix, 'eclean' + suffix
+    if not osutils.FindMissingBinaries([emerge, eclean]):
+      if package_atoms:
+        # If nothing was found to be unmerged, emerge will exit(1).
+        result = runcmd([emerge, '-q', '--unmerge'] + package_atoms,
+                        enter_chroot=True, extra_env={'CLEAN_DELAY': '0'},
+                        error_code_ok=True, cwd=srcroot)
+        if not result.returncode in (0, 1):
+          raise cros_build_lib.RunCommandError('unexpected error', result)
+      runcmd([eclean, '-d', 'packages'],
+             cwd=srcroot, enter_chroot=True,
+             redirect_stdout=True, redirect_stderr=True)
+
+  tasks = []
+  for board in boards:
+    tasks.append([board])
+  tasks.append([None])
+
+  parallel.RunTasksInProcessPool(_CleanStalePackages, tasks)
+
+
+# TODO(build): This code needs to be gutted and rebased to cros_build_lib.
+def _DoWeHaveLocalCommits(stable_branch, tracking_branch, cwd):
+  """Returns true if there are local commits."""
+  current_branch = git.GetCurrentBranch(cwd)
+
+  if current_branch != stable_branch:
+    return False
+  output = git.RunGit(
+      cwd, ['rev-parse', 'HEAD', tracking_branch]).output.split()
+  return output[0] != output[1]
+
+
+# ======================= End Global Helper Functions ========================
+
+
+def PushChange(stable_branch, tracking_branch, dryrun, cwd):
+  """Pushes commits in the stable_branch to the remote git repository.
+
+  Pushes local commits from calls to CommitChange to the remote git
+  repository specified by current working directory. If changes are
+  found to commit, they will be merged to the merge branch and pushed.
+  In that case, the local repository will be left on the merge branch.
+
+  Args:
+    stable_branch: The local branch with commits we want to push.
+    tracking_branch: The tracking branch of the local branch.
+    dryrun: Use git push --dryrun to emulate a push.
+    cwd: The directory to run commands in.
+
+  Raises:
+    OSError: Error occurred while pushing.
+  """
+  if not _DoWeHaveLocalCommits(stable_branch, tracking_branch, cwd):
+    logging.info('No work found to push in %s.  Exiting', cwd)
+    return
+
+  # For the commit queue, our local branch may contain commits that were
+  # just tested and pushed during the CommitQueueCompletion stage. Sync
+  # and rebase our local branch on top of the remote commits.
+  remote_ref = git.GetTrackingBranch(cwd, for_push=True)
+  git.SyncPushBranch(cwd, remote_ref.remote, remote_ref.ref)
+
+  # Check whether any local changes remain after the sync.
+  if not _DoWeHaveLocalCommits(stable_branch, remote_ref.ref, cwd):
+    logging.info('All changes already pushed for %s. Exiting', cwd)
+    return
+
+  # Add a failsafe check here.  Only CLs from the 'chrome-bot' user should
+  # be involved here.  If any other CLs are found then complain.
+  # In dryruns extra CLs are normal, though, and can be ignored.
+  bad_cl_cmd = ['log', '--format=short', '--perl-regexp',
+                '--author', '^(?!chrome-bot)', '%s..%s' % (
+                    remote_ref.ref, stable_branch)]
+  bad_cls = git.RunGit(cwd, bad_cl_cmd).output
+  if bad_cls.strip() and not dryrun:
+    logging.error('The Uprev stage found changes from users other than '
+                  'chrome-bot:\n\n%s', bad_cls)
+    raise AssertionError('Unexpected CLs found during uprev stage.')
+
+  description = git.RunGit(
+      cwd,
+      ['log', '--format=format:%s%n%n%b',
+       '%s..%s' % (remote_ref.ref, stable_branch)]).output
+  description = '%s\n\n%s' % (GIT_COMMIT_SUBJECT, description)
+  logging.info('For %s, using description %s', cwd, description)
+  git.CreatePushBranch(constants.MERGE_BRANCH, cwd)
+  git.RunGit(cwd, ['merge', '--squash', stable_branch])
+  git.RunGit(cwd, ['commit', '-m', description])
+  git.RunGit(cwd, ['config', 'push.default', 'tracking'])
+  git.PushWithRetry(constants.MERGE_BRANCH, cwd, dryrun=dryrun)
+
+
+class GitBranch(object):
+  """Wrapper class for a git branch."""
+
+  def __init__(self, branch_name, tracking_branch, cwd):
+    """Sets up variables but does not create the branch.
+
+    Args:
+      branch_name: The name of the branch.
+      tracking_branch: The associated tracking branch.
+      cwd: The git repository to work in.
+    """
+    self.branch_name = branch_name
+    self.tracking_branch = tracking_branch
+    self.cwd = cwd
+
+  def CreateBranch(self):
+    self.Checkout()
+
+  def Checkout(self, branch=None):
+    """Function used to check out to another GitBranch."""
+    if not branch:
+      branch = self.branch_name
+    if branch == self.tracking_branch or self.Exists(branch):
+      git_cmd = ['git', 'checkout', '-f', branch]
+    else:
+      git_cmd = ['repo', 'start', branch, '.']
+    cros_build_lib.RunCommand(git_cmd, print_cmd=False, cwd=self.cwd,
+                              capture_output=True)
+
+  def Exists(self, branch=None):
+    """Returns True if the branch exists."""
+    if not branch:
+      branch = self.branch_name
+    branches = git.RunGit(self.cwd, ['branch']).output
+    return branch in branches.split()
+
+
+def GetParser():
+  """Creates the argparse parser."""
+  parser = commandline.ArgumentParser()
+  parser.add_argument('--all', action='store_true',
+                      help='Mark all packages as stable.')
+  parser.add_argument('-b', '--boards', default='',
+                      help='Colon-separated list of boards.')
+  parser.add_argument('--drop_file',
+                      help='File to list packages that were revved.')
+  parser.add_argument('--dryrun', action='store_true',
+                      help='Passes dry-run to git push if pushing a change.')
+  parser.add_argument('--force', action='store_true',
+                      help='Force the stabilization of blacklisted packages. '
+                      '(only compatible with -p)')
+  parser.add_argument('-o', '--overlays',
+                      help='Colon-separated list of overlays to modify.')
+  parser.add_argument('-p', '--packages',
+                      help='Colon separated list of packages to rev.')
+  parser.add_argument('-r', '--srcroot', type='path',
+                      default=os.path.join(constants.SOURCE_ROOT, 'src'),
+                      help='Path to root src directory.')
+  parser.add_argument('--verbose', action='store_true',
+                      help='Prints out debug info.')
+  parser.add_argument('command', choices=COMMAND_DICTIONARY.keys(),
+                      help='Command to run.')
+  return parser
+
+
+def main(argv):
+  parser = GetParser()
+  options = parser.parse_args(argv)
+  options.Freeze()
+
+  if options.command == 'commit':
+    if not options.packages and not options.all:
+      parser.error('Please specify at least one package (--packages)')
+    if options.force and options.all:
+      parser.error('Cannot use --force with --all. You must specify a list of '
+                   'packages you want to force uprev.')
+
+  if not os.path.isdir(options.srcroot):
+    parser.error('srcroot is not a valid path: %s' % options.srcroot)
+
+  portage_util.EBuild.VERBOSE = options.verbose
+
+  package_list = None
+  if options.packages:
+    package_list = options.packages.split(':')
+
+  if options.overlays:
+    overlays = {}
+    for path in options.overlays.split(':'):
+      if not os.path.isdir(path):
+        cros_build_lib.Die('Cannot find overlay: %s' % path)
+      overlays[path] = []
+  else:
+    logging.warning('Missing --overlays argument')
+    overlays = {
+        '%s/private-overlays/chromeos-overlay' % options.srcroot: [],
+        '%s/third_party/chromiumos-overlay' % options.srcroot: [],
+    }
+
+  manifest = git.ManifestCheckout.Cached(options.srcroot)
+
+  if options.command == 'commit':
+    portage_util.BuildEBuildDictionary(overlays, options.all, package_list,
+                                       allow_blacklisted=options.force)
+
+  # Contains the array of packages we actually revved.
+  revved_packages = []
+  new_package_atoms = []
+
+  for overlay in overlays:
+    ebuilds = overlays[overlay]
+    if not os.path.isdir(overlay):
+      logging.warning('Skipping %s' % overlay)
+      continue
+
+    # Note we intentionally work from the non push tracking branch;
+    # everything built thus far has been against it (meaning, http mirrors),
+    # thus we should honor that.  During the actual push, the code switches
+    # to the correct urls, and does an appropriate rebasing.
+    tracking_branch = git.GetTrackingBranchViaManifest(
+        overlay, manifest=manifest).ref
+
+    if options.command == 'push':
+      PushChange(constants.STABLE_EBUILD_BRANCH, tracking_branch,
+                 options.dryrun, cwd=overlay)
+    elif options.command == 'commit':
+      existing_commit = git.GetGitRepoRevision(overlay)
+      work_branch = GitBranch(constants.STABLE_EBUILD_BRANCH, tracking_branch,
+                              cwd=overlay)
+      work_branch.CreateBranch()
+      if not work_branch.Exists():
+        cros_build_lib.Die('Unable to create stabilizing branch in %s' %
+                           overlay)
+
+      # In the case of uprevving overlays that have patches applied to them,
+      # include the patched changes in the stabilizing branch.
+      git.RunGit(overlay, ['rebase', existing_commit])
+
+      messages = []
+      for ebuild in ebuilds:
+        if options.verbose:
+          logging.info('Working on %s', ebuild.package)
+        try:
+          new_package = ebuild.RevWorkOnEBuild(options.srcroot, manifest)
+          if new_package:
+            revved_packages.append(ebuild.package)
+            new_package_atoms.append('=%s' % new_package)
+            messages.append(_GIT_COMMIT_MESSAGE % ebuild.package)
+        except (OSError, IOError):
+          logging.warning(
+              'Cannot rev %s\n'
+              'Note you will have to go into %s '
+              'and reset the git repo yourself.' % (ebuild.package, overlay))
+          raise
+
+      if messages:
+        portage_util.EBuild.CommitChange('\n\n'.join(messages), overlay)
+
+  if options.command == 'commit':
+    chroot_path = os.path.join(options.srcroot, constants.DEFAULT_CHROOT_DIR)
+    if os.path.exists(chroot_path):
+      CleanStalePackages(options.srcroot, options.boards.split(':'),
+                         new_package_atoms)
+    if options.drop_file:
+      osutils.WriteFile(options.drop_file, ' '.join(revved_packages))
diff --git a/scripts/cros_mark_as_stable_unittest b/scripts/cros_mark_as_stable_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/cros_mark_as_stable_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/cros_mark_as_stable_unittest.py b/scripts/cros_mark_as_stable_unittest.py
new file mode 100644
index 0000000..9d87e1b
--- /dev/null
+++ b/scripts/cros_mark_as_stable_unittest.py
@@ -0,0 +1,152 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for cros_mark_as_stable.py."""
+
+from __future__ import print_function
+
+import mock
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_build_lib_unittest
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import parallel_unittest
+from chromite.lib import partial_mock
+from chromite.scripts import cros_mark_as_stable
+
+
+class RunGitMock(partial_mock.PartialCmdMock):
+  """Partial mock for git.RunMock."""
+  TARGET = 'chromite.lib.git'
+  ATTRS = ('RunGit',)
+  DEFAULT_ATTR = 'RunGit'
+
+  def RunGit(self, _git_repo, cmd, _retry=True, **kwargs):
+    return self._results['RunGit'].LookupResult(
+        (cmd,), hook_args=(cmd,), hook_kwargs=kwargs)
+
+
+class NonClassTests(cros_test_lib.MockTestCase):
+  """Test the flow for pushing a change."""
+
+  def setUp(self):
+    self._branch = 'test_branch'
+    self._target_manifest_branch = 'cros/master'
+
+  def _TestPushChange(self, bad_cls):
+    side_effect = Exception('unittest says this should not be called')
+
+    git_log = 'Marking test_one as stable\nMarking test_two as stable\n'
+    fake_description = 'Marking set of ebuilds as stable\n\n%s' % git_log
+    self.PatchObject(cros_mark_as_stable, '_DoWeHaveLocalCommits',
+                     return_value=True)
+    self.PatchObject(cros_mark_as_stable.GitBranch, 'CreateBranch',
+                     side_effect=side_effect)
+    self.PatchObject(cros_mark_as_stable.GitBranch, 'Exists',
+                     side_effect=side_effect)
+
+    push_mock = self.PatchObject(git, 'PushWithRetry')
+    self.PatchObject(
+        git, 'GetTrackingBranch',
+        return_value=git.RemoteRef('gerrit', 'refs/remotes/gerrit/master'))
+    sync_mock = self.PatchObject(git, 'SyncPushBranch')
+    create_mock = self.PatchObject(git, 'CreatePushBranch')
+    git_mock = self.StartPatcher(RunGitMock())
+
+    cmd = ['log', '--format=short', '--perl-regexp', '--author',
+           '^(?!chrome-bot)', 'refs/remotes/gerrit/master..%s' % self._branch]
+
+    if bad_cls:
+      push_mock.side_effect = side_effect
+      create_mock.side_effect = side_effect
+      git_mock.AddCmdResult(cmd, output='Found bad stuff')
+    else:
+      git_mock.AddCmdResult(cmd, output='\n')
+      cmd = ['log', '--format=format:%s%n%n%b',
+             'refs/remotes/gerrit/master..%s' % self._branch]
+      git_mock.AddCmdResult(cmd, output=git_log)
+      git_mock.AddCmdResult(['merge', '--squash', self._branch])
+      git_mock.AddCmdResult(['commit', '-m', fake_description])
+      git_mock.AddCmdResult(['config', 'push.default', 'tracking'])
+
+    try:
+      cros_mark_as_stable.PushChange(self._branch, self._target_manifest_branch,
+                                     False, '.')
+    finally:
+      sync_mock.assert_called_with('.', 'gerrit', 'refs/remotes/gerrit/master')
+      if not bad_cls:
+        push_mock.assert_called_with('merge_branch', '.', dryrun=False)
+        create_mock.assert_called_with('merge_branch', '.')
+
+  def testPushChange(self):
+    """Verify pushing changes works."""
+    self._TestPushChange(bad_cls=False)
+
+  def testPushChangeBadCls(self):
+    """Verify we do not push bad CLs."""
+    self.assertRaises(AssertionError, self._TestPushChange, bad_cls=True)
+
+
+class CleanStalePackagesTest(cros_build_lib_unittest.RunCommandTestCase):
+  """Tests for cros_mark_as_stable.CleanStalePackages."""
+
+  def setUp(self):
+    self.PatchObject(osutils, 'FindMissingBinaries', return_value=[])
+
+  def testNormalClean(self):
+    """Clean up boards/packages with normal success"""
+    cros_mark_as_stable.CleanStalePackages('.', ('board1', 'board2'),
+                                           ['cow', 'car'])
+
+  def testNothingToUnmerge(self):
+    """Clean up packages that don't exist (portage will exit 1)"""
+    self.rc.AddCmdResult(partial_mock.In('emerge'), returncode=1)
+    cros_mark_as_stable.CleanStalePackages('.', (), ['no/pkg'])
+
+  def testUnmergeError(self):
+    """Make sure random exit errors are not ignored"""
+    self.rc.AddCmdResult(partial_mock.In('emerge'), returncode=123)
+    with parallel_unittest.ParallelMock():
+      self.assertRaises(cros_build_lib.RunCommandError,
+                        cros_mark_as_stable.CleanStalePackages,
+                        '.', (), ['no/pkg'])
+
+
+class GitBranchTest(cros_test_lib.MockTestCase):
+  """Tests for cros_mark_as_stable.GitBranch."""
+
+  def setUp(self):
+    # Always stub RunCommmand out as we use it in every method.
+    self.rc_mock = self.PatchObject(cros_build_lib, 'RunCommand')
+
+    self._branch_name = 'test_branch'
+    self._target_manifest_branch = 'cros/test'
+    self._branch = cros_mark_as_stable.GitBranch(
+        branch_name=self._branch_name,
+        tracking_branch=self._target_manifest_branch,
+        cwd='.')
+
+  def testCheckoutCreate(self):
+    """Test init with no previous branch existing."""
+    self.PatchObject(self._branch, 'Exists', return_value=False)
+    cros_mark_as_stable.GitBranch.Checkout(self._branch)
+    self.rc_mock.assert_call(mock.call(
+        ['repo', 'start', self._branch_name, '.'],
+        print_cmd=False, cwd='.', capture_output=True))
+
+  def testCheckoutNoCreate(self):
+    """Test init with previous branch existing."""
+    self.PatchObject(self._branch, 'Exists', return_value=True)
+    cros_mark_as_stable.GitBranch.Checkout(self._branch)
+    self.rc_mock.assert_call(mock.call(
+        ['git', 'checkout', '-f', self._branch_name],
+        print_cmd=False, cwd='.', capture_output=True))
+
+  def testExists(self):
+    """Test if branch exists that is created."""
+    result = cros_build_lib.CommandResult(output=self._branch_name + '\n')
+    self.PatchObject(git, 'RunGit', return_value=result)
+    self.assertTrue(self._branch.Exists())
diff --git a/scripts/cros_mark_chrome_as_stable.py b/scripts/cros_mark_chrome_as_stable.py
new file mode 100644
index 0000000..d5cebf5
--- /dev/null
+++ b/scripts/cros_mark_chrome_as_stable.py
@@ -0,0 +1,555 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module uprevs Chrome for cbuildbot.
+
+After calling, it prints outs CHROME_VERSION_ATOM=(version atom string).  A
+caller could then use this atom with emerge to build the newly uprevved version
+of Chrome e.g.
+
+./cros_mark_chrome_as_stable tot
+Returns chrome-base/chromeos-chrome-8.0.552.0_alpha_r1
+
+emerge-x86-generic =chrome-base/chromeos-chrome-8.0.552.0_alpha_r1
+"""
+
+from __future__ import print_function
+
+import base64
+import distutils.version
+import filecmp
+import optparse
+import os
+import re
+import sys
+import urlparse
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import gob_util
+from chromite.lib import portage_util
+from chromite.lib import timeout_util
+from chromite.scripts import cros_mark_as_stable
+
+
+# Helper regex's for finding ebuilds.
+_CHROME_VERSION_REGEX = r'\d+\.\d+\.\d+\.\d+'
+_NON_STICKY_REGEX = r'%s[(_rc.*)|(_alpha.*)]+' % _CHROME_VERSION_REGEX
+
+# Dir where all the action happens.
+_OVERLAY_DIR = '%(srcroot)s/third_party/chromiumos-overlay/'
+
+_GIT_COMMIT_MESSAGE = ('Marking %(chrome_rev)s for %(chrome_pn)s ebuild '
+                       'with version %(chrome_version)s as stable.')
+
+# URLs that print lists of chrome revisions between two versions of the browser.
+_CHROME_VERSION_URL = ('http://omahaproxy.appspot.com/changelog?'
+                       'old_version=%(old)s&new_version=%(new)s')
+
+# Only print links when we rev these types.
+_REV_TYPES_FOR_LINKS = [constants.CHROME_REV_LATEST,
+                        constants.CHROME_REV_STICKY]
+
+# TODO(szager): This is inaccurate, but is it safe to change?  I have no idea.
+_CHROME_SVN_TAG = 'CROS_SVN_COMMIT'
+
+
+def _GetVersionContents(chrome_version_info):
+  """Returns the current Chromium version, from the contents of a VERSION file.
+
+  Args:
+    chrome_version_info: The contents of a chromium VERSION file.
+  """
+  chrome_version_array = []
+  for line in chrome_version_info.splitlines():
+    chrome_version_array.append(line.rpartition('=')[2])
+
+  return '.'.join(chrome_version_array)
+
+
+def _GetSpecificVersionUrl(git_url, revision, time_to_wait=600):
+  """Returns the Chromium version, from a repository URL and version.
+
+  Args:
+    git_url: Repository URL for chromium.
+    revision: the git revision we want to use.
+    time_to_wait: the minimum period before abandoning our wait for the
+      desired revision to be present.
+  """
+  parsed_url = urlparse.urlparse(git_url)
+  host = parsed_url[1]
+  path = parsed_url[2].rstrip('/') + (
+      '/+/%s/chrome/VERSION?format=text' % revision)
+
+  # Allow for git repository replication lag with sleep/retry loop.
+  def _fetch():
+    fh = gob_util.FetchUrl(host, path, ignore_404=True)
+    return fh.read() if fh else None
+
+  def _wait_msg(_remaining):
+    logging.info('Repository does not yet have revision %s.  Sleeping...',
+                 revision)
+
+  content = timeout_util.WaitForSuccess(
+      retry_check=lambda x: not bool(x),
+      func=_fetch,
+      timeout=time_to_wait,
+      period=30,
+      side_effect_func=_wait_msg)
+  return _GetVersionContents(base64.b64decode(content))
+
+
+def _GetTipOfTrunkVersionFile(root):
+  """Returns the current Chromium version, from a file in a checkout.
+
+  Args:
+    root: path to the root of the chromium checkout.
+  """
+  version_file = os.path.join(root, 'src', 'chrome', 'VERSION')
+  chrome_version_info = cros_build_lib.RunCommand(
+      ['cat', version_file],
+      redirect_stdout=True,
+      error_message='Could not read version file at %s.' % version_file).output
+
+  return _GetVersionContents(chrome_version_info)
+
+
+def CheckIfChromeRightForOS(deps_content):
+  """Checks if DEPS is right for Chrome OS.
+
+  This function checks for a variable called 'buildspec_platforms' to
+  find out if its 'chromeos' or 'all'. If any of those values,
+  then it chooses that DEPS.
+
+  Args:
+    deps_content: Content of release buildspec DEPS file.
+
+  Returns:
+    True if DEPS is the right Chrome for Chrome OS.
+  """
+  platforms_search = re.search(r'buildspec_platforms.*\s.*\s', deps_content)
+
+  if platforms_search:
+    platforms = platforms_search.group()
+    if 'chromeos' in platforms or 'all' in platforms:
+      return True
+
+  return False
+
+
+def GetLatestRelease(git_url, branch=None):
+  """Gets the latest release version from the release tags in the repository.
+
+  Args:
+    git_url: URL of git repository.
+    branch: If set, gets the latest release for branch, otherwise latest
+      release.
+
+  Returns:
+    Latest version string.
+  """
+  # TODO(szager): This only works for public release buildspecs in the chromium
+  # src repository.  Internal buildspecs are tracked differently.  At the time
+  # of writing, I can't find any callers that use this method to scan for
+  # internal buildspecs.  But there may be something lurking...
+
+  parsed_url = urlparse.urlparse(git_url)
+  path = parsed_url[2].rstrip('/') + '/+refs/tags?format=JSON'
+  j = gob_util.FetchUrlJson(parsed_url[1], path, ignore_404=False)
+  if branch:
+    chrome_version_re = re.compile(r'^%s\.\d+.*' % branch)
+  else:
+    chrome_version_re = re.compile(r'^[0-9]+\..*')
+  matching_versions = [key for key in j.keys() if chrome_version_re.match(key)]
+  matching_versions.sort(key=distutils.version.LooseVersion)
+  for chrome_version in reversed(matching_versions):
+    path = parsed_url[2].rstrip() + (
+        '/+/refs/tags/%s/DEPS?format=text' % chrome_version)
+    fh = gob_util.FetchUrl(parsed_url[1], path, ignore_404=False)
+    content = fh.read() if fh else None
+    if content:
+      deps_content = base64.b64decode(content)
+      if CheckIfChromeRightForOS(deps_content):
+        return chrome_version
+
+  return None
+
+
+def _GetStickyEBuild(stable_ebuilds):
+  """Returns the sticky ebuild."""
+  sticky_ebuilds = []
+  non_sticky_re = re.compile(_NON_STICKY_REGEX)
+  for ebuild in stable_ebuilds:
+    if not non_sticky_re.match(ebuild.version):
+      sticky_ebuilds.append(ebuild)
+
+  if not sticky_ebuilds:
+    raise Exception('No sticky ebuilds found')
+  elif len(sticky_ebuilds) > 1:
+    logging.warning('More than one sticky ebuild found')
+
+  return portage_util.BestEBuild(sticky_ebuilds)
+
+
+class ChromeEBuild(portage_util.EBuild):
+  """Thin sub-class of EBuild that adds a chrome_version field."""
+  chrome_version_re = re.compile(r'.*-(%s|9999).*' % (
+      _CHROME_VERSION_REGEX))
+  chrome_version = ''
+
+  def __init__(self, path):
+    portage_util.EBuild.__init__(self, path)
+    re_match = self.chrome_version_re.match(self.ebuild_path_no_revision)
+    if re_match:
+      self.chrome_version = re_match.group(1)
+
+  def __str__(self):
+    return self.ebuild_path
+
+
+def FindChromeCandidates(package_dir):
+  """Return a tuple of chrome's unstable ebuild and stable ebuilds.
+
+  Args:
+    package_dir: The path to where the package ebuild is stored.
+
+  Returns:
+    Tuple [unstable_ebuild, stable_ebuilds].
+
+  Raises:
+    Exception: if no unstable ebuild exists for Chrome.
+  """
+  stable_ebuilds = []
+  unstable_ebuilds = []
+  for path in [
+      os.path.join(package_dir, entry) for entry in os.listdir(package_dir)]:
+    if path.endswith('.ebuild'):
+      ebuild = ChromeEBuild(path)
+      if not ebuild.chrome_version:
+        logging.warning('Poorly formatted ebuild found at %s' % path)
+      else:
+        if '9999' in ebuild.version:
+          unstable_ebuilds.append(ebuild)
+        else:
+          stable_ebuilds.append(ebuild)
+
+  # Apply some sanity checks.
+  if not unstable_ebuilds:
+    raise Exception('Missing 9999 ebuild for %s' % package_dir)
+  if not stable_ebuilds:
+    logging.warning('Missing stable ebuild for %s' % package_dir)
+
+  return portage_util.BestEBuild(unstable_ebuilds), stable_ebuilds
+
+
+def FindChromeUprevCandidate(stable_ebuilds, chrome_rev, sticky_branch):
+  """Finds the Chrome uprev candidate for the given chrome_rev.
+
+  Using the pre-flight logic, this means the stable ebuild you are uprevving
+  from.  The difference here is that the version could be different and in
+  that case we want to find it to delete it.
+
+  Args:
+    stable_ebuilds: A list of stable ebuilds.
+    chrome_rev: The chrome_rev designating which candidate to find.
+    sticky_branch: The the branch that is currently sticky with Major/Minor
+      components.  For example: 9.0.553. Can be None but not if chrome_rev
+      is CHROME_REV_STICKY.
+
+  Returns:
+    The EBuild, otherwise None if none found.
+  """
+  candidates = []
+  if chrome_rev in [constants.CHROME_REV_LOCAL, constants.CHROME_REV_TOT,
+                    constants.CHROME_REV_SPEC]:
+    # These are labelled alpha, for historic reasons,
+    # not just for the fun of confusion.
+    chrome_branch_re = re.compile(r'%s.*_alpha.*' % _CHROME_VERSION_REGEX)
+    for ebuild in stable_ebuilds:
+      if chrome_branch_re.search(ebuild.version):
+        candidates.append(ebuild)
+
+  elif chrome_rev == constants.CHROME_REV_STICKY:
+    assert sticky_branch is not None
+    chrome_branch_re = re.compile(r'%s\..*' % sticky_branch)
+    for ebuild in stable_ebuilds:
+      if chrome_branch_re.search(ebuild.version):
+        candidates.append(ebuild)
+
+  else:
+    chrome_branch_re = re.compile(r'%s.*_rc.*' % _CHROME_VERSION_REGEX)
+    for ebuild in stable_ebuilds:
+      if chrome_branch_re.search(ebuild.version):
+        candidates.append(ebuild)
+
+  if candidates:
+    return portage_util.BestEBuild(candidates)
+  else:
+    return None
+
+
+def _AnnotateAndPrint(text, url):
+  """Add buildbot trappings to print <a href='url'>text</a> in the waterfall.
+
+  Args:
+    text: Anchor text for the link
+    url: the URL to which to link
+  """
+  print('\n@@@STEP_LINK@%(text)s@%(url)s@@@' % {'text': text, 'url': url},
+        file=sys.stderr)
+
+
+def GetChromeRevisionLinkFromVersions(old_chrome_version, chrome_version):
+  """Return appropriately formatted link to revision info, given versions
+
+  Given two chrome version strings (e.g. 9.0.533.0), generate a link to a
+  page that prints the Chromium revisions between those two versions.
+
+  Args:
+    old_chrome_version: version to diff from
+    chrome_version: version to which to diff
+
+  Returns:
+    The desired URL.
+  """
+  return _CHROME_VERSION_URL % {'old': old_chrome_version,
+                                'new': chrome_version}
+
+
+def GetChromeRevisionListLink(old_chrome, new_chrome, chrome_rev):
+  """Returns a link to the list of revisions between two Chromium versions
+
+  Given two ChromeEBuilds and the kind of rev we're doing, generate a
+  link to a page that prints the Chromium changes between those two
+  revisions, inclusive.
+
+  Args:
+    old_chrome: ebuild for the version to diff from
+    new_chrome: ebuild for the version to which to diff
+    chrome_rev: one of constants.VALID_CHROME_REVISIONS
+
+  Returns:
+    The desired URL.
+  """
+  assert chrome_rev in _REV_TYPES_FOR_LINKS
+  return GetChromeRevisionLinkFromVersions(old_chrome.chrome_version,
+                                           new_chrome.chrome_version)
+
+
+def MarkChromeEBuildAsStable(stable_candidate, unstable_ebuild, chrome_pn,
+                             chrome_rev, chrome_version, commit, package_dir):
+  r"""Uprevs the chrome ebuild specified by chrome_rev.
+
+  This is the main function that uprevs the chrome_rev from a stable candidate
+  to its new version.
+
+  Args:
+    stable_candidate: ebuild that corresponds to the stable ebuild we are
+      revving from.  If None, builds the a new ebuild given the version
+      and logic for chrome_rev type with revision set to 1.
+    unstable_ebuild: ebuild corresponding to the unstable ebuild for chrome.
+    chrome_pn: package name.
+    chrome_rev: one of constants.VALID_CHROME_REVISIONS or LOCAL
+      constants.CHROME_REV_SPEC -  Requires commit value.  Revs the ebuild for
+        the specified version and uses the portage suffix of _alpha.
+      constants.CHROME_REV_TOT -  Requires commit value.  Revs the ebuild for
+        the TOT version and uses the portage suffix of _alpha.
+      constants.CHROME_REV_LOCAL - Requires a chrome_root. Revs the ebuild for
+        the local version and uses the portage suffix of _alpha.
+      constants.CHROME_REV_LATEST - This uses the portage suffix of _rc as they
+        are release candidates for the next sticky version.
+      constants.CHROME_REV_STICKY -  Revs the sticky version.
+    chrome_version: The \d.\d.\d.\d version of Chrome.
+    commit: Used with constants.CHROME_REV_TOT.  The git revision of chrome.
+    package_dir: Path to the chromeos-chrome package dir.
+
+  Returns:
+    Full portage version atom (including rc's, etc) that was revved.
+  """
+  def IsTheNewEBuildRedundant(new_ebuild, stable_ebuild):
+    """Returns True if the new ebuild is redundant.
+
+    This is True if there if the current stable ebuild is the exact same copy
+    of the new one.
+    """
+    if not stable_ebuild:
+      return False
+
+    if stable_candidate.chrome_version == new_ebuild.chrome_version:
+      return filecmp.cmp(
+          new_ebuild.ebuild_path, stable_ebuild.ebuild_path, shallow=False)
+
+  # Mark latest release and sticky branches as stable.
+  mark_stable = chrome_rev not in [constants.CHROME_REV_TOT,
+                                   constants.CHROME_REV_SPEC,
+                                   constants.CHROME_REV_LOCAL]
+
+  # Case where we have the last stable candidate with same version just rev.
+  if stable_candidate and stable_candidate.chrome_version == chrome_version:
+    new_ebuild_path = '%s-r%d.ebuild' % (
+        stable_candidate.ebuild_path_no_revision,
+        stable_candidate.current_revision + 1)
+  else:
+    suffix = 'rc' if mark_stable else 'alpha'
+    pf = '%s-%s_%s-r1' % (chrome_pn, chrome_version, suffix)
+    new_ebuild_path = os.path.join(package_dir, '%s.ebuild' % pf)
+
+  chrome_variables = dict()
+  if commit:
+    chrome_variables[_CHROME_SVN_TAG] = commit
+
+  portage_util.EBuild.MarkAsStable(
+      unstable_ebuild.ebuild_path, new_ebuild_path,
+      chrome_variables, make_stable=mark_stable)
+  new_ebuild = ChromeEBuild(new_ebuild_path)
+
+  # Determine whether this is ebuild is redundant.
+  if IsTheNewEBuildRedundant(new_ebuild, stable_candidate):
+    msg = 'Previous ebuild with same version found and ebuild is redundant.'
+    logging.info(msg)
+    os.unlink(new_ebuild_path)
+    return None
+
+  if stable_candidate and chrome_rev in _REV_TYPES_FOR_LINKS:
+    _AnnotateAndPrint('Chromium revisions',
+                      GetChromeRevisionListLink(stable_candidate,
+                                                new_ebuild,
+                                                chrome_rev))
+
+  git.RunGit(package_dir, ['add', new_ebuild_path])
+  if stable_candidate and not stable_candidate.IsSticky():
+    git.RunGit(package_dir, ['rm', stable_candidate.ebuild_path])
+
+  portage_util.EBuild.CommitChange(
+      _GIT_COMMIT_MESSAGE % {'chrome_pn': chrome_pn,
+                             'chrome_rev': chrome_rev,
+                             'chrome_version': chrome_version},
+      package_dir)
+
+  return '%s-%s' % (new_ebuild.package, new_ebuild.version)
+
+
+def main(_argv):
+  usage_options = '|'.join(constants.VALID_CHROME_REVISIONS)
+  usage = '%s OPTIONS [%s]' % (__file__, usage_options)
+  parser = optparse.OptionParser(usage)
+  parser.add_option('-b', '--boards', default=None)
+  parser.add_option('-c', '--chrome_url',
+                    default=constants.CHROMIUM_GOB_URL)
+  parser.add_option('-f', '--force_version', default=None,
+                    help='Chrome version or git revision hash to use')
+  parser.add_option('-s', '--srcroot', default=os.path.join(os.environ['HOME'],
+                                                            'trunk', 'src'),
+                    help='Path to the src directory')
+  parser.add_option('-t', '--tracking_branch', default='cros/master',
+                    help='Branch we are tracking changes against')
+  (options, args) = parser.parse_args()
+
+  if len(args) != 1 or args[0] not in constants.VALID_CHROME_REVISIONS:
+    parser.error('Commit requires arg set to one of %s.'
+                 % constants.VALID_CHROME_REVISIONS)
+
+  if options.force_version and args[0] not in (constants.CHROME_REV_SPEC,
+                                               constants.CHROME_REV_LATEST):
+    parser.error('--force_version is not compatible with the %r '
+                 'option.' % (args[0],))
+
+  overlay_dir = os.path.abspath(_OVERLAY_DIR % {'srcroot': options.srcroot})
+  chrome_package_dir = os.path.join(overlay_dir, constants.CHROME_CP)
+  chrome_rev = args[0]
+  version_to_uprev = None
+  commit_to_use = None
+  sticky_branch = None
+
+  (unstable_ebuild, stable_ebuilds) = FindChromeCandidates(chrome_package_dir)
+
+  if chrome_rev == constants.CHROME_REV_LOCAL:
+    if 'CHROME_ROOT' in os.environ:
+      chrome_root = os.environ['CHROME_ROOT']
+    else:
+      chrome_root = os.path.join(os.environ['HOME'], 'chrome_root')
+
+    version_to_uprev = _GetTipOfTrunkVersionFile(chrome_root)
+    commit_to_use = 'Unknown'
+    logging.info('Using local source, versioning is untrustworthy.')
+  elif chrome_rev == constants.CHROME_REV_SPEC:
+    if '.' in options.force_version:
+      version_to_uprev = options.force_version
+    else:
+      commit_to_use = options.force_version
+      if '@' in commit_to_use:
+        commit_to_use = commit_to_use.rpartition('@')[2]
+      version_to_uprev = _GetSpecificVersionUrl(options.chrome_url,
+                                                commit_to_use)
+  elif chrome_rev == constants.CHROME_REV_TOT:
+    commit_to_use = gob_util.GetTipOfTrunkRevision(options.chrome_url)
+    version_to_uprev = _GetSpecificVersionUrl(options.chrome_url,
+                                              commit_to_use)
+  elif chrome_rev == constants.CHROME_REV_LATEST:
+    if options.force_version:
+      if '.' not in options.force_version:
+        parser.error('%s only accepts released Chrome versions, not SVN or '
+                     'Git revisions.' % (chrome_rev,))
+      version_to_uprev = options.force_version
+    else:
+      version_to_uprev = GetLatestRelease(options.chrome_url)
+  else:
+    sticky_ebuild = _GetStickyEBuild(stable_ebuilds)
+    sticky_version = sticky_ebuild.chrome_version
+    sticky_branch = sticky_version.rpartition('.')[0]
+    version_to_uprev = GetLatestRelease(options.chrome_url, sticky_branch)
+
+  stable_candidate = FindChromeUprevCandidate(stable_ebuilds, chrome_rev,
+                                              sticky_branch)
+
+  if stable_candidate:
+    logging.info('Stable candidate found %s' % stable_candidate)
+  else:
+    logging.info('No stable candidate found.')
+
+  tracking_branch = 'remotes/m/%s' % os.path.basename(options.tracking_branch)
+  existing_branch = git.GetCurrentBranch(chrome_package_dir)
+  work_branch = cros_mark_as_stable.GitBranch(constants.STABLE_EBUILD_BRANCH,
+                                              tracking_branch,
+                                              chrome_package_dir)
+  work_branch.CreateBranch()
+
+  # In the case of uprevving overlays that have patches applied to them,
+  # include the patched changes in the stabilizing branch.
+  if existing_branch:
+    git.RunGit(chrome_package_dir, ['rebase', existing_branch])
+
+  chrome_version_atom = MarkChromeEBuildAsStable(
+      stable_candidate, unstable_ebuild, 'chromeos-chrome', chrome_rev,
+      version_to_uprev, commit_to_use, chrome_package_dir)
+  if chrome_version_atom:
+    if options.boards:
+      cros_mark_as_stable.CleanStalePackages(options.srcroot,
+                                             options.boards.split(':'),
+                                             [chrome_version_atom])
+
+    # If we did rev Chrome, now is a good time to uprev other packages.
+    for other_ebuild in constants.OTHER_CHROME_PACKAGES:
+      other_ebuild_name = os.path.basename(other_ebuild)
+      other_package_dir = os.path.join(overlay_dir, other_ebuild)
+      (other_unstable_ebuild, other_stable_ebuilds) = FindChromeCandidates(
+          other_package_dir)
+      other_stable_candidate = FindChromeUprevCandidate(other_stable_ebuilds,
+                                                        chrome_rev,
+                                                        sticky_branch)
+      revved_atom = MarkChromeEBuildAsStable(other_stable_candidate,
+                                             other_unstable_ebuild,
+                                             other_ebuild_name,
+                                             chrome_rev, version_to_uprev,
+                                             commit_to_use, other_package_dir)
+      if revved_atom and options.boards:
+        cros_mark_as_stable.CleanStalePackages(options.srcroot,
+                                               options.boards.split(':'),
+                                               [revved_atom])
+
+  # Explicit print to communicate to caller.
+  if chrome_version_atom:
+    print('CHROME_VERSION_ATOM=%s' % chrome_version_atom)
diff --git a/scripts/cros_mark_chrome_as_stable_unittest b/scripts/cros_mark_chrome_as_stable_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/cros_mark_chrome_as_stable_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/cros_mark_chrome_as_stable_unittest.py b/scripts/cros_mark_chrome_as_stable_unittest.py
new file mode 100644
index 0000000..6580c1d
--- /dev/null
+++ b/scripts/cros_mark_chrome_as_stable_unittest.py
@@ -0,0 +1,283 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for cros_mark_chrome_as_stable.py."""
+
+from __future__ import print_function
+
+import base64
+import cStringIO
+import mock
+import os
+from textwrap import dedent
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.lib import gob_util
+from chromite.lib import osutils
+from chromite.lib import portage_util
+from chromite.scripts import cros_mark_chrome_as_stable
+
+
+unstable_data = 'KEYWORDS=~x86 ~arm'
+stable_data = 'KEYWORDS=x86 arm'
+fake_svn_rev = '12345'
+new_fake_svn_rev = '23456'
+
+
+class _StubCommandResult(object):
+  """Helper for mocking RunCommand results."""
+  def __init__(self, msg):
+    self.output = msg
+
+
+class CrosMarkChromeAsStable(cros_test_lib.MockTempDirTestCase):
+  """Tests for cros_mark_chrome_as_stable."""
+
+  def setUp(self):
+    """Setup vars and create mock dir."""
+    self.tmp_overlay = os.path.join(self.tempdir, 'chromiumos-overlay')
+    self.mock_chrome_dir = os.path.join(self.tmp_overlay, constants.CHROME_CP)
+    os.makedirs(self.mock_chrome_dir)
+
+    ebuild = os.path.join(self.mock_chrome_dir,
+                          constants.CHROME_PN + '-%s.ebuild')
+    self.unstable = ebuild % '9999'
+    self.sticky_branch = '8.0.224'
+    self.sticky_version = '%s.503' % self.sticky_branch
+    self.sticky = ebuild % self.sticky_version
+    self.sticky_rc_version = '%s.504' % self.sticky_branch
+    self.sticky_rc = ebuild % (self.sticky_rc_version + '_rc-r1')
+    self.latest_stable_version = '8.0.300.1'
+    self.latest_stable = ebuild % (self.latest_stable_version + '_rc-r2')
+    self.tot_stable_version = '9.0.305.0'
+    self.tot_stable = ebuild % (self.tot_stable_version + '_alpha-r1')
+
+    self.sticky_new_rc_version = '%s.520' % self.sticky_branch
+    self.sticky_new_rc = ebuild % (self.sticky_new_rc_version + '_rc-r1')
+    self.latest_new_version = '9.0.305.1'
+    self.latest_new = ebuild % (self.latest_new_version + '_rc-r1')
+    self.tot_new_version = '9.0.306.0'
+    self.tot_new = ebuild % (self.tot_new_version + '_alpha-r1')
+
+    osutils.WriteFile(self.unstable, unstable_data)
+    osutils.WriteFile(self.sticky, stable_data)
+    osutils.WriteFile(self.sticky_rc, stable_data)
+    osutils.WriteFile(self.latest_stable, stable_data)
+    # pylint: disable=protected-access
+    osutils.WriteFile(
+        self.tot_stable,
+        '\n'.join((stable_data,
+                   '%s=%s' % (cros_mark_chrome_as_stable._CHROME_SVN_TAG,
+                              fake_svn_rev))))
+
+  def testFindChromeCandidates(self):
+    """Test creation of stable ebuilds from mock dir."""
+    unstable, stable_ebuilds = cros_mark_chrome_as_stable.FindChromeCandidates(
+        self.mock_chrome_dir)
+
+    stable_ebuild_paths = [x.ebuild_path for x in stable_ebuilds]
+    self.assertEqual(unstable.ebuild_path, self.unstable)
+    self.assertEqual(len(stable_ebuilds), 4)
+    self.assertTrue(self.sticky in stable_ebuild_paths)
+    self.assertTrue(self.sticky_rc in stable_ebuild_paths)
+    self.assertTrue(self.latest_stable in stable_ebuild_paths)
+    self.assertTrue(self.tot_stable in stable_ebuild_paths)
+
+  def _GetStableEBuilds(self):
+    """Common helper to create a list of stable ebuilds."""
+    return [
+        cros_mark_chrome_as_stable.ChromeEBuild(self.sticky),
+        cros_mark_chrome_as_stable.ChromeEBuild(self.sticky_rc),
+        cros_mark_chrome_as_stable.ChromeEBuild(self.latest_stable),
+        cros_mark_chrome_as_stable.ChromeEBuild(self.tot_stable),
+    ]
+
+  def testTOTFindChromeUprevCandidate(self):
+    """Tests if we can find tot uprev candidate from our mock dir data."""
+    stable_ebuilds = self._GetStableEBuilds()
+
+    candidate = cros_mark_chrome_as_stable.FindChromeUprevCandidate(
+        stable_ebuilds, constants.CHROME_REV_TOT,
+        self.sticky_branch)
+
+    self.assertEqual(candidate.ebuild_path, self.tot_stable)
+
+  def testLatestFindChromeUprevCandidate(self):
+    """Tests if we can find latest uprev candidate from our mock dir data."""
+    stable_ebuilds = self._GetStableEBuilds()
+
+    candidate = cros_mark_chrome_as_stable.FindChromeUprevCandidate(
+        stable_ebuilds, constants.CHROME_REV_LATEST,
+        self.sticky_branch)
+
+    self.assertEqual(candidate.ebuild_path, self.latest_stable)
+
+  def testStickyFindChromeUprevCandidate(self):
+    """Tests if we can find sticky uprev candidate from our mock dir data."""
+    stable_ebuilds = self._GetStableEBuilds()
+
+    candidate = cros_mark_chrome_as_stable.FindChromeUprevCandidate(
+        stable_ebuilds, constants.CHROME_REV_STICKY,
+        self.sticky_branch)
+
+    self.assertEqual(candidate.ebuild_path, self.sticky_rc)
+
+  def testGetTipOfTrunkRevision(self):
+    """Tests if we can get the latest svn revision from TOT."""
+    A_URL = 'dorf://mink/delaane/forkat/sertiunu.ortg./desk'
+    result = {'log': [{'commit': 'deadbeef' * 5}]}
+    self.PatchObject(gob_util, 'FetchUrlJson', return_value=result)
+    revision = gob_util.GetTipOfTrunkRevision(A_URL)
+    self.assertEquals(revision, 'deadbeef' * 5)
+
+  def testGetTipOfTrunkVersion(self):
+    """Tests if we get the latest version from TOT."""
+    TEST_URL = 'proto://host.org/path/to/repo'
+    TEST_VERSION_CONTENTS = dedent('''\
+        A=8
+        B=0
+        C=256
+        D=0''')
+    result = cStringIO.StringIO(base64.b64encode(TEST_VERSION_CONTENTS))
+    self.PatchObject(gob_util, 'FetchUrl', return_value=result)
+    # pylint: disable=protected-access
+    version = cros_mark_chrome_as_stable._GetSpecificVersionUrl(
+        TEST_URL, 'test-revision')
+    self.assertEquals(version, '8.0.256.0')
+
+  def testCheckIfChromeRightForOS(self):
+    """Tests if we can find the chromeos build from our mock DEPS."""
+    test_data1 = "buildspec_platforms:\n    'chromeos,',\n"
+    test_data2 = "buildspec_platforms:\n    'android,',\n"
+    expected_deps = cros_mark_chrome_as_stable.CheckIfChromeRightForOS(
+        test_data1)
+    unexpected_deps = cros_mark_chrome_as_stable.CheckIfChromeRightForOS(
+        test_data2)
+    self.assertTrue(expected_deps)
+    self.assertFalse(unexpected_deps)
+
+  def testGetLatestRelease(self):
+    """Tests if we can find the latest release from our mock url data."""
+    TEST_HOST = 'sores.chromium.org'
+    TEST_URL = 'phthp://%s/tqs' % TEST_HOST
+    TEST_TAGS = ['7.0.224.1', '7.0.224', '8.0.365.5', 'foo', 'bar-12.13.14.15']
+    TEST_REFS_JSON = dict((tag, None) for tag in TEST_TAGS)
+    TEST_BAD_DEPS_CONTENT = dedent('''\
+        buildspec_platforms: 'TRS-80,',
+        ''')
+    TEST_GOOD_DEPS_CONTENT = dedent('''\
+        buildspec_platforms: 'chromeos,',
+        ''')
+
+    self.PatchObject(gob_util, 'FetchUrl', side_effect=(
+        cStringIO.StringIO(base64.b64encode(TEST_BAD_DEPS_CONTENT)),
+        cStringIO.StringIO(base64.b64encode(TEST_GOOD_DEPS_CONTENT)),
+    ))
+    self.PatchObject(gob_util, 'FetchUrlJson', side_effect=(TEST_REFS_JSON,))
+    release = cros_mark_chrome_as_stable.GetLatestRelease(TEST_URL)
+    self.assertEqual('7.0.224.1', release)
+
+  def testGetLatestStickyRelease(self):
+    """Tests if we can find the latest sticky release from our mock url data."""
+    TEST_HOST = 'sores.chromium.org'
+    TEST_URL = 'phthp://%s/tqs' % TEST_HOST
+    TEST_TAGS = ['7.0.224.2', '7.0.224', '7.0.365.5', 'foo', 'bar-12.13.14.15']
+    TEST_REFS_JSON = dict((tag, None) for tag in TEST_TAGS)
+    TEST_DEPS_CONTENT = dedent('''\
+        buildspec_platforms: 'chromeos,',
+        ''')
+
+    self.PatchObject(gob_util, 'FetchUrl', side_effect=(
+        cStringIO.StringIO(base64.b64encode(TEST_DEPS_CONTENT)),
+    ))
+    self.PatchObject(gob_util, 'FetchUrlJson', side_effect=(TEST_REFS_JSON,))
+    release = cros_mark_chrome_as_stable.GetLatestRelease(TEST_URL, '7.0.224')
+    self.assertEqual('7.0.224.2', release)
+
+  def testLatestChromeRevisionListLink(self):
+    """Tests link generation to rev lists.
+
+    Verifies that we can generate a link to the revision list between the
+    latest Chromium release and the last one we successfully built.
+    """
+    osutils.WriteFile(self.latest_new, stable_data)
+    expected = cros_mark_chrome_as_stable.GetChromeRevisionLinkFromVersions(
+        self.latest_stable_version, self.latest_new_version)
+    made = cros_mark_chrome_as_stable.GetChromeRevisionListLink(
+        cros_mark_chrome_as_stable.ChromeEBuild(self.latest_stable),
+        cros_mark_chrome_as_stable.ChromeEBuild(self.latest_new),
+        constants.CHROME_REV_LATEST)
+    self.assertEqual(expected, made)
+
+  def testStickyEBuild(self):
+    """Tests if we can find the sticky ebuild from our mock directories."""
+    # pylint: disable=protected-access
+    stable_ebuilds = self._GetStableEBuilds()
+    sticky_ebuild = cros_mark_chrome_as_stable._GetStickyEBuild(
+        stable_ebuilds)
+    self.assertEqual(sticky_ebuild.chrome_version, self.sticky_version)
+
+  def testChromeEBuildInit(self):
+    """Tests if the chrome_version is set correctly in a ChromeEBuild."""
+    ebuild = cros_mark_chrome_as_stable.ChromeEBuild(self.sticky)
+    self.assertEqual(ebuild.chrome_version, self.sticky_version)
+
+  def _CommonMarkAsStableTest(self, chrome_rev, new_version, old_ebuild_path,
+                              new_ebuild_path, commit_string_indicator):
+    """Common function used for test functions for MarkChromeEBuildAsStable.
+
+    This function stubs out others calls, and runs MarkChromeEBuildAsStable
+    with the specified args.
+
+    Args:
+      chrome_rev: standard chrome_rev argument
+      new_version: version we are revving up to
+      old_ebuild_path: path to the stable ebuild
+      new_ebuild_path: path to the to be created path
+      commit_string_indicator: a string that the commit message must contain
+    """
+    self.PatchObject(cros_build_lib, 'RunCommand',
+                     side_effect=Exception('should not be called'))
+    git_mock = self.PatchObject(git, 'RunGit')
+    commit_mock = self.PatchObject(portage_util.EBuild, 'CommitChange')
+    stable_candidate = cros_mark_chrome_as_stable.ChromeEBuild(old_ebuild_path)
+    unstable_ebuild = cros_mark_chrome_as_stable.ChromeEBuild(self.unstable)
+    chrome_pn = 'chromeos-chrome'
+    chrome_version = new_version
+    commit = None
+    package_dir = self.mock_chrome_dir
+
+    cros_mark_chrome_as_stable.MarkChromeEBuildAsStable(
+        stable_candidate, unstable_ebuild, chrome_pn, chrome_rev,
+        chrome_version, commit, package_dir)
+
+    git_mock.assert_has_calls([
+        mock.call(package_dir, ['add', new_ebuild_path]),
+        mock.call(package_dir, ['rm', old_ebuild_path]),
+    ])
+    commit_mock.assert_call(mock.call(commit_string_indicator, package_dir))
+
+  def testStickyMarkAsStable(self):
+    """Tests to see if we can mark chrome as stable for a new sticky release."""
+    self._CommonMarkAsStableTest(
+        constants.CHROME_REV_STICKY,
+        self.sticky_new_rc_version, self.sticky_rc,
+        self.sticky_new_rc, 'stable_release')
+
+  def testLatestMarkAsStable(self):
+    """Tests to see if we can mark chrome for a latest release."""
+    self._CommonMarkAsStableTest(
+        constants.CHROME_REV_LATEST,
+        self.latest_new_version, self.latest_stable,
+        self.latest_new, 'latest_release')
+
+  def testTotMarkAsStable(self):
+    """Tests to see if we can mark chrome for tot."""
+    self._CommonMarkAsStableTest(
+        constants.CHROME_REV_TOT,
+        self.tot_new_version, self.tot_stable,
+        self.tot_new, 'tot')
diff --git a/scripts/cros_mark_mojo_as_stable.py b/scripts/cros_mark_mojo_as_stable.py
new file mode 100644
index 0000000..adc0051
--- /dev/null
+++ b/scripts/cros_mark_mojo_as_stable.py
@@ -0,0 +1,153 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module uprevs Mojo for cbuildbot.
+
+If a new version of Mojo is found, a stable ebuild for the new version
+is created and older stable ebuilds will be deleted. The changes will be
+contained in a single commit on the "stabilizing_branch" branch which
+will be created if it does not already exist. Additionally, the program
+will print out
+
+ MOJO_VERSION_ATOM="version atom string"
+
+This can be used with emerge to build the newly uprevved version:
+
+ $ cros_mark_mojo_as_stable
+ MOJO_VERSION_ATOM=dev-libs/mojo-0.20141202.181307-r1
+ $ emerge-lumpy =dev-libs/mojo-0.20141202.181307-r1
+
+If no new version of Mojo is found, the program will not print anything.
+"""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import git
+from chromite.lib import gob_util
+from chromite.lib import portage_util
+from chromite.scripts import cros_mark_as_stable
+
+
+MOJO_CATEGORY = 'dev-libs'
+MOJO_PN = 'mojo'
+MOJO_CP = MOJO_CATEGORY + '/' + MOJO_PN
+MOJO_EBUILD_PATH = 'third_party/chromiumos-overlay/' + MOJO_CP
+MOJO_REPO_URL = 'https://chromium.googlesource.com/external/mojo'
+
+
+def GetStableEBuilds(ebuild_dir):
+  """Gets all stable ebuilds from the given directory.
+
+  Args:
+    ebuild_dir: Path to the directory to look in.
+
+  Returns:
+    An array of ebuilds in the given directory.
+  """
+  return [x for x in os.listdir(ebuild_dir)
+          if x.endswith('.ebuild') and not
+          x.endswith(portage_util.WORKON_EBUILD_SUFFIX)]
+
+
+def UprevStableEBuild(ebuild_dir, commit_to_use, date_of_commit,
+                      tracking_branch='cros/master'):
+  """Checks if there already if a stable Mojo ebuild for the given commit.
+
+  If there already is a stable Mojo ebuild for the given commit, this
+  function does nothing and returns None. Otherwise creates a stabilization
+  branch with a single commit that creates a new stable ebuild and deletes
+  all other stable ebuilds.
+
+  Args:
+    ebuild_dir: Path to the directory holding Mojo ebuilds.
+    commit_to_use: The upstream Mojo commit id.
+    date_of_commit: The date of the commit.
+    tracking_branch: The branch that the stabilization branch should track.
+
+  Returns:
+    None or a version atom describing the newly created ebuild.
+  """
+  # There is no version number or other monotonically increasing value
+  # that is suitable to use as a version number except for the point
+  # in time that a commit was added to the repository. So we use that
+  # for now.
+  pvr = date_of_commit.strftime('0.%Y%m%d.%H%M%S-r1')
+  mojo_stable_pn = '%s-%s.ebuild' % (MOJO_PN, pvr)
+
+  # Find existing stable ebuilds and only add a new one if there's a
+  # newer commit.
+  existing_ebuilds = GetStableEBuilds(ebuild_dir)
+  if mojo_stable_pn in existing_ebuilds:
+    return None
+
+  # OK. First create a stablizing branch.
+  tracking_branch_full = 'remotes/m/%s' % os.path.basename(tracking_branch)
+  existing_branch = git.GetCurrentBranch(ebuild_dir)
+  work_branch = cros_mark_as_stable.GitBranch(constants.STABLE_EBUILD_BRANCH,
+                                              tracking_branch_full,
+                                              ebuild_dir)
+  work_branch.CreateBranch()
+
+  # In the case of uprevving overlays that have patches applied to them,
+  # include the patched changes in the stabilizing branch.
+  if existing_branch:
+    git.RunGit(ebuild_dir, ['rebase', existing_branch])
+
+  # Create a new ebuild.
+  unstable_ebuild_path = os.path.join(ebuild_dir, MOJO_PN +
+                                      portage_util.WORKON_EBUILD_SUFFIX)
+  new_stable_ebuild_path = os.path.join(ebuild_dir, mojo_stable_pn)
+  variables = {'MOJO_REVISION': commit_to_use}
+  portage_util.EBuild.MarkAsStable(unstable_ebuild_path,
+                                   new_stable_ebuild_path,
+                                   variables, make_stable=True)
+
+  # Add it to the repo.
+  git.RunGit(ebuild_dir, ['add', new_stable_ebuild_path])
+
+  # Nuke the now stale older ebuilds.
+  for f in existing_ebuilds:
+    git.RunGit(ebuild_dir, ['rm', '-f', f])
+
+  # ... and finally commit the change.
+  portage_util.EBuild.CommitChange('Updated %s to upstream commit %s.' %
+                                   (MOJO_CP, commit_to_use),
+                                   ebuild_dir)
+  # Return version atom for newly created ebuild.
+  return MOJO_CP + '-' + pvr
+
+
+def main(_argv):
+  parser = commandline.ArgumentParser(usage=__doc__)
+  parser.add_argument('--force_version', default=None,
+                      help='git revision hash to use')
+  parser.add_argument('--repo_url', default=MOJO_REPO_URL)
+  parser.add_argument('--srcroot', type='path',
+                      default=os.path.join(os.environ['HOME'], 'trunk', 'src'),
+                      help='Path to the src directory')
+  parser.add_argument('--tracking_branch', default='cros/master',
+                      help='Branch we are tracking changes against')
+  options = parser.parse_args()
+  options.Freeze()
+
+  mojo_version_atom = None
+  ebuild_dir = os.path.join(options.srcroot, MOJO_EBUILD_PATH)
+
+  # Figure out commit to use and its date.
+  if options.force_version:
+    commit_to_use = options.force_version
+  else:
+    commit_to_use = gob_util.GetTipOfTrunkRevision(options.repo_url)
+  date_of_commit = gob_util.GetCommitDate(options.repo_url, commit_to_use)
+
+  # Do the uprev and explicit print version to inform caller if we
+  # made a change.
+  mojo_version_atom = UprevStableEBuild(ebuild_dir, commit_to_use,
+                                        date_of_commit, options.tracking_branch)
+  if mojo_version_atom:
+    print('MOJO_VERSION_ATOM=%s' % mojo_version_atom)
diff --git a/scripts/cros_mark_mojo_as_stable_unittest b/scripts/cros_mark_mojo_as_stable_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/cros_mark_mojo_as_stable_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/cros_mark_mojo_as_stable_unittest.py b/scripts/cros_mark_mojo_as_stable_unittest.py
new file mode 100644
index 0000000..1427ce5
--- /dev/null
+++ b/scripts/cros_mark_mojo_as_stable_unittest.py
@@ -0,0 +1,112 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for cros_mark_mojo_as_stable.py."""
+
+from __future__ import print_function
+
+import datetime
+import mock
+import os
+
+from chromite.lib import cros_test_lib
+from chromite.lib import git
+from chromite.lib import osutils
+from chromite.lib import portage_util
+from chromite.scripts import cros_mark_as_stable
+from chromite.scripts import cros_mark_mojo_as_stable
+
+
+class CrosMarkChromeAsStableTestCase(cros_test_lib.MockTempDirTestCase):
+  """Various test helpers for cros_mark_mojo_as_stable."""
+
+  def setUp(self):
+    """Setup variables and create mock dir."""
+    self.tmp_overlay = os.path.join(self.tempdir, 'chromiumos-overlay')
+    self.fake_ebuild_dir = os.path.join(self.tmp_overlay,
+                                        cros_mark_mojo_as_stable.MOJO_CP)
+    osutils.SafeMakedirs(self.fake_ebuild_dir)
+
+  def WriteFile(self, filename, data=None):
+    """Writes data to a file in the fake ebuild directory.
+
+    Args:
+      filename: Name of file to write to.
+      data: Data to write or None for an empty file.
+    """
+    osutils.WriteFile(os.path.join(self.fake_ebuild_dir, filename), data or '')
+
+
+class GetStableEbuildsTest(CrosMarkChromeAsStableTestCase):
+  """Tests for GetStableEBuilds()."""
+
+  def testBasic(self):
+    """Basic sanity check."""
+    self.WriteFile('foo-9999.ebuild')
+    self.WriteFile('foo-42-r1.ebuild')
+    self.WriteFile('foo-43-r1.ebuild')
+    self.WriteFile('foo-44-r1.ebuild')
+    ebuilds = cros_mark_mojo_as_stable.GetStableEBuilds(self.fake_ebuild_dir)
+    self.assertEqual(len(ebuilds), 3)
+    self.assertIn('foo-42-r1.ebuild', ebuilds)
+    self.assertIn('foo-43-r1.ebuild', ebuilds)
+    self.assertIn('foo-44-r1.ebuild', ebuilds)
+
+
+class UprevStableEBuildTest(CrosMarkChromeAsStableTestCase):
+  """Tests for UprevStableEBuild()."""
+
+  def testSameCommit(self):
+    """Check we do nothing if there is no new commit."""
+    git_mock = self.PatchObject(git, 'RunGit')
+    git_branch_mock = self.PatchObject(cros_mark_as_stable, 'GitBranch')
+    commit_mock = self.PatchObject(portage_util.EBuild, 'CommitChange')
+
+    self.WriteFile('mojo-9999.ebuild')
+    self.WriteFile('mojo-0.20141205.164445-r1.ebuild')
+    commit_date = datetime.datetime(2014, 12, 5, 16, 44, 45)
+    atom = cros_mark_mojo_as_stable.UprevStableEBuild(self.fake_ebuild_dir,
+                                                      '1234ab', commit_date)
+    self.assertEqual(atom, None)
+    self.assertEqual(git_mock.call_count, 0)
+    self.assertEqual(git_branch_mock.call_count, 0)
+    self.assertEqual(commit_mock.call_count, 0)
+
+  def testNewCommit(self):
+    """Check we do the right thing if there is a new commit.
+
+    If there is a new commit, a new stable ebuild should be created and the
+    old ones should be deleted.
+    """
+    git_mock = self.PatchObject(git, 'RunGit')
+    git_branch_mock = self.PatchObject(cros_mark_as_stable, 'GitBranch')
+    commit_mock = self.PatchObject(portage_util.EBuild, 'CommitChange')
+
+    self.WriteFile('mojo-9999.ebuild')
+    self.WriteFile('mojo-0.20141204.120042-r1.ebuild')
+    self.WriteFile('mojo-0.20141205.164445-r1.ebuild')
+    commit_date = datetime.datetime(2014, 12, 6, 5, 0, 0)
+    atom = cros_mark_mojo_as_stable.UprevStableEBuild(self.fake_ebuild_dir,
+                                                      '1234ab', commit_date)
+    self.assertEqual(atom, 'dev-libs/mojo-0.20141206.050000-r1')
+
+    git_branch_mock.assert_called_once_with(
+        'stabilizing_branch', 'remotes/m/master',
+        self.fake_ebuild_dir)
+
+    # The 'git rm -f' calls could be in any order because
+    # GetStableEBuilds() does not guarantee the order.
+    git_mock.assert_has_calls([
+        mock.call(self.fake_ebuild_dir,
+                  ['add', os.path.join(self.fake_ebuild_dir,
+                                       'mojo-0.20141206.050000-r1.ebuild')]),
+        mock.call(self.fake_ebuild_dir,
+                  ['rm', '-f', 'mojo-0.20141204.120042-r1.ebuild']),
+        mock.call(self.fake_ebuild_dir,
+                  ['rm', '-f', 'mojo-0.20141205.164445-r1.ebuild']),
+    ], any_order=True)
+
+    commit_mock.assert_called_once_with(
+        'Updated dev-libs/mojo to upstream commit 1234ab.',
+        self.fake_ebuild_dir)
diff --git a/scripts/cros_merge_to_branch.py b/scripts/cros_merge_to_branch.py
new file mode 100644
index 0000000..4692bb1
--- /dev/null
+++ b/scripts/cros_merge_to_branch.py
@@ -0,0 +1,311 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Developer helper tool for merging CLs from ToT to branches.
+
+This simple program takes changes from gerrit/gerrit-int and creates new
+changes for them on the desired branch using your gerrit/ssh credentials. To
+specify a change on gerrit-int, you must prefix the change with a *.
+
+Note that this script is best used from within an existing checkout of
+Chromium OS that already has the changes you want merged to the branch in it
+i.e. if you want to push changes to crosutils.git, you must have src/scripts
+checked out. If this isn't true e.g. you are running this script from a
+minilayout or trying to upload an internal change from a non internal checkout,
+you must specify some extra options: use the --nomirror option and use -e to
+specify your email address. This tool will then checkout the git repo fresh
+using the credentials for the -e/email you specified and upload the change. Note
+you can always use this method but it's slower than the "mirrored" method and
+requires more typing :(.
+
+Examples:
+  cros_merge_to_branch 32027 32030 32031 release-R22.2723.B
+
+This will create changes for 32027, 32030 and 32031 on the R22 branch. To look
+up the name of a branch, go into a git sub-dir and type 'git branch -a' and the
+find the branch you want to merge to. If you want to upload internal changes
+from gerrit-int, you must prefix the gerrit change number with a * e.g.
+
+  cros_merge_to_branch *26108 release-R22.2723.B
+
+For more information on how to do this yourself you can go here:
+http://dev.chromium.org/chromium-os/how-tos-and-troubleshooting/working-on-a-br\
+anch
+"""
+
+from __future__ import print_function
+
+import errno
+import os
+import re
+import shutil
+import sys
+import tempfile
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import repository
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gerrit
+from chromite.lib import git
+from chromite.lib import patch as cros_patch
+
+
+def _GetParser():
+  """Returns the parser to use for this module."""
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('-d', '--draft', default=False, action='store_true',
+                      help='upload a draft to Gerrit rather than a change')
+  parser.add_argument('-n', '--dry-run', default=False, action='store_true',
+                      dest='dryrun',
+                      help='apply changes locally but do not upload them')
+  parser.add_argument('-e', '--email',
+                      help='use this email instead of the email you would '
+                           'upload changes as; required w/--nomirror')
+  parser.add_argument('--nomirror', default=True, dest='mirror',
+                      action='store_false',
+                      help='checkout git repo directly; requires --email')
+  parser.add_argument('--nowipe', default=True, dest='wipe',
+                      action='store_false',
+                      help='do not wipe the work directory after finishing')
+  parser.add_argument('change', nargs='+', help='CLs to merge')
+  parser.add_argument('branch', help='the branch to merge to')
+  return parser
+
+
+def _UploadChangeToBranch(work_dir, patch, branch, draft, dryrun):
+  """Creates a new change from GerritPatch |patch| to |branch| from |work_dir|.
+
+  Args:
+    patch: Instance of GerritPatch to upload.
+    branch: Branch to upload to.
+    work_dir: Local directory where repository is checked out in.
+    draft: If True, upload to refs/draft/|branch| rather than refs/for/|branch|.
+    dryrun: Don't actually upload a change but go through all the steps up to
+      and including git push --dry-run.
+
+  Returns:
+    A list of all the gerrit URLs found.
+  """
+  upload_type = 'drafts' if draft else 'for'
+  # Download & setup the patch if need be.
+  patch.Fetch(work_dir)
+  # Apply the actual change.
+  patch.CherryPick(work_dir, inflight=True, leave_dirty=True)
+
+  # Get the new sha1 after apply.
+  new_sha1 = git.GetGitRepoRevision(work_dir)
+  reviewers = set()
+
+  # Filter out tags that are added by gerrit and chromite.
+  filter_re = re.compile(
+      r'((Commit|Trybot)-Ready|Commit-Queue|(Reviewed|Submitted|Tested)-by): ')
+
+  # Rewrite the commit message all the time.  Latest gerrit doesn't seem
+  # to like it when you use the same ChangeId on different branches.
+  msg = []
+  for line in patch.commit_message.splitlines():
+    if line.startswith('Reviewed-on: '):
+      line = 'Previous-' + line
+    elif filter_re.match(line):
+      # If the tag is malformed, or the person lacks a name,
+      # then that's just too bad -- throw it away.
+      ele = re.split(r'[<>@]+', line)
+      if len(ele) == 4:
+        reviewers.add('@'.join(ele[-3:-1]))
+      continue
+    msg.append(line)
+  msg += ['(cherry picked from commit %s)' % patch.sha1]
+  git.RunGit(work_dir, ['commit', '--amend', '-F', '-'],
+             input='\n'.join(msg).encode('utf8'))
+
+  # Get the new sha1 after rewriting the commit message.
+  new_sha1 = git.GetGitRepoRevision(work_dir)
+
+  # Create and use a LocalPatch to Upload the change to Gerrit.
+  local_patch = cros_patch.LocalPatch(
+      work_dir, patch.project_url, constants.PATCH_BRANCH,
+      patch.tracking_branch, patch.remote, new_sha1)
+  for reviewers in (reviewers, ()):
+    try:
+      return local_patch.Upload(
+          patch.project_url, 'refs/%s/%s' % (upload_type, branch),
+          carbon_copy=False, dryrun=dryrun, reviewers=reviewers)
+    except cros_build_lib.RunCommandError as e:
+      if (e.result.returncode == 128 and
+          re.search(r'fatal: user ".*?" not found', e.result.error)):
+        logging.warning('Some reviewers were not found (%s); '
+                        'dropping them & retrying upload', ' '.join(reviewers))
+        continue
+      raise
+
+
+def _SetupWorkDirectoryForPatch(work_dir, patch, branch, manifest, email):
+  """Set up local dir for uploading changes to the given patch's project."""
+  logging.notice('Setting up dir %s for uploading changes to %s', work_dir,
+                 patch.project_url)
+
+  # Clone the git repo from reference if we have a pointer to a
+  # ManifestCheckout object.
+  reference = None
+  if manifest:
+    # Get the path to the first checkout associated with this change. Since
+    # all of the checkouts share git objects, it doesn't matter which checkout
+    # we pick.
+    path = manifest.FindCheckouts(patch.project, only_patchable=True)[0]['path']
+
+    reference = os.path.join(constants.SOURCE_ROOT, path)
+    if not os.path.isdir(reference):
+      logging.error('Unable to locate git checkout: %s', reference)
+      logging.error('Did you mean to use --nomirror?')
+      # This will do an "raise OSError" with the right values.
+      os.open(reference, os.O_DIRECTORY)
+    # Use the email if email wasn't specified.
+    if not email:
+      email = git.GetProjectUserEmail(reference)
+
+  repository.CloneGitRepo(work_dir, patch.project_url, reference=reference)
+
+  # Set the git committer.
+  git.RunGit(work_dir, ['config', '--replace-all', 'user.email', email])
+
+  mbranch = git.MatchSingleBranchName(
+      work_dir, branch, namespace='refs/remotes/origin/')
+  if branch != mbranch:
+    logging.notice('Auto resolved branch name "%s" to "%s"', branch, mbranch)
+  branch = mbranch
+
+  # Finally, create a local branch for uploading changes to the given remote
+  # branch.
+  git.CreatePushBranch(
+      constants.PATCH_BRANCH, work_dir, sync=False,
+      remote_push_branch=git.RemoteRef('ignore', 'origin/%s' % branch))
+
+  return branch
+
+
+def _ManifestContainsAllPatches(manifest, patches):
+  """Returns true if the given manifest contains all the patches.
+
+  Args:
+    manifest: an instance of git.Manifest
+    patches: a collection of GerritPatch objects.
+  """
+  for patch in patches:
+    if not manifest.FindCheckouts(patch.project):
+      logging.error('Your manifest does not have the repository %s for '
+                    'change %s. Please re-run with --nomirror and '
+                    '--email set', patch.project, patch.gerrit_number)
+      return False
+
+    return True
+
+
+def main(argv):
+  parser = _GetParser()
+  options = parser.parse_args(argv)
+  changes = options.change
+  branch = options.branch
+
+  try:
+    patches = gerrit.GetGerritPatchInfo(changes)
+  except ValueError as e:
+    logging.error('Invalid patch: %s', e)
+    cros_build_lib.Die('Did you swap the branch/gerrit number?')
+
+  # Suppress all logging info output unless we're running debug.
+  if not options.debug:
+    logging.getLogger().setLevel(logging.NOTICE)
+
+  # Get a pointer to your repo checkout to look up the local project paths for
+  # both email addresses and for using your checkout as a git mirror.
+  manifest = None
+  if options.mirror:
+    try:
+      manifest = git.ManifestCheckout.Cached(constants.SOURCE_ROOT)
+    except OSError as e:
+      if e.errno == errno.ENOENT:
+        logging.error('Unable to locate ChromiumOS checkout: %s',
+                      constants.SOURCE_ROOT)
+        logging.error('Did you mean to use --nomirror?')
+        return 1
+      raise
+    if not _ManifestContainsAllPatches(manifest, patches):
+      return 1
+  else:
+    if not options.email:
+      chromium_email = '%s@chromium.org' % os.environ['USER']
+      logging.notice('--nomirror set without email, using %s', chromium_email)
+      options.email = chromium_email
+
+  index = 0
+  work_dir = None
+  root_work_dir = tempfile.mkdtemp(prefix='cros_merge_to_branch')
+  try:
+    for index, (change, patch) in enumerate(zip(changes, patches)):
+      # We only clone the project and set the committer the first time.
+      work_dir = os.path.join(root_work_dir, patch.project)
+      if not os.path.isdir(work_dir):
+        branch = _SetupWorkDirectoryForPatch(work_dir, patch, branch, manifest,
+                                             options.email)
+
+      # Now that we have the project checked out, let's apply our change and
+      # create a new change on Gerrit.
+      logging.notice('Uploading change %s to branch %s', change, branch)
+      urls = _UploadChangeToBranch(work_dir, patch, branch, options.draft,
+                                   options.dryrun)
+      logging.notice('Successfully uploaded %s to %s', change, branch)
+      for url in urls:
+        if url.endswith('\x1b[K'):
+          # Git will often times emit these escape sequences.
+          url = url[0:-3]
+        logging.notice('  URL: %s', url)
+
+  except (cros_build_lib.RunCommandError, cros_patch.ApplyPatchException,
+          git.AmbiguousBranchName, OSError) as e:
+    # Tell the user how far we got.
+    good_changes = changes[:index]
+    bad_changes = changes[index:]
+
+    logging.warning('############## SOME CHANGES FAILED TO UPLOAD ############')
+
+    if good_changes:
+      logging.notice(
+          'Successfully uploaded change(s) %s', ' '.join(good_changes))
+
+    # Printing out the error here so that we can see exactly what failed. This
+    # is especially useful to debug without using --debug.
+    logging.error('Upload failed with %s', str(e).strip())
+    if not options.wipe:
+      logging.error('Not wiping the directory. You can inspect the failed '
+                    'change at %s; After fixing the change (if trivial) you can'
+                    ' try to upload the change by running:\n'
+                    'git commit -a -c CHERRY_PICK_HEAD\n'
+                    'git push %s HEAD:refs/for/%s', work_dir, patch.project_url,
+                    branch)
+    else:
+      logging.error('--nowipe not set thus deleting the work directory. If you '
+                    'wish to debug this, re-run the script with change(s) '
+                    '%s and --nowipe by running:\n  %s %s %s --nowipe',
+                    ' '.join(bad_changes), sys.argv[0], ' '.join(bad_changes),
+                    branch)
+
+    # Suppress the stack trace if we're not debugging.
+    if options.debug:
+      raise
+    else:
+      return 1
+
+  finally:
+    if options.wipe:
+      shutil.rmtree(root_work_dir)
+
+  if options.dryrun:
+    logging.notice('Success! To actually upload changes, re-run without '
+                   '--dry-run.')
+  else:
+    logging.notice('Successfully uploaded all changes requested.')
+
+  return 0
diff --git a/scripts/cros_mirror b/scripts/cros_mirror
new file mode 100755
index 0000000..63afb24
--- /dev/null
+++ b/scripts/cros_mirror
@@ -0,0 +1,261 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Note:
+# We do *not* source any other files as this is meant to be used as a
+# standalone helper script with no other dependencies.  So please do
+# not try to refactor this to rely on anything else.
+
+ARGV0="cros_mirror"
+
+MANIFEST_URL_EXT='https://chromium.googlesource.com/chromiumos/manifest.git'
+MANIFEST_URL_INT='https://chrome-internal.googlesource.com/chromeos/manifest-internal.git'
+REPO_URL='https://chromium.googlesource.com/external/repo.git'
+# When we make commits to manifest git repos, we want the owner info to be the
+# same, and we want it to map to info that end users are unlikely to use.  That
+# way when the local repo rebase runs, it handles the update gracefully.
+export GIT_AUTHOR_NAME='Elmur Fudsicle' GIT_AUTHOR_EMAIL='<baba@booya.bizzle>'
+
+set -e
+
+#
+# Helper functions.
+#
+info() {
+  if [[ -z ${QUIET} ]]; then
+    echo "info: $*"
+  fi
+}
+
+error() {
+  echo "error: $*" >&2
+}
+
+die() {
+  error "$*"
+  exit 1
+}
+
+_pushd() { pushd "$@" >/dev/null; }
+_popd()  { popd "$@" >/dev/null; }
+
+#
+# Process user flags.
+#
+usage() {
+  cat <<EOF
+Create or maintain a mirror of the ChromiumOS repository.
+
+Usage: ${ARGV0} [options] --root <dir>
+
+Options:
+  --external          Pull the external ChromiumOS manifest
+  --internal          Pull the internal ChromeOS manifest (note: Googler-only)
+  -j, --jobs          Number of fetch jobs to run in parallel (default ${JOBS})
+  -q, --quiet         Be quiet!
+  -r, --root          The directory to hold all mirroring information
+  -u, --url           The URL that people will use to pull from this mirror
+  -m, --manifest      The repo manifest to use as a basis (default default.xml)
+  -h, --help          This!
+
+You must specify the --url option at least once so that all of the paths can
+be rewritten for external users.  After that, it should be automatically saved
+and restored at runtime by this script.
+
+Example:
+# Create a full mirror in \${PWD} and have users fetch from the local machine.
+\$ ${ARGV0} -r . -u git://$(hostname -f)
+# Update the mirror (put this into a cronjob).
+\$ ${ARGV0} -q -r ${ROOT:-${PWD}}
+
+See this page for more information:
+http://dev.chromium.org/chromium-os/how-tos-and-troubleshooting/creating-local-mirrors
+EOF
+  exit ${1:-0}
+}
+
+JOBS=$(getconf _NPROCESSORS_ONLN 2>/dev/null || echo 4)
+MANIFEST_URL=
+MANIFEST_NAME=default.xml
+QUIET=
+ROOT=
+URL=
+while [[ $# -gt 0 ]]; do
+  case $1 in
+  --external)       MANIFEST_URL=${MANIFEST_URL_EXT};;
+  --internal)       MANIFEST_URL=${MANIFEST_URL_INT};;
+  -j|--jobs)        JOBS=$2; shift;;
+  -q|--quiet)       QUIET="-q";;
+  -r|--root)        ROOT=$2; shift;;
+  -u|--url)         URL=$2; shift;;
+  -m|--manifest)    MANIFEST_NAME=$2; shift;;
+  -h|--help)        usage;;
+  -x)               set -x;;
+  *)                usage 1;;
+  esac
+  shift
+done
+
+if [[ -z ${ROOT} ]]; then
+  die "need to specify root dir with --root"
+fi
+if [[ ! -d ${ROOT} ]]; then
+  die "specified root dir does not exist; please run: mkdir '${ROOT}'"
+fi
+
+cd "${ROOT}"
+
+#
+# Make sure our tools are up-to-date.
+#
+if ! which repo >/dev/null; then
+  die "you must have repo installed and in your \$PATH; please see:" \
+      "http://dev.chromium.org/developers/how-tos/install-depot-tools"
+fi
+# This has been tested against 1.7.7.x, so require at least that.
+if ! gver=$(git --version); then
+  die "you must have git installed!"
+fi
+bver="1.7.7"
+ver_to_int() {
+  local v i=0
+  local ver=( $(echo $(IFS=.; echo $*)) )
+  for v in 0 1 2; do
+    : $(( i = (i << 8) | ${ver[v]} ))
+  done
+  echo ${i}
+}
+if [[ $(ver_to_int "${bver}") -gt $(ver_to_int "${gver##* }") ]]; then
+  die "your git version is too old (${gver}); we require at least git ${bver}"
+fi
+
+
+#
+# Initialize the whole tree mirror style.
+#
+repo_init() {
+  repo init -u "${MANIFEST_URL}" -m "${MANIFEST_NAME}" \
+    --repo-url="${REPO_URL}" ${QUIET} "$@"
+}
+if [[ ! -d .repo ]]; then
+  # Default to external manifest.
+  : ${MANIFEST_URL:=${MANIFEST_URL_EXT}}
+  repo_init --mirror
+else
+  # Allow people to change manifests on the fly (internal<->external).
+  CURRENT_URL=$(git --git-dir=.repo/manifests.git config remote.origin.url)
+  # Find the previously defined manifest name by following the symbolic link.
+  CURRENT_NAME=$(readlink .repo/manifest.xml)
+  # Strip off the "manifests/" prefix.
+  CURRENT_NAME=${CURRENT_NAME#manifests/}
+  # If no manifest was selected, default to the current one.
+  : ${MANIFEST_URL:=${CURRENT_URL}}
+  if [[ "${CURRENT_URL}" != "${MANIFEST_URL}" || \
+        "${CURRENT_NAME}" != "${MANIFEST_NAME}" ]]; then
+    info "re-initing due to URL or manifest change: \
+      ${CURRENT_URL} -> ${MANIFEST_URL}, ${CURRENT_NAME} -> ${MANIFEST_NAME}"
+    repo_init
+  fi
+fi
+if [[ ! -e git ]]; then
+  ln -s . git
+fi
+
+#
+# Pull down any updates.
+#
+info "syncing the whole tree"
+repo sync -j${JOBS} ${QUIET}
+
+#
+# Setup our local manifests repo which we'll hack on to point
+# to our local mirror.  We can't modify the repo in place as
+# we want to make sure updates are atomic -- don't want other
+# people to be able to accidentally pull in an unmodified repo.
+#
+update_manifests() {
+  local git_repo=$1
+  local checkout=${git_repo##*/}
+  checkout=${checkout%.git}
+
+  if [[ ${MANIFEST_URL} == "${MANIFEST_URL_INT}" ]]; then
+    # Try to head off leakage of Google data.
+    case ${URL} in
+    ssh://* | file://*) ;;
+    *)
+      die "You *must* use a secure channel like ssh:// or file://" \
+          "when mirroring internal Google repositories."
+      ;;
+    esac
+  fi
+
+  if [[ ! -d ${checkout} ]]; then
+    info "cloning ${checkout}"
+    git clone ${QUIET} ./${git_repo} ${checkout}
+    # Make the path relative so the whole tree can be moved w/out breaking.
+    git --git-dir=${checkout}/.git config remote.origin.url ../${git_repo}
+  fi
+
+  info "updating ${checkout}"
+  _pushd ${checkout}
+  git fetch ${QUIET}
+  if [[ -z ${URL} ]]; then
+    # Extract the local URI if they didn't specify one.
+    URL=$(eval $(grep -h fetch= *.xml); echo ${fetch})
+    if [[ ${URL} == "https://chromium.googlesource.com" ]]; then
+      # Guess they want the current system.
+      URL="git://$(hostname -f)"
+    fi
+  fi
+
+  # Setup the fetch= field of the manifest to point to our local mirror.
+  local b branches=(
+    $(git ls-remote | sed 's:.*/::' | egrep -v '\<(HEAD|^master)$')
+    "master"
+  )
+  info "rewriting ${checkout} branches to ${URL}"
+  for b in "${branches[@]}"; do
+    git checkout -q -f -B ${b} origin/${b} >/dev/null
+    find -name '*.xml' -type f \
+      -exec sed -i "s|fetch=\"[^\"]*\"|fetch=\"${URL}\"|" {} +
+    git commit -q -a -m 'set fetch references to local mirror'
+  done
+
+  # Push out our updates.
+  local pub="../${git_repo%.git}-mirror.git"
+  if [[ ! -e ${pub} ]]; then
+    git --git-dir="${pub}" init ${QUIET}
+  fi
+  git push ${QUIET} -f "${pub}" 'refs/heads/*:refs/heads/*'
+  _popd
+}
+update_manifests chromiumos/manifest.git
+if [[ ${MANIFEST_URL} == "${MANIFEST_URL_INT}" ]]; then
+  update_manifests chromeos/manifest-internal.git
+fi
+
+#
+# All done!
+#
+if [[ -z ${QUIET} ]]; then
+  if [[ ${MANIFEST_URL} == "${MANIFEST_URL_EXT}" ]]; then
+    cat <<EOF
+
+You can now serve this tree with:
+git daemon --base-path=${ROOT} --export-all
+
+Your users can pull from this mirror with:
+repo init -u ${URL}/chromiumos/manifest-mirror.git --repo-url=${URL}/external/repo.git
+EOF
+  else
+    cat <<EOF
+
+You must only serve these resources over encrypted channels like ssh://.
+
+Your users can pull from this mirror with:
+repo init -u ${URL}/chromeos/manifest-internal-mirror.git --repo-url=${URL}/external/repo.git
+EOF
+  fi
+fi
diff --git a/scripts/cros_portage_upgrade.py b/scripts/cros_portage_upgrade.py
new file mode 100644
index 0000000..35f8dbb
--- /dev/null
+++ b/scripts/cros_portage_upgrade.py
@@ -0,0 +1,2000 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Perform various tasks related to updating Portage packages."""
+
+from __future__ import print_function
+
+import filecmp
+import fnmatch
+import os
+import parallel_emerge
+import portage  # pylint: disable=import-error
+import re
+import shutil
+import tempfile
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import osutils
+from chromite.lib import operation
+from chromite.lib import upgrade_table as utable
+from chromite.scripts import merge_package_status as mps
+
+
+site_config = config_lib.GetConfig()
+
+
+oper = operation.Operation('cros_portage_upgrade')
+
+NOT_APPLICABLE = 'N/A'
+WORLD_TARGET = 'world'
+UPGRADED = 'Upgraded'
+
+# Arches we care about -- we actively develop/support/ship.
+STANDARD_BOARD_ARCHS = set(('amd64', 'arm', 'x86'))
+
+# Files we do not include in our upgrades by convention.
+BLACKLISTED_FILES = set(['Manifest', 'ChangeLog*'])
+
+
+# pylint: disable=attribute-defined-outside-init
+
+
+class PInfo(object):
+  """Class to accumulate package info during upgrade process.
+
+  This class is basically a formalized dictionary.
+  """
+
+  __slots__ = (
+      'category',            # Package category only
+      # TODO(mtennant): Rename 'cpv' to 'curr_cpv' or similar.
+      'cpv',                 # Current full cpv (revision included)
+      'cpv_cmp_upstream',    # 0 = current, >0 = outdated, <0 = futuristic
+      'latest_upstream_cpv', # Latest (non-stable ok) upstream cpv
+      'overlay',             # Overlay package currently in
+      'package',             # category/package_name
+      'package_name',        # The 'p' in 'cpv'
+      'package_ver',         # The 'pv' in 'cpv'
+      'slot',                # Current package slot
+      'stable_upstream_cpv', # Latest stable upstream cpv
+      'state',               # One of utable.UpgradeTable.STATE_*
+      'upgraded_cpv',        # If upgraded, it is to this cpv
+      'upgraded_unmasked',   # Boolean. If upgraded_cpv, indicates if unmasked.
+      'upstream_cpv',        # latest/stable upstream cpv according to request
+      'user_arg',            # Original user arg for this pkg, if applicable
+      'version_rev',         # Just revision (e.g. 'r1').  '' if no revision
+  )
+
+  # Any deriving classes must maintain this cumulative attribute list.
+  __attrlist__ = __slots__
+
+  def __init__(self, **kwargs):
+    """Initialize all attributes to None unless specified in |kwargs|."""
+    for attr in self.__attrlist__:
+      setattr(self, attr, kwargs.get(attr))
+
+  def __eq__(self, other):
+    """Equality support.  Used in unittests."""
+
+    if type(self) != type(other):
+      return False
+
+    no_attr = object()
+    for attr in self.__attrlist__:
+      if getattr(self, attr, no_attr) != getattr(other, attr, no_attr):
+        return False
+
+    return True
+
+  def __ne__(self, other):
+    """Inequality support for completeness."""
+    return not self == other
+
+
+class Upgrader(object):
+  """A class to perform various tasks related to updating Portage packages."""
+
+  PORTAGE_GIT_URL = '%s/chromiumos/overlays/portage.git' % (
+      site_config.params.EXTERNAL_GOB_URL)
+  ORIGIN_GENTOO = 'origin/gentoo'
+
+  UPSTREAM_OVERLAY_NAME = 'portage'
+  UPSTREAM_TMP_REPO = os.environ.get(constants.SHARED_CACHE_ENVVAR)
+  if UPSTREAM_TMP_REPO is not None:
+    UPSTREAM_TMP_REPO = '%s/cros_portage_upgrade' % UPSTREAM_TMP_REPO
+  else:
+    UPSTREAM_TMP_REPO = '/tmp'
+  UPSTREAM_TMP_REPO += '/' + UPSTREAM_OVERLAY_NAME
+
+  STABLE_OVERLAY_NAME = 'portage-stable'
+  CROS_OVERLAY_NAME = 'chromiumos-overlay'
+  CATEGORIES_FILE = 'profiles/categories'
+  HOST_BOARD = 'amd64-host'
+  OPT_SLOTS = ('amend', 'csv_file', 'force', 'no_upstream_cache', 'rdeps',
+               'upgrade', 'upgrade_deep', 'upstream', 'unstable_ok', 'verbose',
+               'local_only')
+
+  EQUERY_CMD = 'equery'
+  EMERGE_CMD = 'emerge'
+  PORTAGEQ_CMD = 'portageq'
+  BOARD_CMDS = set([EQUERY_CMD, EMERGE_CMD, PORTAGEQ_CMD])
+
+  __slots__ = (
+      '_amend',        # Boolean to use --amend with upgrade commit
+      '_args',         # Commandline arguments (all portage targets)
+      '_curr_arch',    # Architecture for current board run
+      '_curr_board',   # Board for current board run
+      '_curr_table',   # Package status for current board run
+      '_cros_overlay', # Path to chromiumos-overlay repo
+      '_csv_file',     # File path for writing csv output
+      '_deps_graph',   # Dependency graph from portage
+      '_force',        # Force upgrade even when version already exists
+      '_local_only',   # Skip network traffic
+      '_missing_eclass_re',# Regexp for missing eclass in equery
+      '_outdated_eclass_re',# Regexp for outdated eclass in equery
+      '_emptydir',     # Path to temporary empty directory
+      '_master_archs', # Set. Archs of tables merged into master_table
+      '_master_cnt',   # Number of tables merged into master_table
+      '_master_table', # Merged table from all board runs
+      '_no_upstream_cache', # Boolean.  Delete upstream cache when done
+      '_porttree',     # Reference to portage porttree object
+      '_rdeps',        # Boolean, if True pass --root-deps=rdeps
+      '_stable_repo',  # Path to portage-stable
+      '_stable_repo_categories', # Categories from profiles/categories
+      '_stable_repo_stashed', # True if portage-stable has a git stash
+      '_stable_repo_status', # git status report at start of run
+      '_targets',      # Processed list of portage targets
+      '_upgrade',      # Boolean indicating upgrade requested
+      '_upgrade_cnt',  # Num pkg upgrades in this run (all boards)
+      '_upgrade_deep', # Boolean indicating upgrade_deep requested
+      '_upstream',     # Path to upstream portage repo
+      '_unstable_ok',  # Boolean to allow unstable upstream also
+      '_verbose',      # Boolean
+  )
+
+  def __init__(self, options):
+    self._args = options.packages
+    self._targets = mps.ProcessTargets(self._args)
+
+    self._master_table = None
+    self._master_cnt = 0
+    self._master_archs = set()
+    self._upgrade_cnt = 0
+
+    self._stable_repo = os.path.join(options.srcroot, 'third_party',
+                                     self.STABLE_OVERLAY_NAME)
+    # This can exist in two spots; the tree, or the cache.
+
+    self._cros_overlay = os.path.join(options.srcroot, 'third_party',
+                                      self.CROS_OVERLAY_NAME)
+
+    # Save options needed later.
+    for opt in self.OPT_SLOTS:
+      setattr(self, '_' + opt, getattr(options, opt, None))
+
+    self._porttree = None
+    self._emptydir = None
+    self._deps_graph = None
+
+    # Pre-compiled regexps for speed.
+    self._missing_eclass_re = re.compile(r'(\S+\.eclass) could not be '
+                                         r'found by inherit')
+    self._outdated_eclass_re = re.compile(r'Call stack:\n'
+                                          r'(?:.*?\s+\S+,\sline.*?\n)*'
+                                          r'.*?\s+(\S+\.eclass),\s+line')
+
+  def _IsInUpgradeMode(self):
+    """Return True if running in upgrade mode."""
+    return self._upgrade or self._upgrade_deep
+
+  def _SaveStatusOnStableRepo(self):
+    """Get the 'git status' for everything in |self._stable_repo|.
+
+    The results are saved in a dict at self._stable_repo_status where each key
+    is a file path rooted at |self._stable_repo|, and the value is the status
+    for that file as returned by 'git status -s'.  (e.g. 'A' for 'Added').
+    """
+    result = self._RunGit(self._stable_repo, ['status', '-s'],
+                          redirect_stdout=True)
+    if result.returncode == 0:
+      statuses = {}
+      for line in result.output.strip().split('\n'):
+        if not line:
+          continue
+
+        linesplit = line.split()
+        (status, path) = linesplit[0], linesplit[1]
+        if status == 'R':
+          # Handle a rename as separate 'D' and 'A' statuses.  Example line:
+          # R path/to/foo-1.ebuild -> path/to/foo-2.ebuild
+          statuses[path] = 'D'
+          statuses[linesplit[3]] = 'A'
+        else:
+          statuses[path] = status
+
+      self._stable_repo_status = statuses
+    else:
+      raise RuntimeError('Unable to run "git status -s" in %s:\n%s' %
+                         (self._stable_repo, result.output))
+
+    self._stable_repo_stashed = False
+
+  def _LoadStableRepoCategories(self):
+    """Load |self._stable_repo|/profiles/categories into set."""
+
+    self._stable_repo_categories = set()
+    cat_file_path = os.path.join(self._stable_repo, self.CATEGORIES_FILE)
+    with open(cat_file_path, 'r') as f:
+      for line in f:
+        line = line.strip()
+        if line:
+          self._stable_repo_categories.add(line)
+
+  def _WriteStableRepoCategories(self):
+    """Write |self._stable_repo_categories| to profiles/categories."""
+
+    categories = sorted(self._stable_repo_categories)
+    cat_file_path = os.path.join(self._stable_repo, self.CATEGORIES_FILE)
+    with open(cat_file_path, 'w') as f:
+      f.writelines('\n'.join(categories))
+
+    self._RunGit(self._stable_repo, ['add', self.CATEGORIES_FILE])
+
+  def _CheckStableRepoOnBranch(self):
+    """Raise exception if |self._stable_repo| is not on a branch now."""
+    result = self._RunGit(self._stable_repo, ['branch'], redirect_stdout=True)
+    if result.returncode == 0:
+      for line in result.output.split('\n'):
+        match = re.search(r'^\*\s+(.+)$', line)
+        if match:
+          # Found current branch, see if it is a real branch.
+          branch = match.group(1)
+          if branch != '(no branch)':
+            return
+          raise RuntimeError('To perform upgrade, %s must be on a branch.' %
+                             self._stable_repo)
+
+    raise RuntimeError('Unable to determine whether %s is on a branch.' %
+                       self._stable_repo)
+
+  def _PkgUpgradeRequested(self, pinfo):
+    """Return True if upgrade of pkg in |pinfo| was requested by user."""
+    if self._upgrade_deep:
+      return True
+
+    if self._upgrade:
+      return bool(pinfo.user_arg)
+
+    return False
+
+  @staticmethod
+  def _FindBoardArch(board):
+    """Return the architecture for a given board name."""
+    # Host is a special case
+    if board == Upgrader.HOST_BOARD:
+      return 'amd64'
+
+    # Leverage Portage 'portageq' tool to do this.
+    cmd = ['portageq-%s' % board, 'envvar', 'ARCH']
+    cmd_result = cros_build_lib.RunCommand(
+        cmd, print_cmd=False, redirect_stdout=True)
+    if cmd_result.returncode == 0:
+      return cmd_result.output.strip()
+    else:
+      return None
+
+  @staticmethod
+  def _GetPreOrderDepGraphPackage(deps_graph, package, pkglist, visited):
+    """Collect packages from |deps_graph| into |pkglist| in pre-order."""
+    if package in visited:
+      return
+    visited.add(package)
+    for parent in deps_graph[package]['provides']:
+      Upgrader._GetPreOrderDepGraphPackage(deps_graph, parent, pkglist, visited)
+    pkglist.append(package)
+
+  @staticmethod
+  def _GetPreOrderDepGraph(deps_graph):
+    """Return packages from |deps_graph| in pre-order."""
+    pkglist = []
+    visited = set()
+    for package in deps_graph:
+      Upgrader._GetPreOrderDepGraphPackage(deps_graph, package, pkglist,
+                                           visited)
+    return pkglist
+
+  @staticmethod
+  def _CmpCpv(cpv1, cpv2):
+    """Returns standard cmp result between |cpv1| and |cpv2|.
+
+    If one cpv is None then the other is greater.
+    """
+    if cpv1 is None and cpv2 is None:
+      return 0
+    if cpv1 is None:
+      return -1
+    if cpv2 is None:
+      return 1
+    return portage.versions.pkgcmp(portage.versions.pkgsplit(cpv1),
+                                   portage.versions.pkgsplit(cpv2))
+
+  @staticmethod
+  def _GetCatPkgFromCpv(cpv):
+    """Returns category/package_name from a full |cpv|.
+
+    If |cpv| is incomplete, may return only the package_name.
+
+    If package_name cannot be determined, return None.
+    """
+    if not cpv:
+      return None
+
+    # Result is None or (cat, pn, version, rev)
+    result = portage.versions.catpkgsplit(cpv)
+    if result:
+      # This appears to be a quirk of portage? Category string == 'null'.
+      if result[0] is None or result[0] == 'null':
+        return result[1]
+      return '%s/%s' % (result[0], result[1])
+
+    return None
+
+  @staticmethod
+  def _GetVerRevFromCpv(cpv):
+    """Returns just the version-revision string from a full |cpv|."""
+    if not cpv:
+      return None
+
+    # Result is None or (cat, pn, version, rev)
+    result = portage.versions.catpkgsplit(cpv)
+    if result:
+      (version, rev) = result[2:4]
+      if rev != 'r0':
+        return '%s-%s' % (version, rev)
+      else:
+        return version
+
+    return None
+
+  @staticmethod
+  def _GetEbuildPathFromCpv(cpv):
+    """Returns the relative path to ebuild for |cpv|."""
+    if not cpv:
+      return None
+
+    # Result is None or (cat, pn, version, rev)
+    result = portage.versions.catpkgsplit(cpv)
+    if result:
+      (cat, pn, _version, _rev) = result
+      ebuild = cpv.replace(cat + '/', '') + '.ebuild'
+      return os.path.join(cat, pn, ebuild)
+
+    return None
+
+  def _RunGit(self, cwd, command, redirect_stdout=False,
+              combine_stdout_stderr=False):
+    """Runs git |command| (a list of command tokens) in |cwd|.
+
+    This leverages the cros_build_lib.RunCommand function.  The
+    |redirect_stdout| and |combine_stdout_stderr| arguments are
+    passed to that function.
+
+    Returns a Result object as documented by cros_build_lib.RunCommand.
+    Most usefully, the result object has a .output attribute containing
+    the output from the command (if |redirect_stdout| was True).
+    """
+    # This disables the vi-like output viewer for commands like 'git show'.
+    extra_env = {'GIT_PAGER': 'cat'}
+    cmdline = ['git'] + command
+    return cros_build_lib.RunCommand(
+        cmdline, cwd=cwd, extra_env=extra_env, print_cmd=self._verbose,
+        redirect_stdout=redirect_stdout,
+        combine_stdout_stderr=combine_stdout_stderr)
+
+  def _SplitEBuildPath(self, ebuild_path):
+    """Split a full ebuild path into (overlay, cat, pn, pv)."""
+    (ebuild_path, _ebuild) = os.path.splitext(ebuild_path)
+    (ebuild_path, pv) = os.path.split(ebuild_path)
+    (ebuild_path, pn) = os.path.split(ebuild_path)
+    (ebuild_path, cat) = os.path.split(ebuild_path)
+    (ebuild_path, overlay) = os.path.split(ebuild_path)
+    return (overlay, cat, pn, pv)
+
+  def _GenPortageEnvvars(self, arch, unstable_ok, portdir=None,
+                         portage_configroot=None):
+    """Returns dictionary of envvars for running portage tools.
+
+    If |arch| is set, then ACCEPT_KEYWORDS will be included and set
+    according to |unstable_ok|.
+
+    PORTDIR is set to |portdir| value, if not None.
+    PORTAGE_CONFIGROOT is set to |portage_configroot| value, if not None.
+    """
+    envvars = {}
+    if arch:
+      if unstable_ok:
+        envvars['ACCEPT_KEYWORDS'] = arch + ' ~' + arch
+      else:
+        envvars['ACCEPT_KEYWORDS'] = arch
+
+    if portdir is not None:
+      envvars['PORTDIR'] = portdir
+      # Since we are clearing PORTDIR, we also have to clear PORTDIR_OVERLAY
+      # as most of those repos refer to the "normal" PORTDIR and will dump a
+      # lot of warnings if it can't be found.
+      envvars['PORTDIR_OVERLAY'] = portdir
+    if portage_configroot is not None:
+      envvars['PORTAGE_CONFIGROOT'] = portage_configroot
+
+    return envvars
+
+  def _FindUpstreamCPV(self, pkg, unstable_ok=False):
+    """Returns latest cpv in |_upstream| that matches |pkg|.
+
+    The |pkg| argument can specify as much or as little of the full CPV
+    syntax as desired, exactly as accepted by the Portage 'equery' command.
+    To find whether an exact version exists upstream specify the full
+    CPV.  To find the latest version specify just the category and package
+    name.
+
+    Results are filtered by architecture keyword using |self._curr_arch|.
+    By default, only ebuilds stable on that arch will be accepted.  To
+    accept unstable ebuilds, set |unstable_ok| to True.
+
+    Returns upstream cpv, if found.
+    """
+    envvars = self._GenPortageEnvvars(self._curr_arch, unstable_ok,
+                                      portdir=self._upstream,
+                                      portage_configroot=self._emptydir)
+
+    # Point equery to the upstream source to get latest version for keywords.
+    equery = ['equery', 'which', pkg]
+    cmd_result = cros_build_lib.RunCommand(
+        equery, extra_env=envvars, print_cmd=self._verbose,
+        error_code_ok=True, redirect_stdout=True, combine_stdout_stderr=True)
+
+    if cmd_result.returncode == 0:
+      ebuild_path = cmd_result.output.strip()
+      (_overlay, cat, _pn, pv) = self._SplitEBuildPath(ebuild_path)
+      return os.path.join(cat, pv)
+    else:
+      return None
+
+  def _GetBoardCmd(self, cmd):
+    """Return the board-specific version of |cmd|, if applicable."""
+    if cmd in self.BOARD_CMDS:
+      # Host "board" is a special case.
+      if self._curr_board != self.HOST_BOARD:
+        return '%s-%s' % (cmd, self._curr_board)
+
+    return cmd
+
+  def _AreEmergeable(self, cpvlist):
+    """Indicate whether cpvs in |cpvlist| can be emerged on current board.
+
+    This essentially runs emerge with the --pretend option to verify
+    that all dependencies for these package versions are satisfied.
+
+    Returns:
+      Tuple with two elements:
+      [0] True if |cpvlist| can be emerged.
+      [1] Output from the emerge command.
+    """
+    envvars = self._GenPortageEnvvars(self._curr_arch, unstable_ok=False)
+    emerge = self._GetBoardCmd(self.EMERGE_CMD)
+    cmd = [emerge, '-p'] + ['=' + cpv for cpv in cpvlist]
+    result = cros_build_lib.RunCommand(
+        cmd, error_code_ok=True, extra_env=envvars, print_cmd=False,
+        redirect_stdout=True, combine_stdout_stderr=True)
+
+    return (result.returncode == 0, ' '.join(cmd), result.output)
+
+  def _FindCurrentCPV(self, pkg):
+    """Returns current cpv on |_curr_board| that matches |pkg|, or None."""
+    envvars = self._GenPortageEnvvars(self._curr_arch, unstable_ok=False)
+
+    equery = self._GetBoardCmd(self.EQUERY_CMD)
+    cmd = [equery, '-C', 'which', pkg]
+    cmd_result = cros_build_lib.RunCommand(
+        cmd, error_code_ok=True, extra_env=envvars, print_cmd=False,
+        redirect_stdout=True, combine_stdout_stderr=True)
+
+    if cmd_result.returncode == 0:
+      ebuild_path = cmd_result.output.strip()
+      (_overlay, cat, _pn, pv) = self._SplitEBuildPath(ebuild_path)
+      return os.path.join(cat, pv)
+    else:
+      return None
+
+  def _SetUpgradedMaskBits(self, pinfo):
+    """Set pinfo.upgraded_unmasked."""
+    cpv = pinfo.upgraded_cpv
+    envvars = self._GenPortageEnvvars(self._curr_arch, unstable_ok=False)
+
+    equery = self._GetBoardCmd('equery')
+    cmd = [equery, '-qCN', 'list', '-F', '$mask|$cpv:$slot', '-op', cpv]
+    result = cros_build_lib.RunCommand(
+        cmd, error_code_ok=True, extra_env=envvars, print_cmd=False,
+        redirect_stdout=True, combine_stdout_stderr=True)
+
+    output = result.output
+    if result.returncode:
+      raise RuntimeError('equery failed on us:\n %s\noutput:\n %s'
+                         % (' '.join(cmd), output))
+
+    # Expect output like one of these cases (~ == unstable, M == masked):
+    #  ~|sys-fs/fuse-2.7.3:0
+    #   |sys-fs/fuse-2.7.3:0
+    # M |sys-fs/fuse-2.7.3:0
+    # M~|sys-fs/fuse-2.7.3:0
+    for line in output.split('\n'):
+      mask = line.split('|')[0]
+      if len(mask) == 2:
+        pinfo.upgraded_unmasked = 'M' != mask[0]
+        return
+
+    raise RuntimeError('Unable to determine whether %s is stable from equery:\n'
+                       ' %s\noutput:\n %s' % (cpv, ' '.join(cmd), output))
+
+  def _VerifyEbuildOverlay(self, cpv, expected_overlay, was_overwrite):
+    """Raises exception if ebuild for |cpv| is not from |expected_overlay|.
+
+    Essentially, this verifies that the upgraded ebuild in portage-stable
+    is indeed the one being picked up, rather than some other ebuild with
+    the same version in another overlay.  Unless |was_overwrite| (see below).
+
+    If |was_overwrite| then this upgrade was an overwrite of an existing
+    package version (via --force) and it is possible the previous package
+    is still in another overlay (e.g. chromiumos-overlay).  In this case,
+    the user should get rid of the other version first.
+    """
+    # Further explanation: this check should always pass, but might not
+    # if the copy/upgrade from upstream did not work.  This is just a
+    # sanity check.
+    envvars = self._GenPortageEnvvars(self._curr_arch, unstable_ok=False)
+
+    equery = self._GetBoardCmd(self.EQUERY_CMD)
+    cmd = [equery, '-C', 'which', '--include-masked', cpv]
+    result = cros_build_lib.RunCommand(
+        cmd, error_code_ok=True, extra_env=envvars, print_cmd=False,
+        redirect_stdout=True, combine_stdout_stderr=True)
+
+    ebuild_path = result.output.strip()
+    (overlay, _cat, _pn, _pv) = self._SplitEBuildPath(ebuild_path)
+    if overlay != expected_overlay:
+      if was_overwrite:
+        raise RuntimeError('Upgraded ebuild for %s is not visible because'
+                           ' existing ebuild in %s overlay takes precedence\n'
+                           'Please remove that ebuild before continuing.' %
+                           (cpv, overlay))
+      else:
+        raise RuntimeError('Upgraded ebuild for %s is not coming from %s:\n'
+                           ' %s\n'
+                           'Please show this error to the build team.' %
+                           (cpv, expected_overlay, ebuild_path))
+
+  def _IdentifyNeededEclass(self, cpv):
+    """Return eclass that must be upgraded for this |cpv|."""
+    # Try to detect two cases:
+    # 1) The upgraded package uses an eclass not in local source, yet.
+    # 2) The upgraded package needs one or more eclasses to also be upgraded.
+
+    # Use the output of 'equery which'.
+    # If a needed eclass cannot be found, then the output will have lines like:
+    # * ERROR: app-admin/eselect-1.2.15 failed (depend phase):
+    # *   bash-completion-r1.eclass could not be found by inherit()
+
+    # If a needed eclass must be upgraded, the output might have the eclass
+    # in the call stack (... used for long paths):
+    # * Call stack:
+    # *            ebuild.sh, line 2047:  Called source '.../vim-7.3.189.ebuild'
+    # *   vim-7.3.189.ebuild, line    7:  Called inherit 'vim'
+    # *            ebuild.sh, line 1410:  Called qa_source '.../vim.eclass'
+    # *            ebuild.sh, line   43:  Called source '.../vim.eclass'
+    # *           vim.eclass, line   40:  Called die
+    # * The specific snippet of code:
+    # *       die "Unknown EAPI ${EAPI}"
+
+    envvars = self._GenPortageEnvvars(self._curr_arch, unstable_ok=True)
+
+    equery = self._GetBoardCmd(self.EQUERY_CMD)
+    cmd = [equery, '-C', '--no-pipe', 'which', cpv]
+    result = cros_build_lib.RunCommand(
+        cmd, error_code_ok=True, extra_env=envvars, print_cmd=False,
+        redirect_stdout=True, combine_stdout_stderr=True)
+
+    if result.returncode != 0:
+      output = result.output.strip()
+
+      # _missing_eclass_re works line by line.
+      for line in output.split('\n'):
+        match = self._missing_eclass_re.search(line)
+        if match:
+          eclass = match.group(1)
+          oper.Notice('Determined that %s requires %s' % (cpv, eclass))
+          return eclass
+
+      # _outdated_eclass_re works on the entire output at once.
+      match = self._outdated_eclass_re.search(output)
+      if match:
+        eclass = match.group(1)
+        oper.Notice('Making educated guess that %s requires update of %s' %
+                    (cpv, eclass))
+        return eclass
+
+    return None
+
+  def _GiveMaskedError(self, upgraded_cpv, emerge_output):
+    """Print error saying that |upgraded_cpv| is masked off.
+
+    See if hint found in |emerge_output| to improve error emssage.
+    """
+
+    # Expecting emerge_output to have lines like this:
+    #  The following mask changes are necessary to proceed:
+    # #required by ... =somecategory/somepackage (some reason)
+    # # /home/mtennant/trunk/src/third_party/chromiumos-overlay/profiles\
+    # /targets/chromeos/package.mask:
+    # >=upgraded_cp
+    package_mask = None
+
+    upgraded_cp = Upgrader._GetCatPkgFromCpv(upgraded_cpv)
+    regexp = re.compile(r'#\s*required by.+=\S+.*\n'
+                        r'#\s*(\S+/package\.mask):\s*\n'
+                        '[<>=]+%s' % upgraded_cp)
+
+    match = regexp.search(emerge_output)
+    if match:
+      package_mask = match.group(1)
+
+    if package_mask:
+      oper.Error('\nUpgraded package "%s" appears to be masked by a line in\n'
+                 '"%s"\n'
+                 'Full emerge output is above. Address mask issue, '
+                 'then run this again.' %
+                 (upgraded_cpv, package_mask))
+    else:
+      oper.Error('\nUpgraded package "%s" is masked somehow (See full '
+                 'emerge output above). Address that and then run this '
+                 'again.' % upgraded_cpv)
+
+  def _PkgUpgradeStaged(self, upstream_cpv):
+    """Return True if package upgrade is already staged."""
+    ebuild_path = Upgrader._GetEbuildPathFromCpv(upstream_cpv)
+    status = self._stable_repo_status.get(ebuild_path, None)
+    if status and 'A' == status:
+      return True
+
+    return False
+
+  def _AnyChangesStaged(self):
+    """Return True if any local changes are staged in stable repo."""
+    # Don't count files with '??' status - they aren't staged.
+    files = [f for (f, s) in self._stable_repo_status.items() if s != '??']
+    return bool(len(files))
+
+  def _StashChanges(self):
+    """Run 'git stash save' on stable repo."""
+    # Only one level of stashing expected/supported.
+    self._RunGit(self._stable_repo, ['stash', 'save'],
+                 redirect_stdout=True, combine_stdout_stderr=True)
+    self._stable_repo_stashed = True
+
+  def _UnstashAnyChanges(self):
+    """Unstash any changes in stable repo."""
+    # Only one level of stashing expected/supported.
+    if self._stable_repo_stashed:
+      self._RunGit(self._stable_repo, ['stash', 'pop', '--index'],
+                   redirect_stdout=True, combine_stdout_stderr=True)
+      self._stable_repo_stashed = False
+
+  def _DropAnyStashedChanges(self):
+    """Drop any stashed changes in stable repo."""
+    # Only one level of stashing expected/supported.
+    if self._stable_repo_stashed:
+      self._RunGit(self._stable_repo, ['stash', 'drop'],
+                   redirect_stdout=True, combine_stdout_stderr=True)
+      self._stable_repo_stashed = False
+
+  def _CopyUpstreamPackage(self, upstream_cpv):
+    """Upgrades package in |upstream_cpv| to the version in |upstream_cpv|.
+
+    Returns:
+      The upstream_cpv if the package was upgraded, None otherwise.
+    """
+    oper.Notice('Copying %s from upstream.' % upstream_cpv)
+
+    (cat, pkgname, _version, _rev) = portage.versions.catpkgsplit(upstream_cpv)
+    ebuild = upstream_cpv.replace(cat + '/', '') + '.ebuild'
+    catpkgsubdir = os.path.join(cat, pkgname)
+    pkgdir = os.path.join(self._stable_repo, catpkgsubdir)
+    upstream_pkgdir = os.path.join(self._upstream, cat, pkgname)
+
+    # Fail early if upstream_cpv ebuild is not found
+    upstream_ebuild_path = os.path.join(upstream_pkgdir, ebuild)
+    if not os.path.exists(upstream_ebuild_path):
+      # Note: this should only be possible during unit tests.
+      raise RuntimeError('Cannot find upstream ebuild at "%s"' %
+                         upstream_ebuild_path)
+
+    # If pkgdir already exists, remove everything in it except Manifest.
+    # Note that git will remove a parent directory when it removes
+    # the last item in the directory.
+    if os.path.exists(pkgdir):
+      items = os.listdir(pkgdir)
+      items = [os.path.join(catpkgsubdir, i) for i in items if i != 'Manifest']
+      if items:
+        args = ['rm', '-rf', '--ignore-unmatch'] + items
+        self._RunGit(self._stable_repo, args, redirect_stdout=True)
+        # Now delete any files that git doesn't know about.
+        for item in items:
+          osutils.SafeUnlink(os.path.join(self._stable_repo, item))
+
+    osutils.SafeMakedirs(pkgdir)
+
+    # Grab all non-blacklisted, non-ebuilds from upstream plus the specific
+    # ebuild requested.
+    items = os.listdir(upstream_pkgdir)
+    for item in items:
+      blacklisted = [b for b in BLACKLISTED_FILES
+                     if fnmatch.fnmatch(os.path.basename(item), b)]
+      if not blacklisted:
+        if not item.endswith('.ebuild') or item == ebuild:
+          src = os.path.join(upstream_pkgdir, item)
+          dst = os.path.join(pkgdir, item)
+          if os.path.isdir(src):
+            shutil.copytree(src, dst, symlinks=True)
+          else:
+            shutil.copy2(src, dst)
+
+    # Create a new Manifest file for this package.
+    self._CreateManifest(upstream_pkgdir, pkgdir, ebuild)
+
+    # Now copy any eclasses that this package requires.
+    eclass = self._IdentifyNeededEclass(upstream_cpv)
+    while eclass and self._CopyUpstreamEclass(eclass):
+      eclass = self._IdentifyNeededEclass(upstream_cpv)
+
+    return upstream_cpv
+
+  def _StabilizeEbuild(self, ebuild_path):
+    """Edit keywords to stablize ebuild at |ebuild_path| on current arch."""
+    oper.Notice('Editing %r to mark it stable for everyone' % ebuild_path)
+
+    # Regexp to search for KEYWORDS="...".
+    keywords_regexp = re.compile(r'^(\s*KEYWORDS=")[^"]*(")', re.MULTILINE)
+
+    # Read in entire ebuild.
+    content = osutils.ReadFile(ebuild_path)
+
+    # Replace all KEYWORDS with "*".
+    content = re.sub(keywords_regexp, r'\1*\2', content)
+
+    # Write ebuild file back out.
+    osutils.WriteFile(ebuild_path, content)
+
+  def _CreateManifest(self, upstream_pkgdir, pkgdir, ebuild):
+    """Create a trusted Manifest from available Manifests.
+
+    Combine the current Manifest in |pkgdir| (if it exists) with
+    the Manifest from |upstream_pkgdir| to create a new trusted
+    Manifest.  Supplement with 'ebuild manifest' command.
+
+    It is assumed that a Manifest exists in |upstream_pkgdir|, but
+    there may not be one in |pkgdir|.  The new |ebuild| in pkgdir
+    should be used for 'ebuild manifest' command.
+
+    The algorithm is this:
+    1) Remove all lines in upstream Manifest that duplicate
+    lines in current Manifest.
+    2) Concatenate the result of 1) onto the current Manifest.
+    3) Run 'ebuild manifest' to add to results.
+    """
+    upstream_manifest = os.path.join(upstream_pkgdir, 'Manifest')
+    current_manifest = os.path.join(pkgdir, 'Manifest')
+
+    if os.path.exists(current_manifest):
+      # Determine which files have DIST entries in current_manifest.
+      dists = set()
+      with open(current_manifest, 'r') as f:
+        for line in f:
+          tokens = line.split()
+          if len(tokens) > 1 and tokens[0] == 'DIST':
+            dists.add(tokens[1])
+
+      # Find DIST lines in upstream manifest not overlapping with current.
+      new_lines = []
+      with open(upstream_manifest, 'r') as f:
+        for line in f:
+          tokens = line.split()
+          if len(tokens) > 1 and tokens[0] == 'DIST' and tokens[1] not in dists:
+            new_lines.append(line)
+
+      # Write all new_lines to current_manifest.
+      if new_lines:
+        with open(current_manifest, 'a') as f:
+          f.writelines(new_lines)
+    else:
+      # Use upstream_manifest as a starting point.
+      shutil.copyfile(upstream_manifest, current_manifest)
+
+    manifest_cmd = ['ebuild', os.path.join(pkgdir, ebuild), 'manifest']
+    manifest_result = cros_build_lib.RunCommand(
+        manifest_cmd, error_code_ok=True, print_cmd=False,
+        redirect_stdout=True, combine_stdout_stderr=True)
+
+    if manifest_result.returncode != 0:
+      raise RuntimeError('Failed "ebuild manifest" for upgraded package.\n'
+                         'Output of %r:\n%s' %
+                         (' '.join(manifest_cmd), manifest_result.output))
+
+  def _CopyUpstreamEclass(self, eclass):
+    """Upgrades eclass in |eclass| to upstream copy.
+
+    Does not do the copy if the eclass already exists locally and
+    is identical to the upstream version.
+
+    Returns:
+      True if the copy was done.
+    """
+    eclass_subpath = os.path.join('eclass', eclass)
+    upstream_path = os.path.join(self._upstream, eclass_subpath)
+    local_path = os.path.join(self._stable_repo, eclass_subpath)
+
+    if os.path.exists(upstream_path):
+      if os.path.exists(local_path) and filecmp.cmp(upstream_path, local_path):
+        return False
+      else:
+        oper.Notice('Copying %s from upstream.' % eclass)
+        osutils.SafeMakedirs(os.path.dirname(local_path))
+        shutil.copy2(upstream_path, local_path)
+        self._RunGit(self._stable_repo, ['add', eclass_subpath])
+        return True
+
+    raise RuntimeError('Cannot find upstream "%s".  Looked at "%s"' %
+                       (eclass, upstream_path))
+
+  def _GetPackageUpgradeState(self, pinfo):
+    """Return state value for package in |pinfo|."""
+    # See whether this specific cpv exists upstream.
+    cpv = pinfo.cpv
+    cpv_exists_upstream = bool(cpv and
+                               self._FindUpstreamCPV(cpv, unstable_ok=True))
+
+    # The value in pinfo.cpv_cmp_upstream represents a comparison of cpv
+    # version and the upstream version, where:
+    # 0 = current, >0 = outdated, <0 = futuristic!
+
+    # Convention is that anything not in portage overlay has been altered.
+    overlay = pinfo.overlay
+    locally_patched = (overlay != NOT_APPLICABLE and
+                       overlay != self.UPSTREAM_OVERLAY_NAME and
+                       overlay != self.STABLE_OVERLAY_NAME)
+    locally_duplicated = locally_patched and cpv_exists_upstream
+
+    # Gather status details for this package
+    if pinfo.cpv_cmp_upstream is None:
+      # No upstream cpv to compare to (although this might include a
+      # a restriction to only stable upstream versions).  This is concerning
+      # if the package is coming from 'portage' or 'portage-stable' overlays.
+      if locally_patched and pinfo.latest_upstream_cpv is None:
+        state = utable.UpgradeTable.STATE_LOCAL_ONLY
+      elif not cpv:
+        state = utable.UpgradeTable.STATE_UPSTREAM_ONLY
+      else:
+        state = utable.UpgradeTable.STATE_UNKNOWN
+    elif pinfo.cpv_cmp_upstream > 0:
+      if locally_duplicated:
+        state = utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_DUPLICATED
+      elif locally_patched:
+        state = utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_PATCHED
+      else:
+        state = utable.UpgradeTable.STATE_NEEDS_UPGRADE
+    elif locally_duplicated:
+      state = utable.UpgradeTable.STATE_DUPLICATED
+    elif locally_patched:
+      state = utable.UpgradeTable.STATE_PATCHED
+    else:
+      state = utable.UpgradeTable.STATE_CURRENT
+
+    return state
+
+  # TODO(mtennant): Generate output from finished table instead.
+  def _PrintPackageLine(self, pinfo):
+    """Print a brief one-line report of package status."""
+    upstream_cpv = pinfo.upstream_cpv
+    if pinfo.upgraded_cpv:
+      action_stat = ' (UPGRADED)'
+    else:
+      action_stat = ''
+
+    up_stat = {
+        utable.UpgradeTable.STATE_UNKNOWN: ' no package found upstream!',
+        utable.UpgradeTable.STATE_LOCAL_ONLY: ' (exists locally only)',
+        utable.UpgradeTable.STATE_NEEDS_UPGRADE: ' -> %s' % upstream_cpv,
+        utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_PATCHED:
+            ' <-> %s' % upstream_cpv,
+        utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_DUPLICATED:
+            ' (locally duplicated) <-> %s' % upstream_cpv,
+        utable.UpgradeTable.STATE_PATCHED: ' <- %s' % upstream_cpv,
+        utable.UpgradeTable.STATE_DUPLICATED: ' (locally duplicated)',
+        utable.UpgradeTable.STATE_CURRENT: ' (current)',
+    }[pinfo.state]
+
+    oper.Info('[%s] %s%s%s' % (pinfo.overlay, pinfo.cpv,
+                               up_stat, action_stat))
+
+  def _AppendPackageRow(self, pinfo):
+    """Add a row to status table for the package in |pinfo|."""
+    cpv = pinfo.cpv
+    upgraded_cpv = pinfo.upgraded_cpv
+
+    upgraded_ver = ''
+    if upgraded_cpv:
+      upgraded_ver = Upgrader._GetVerRevFromCpv(upgraded_cpv)
+
+    # Assemble 'depends on' and 'required by' strings.
+    depsstr = NOT_APPLICABLE
+    usedstr = NOT_APPLICABLE
+    if cpv and self._deps_graph:
+      deps_entry = self._deps_graph[cpv]
+      depslist = sorted(deps_entry['needs'].keys())  # dependencies
+      depsstr = ' '.join(depslist)
+      usedset = deps_entry['provides']  # used by
+      usedlist = sorted([p for p in usedset])
+      usedstr = ' '.join(usedlist)
+
+    stable_up_ver = Upgrader._GetVerRevFromCpv(pinfo.stable_upstream_cpv)
+    if not stable_up_ver:
+      stable_up_ver = NOT_APPLICABLE
+    latest_up_ver = Upgrader._GetVerRevFromCpv(pinfo.latest_upstream_cpv)
+    if not latest_up_ver:
+      latest_up_ver = NOT_APPLICABLE
+
+    row = {
+        self._curr_table.COL_PACKAGE: pinfo.package,
+        self._curr_table.COL_SLOT: pinfo.slot,
+        self._curr_table.COL_OVERLAY: pinfo.overlay,
+        self._curr_table.COL_CURRENT_VER: pinfo.version_rev,
+        self._curr_table.COL_STABLE_UPSTREAM_VER: stable_up_ver,
+        self._curr_table.COL_LATEST_UPSTREAM_VER: latest_up_ver,
+        self._curr_table.COL_STATE: pinfo.state,
+        self._curr_table.COL_DEPENDS_ON: depsstr,
+        self._curr_table.COL_USED_BY: usedstr,
+        self._curr_table.COL_TARGET: ' '.join(self._targets),
+    }
+
+    # Only include if upgrade was involved.  Table may not have this column
+    # if upgrade was not requested.
+    if upgraded_ver:
+      row[self._curr_table.COL_UPGRADED] = upgraded_ver
+
+    self._curr_table.AppendRow(row)
+
+  def _UpgradePackage(self, pinfo):
+    """Gathers upgrade status for pkg, performs upgrade if requested.
+
+    The upgrade is performed only if the package is outdated and --upgrade
+    is specified.
+
+    The |pinfo| must have the following entries:
+    package, category, package_name
+
+    Regardless, the following attributes in |pinfo| are filled in:
+    stable_upstream_cpv
+    latest_upstream_cpv
+    upstream_cpv (one of the above, depending on --stable-only option)
+    upgrade_cpv (if upgrade performed)
+    """
+    cpv = pinfo.cpv
+    catpkg = pinfo.package
+    pinfo.stable_upstream_cpv = self._FindUpstreamCPV(catpkg)
+    pinfo.latest_upstream_cpv = self._FindUpstreamCPV(catpkg,
+                                                      unstable_ok=True)
+
+    # The upstream version can be either latest stable or latest overall,
+    # or specified explicitly by the user at the command line.  In the latter
+    # case, 'upstream_cpv' will already be set.
+    if not pinfo.upstream_cpv:
+      if not self._unstable_ok:
+        pinfo.upstream_cpv = pinfo.stable_upstream_cpv
+      else:
+        pinfo.upstream_cpv = pinfo.latest_upstream_cpv
+
+    # Perform the actual upgrade, if requested.
+    pinfo.cpv_cmp_upstream = None
+    pinfo.upgraded_cpv = None
+    if pinfo.upstream_cpv:
+      # cpv_cmp_upstream values: 0 = current, >0 = outdated, <0 = futuristic!
+      pinfo.cpv_cmp_upstream = Upgrader._CmpCpv(pinfo.upstream_cpv, cpv)
+
+      # Determine whether upgrade of this package is requested.
+      if self._PkgUpgradeRequested(pinfo):
+        if self._PkgUpgradeStaged(pinfo.upstream_cpv):
+          oper.Notice('Determined that %s is already staged.' %
+                      pinfo.upstream_cpv)
+          pinfo.upgraded_cpv = pinfo.upstream_cpv
+        elif pinfo.cpv_cmp_upstream > 0:
+          pinfo.upgraded_cpv = self._CopyUpstreamPackage(pinfo.upstream_cpv)
+        elif pinfo.cpv_cmp_upstream == 0:
+          if self._force:
+            oper.Notice('Forcing upgrade of existing %s.' %
+                        pinfo.upstream_cpv)
+            upgraded_cpv = self._CopyUpstreamPackage(pinfo.upstream_cpv)
+            pinfo.upgraded_cpv = upgraded_cpv
+          else:
+            oper.Warning('Not upgrading %s; it already exists in source.\n'
+                         'To force upgrade of this version specify --force.' %
+                         pinfo.upstream_cpv)
+    elif self._PkgUpgradeRequested(pinfo):
+      raise RuntimeError('Unable to find upstream package for upgrading %s.' %
+                         catpkg)
+
+    if pinfo.upgraded_cpv:
+      # Deal with keywords now.  We always run this logic as we sometimes will
+      # stabilizing keywords other than just our own (the unsupported arches).
+      self._SetUpgradedMaskBits(pinfo)
+      ebuild_path = Upgrader._GetEbuildPathFromCpv(pinfo.upgraded_cpv)
+      self._StabilizeEbuild(os.path.join(self._stable_repo, ebuild_path))
+
+      # Add all new package files to git.
+      self._RunGit(self._stable_repo, ['add', pinfo.package])
+
+      # Update profiles/categories.
+      self._UpdateCategories(pinfo)
+
+      # Regenerate the cache.  In theory, this might glob too much, but
+      # in practice, this should be fine for now ...
+      cache_files = 'metadata/md5-cache/%s-[0-9]*' % pinfo.package
+      self._RunGit(self._stable_repo, ['rm', '--ignore-unmatch', '-q', '-f',
+                                       cache_files])
+      cmd = ['egencache', '--update', '--repo=portage-stable', pinfo.package]
+      egen_result = cros_build_lib.RunCommand(cmd, print_cmd=False,
+                                              redirect_stdout=True,
+                                              combine_stdout_stderr=True)
+      if egen_result.returncode != 0:
+        raise RuntimeError('Failed to regenerate md5-cache for %r.\n'
+                           'Output of %r:\n%s' %
+                           (pinfo.package, ' '.join(cmd), egen_result.output))
+
+      self._RunGit(self._stable_repo, ['add', cache_files])
+
+    return bool(pinfo.upgraded_cpv)
+
+  def _UpdateCategories(self, pinfo):
+    """Update profiles/categories to include category in |pinfo|, if needed."""
+
+    if pinfo.category not in self._stable_repo_categories:
+      self._stable_repo_categories.add(pinfo.category)
+      self._WriteStableRepoCategories()
+
+  def _VerifyPackageUpgrade(self, pinfo):
+    """Verify that the upgraded package in |pinfo| passes checks."""
+    self._VerifyEbuildOverlay(pinfo.upgraded_cpv, self.STABLE_OVERLAY_NAME,
+                              pinfo.cpv_cmp_upstream == 0)
+
+  def _PackageReport(self, pinfo):
+    """Report on whatever was done with package in |pinfo|."""
+
+    pinfo.state = self._GetPackageUpgradeState(pinfo)
+
+    if self._verbose:
+      # Print a quick summary of package status.
+      self._PrintPackageLine(pinfo)
+
+    # Add a row to status table for this package
+    self._AppendPackageRow(pinfo)
+
+  def _ExtractUpgradedPkgs(self, upgrade_lines):
+    """Extracts list of packages from standard commit |upgrade_lines|."""
+    # Expecting message lines like this (return just package names):
+    # Upgraded sys-libs/ncurses to version 5.7-r7 on amd64, arm, x86
+    # Upgraded sys-apps/less to version 441 on amd64, arm
+    # Upgraded sys-apps/less to version 442 on x86
+    pkgs = set()
+    regexp = re.compile(r'^%s\s+\S+/(\S+)\s' % UPGRADED)
+    for line in upgrade_lines:
+      match = regexp.search(line)
+      if match:
+        pkgs.add(match.group(1))
+
+    return sorted(pkgs)
+
+  def _CreateCommitMessage(self, upgrade_lines, remaining_lines=None):
+    """Create appropriate git commit message for upgrades in |upgrade_lines|."""
+    message = None
+    upgrade_pkgs = self._ExtractUpgradedPkgs(upgrade_lines)
+    upgrade_count = len(upgrade_pkgs)
+    upgrade_str = '\n'.join(upgrade_lines)
+    if upgrade_count < 6:
+      message = ('%s: upgraded package%s to upstream' %
+                 (', '.join(upgrade_pkgs), '' if upgrade_count == 1 else 's'))
+    else:
+      message = 'Upgraded the following %d packages' % upgrade_count
+    message += '\n\n' + upgrade_str + '\n'
+
+    if remaining_lines:
+      # Keep previous remaining lines verbatim.
+      message += '\n%s\n' % '\n'.join(remaining_lines)
+    else:
+      # The space before <fill-in> (at least for TEST=) fails pre-submit check,
+      # which is the intention here.
+      message += '\nBUG= <fill-in>'
+      message += '\nTEST= <fill-in>'
+
+    return message
+
+  def _AmendCommitMessage(self, upgrade_lines):
+    """Create git commit message combining |upgrade_lines| with last commit."""
+    # First get the body of the last commit message.
+    git_cmd = ['show', '-s', '--format=%b']
+    result = self._RunGit(self._stable_repo, git_cmd, redirect_stdout=True)
+    body = result.output
+
+    remaining_lines = []
+    # Extract the upgrade_lines of last commit.  Everything after the
+    # empty line is preserved verbatim.
+    # Expecting message body like this:
+    # Upgraded sys-libs/ncurses to version 5.7-r7 on amd64, arm, x86
+    # Upgraded sys-apps/less to version 441 on amd64, arm, x86
+    #
+    # BUG=chromium-os:20923
+    # TEST=trybot run of chromiumos-sdk
+    before_break = True
+    for line in body.split('\n'):
+      if not before_break:
+        remaining_lines.append(line)
+      elif line:
+        if re.search(r'^%s\s+' % UPGRADED, line):
+          upgrade_lines.append(line)
+        else:
+          # If the lines in the message body are not in the expected
+          # format simply push them to the end of the new commit
+          # message body, but left intact.
+          oper.Warning('It looks like the existing commit message '
+                       'that you are amending was not generated by\n'
+                       'this utility.  Appending previous commit '
+                       'message to newly generated message.')
+          before_break = False
+          remaining_lines.append(line)
+      else:
+        before_break = False
+
+    return self._CreateCommitMessage(upgrade_lines, remaining_lines)
+
+  def _GiveEmergeResults(self, pinfolist):
+    """Summarize emerge checks, raise RuntimeError if there is a problem."""
+
+    upgraded_pinfos = [pinfo for pinfo in pinfolist if pinfo.upgraded_cpv]
+    upgraded_cpvs = [pinfo.upgraded_cpv for pinfo in upgraded_pinfos]
+    masked_cpvs = set([pinfo.upgraded_cpv for pinfo in upgraded_pinfos
+                       if not pinfo.upgraded_unmasked])
+
+    (ok, cmd, output) = self._AreEmergeable(upgraded_cpvs)
+
+    if masked_cpvs:
+      # If any of the upgraded_cpvs are masked, then emerge should have
+      # failed.  Give a helpful message.  If it didn't fail then panic.
+      if ok:
+        raise RuntimeError('Emerge passed for masked package(s)!  Something '
+                           'fishy here. Emerge output follows:\n%s\n'
+                           'Show this to the build team.' % output)
+
+      else:
+        oper.Error('\nEmerge output for "%s" on %s follows:' %
+                   (cmd, self._curr_arch))
+        print(output)
+        for masked_cpv in masked_cpvs:
+          self._GiveMaskedError(masked_cpv, output)
+        raise RuntimeError('\nOne or more upgraded packages are masked '
+                           '(see above).')
+
+    if ok:
+      oper.Notice('Confirmed that all upgraded packages can be emerged '
+                  'on %s after upgrade.' % self._curr_board)
+    else:
+      oper.Error('Packages cannot be emerged after upgrade.  The output '
+                 'of "%s" follows:' % cmd)
+      print(output)
+      raise RuntimeError('Failed to complete upgrades on %s (see above). '
+                         'Address the emerge errors before continuing.' %
+                         self._curr_board)
+
+  def _UpgradePackages(self, pinfolist):
+    """Given a list of cpv pinfos, adds the upstream cpv to the pinfos."""
+    self._curr_table.Clear()
+
+    try:
+      upgrades_this_run = False
+      for pinfo in pinfolist:
+        if self._UpgradePackage(pinfo):
+          self._upgrade_cnt += 1
+          upgrades_this_run = True
+
+      # The verification of upgrades needs to happen after upgrades are done.
+      # The reason is that it cannot be guaranteed that pinfolist is ordered
+      # such that dependencies are satisified after each individual upgrade,
+      # because one or more of the packages may only exist upstream.
+      for pinfo in pinfolist:
+        if pinfo.upgraded_cpv:
+          self._VerifyPackageUpgrade(pinfo)
+
+        self._PackageReport(pinfo)
+
+      if upgrades_this_run:
+        self._GiveEmergeResults(pinfolist)
+
+      if self._IsInUpgradeMode():
+        # If there were any ebuilds staged before running this script, then
+        # make sure they were targeted in pinfolist.  If not, abort.
+        self._CheckStagedUpgrades(pinfolist)
+    except RuntimeError as ex:
+      oper.Error(str(ex))
+
+      raise RuntimeError('\nTo reset all changes in %s now:\n'
+                         ' cd %s; git reset --hard; cd -' %
+                         (self._stable_repo, self._stable_repo))
+      # Allow the changes to stay staged so that the user can attempt to address
+      # the issue (perhaps an edit to package.mask is required, or another
+      # package must also be upgraded).
+
+  def _CheckStagedUpgrades(self, pinfolist):
+    """Raise RuntimeError if staged upgrades are not also in |pinfolist|."""
+    # This deals with the situation where a previous upgrade run staged one or
+    # more package upgrades, but did not commit them because it found an error
+    # of some kind.  This is ok, as long as subsequent runs continue to request
+    # an upgrade of that package again (presumably with the problem fixed).
+    # However, if a subsequent run does not mention that package then it should
+    # abort.  The user must reset those staged changes first.
+
+    if self._stable_repo_status:
+      err_msgs = []
+
+      # Go over files with pre-existing git statuses.
+      filelist = self._stable_repo_status.keys()
+      ebuilds = [e for e in filelist if e.endswith('.ebuild')]
+
+      for ebuild in ebuilds:
+        status = self._stable_repo_status[ebuild]
+        (_overlay, cat, pn, _pv) = self._SplitEBuildPath(ebuild)
+        package = '%s/%s' % (cat, pn)
+
+        # As long as this package is involved in an upgrade this is fine.
+        matching_pinfos = [pi for pi in pinfolist if pi.package == package]
+        if not matching_pinfos:
+          err_msgs.append('Staged %s (status=%s) is not an upgrade target.' %
+                          (ebuild, status))
+
+      if err_msgs:
+        raise RuntimeError('%s\n'
+                           'Add to upgrade targets or reset staged changes.' %
+                           '\n'.join(err_msgs))
+
+  def _GenParallelEmergeArgv(self, args):
+    """Creates an argv for parallel_emerge using current options and |args|."""
+    argv = ['--emptytree', '--pretend']
+    if self._curr_board and self._curr_board != self.HOST_BOARD:
+      argv.append('--board=%s' % self._curr_board)
+    if not self._verbose:
+      argv.append('--quiet')
+    if self._rdeps:
+      argv.append('--root-deps=rdeps')
+    argv.extend(args)
+
+    return argv
+
+  def _SetPortTree(self, settings, trees):
+    """Set self._porttree from portage |settings| and |trees|."""
+    root = settings['ROOT']
+    self._porttree = trees[root]['porttree']
+
+  def _GetPortageDBAPI(self):
+    """Retrieve the Portage dbapi object, if available."""
+    try:
+      return self._porttree.dbapi
+    except AttributeError:
+      return None
+
+  def _CreatePInfoFromCPV(self, cpv, cpv_key=None):
+    """Return a basic pinfo object created from |cpv|."""
+    pinfo = PInfo()
+    self._FillPInfoFromCPV(pinfo, cpv, cpv_key)
+    return pinfo
+
+  def _FillPInfoFromCPV(self, pinfo, cpv, cpv_key=None):
+    """Flesh out |pinfo| from |cpv|."""
+    pkg = Upgrader._GetCatPkgFromCpv(cpv)
+    (cat, pn) = pkg.split('/')
+
+    pinfo.cpv = None
+    pinfo.upstream_cpv = None
+
+    pinfo.package = pkg
+    pinfo.package_name = pn
+    pinfo.category = cat
+
+    if cpv_key:
+      setattr(pinfo, cpv_key, cpv)
+
+  def _GetCurrentVersions(self, target_pinfolist):
+    """Returns a list of pkg pinfos of the current package dependencies.
+
+    The dependencies are taken from giving the 'package' values in each
+    pinfo of |target_pinfolist| to (parallel_)emerge.
+
+    The returned list is ordered such that the dependencies of any mentioned
+    package occur earlier in the list.
+    """
+    emerge_args = []
+    for pinfo in target_pinfolist:
+      local_cpv = pinfo.cpv
+      if local_cpv and local_cpv != WORLD_TARGET:
+        emerge_args.append('=' + local_cpv)
+      else:
+        emerge_args.append(pinfo.package)
+    argv = self._GenParallelEmergeArgv(emerge_args)
+
+    deps = parallel_emerge.DepGraphGenerator()
+    deps.Initialize(argv)
+
+    try:
+      deps_tree, deps_info = deps.GenDependencyTree()
+    except SystemExit:
+      oper.Error('Run of parallel_emerge exited with error while assembling'
+                 ' package dependencies (error message should be above).\n'
+                 'Command effectively was:\n%s' %
+                 ' '.join(['parallel_emerge'] + argv))
+      oper.Error('Address the source of the error, then run again.')
+      raise
+    self._SetPortTree(deps.emerge.settings, deps.emerge.trees)
+    self._deps_graph = deps.GenDependencyGraph(deps_tree, deps_info)
+
+    cpvlist = Upgrader._GetPreOrderDepGraph(self._deps_graph)
+    cpvlist.reverse()
+
+    pinfolist = []
+    for cpv in cpvlist:
+      # See if this cpv was in target_pinfolist
+      is_target = False
+      for pinfo in target_pinfolist:
+        if cpv == pinfo.cpv:
+          pinfolist.append(pinfo)
+          is_target = True
+          break
+      if not is_target:
+        pinfolist.append(self._CreatePInfoFromCPV(cpv, cpv_key='cpv'))
+
+    return pinfolist
+
+  def _FinalizeLocalPInfolist(self, orig_pinfolist):
+    """Filters and fleshes out |orig_pinfolist|, returns new list.
+
+    Each pinfo object is assumed to have entries for:
+    cpv, package, package_name, category
+    """
+    pinfolist = []
+    for pinfo in orig_pinfolist:
+      # No need to report or try to upgrade chromeos-base packages.
+      if pinfo.category == 'chromeos-base':
+        continue
+
+      dbapi = self._GetPortageDBAPI()
+      ebuild_path = dbapi.findname2(pinfo.cpv)[0]
+      if not ebuild_path:
+        # This has only happened once.  See crosbug.com/26385.
+        # In that case, this meant the package, while in the deps graph,
+        # was actually to be uninstalled.  How is that possible?  The
+        # package was newly added to package.provided.  So skip it.
+        oper.Notice('Skipping %r from deps graph, as it appears to be'
+                    ' scheduled for uninstall.' % pinfo.cpv)
+        continue
+
+      (overlay, _cat, pn, pv) = self._SplitEBuildPath(ebuild_path)
+      ver_rev = pv.replace(pn + '-', '')
+      slot, = dbapi.aux_get(pinfo.cpv, ['SLOT'])
+
+      pinfo.slot = slot
+      pinfo.overlay = overlay
+      pinfo.version_rev = ver_rev
+      pinfo.package_ver = pv
+
+      pinfolist.append(pinfo)
+
+    return pinfolist
+
+  # TODO(mtennant): It is likely this method can be yanked now that all
+  # attributes in PInfo are initialized to something (None).
+  # TODO(mtennant): This should probably not return anything, since it
+  # also modifies the list that is passed in.
+  def _FinalizeUpstreamPInfolist(self, pinfolist):
+    """Adds missing values in upstream |pinfolist|, returns list."""
+
+    for pinfo in pinfolist:
+      pinfo.slot = NOT_APPLICABLE
+      pinfo.overlay = NOT_APPLICABLE
+      pinfo.version_rev = NOT_APPLICABLE
+      pinfo.package_ver = NOT_APPLICABLE
+
+    return pinfolist
+
+  def _ResolveAndVerifyArgs(self, args, upgrade_mode):
+    """Resolve |args| to full pkgs, and check validity of each.
+
+    Each argument will be resolved to a full category/packagename, if possible,
+    by looking in both the local overlays and the upstream overlay.  Any
+    argument that cannot be resolved will raise a RuntimeError.
+
+    Arguments that specify a specific version of a package are only
+    allowed when |upgrade_mode| is True.
+
+    The 'world' target is handled as a local package.
+
+    Any errors will raise a RuntimeError.
+
+    Return list of package pinfos, one for each argument.  Each will have:
+    'user_arg' = Original command line argument package was resolved from
+    'package'  = Resolved category/package_name
+    'package_name' = package_name
+    'category' = category (None for 'world' target)
+    Packages found in local overlays will also have:
+    'cpv'      = Current cpv ('world' for 'world' target)
+    Packages found upstream will also have:
+    'upstream_cpv' = Upstream cpv
+    """
+    pinfolist = []
+
+    for arg in args:
+      pinfo = PInfo(user_arg=arg)
+
+      if arg == WORLD_TARGET:
+        # The 'world' target is a special case.  Consider it a valid target
+        # locally, but not an upstream package.
+        pinfo.package = arg
+        pinfo.package_name = arg
+        pinfo.category = None
+        pinfo.cpv = arg
+      else:
+        catpkg = Upgrader._GetCatPkgFromCpv(arg)
+        verrev = Upgrader._GetVerRevFromCpv(arg)
+
+        if verrev and not upgrade_mode:
+          raise RuntimeError('Specifying specific versions is only allowed '
+                             'in upgrade mode.  Do not know what to do with '
+                             '"%s".' % arg)
+
+        # Local cpv search ignores version in argument, if any.  If version is
+        # in argument, though, it *must* be found upstream.
+        local_arg = catpkg if catpkg else arg
+
+        local_cpv = self._FindCurrentCPV(local_arg)
+        upstream_cpv = self._FindUpstreamCPV(arg, self._unstable_ok)
+
+        # Old-style virtual packages will resolve to their target packages,
+        # which we do not want here because if the package 'virtual/foo' was
+        # specified at the command line we want to try upgrading the actual
+        # 'virtual/foo' package, not whatever package equery resolves it to.
+        # This only matters when 'virtual/foo' is currently an old-style
+        # virtual but a new-style virtual for it exists upstream which we
+        # want to upgrade to.  For new-style virtuals, equery will resolve
+        # 'virtual/foo' to 'virtual/foo', which is fine.
+        if arg.startswith('virtual/'):
+          if local_cpv and not local_cpv.startswith('virtual/'):
+            local_cpv = None
+
+        if not upstream_cpv and upgrade_mode:
+          # See if --unstable-ok is required for this upstream version.
+          if not self._unstable_ok and self._FindUpstreamCPV(arg, True):
+            raise RuntimeError('Upstream "%s" is unstable on %s.  Re-run with '
+                               '--unstable-ok option?' % (arg, self._curr_arch))
+          else:
+            raise RuntimeError('Unable to find "%s" upstream on %s.' %
+                               (arg, self._curr_arch))
+
+        any_cpv = local_cpv if local_cpv else upstream_cpv
+        if not any_cpv:
+          msg = ('Unable to resolve "%s" as a package either local or upstream.'
+                 % arg)
+          if arg.find('/') < 0:
+            msg = msg + ' Try specifying the full category/package_name.'
+
+          raise RuntimeError(msg)
+
+        self._FillPInfoFromCPV(pinfo, any_cpv)
+        pinfo.cpv = local_cpv
+        pinfo.upstream_cpv = upstream_cpv
+        if local_cpv and upstream_cpv:
+          oper.Notice('Resolved "%s" to "%s" (local) and "%s" (upstream).' %
+                      (arg, local_cpv, upstream_cpv))
+        elif local_cpv:
+          oper.Notice('Resolved "%s" to "%s" (local).' %
+                      (arg, local_cpv))
+        elif upstream_cpv:
+          oper.Notice('Resolved "%s" to "%s" (upstream).' %
+                      (arg, upstream_cpv))
+
+      pinfolist.append(pinfo)
+
+    return pinfolist
+
+  def PrepareToRun(self):
+    """Checkout upstream gentoo if necessary, and any other prep steps."""
+
+    if not os.path.exists(os.path.join(
+        self._upstream, '.git', 'shallow')):
+      osutils.RmDir(self._upstream, ignore_missing=True)
+
+    if os.path.exists(self._upstream):
+      if self._local_only:
+        oper.Notice('Using upstream cache as-is (no network) %s.' %
+                    self._upstream)
+      else:
+        # Recheck the pathway; it's possible in switching off alternates,
+        # this was converted down to a depth=1 repo.
+
+        oper.Notice('Updating previously created upstream cache at %s.' %
+                    self._upstream)
+        self._RunGit(self._upstream, ['remote', 'set-url', 'origin',
+                                      self.PORTAGE_GIT_URL])
+        self._RunGit(self._upstream, ['remote', 'update'])
+        self._RunGit(self._upstream, ['checkout', '-f', self.ORIGIN_GENTOO],
+                     redirect_stdout=True, combine_stdout_stderr=True)
+    else:
+      if self._local_only:
+        oper.Die('--local-only specified, but no local cache exists. '
+                 'Re-run w/out --local-only to create cache automatically.')
+
+      root = os.path.dirname(self._upstream)
+      osutils.SafeMakedirs(root)
+      # Create local copy of upstream gentoo.
+      oper.Notice('Cloning origin/gentoo at %s as upstream reference.' %
+                  self._upstream)
+      name = os.path.basename(self._upstream)
+      args = ['clone', '--branch', os.path.basename(self.ORIGIN_GENTOO)]
+      args += ['--depth', '1', self.PORTAGE_GIT_URL, name]
+      self._RunGit(root, args)
+
+      # Create a README file to explain its presence.
+      with open(self._upstream + '-README', 'w') as f:
+        f.write('Directory at %s is local copy of upstream '
+                'Gentoo/Portage packages. Used by cros_portage_upgrade.\n'
+                'Feel free to delete if you want the space back.\n' %
+                self._upstream)
+
+    # An empty directory is needed to trick equery later.
+    self._emptydir = tempfile.mkdtemp()
+
+  def RunCompleted(self):
+    """Undo any checkout of upstream gentoo if requested."""
+    if self._no_upstream_cache:
+      oper.Notice('Removing upstream cache at %s as requested.'
+                  % self._upstream)
+      osutils.RmDir(self._upstream, ignore_missing=True)
+
+      # Remove the README file, too.
+      readmepath = self._upstream + '-README'
+      osutils.SafeUnlink(readmepath)
+    else:
+      oper.Notice('Keeping upstream cache at %s.' % self._upstream)
+
+    if self._emptydir:
+      osutils.RmDir(self._emptydir, ignore_missing=True)
+
+  def CommitIsStaged(self):
+    """Return True if upgrades are staged and ready for a commit."""
+    return bool(self._upgrade_cnt)
+
+  def Commit(self):
+    """Commit whatever has been prepared in the stable repo."""
+    # Trying to create commit message body lines that look like these:
+    # Upgraded foo/bar-1.2.3 to version 1.2.4 on x86
+    # Upgraded foo/baz to version 2 on arm AND version 3 on amd64, x86
+
+    commit_lines = []  # Lines for the body of the commit message
+    pkg_overlays = {}  # Overlays for upgraded packages in non-portage overlays.
+
+    # Assemble hash of COL_UPGRADED column names by arch.
+    upgraded_cols = {}
+    for arch in self._master_archs:
+      tmp_col = utable.UpgradeTable.COL_UPGRADED
+      col = utable.UpgradeTable.GetColumnName(tmp_col, arch)
+      upgraded_cols[arch] = col
+
+    table = self._master_table
+    for row in table:
+      pkg = row[table.COL_PACKAGE]
+      pkg_commit_line = None
+
+      # First determine how many unique upgraded versions there are.
+      upgraded_versarch = {}
+      for arch in self._master_archs:
+        upgraded_ver = row[upgraded_cols[arch]]
+        if upgraded_ver:
+          # This package has been upgraded for this arch.
+          upgraded_versarch.setdefault(upgraded_ver, []).append(arch)
+
+          # Save the overlay this package is originally from, if the overlay
+          # is not a Portage overlay (e.g. chromiumos-overlay).
+          ovrly_col = utable.UpgradeTable.COL_OVERLAY
+          ovrly_col = utable.UpgradeTable.GetColumnName(ovrly_col, arch)
+          ovrly = row[ovrly_col]
+          if (ovrly != NOT_APPLICABLE and
+              ovrly != self.UPSTREAM_OVERLAY_NAME and
+              ovrly != self.STABLE_OVERLAY_NAME):
+            pkg_overlays[pkg] = ovrly
+
+      if upgraded_versarch:
+        pkg_commit_line = '%s %s to ' % (UPGRADED, pkg)
+        pkg_commit_line += ' AND '.join(
+            'version %s on %s' % (upgraded_ver, ', '.join(sorted(archlist)))
+            for upgraded_ver, archlist in upgraded_versarch.iteritems())
+        commit_lines.append(pkg_commit_line)
+
+    if commit_lines:
+      if self._amend:
+        message = self._AmendCommitMessage(commit_lines)
+        self._RunGit(self._stable_repo, ['commit', '--amend', '-m', message])
+      else:
+        message = self._CreateCommitMessage(commit_lines)
+        self._RunGit(self._stable_repo, ['commit', '-m', message])
+
+      oper.Warning('\n'
+                   'Upgrade changes committed (see above),'
+                   ' but message needs edit BY YOU:\n'
+                   ' cd %s; git commit --amend; cd -' %
+                   self._stable_repo)
+      # See if any upgraded packages are in non-portage overlays now, meaning
+      # they probably require a patch and should not go into portage-stable.
+      if pkg_overlays:
+        lines = ['%s [%s]' % (p, pkg_overlays[p]) for p in pkg_overlays]
+        oper.Warning('\n'
+                     'The following packages were coming from a non-portage'
+                     ' overlay, which means they were probably patched.\n'
+                     'You should consider whether the upgraded package'
+                     ' needs the same patches applied now.\n'
+                     'If so, do not commit these changes in portage-stable.'
+                     ' Instead, copy them to the applicable overlay dir.\n'
+                     '%s' %
+                     '\n'.join(lines))
+      oper.Notice('\n'
+                  'To remove any individual file above from commit do:\n'
+                  ' cd %s; git reset HEAD~ <filepath>; rm <filepath>;'
+                  ' git commit --amend -C HEAD; cd -' %
+                  self._stable_repo)
+
+      oper.Notice('\n'
+                  'If you wish to undo all the changes to %s:\n'
+                  ' cd %s; git reset --hard HEAD~; cd -' %
+                  (self.STABLE_OVERLAY_NAME, self._stable_repo))
+
+  def PreRunChecks(self):
+    """Run any board-independent validation checks before Run is called."""
+    # Upfront check(s) if upgrade is requested.
+    if self._upgrade or self._upgrade_deep:
+      # Stable source must be on branch.
+      self._CheckStableRepoOnBranch()
+
+  def CheckBoardList(self, boards):
+    """Validate list of specified |boards| before running any of them."""
+
+    # If this is an upgrade run (i.e. --upgrade was specified), then in
+    # almost all cases we want all our supported architectures to be covered.
+    if self._IsInUpgradeMode():
+      board_archs = set()
+      for board in boards:
+        board_archs.add(Upgrader._FindBoardArch(board))
+
+      if not STANDARD_BOARD_ARCHS.issubset(board_archs):
+        # Only proceed if user acknowledges.
+        oper.Warning('You have selected boards for archs %r, which does not'
+                     ' cover all standard archs %r' %
+                     (sorted(board_archs), sorted(STANDARD_BOARD_ARCHS)))
+        oper.Warning('If you continue with this upgrade you may break'
+                     ' builds for architectures not covered by your\n'
+                     'boards.  Continue only if you have a reason to limit'
+                     ' this upgrade to these specific architectures.\n')
+        if not cros_build_lib.BooleanPrompt(
+            prompt='Do you want to continue anyway?', default=False):
+          raise RuntimeError('Missing one or more of the standard archs')
+
+  def RunBoard(self, board):
+    """Runs the upgrader based on the supplied options and arguments.
+
+    Currently just lists all package dependencies in pre-order along with
+    potential upgrades.
+    """
+    # Preserve status report for entire stable repo (output of 'git status -s').
+    self._SaveStatusOnStableRepo()
+    # Read contents of profiles/categories for later checks
+    self._LoadStableRepoCategories()
+
+    self._porttree = None
+    self._deps_graph = None
+
+    self._curr_board = board
+    self._curr_arch = Upgrader._FindBoardArch(board)
+    upgrade_mode = self._IsInUpgradeMode()
+    self._curr_table = utable.UpgradeTable(self._curr_arch,
+                                           upgrade=upgrade_mode,
+                                           name=board)
+
+    if self._AnyChangesStaged():
+      self._StashChanges()
+
+    try:
+      target_pinfolist = self._ResolveAndVerifyArgs(self._args, upgrade_mode)
+      upstream_only_pinfolist = [pi for pi in target_pinfolist if not pi.cpv]
+      if not upgrade_mode and upstream_only_pinfolist:
+        # This means that not all arguments were found in local source, which is
+        # only allowed in upgrade mode.
+        msg = ('The following packages were not found in current overlays'
+               ' (but they do exist upstream):\n%s' %
+               '\n'.join([pinfo.user_arg for pinfo in upstream_only_pinfolist]))
+        raise RuntimeError(msg)
+
+      full_pinfolist = None
+
+      if self._upgrade:
+        # Shallow upgrade mode only cares about targets as they were
+        # found upstream.
+        full_pinfolist = self._FinalizeUpstreamPInfolist(target_pinfolist)
+      else:
+        # Assembling dependencies only matters in status report mode or
+        # if --upgrade-deep was requested.
+        local_target_pinfolist = [pi for pi in target_pinfolist if pi.cpv]
+        if local_target_pinfolist:
+          oper.Notice('Assembling package dependencies.')
+          full_pinfolist = self._GetCurrentVersions(local_target_pinfolist)
+          full_pinfolist = self._FinalizeLocalPInfolist(full_pinfolist)
+        else:
+          full_pinfolist = []
+
+        # Append any command line targets that were not found in current
+        # overlays. The idea is that they will still be found upstream
+        # for upgrading.
+        if upgrade_mode:
+          tmp_list = self._FinalizeUpstreamPInfolist(upstream_only_pinfolist)
+          full_pinfolist = full_pinfolist + tmp_list
+
+      self._UnstashAnyChanges()
+      self._UpgradePackages(full_pinfolist)
+
+    finally:
+      self._DropAnyStashedChanges()
+
+    # Merge tables together after each run.
+    self._master_cnt += 1
+    self._master_archs.add(self._curr_arch)
+    if self._master_table:
+      tables = [self._master_table, self._curr_table]
+      self._master_table = mps.MergeTables(tables)
+    else:
+      self._master_table = self._curr_table
+      # pylint: disable=protected-access
+      self._master_table._arch = None
+
+  def WriteTableFiles(self, csv=None):
+    """Write |self._master_table| to |csv| file, if requested."""
+
+    # Sort the table by package name, then slot
+    def PkgSlotSort(row):
+      return (row[self._master_table.COL_PACKAGE],
+              row[self._master_table.COL_SLOT])
+    self._master_table.Sort(PkgSlotSort)
+
+    if csv:
+      filehandle = open(csv, 'w')
+      oper.Notice('Writing package status as csv to %s.' % csv)
+      self._master_table.WriteCSV(filehandle)
+      filehandle.close()
+    elif not self._IsInUpgradeMode():
+      oper.Notice('Package status report file not requested (--to-csv).')
+
+  def SayGoodbye(self):
+    """Print any final messages to user."""
+    if not self._IsInUpgradeMode():
+      # Without this message users are confused why running a script
+      # with 'upgrade' in the name does not actually do an upgrade.
+      oper.Warning('Completed status report run.  To run in "upgrade"'
+                   ' mode include the --upgrade option.')
+
+
+def _BoardIsSetUp(board):
+  """Return true if |board| has been setup."""
+  return os.path.isdir(cros_build_lib.GetSysroot(board=board))
+
+
+def _CreateParser():
+  """Create the optparser.parser object for command-line args."""
+  epilog = (
+      '\n'
+      'There are essentially two "modes": status report mode and '
+      'upgrade mode.\nStatus report mode is the default; upgrade '
+      'mode is enabled by either --upgrade or --upgrade-deep.\n'
+      '\n'
+      'In either mode, packages can be specified in any manner '
+      'commonly accepted by Portage tools.  For example:\n'
+      ' category/package_name\n'
+      ' package_name\n'
+      ' category/package_name-version (upgrade mode only)\n'
+      '\n'
+      'Status report mode will report on the status of the specified '
+      'packages relative to upstream,\nwithout making any changes. '
+      'In this mode, the specified packages are often high-level\n'
+      'targets such as "virtual/target-os". '
+      'The --to-csv option is often used in this mode.\n'
+      'The --unstable-ok option in this mode will make '
+      'the upstream comparison (e.g. "needs update") be\n'
+      'relative to the latest upstream version, stable or not.\n'
+      '\n'
+      'Upgrade mode will attempt to upgrade the specified '
+      'packages to one of the following versions:\n'
+      '1) The version specified in argument (e.g. foo/bar-1.2.3)\n'
+      '2) The latest stable version upstream (the default)\n'
+      '3) The latest overall version upstream (with --unstable-ok)\n'
+      '\n'
+      'Unlike with --upgrade, if --upgrade-deep is specified, '
+      'then the package dependencies will also be upgraded.\n'
+      'In upgrade mode, it is ok if the specified packages only '
+      'exist upstream.\n'
+      'The --force option can be used to do a package upgrade '
+      'even if the local version matches the upstream version.\n'
+      '\n'
+      'Status report mode examples:\n'
+      '> cros_portage_upgrade --board=arm-generic:x86-generic '
+      '--to-csv=cros-aebl.csv virtual/target-os\n'
+      '> cros_portage_upgrade --unstable-ok --board=x86-mario '
+      '--to-csv=cros_test-mario virtual/target-os virtual/target-os-dev '
+      'virtual/target-os-test\n'
+      'Upgrade mode examples:\n'
+      '> cros_portage_upgrade --board=arm-generic:x86-generic '
+      '--upgrade sys-devel/gdb virtual/yacc\n'
+      '> cros_portage_upgrade --unstable-ok --board=x86-mario '
+      '--upgrade-deep gdata\n'
+      '> cros_portage_upgrade --board=x86-generic --upgrade '
+      'media-libs/libpng-1.2.45\n'
+      '\n'
+  )
+
+  parser = commandline.ArgumentParser(epilog=epilog)
+  parser.add_argument('packages', nargs='*', default=None,
+                      help='Packages to process.')
+  parser.add_argument('--amend', action='store_true', default=False,
+                      help='Amend existing commit when doing upgrade.')
+  parser.add_argument('--board', default=None,
+                      help='Target board(s), colon-separated')
+  parser.add_argument('--force', action='store_true', default=False,
+                      help='Force upgrade even if version already in source')
+  parser.add_argument('--host', action='store_true', default=False,
+                      help='Host target pseudo-board')
+  parser.add_argument('--no-upstream-cache', action='store_true', default=False,
+                      help='Do not preserve cached upstream for future runs')
+  parser.add_argument('--rdeps', action='store_true', default=False,
+                      help='Use runtime dependencies only')
+  parser.add_argument('--srcroot', type='path',
+                      default='%s/trunk/src' % os.environ['HOME'],
+                      help='Path to root src directory [default: %(default)s]')
+  parser.add_argument('--to-csv', dest='csv_file', type='path',
+                      default=None, help='File to store csv-formatted results')
+  parser.add_argument('--upgrade', action='store_true', default=False,
+                      help='Upgrade target package(s) only')
+  parser.add_argument('--upgrade-deep', action='store_true', default=False,
+                      help='Upgrade target package(s) and all dependencies')
+  parser.add_argument('--upstream', type='path',
+                      default=Upgrader.UPSTREAM_TMP_REPO,
+                      help='Latest upstream repo location '
+                      '[default: %(default)s]')
+  parser.add_argument('--unstable-ok', action='store_true', default=False,
+                      help='Use latest upstream ebuild, stable or not')
+  parser.add_argument('--verbose', action='store_true', default=False,
+                      help='Enable verbose output (for debugging)')
+  parser.add_argument('-l', '--local-only', action='store_true', default=False,
+                      help='Do not attempt to update local portage cache')
+  return parser
+
+
+def main(argv):
+  """Main function."""
+  parser = _CreateParser()
+  options = parser.parse_args(argv)
+  # TODO: Can't freeze until options.host modification below is sorted.
+  #options.Freeze()
+
+  oper.verbose = options.verbose
+
+  #
+  # Do some argument checking.
+  #
+
+  if not options.board and not options.host:
+    parser.print_usage()
+    oper.Die('Board (or host) is required.')
+
+  if not options.packages:
+    parser.print_usage()
+    oper.Die('No packages provided.')
+
+  # The --upgrade and --upgrade-deep options are mutually exclusive.
+  if options.upgrade_deep and options.upgrade:
+    parser.print_usage()
+    oper.Die('The --upgrade and --upgrade-deep options '
+             'are mutually exclusive.')
+
+  # The --force option only makes sense with --upgrade or --upgrade-deep.
+  if options.force and not (options.upgrade or options.upgrade_deep):
+    parser.print_usage()
+    oper.Die('The --force option requires --upgrade or --upgrade-deep.')
+
+  # If --to-csv given verify file can be opened for write.
+  if options.csv_file:
+    try:
+      osutils.WriteFile(options.csv_file, '')
+    except IOError as ex:
+      parser.print_usage()
+      oper.Die('Unable to open %s for writing: %s' % (options.csv_file,
+                                                      str(ex)))
+
+  upgrader = Upgrader(options)
+  upgrader.PreRunChecks()
+
+  # Automatically handle board 'host' as 'amd64-host'.
+  boards = []
+  if options.board:
+    boards = options.board.split(':')
+
+    # Specifying --board=host is equivalent to --host.
+    if 'host' in boards:
+      options.host = True
+
+    boards = [b for b in boards if b != 'host']
+
+  # Make sure host pseudo-board is run first.
+  if options.host and Upgrader.HOST_BOARD not in boards:
+    boards.insert(0, Upgrader.HOST_BOARD)
+  elif Upgrader.HOST_BOARD in boards:
+    boards = [b for b in boards if b != Upgrader.HOST_BOARD]
+    boards.insert(0, Upgrader.HOST_BOARD)
+
+  # Check that all boards have been setup first.
+  for board in boards:
+    if board != Upgrader.HOST_BOARD and not _BoardIsSetUp(board):
+      parser.print_usage()
+      oper.Die('You must setup the %s board first.' % board)
+
+  # If --board and --upgrade are given then in almost all cases
+  # the user should cover all architectures.
+  if options.board:
+    non_host_boards = [b for b in boards if b != Upgrader.HOST_BOARD]
+    upgrader.CheckBoardList(non_host_boards)
+
+  passed = True
+  try:
+    upgrader.PrepareToRun()
+
+    for board in boards:
+      oper.Notice('Running with board %s.' % board)
+      upgrader.RunBoard(board)
+  except RuntimeError as ex:
+    passed = False
+    oper.Error(str(ex))
+
+  finally:
+    upgrader.RunCompleted()
+
+  if not passed:
+    oper.Die('Failed with above errors.')
+
+  if upgrader.CommitIsStaged():
+    upgrader.Commit()
+
+  # TODO(mtennant): Move stdout output to here, rather than as-we-go.  That
+  # way it won't come out for each board.  Base it on contents of final table.
+  # Make verbose-dependent?
+
+  upgrader.WriteTableFiles(csv=options.csv_file)
+
+  upgrader.SayGoodbye()
diff --git a/scripts/cros_portage_upgrade_unittest b/scripts/cros_portage_upgrade_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/cros_portage_upgrade_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/cros_portage_upgrade_unittest.py b/scripts/cros_portage_upgrade_unittest.py
new file mode 100644
index 0000000..b7cdea5
--- /dev/null
+++ b/scripts/cros_portage_upgrade_unittest.py
@@ -0,0 +1,3629 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for cros_portage_upgrade.py."""
+
+from __future__ import print_function
+
+import exceptions
+import filecmp
+import mox
+import os
+import re
+import shutil
+import tempfile
+import unittest
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import upgrade_table as utable
+from chromite.scripts import cros_portage_upgrade as cpu
+from chromite.scripts import parallel_emerge
+
+from portage.package.ebuild import config as portcfg  # pylint: disable=F0401
+
+
+# This no longer gets installed by portage.  Stub it as None to avoid
+# `cros lint` errors.
+#from portage.tests.resolver import ResolverPlayground as respgnd
+respgnd = None
+
+# Enable color invariably. Since we rely on color for error/warn message
+# recognition, leaving this to be decided based on stdout being a tty
+# will make the tests fail/succeed based on how they are run.
+# pylint: disable=W0102,W0212,E1120,E1101
+cpu.oper._color._enabled = True
+
+DEFAULT_PORTDIR = '/usr/portage'
+
+# Configuration for generating a temporary valid ebuild hierarchy.
+# ResolverPlayground sets up a default profile with ARCH=x86, so
+# other architectures are irrelevant for now.
+DEFAULT_ARCH = 'x86'
+EBUILDS = {
+    'dev-libs/A-1': {'RDEPEND': 'dev-libs/B'},
+    'dev-libs/A-2': {'RDEPEND': 'dev-libs/B'},
+    'dev-libs/B-1': {'RDEPEND': 'dev-libs/C'},
+    'dev-libs/B-2': {'RDEPEND': 'dev-libs/C'},
+    'dev-libs/C-1': {},
+    'dev-libs/C-2': {},
+    'dev-libs/D-1': {'RDEPEND': '!dev-libs/E'},
+    'dev-libs/D-2': {},
+    'dev-libs/D-3': {},
+    'dev-libs/E-2': {'RDEPEND': '!dev-libs/D'},
+    'dev-libs/E-3': {},
+
+    'dev-libs/F-1': {'SLOT': '1'},
+    'dev-libs/F-2': {'SLOT': '2'},
+    'dev-libs/F-2-r1': {
+        'SLOT': '2',
+        'KEYWORDS': '~amd64 ~x86 ~arm',
+    },
+
+    'dev-apps/X-1': {
+        'EAPI': '3',
+        'SLOT': '0',
+        'KEYWORDS': 'amd64 arm x86',
+        'RDEPEND': '=dev-libs/C-1',
+    },
+    'dev-apps/Y-2': {
+        'EAPI': '3',
+        'SLOT': '0',
+        'KEYWORDS': 'amd64 arm x86',
+        'RDEPEND': '=dev-libs/C-2',
+    },
+
+    'chromeos-base/flimflam-0.0.1-r228': {
+        'EAPI': '2',
+        'SLOT': '0',
+        'KEYWORDS': 'amd64 x86 arm',
+        'RDEPEND': '>=dev-libs/D-2',
+    },
+    'chromeos-base/flimflam-0.0.2-r123': {
+        'EAPI': '2',
+        'SLOT': '0',
+        'KEYWORDS': '~amd64 ~x86 ~arm',
+        'RDEPEND': '>=dev-libs/D-3',
+    },
+    'chromeos-base/libchrome-57098-r4': {
+        'EAPI': '2',
+        'SLOT': '0',
+        'KEYWORDS': 'amd64 x86 arm',
+        'RDEPEND': '>=dev-libs/E-2',
+    },
+    'chromeos-base/libcros-1': {
+        'EAPI': '2',
+        'SLOT': '0',
+        'KEYWORDS': 'amd64 x86 arm',
+        'RDEPEND': 'dev-libs/B dev-libs/C chromeos-base/flimflam',
+        'DEPEND': ('dev-libs/B dev-libs/C chromeos-base/flimflam '
+                   'chromeos-base/libchrome'),
+    },
+
+    'virtual/libusb-0': {
+        'EAPI': '2',
+        'SLOT': '0',
+        'RDEPEND': (
+            '|| ( >=dev-libs/libusb-0.1.12-r1:0 dev-libs/libusb-compat '
+            '>=sys-freebsd/freebsd-lib-8.0[usb] )'
+        ),
+    },
+    'virtual/libusb-1': {
+        'EAPI':'2', 'SLOT': '1',
+        'RDEPEND': '>=dev-libs/libusb-1.0.4:1',
+    },
+    'dev-libs/libusb-0.1.13': {},
+    'dev-libs/libusb-1.0.5': {'SLOT':'1'},
+    'dev-libs/libusb-compat-1': {},
+    'sys-freebsd/freebsd-lib-8': {'IUSE': '+usb'},
+
+    'sys-fs/udev-164': {'EAPI': '1', 'RDEPEND': 'virtual/libusb:0'},
+
+    'virtual/jre-1.5.0': {
+        'SLOT': '1.5',
+        'RDEPEND': '|| ( =dev-java/sun-jre-bin-1.5.0* =virtual/jdk-1.5.0* )',
+    },
+    'virtual/jre-1.5.0-r1': {
+        'SLOT': '1.5',
+        'RDEPEND': '|| ( =dev-java/sun-jre-bin-1.5.0* =virtual/jdk-1.5.0* )',
+    },
+    'virtual/jre-1.6.0': {
+        'SLOT': '1.6',
+        'RDEPEND': '|| ( =dev-java/sun-jre-bin-1.6.0* =virtual/jdk-1.6.0* )',
+    },
+    'virtual/jre-1.6.0-r1': {
+        'SLOT': '1.6',
+        'RDEPEND': '|| ( =dev-java/sun-jre-bin-1.6.0* =virtual/jdk-1.6.0* )',
+    },
+    'virtual/jdk-1.5.0': {
+        'SLOT': '1.5',
+        'RDEPEND': '|| ( =dev-java/sun-jdk-1.5.0* dev-java/gcj-jdk )',
+    },
+    'virtual/jdk-1.5.0-r1': {
+        'SLOT': '1.5',
+        'RDEPEND': '|| ( =dev-java/sun-jdk-1.5.0* dev-java/gcj-jdk )',
+    },
+    'virtual/jdk-1.6.0': {
+        'SLOT': '1.6',
+        'RDEPEND': '|| ( =dev-java/icedtea-6* =dev-java/sun-jdk-1.6.0* )',
+    },
+    'virtual/jdk-1.6.0-r1': {
+        'SLOT': '1.6',
+        'RDEPEND': '|| ( =dev-java/icedtea-6* =dev-java/sun-jdk-1.6.0* )',
+    },
+    'dev-java/gcj-jdk-4.5': {},
+    'dev-java/gcj-jdk-4.5-r1': {},
+    'dev-java/icedtea-6.1': {},
+    'dev-java/icedtea-6.1-r1': {},
+    'dev-java/sun-jdk-1.5': {'SLOT': '1.5'},
+    'dev-java/sun-jdk-1.6': {'SLOT': '1.6'},
+    'dev-java/sun-jre-bin-1.5': {'SLOT': '1.5'},
+    'dev-java/sun-jre-bin-1.6': {'SLOT': '1.6'},
+
+    'dev-java/ant-core-1.8': {'DEPEND': '>=virtual/jdk-1.4'},
+    'dev-db/hsqldb-1.8': {'RDEPEND': '>=virtual/jre-1.6'},
+}
+
+WORLD = [
+    'dev-libs/A',
+    'dev-libs/D',
+    'virtual/jre',
+]
+
+INSTALLED = {
+    'dev-libs/A-1': {},
+    'dev-libs/B-1': {},
+    'dev-libs/C-1': {},
+    'dev-libs/D-1': {},
+
+    'virtual/jre-1.5.0': {
+        'SLOT': '1.5',
+        'RDEPEND': '|| ( =virtual/jdk-1.5.0* =dev-java/sun-jre-bin-1.5.0* )',
+    },
+    'virtual/jre-1.6.0': {
+        'SLOT': '1.6',
+        'RDEPEND': '|| ( =virtual/jdk-1.6.0* =dev-java/sun-jre-bin-1.6.0* )',
+    },
+    'virtual/jdk-1.5.0': {
+        'SLOT': '1.5',
+        'RDEPEND': '|| ( =dev-java/sun-jdk-1.5.0* dev-java/gcj-jdk )',
+    },
+    'virtual/jdk-1.6.0': {
+        'SLOT': '1.6',
+        'RDEPEND': '|| ( =dev-java/icedtea-6* =dev-java/sun-jdk-1.6.0* )',
+    },
+    'dev-java/gcj-jdk-4.5': {},
+    'dev-java/icedtea-6.1': {},
+
+    'virtual/libusb-0': {
+        'EAPI': '2',
+        'SLOT': '0',
+        'RDEPEND': (
+            '|| ( >=dev-libs/libusb-0.1.12-r1:0 dev-libs/libusb-compat '
+            '>=sys-freebsd/freebsd-lib-8.0[usb] )'
+        )
+    },
+}
+
+# For verifying dependency graph results
+GOLDEN_DEP_GRAPHS = {
+    'dev-libs/A-2': {
+        'needs': {'dev-libs/B-2': 'runtime'},
+        'action': 'merge',
+    },
+    'dev-libs/B-2': {'needs': {'dev-libs/C-2': 'runtime'}},
+    'dev-libs/C-2': {'needs': {}},
+    'dev-libs/D-3': {'needs': {}},
+    # TODO(mtennant): Bug in parallel_emerge deps graph makes blocker show up
+    # for E-3, rather than in just E-2 where it belongs. See crosbug.com/22190.
+    # To repeat bug, swap the commented status of next two lines.
+    #'dev-libs/E-3': {'needs': {}},
+    'dev-libs/E-3': {'needs': {'dev-libs/D-3': 'blocker'}},
+    'chromeos-base/libcros-1': {
+        'needs': {
+            'dev-libs/B-2': 'runtime/buildtime',
+            'dev-libs/C-2': 'runtime/buildtime',
+            'chromeos-base/libchrome-57098-r4': 'buildtime',
+            'chromeos-base/flimflam-0.0.1-r228': 'runtime/buildtime'
+        }
+    },
+    'chromeos-base/flimflam-0.0.1-r228': {
+        'needs': {'dev-libs/D-3': 'runtime'},
+    },
+    'chromeos-base/libchrome-57098-r4': {
+        'needs': {'dev-libs/E-3': 'runtime'},
+    },
+}
+
+# For verifying dependency set results
+GOLDEN_DEP_SETS = {
+    'dev-libs/A': set(['dev-libs/A-2', 'dev-libs/B-2', 'dev-libs/C-2']),
+    'dev-libs/B': set(['dev-libs/B-2', 'dev-libs/C-2']),
+    'dev-libs/C': set(['dev-libs/C-2']),
+    'dev-libs/D': set(['dev-libs/D-3']),
+    'virtual/libusb': set(['virtual/libusb-1', 'dev-libs/libusb-1.0.5']),
+    'chromeos-base/libcros': set(['chromeos-base/libcros-1',
+                                  'dev-libs/B-2',
+                                  'chromeos-base/libchrome-57098-r4',
+                                  'dev-libs/E-3',
+                                  'chromeos-base/flimflam-0.0.1-r228',
+                                  'dev-libs/D-3',
+                                  'dev-libs/C-2',])
+}
+
+
+def _GetGoldenDepsSet(pkg):
+  """Retrieve the golden dependency set for |pkg| from GOLDEN_DEP_SETS."""
+  return GOLDEN_DEP_SETS.get(pkg, None)
+
+
+def _VerifyDepsGraph(deps_graph, pkgs):
+  for pkg in pkgs:
+    if not _VerifyDepsGraphOnePkg(deps_graph, pkg):
+      return False
+
+  return True
+
+
+def _VerifyDepsGraphOnePkg(deps_graph, pkg):
+  """Verfication function for Mox to validate deps graph for |pkg|."""
+
+  if deps_graph is None:
+    print('Error: no dependency graph passed into _GetPreOrderDepGraph')
+    return False
+
+  if type(deps_graph) != dict:
+    print('Error: dependency graph is expected to be a dict.  Instead:\n%r' %
+          deps_graph)
+    return False
+
+  validated = True
+
+  golden_deps_set = _GetGoldenDepsSet(pkg)
+  if golden_deps_set == None:
+    print('Error: golden dependency list not configured for %s package' % pkg)
+    validated = False
+
+  # Verify dependencies, by comparing them to GOLDEN_DEP_GRAPHS
+  for p in deps_graph:
+    golden_pkg_info = None
+    try:
+      golden_pkg_info = GOLDEN_DEP_GRAPHS[p]
+    except KeyError:
+      print('Error: golden dependency graph not configured for %s package' % p)
+      validated = False
+      continue
+
+    pkg_info = deps_graph[p]
+    for key in golden_pkg_info:
+      golden_value = golden_pkg_info[key]
+      value = pkg_info[key]
+      if not value == golden_value:
+        print('Error: while verifying "%s" value for %s package,'
+              ' expected:\n%r\nBut instead found:\n%r'
+              % (key, p, golden_value, value))
+        validated = False
+
+  if not validated:
+    print('Error: dependency graph for %s is not as expected.  Instead:\n%r' %
+          (pkg, deps_graph))
+
+  return validated
+
+
+def _GenDepsGraphVerifier(pkgs):
+  """Generate a graph verification function for the given package."""
+  return lambda deps_graph: _VerifyDepsGraph(deps_graph, pkgs)
+
+
+class ManifestLine(object):
+  """Class to represent a Manifest line."""
+
+  __slots__ = (
+      'type',    # DIST, EBUILD, etc.
+      'file',
+      'size',
+      'RMD160',
+      'SHA1',
+      'SHA256',
+      )
+
+  __attrlist__ = __slots__
+
+  def __init__(self, line=None, **kwargs):
+    """Parse |line| from manifest file."""
+    if line:
+      tokens = line.split()
+      self.type = tokens[0]
+      self.file = tokens[1]
+      self.size = tokens[2]
+      self.RMD160 = tokens[4]
+      self.SHA1 = tokens[6]
+      self.SHA256 = tokens[8]
+
+      assert tokens[3] == 'RMD160'
+      assert tokens[5] == 'SHA1'
+      assert tokens[7] == 'SHA256'
+
+    # Entries in kwargs are overwrites.
+    for attr in self.__attrlist__:
+      if attr in kwargs or not hasattr(self, attr):
+        setattr(self, attr, kwargs.get(attr))
+
+  def __str__(self):
+    return ('%s %s %s RMD160 %s SHA1 %s SHA256 %s' %
+            (self.type, self.file, self.size,
+             self.RMD160, self.SHA1, self.SHA256))
+
+  def __eq__(self, other):
+    """Equality support."""
+
+    if type(self) != type(other):
+      return False
+
+    no_attr = object()
+    for attr in self.__attrlist__:
+      if getattr(self, attr, no_attr) != getattr(other, attr, no_attr):
+        return False
+
+    return True
+
+  def __ne__(self, other):
+    """Inequality for completeness."""
+    return not self == other
+
+
+class RunCommandResult(object):
+  """Class to simulate result of cros_build_lib.RunCommand."""
+  __slots__ = ['returncode', 'output']
+
+  def __init__(self, returncode, output):
+    self.returncode = returncode
+    self.output = output
+
+
+class PInfoTest(cros_test_lib.OutputTestCase):
+  """Tests for the PInfo class."""
+
+  def testInit(self):
+    pinfo = cpu.PInfo(category='SomeCat', user_arg='SomeArg')
+
+    self.assertEquals('SomeCat', pinfo.category)
+    self.assertEquals('SomeArg', pinfo.user_arg)
+
+    self.assertEquals(None, pinfo.cpv)
+    self.assertEquals(None, pinfo.overlay)
+
+    self.assertRaises(AttributeError, getattr, pinfo, 'foobar')
+
+  def testEqAndNe(self):
+    pinfo1 = cpu.PInfo(category='SomeCat', user_arg='SomeArg')
+
+    self.assertEquals(pinfo1, pinfo1)
+    self.assertTrue(pinfo1 == pinfo1)
+    self.assertFalse(pinfo1 != pinfo1)
+
+    pinfo2 = cpu.PInfo(category='SomeCat', user_arg='SomeArg')
+
+    self.assertEquals(pinfo1, pinfo2)
+    self.assertTrue(pinfo1 == pinfo2)
+    self.assertFalse(pinfo1 != pinfo2)
+
+    pinfo3 = cpu.PInfo(category='SomeCat', user_arg='SomeOtherArg')
+
+    self.assertNotEquals(pinfo1, pinfo3)
+    self.assertFalse(pinfo1 == pinfo3)
+    self.assertTrue(pinfo1 != pinfo3)
+
+    pinfo4 = cpu.PInfo(category='SomeCat', slot='SomeSlot')
+
+    self.assertNotEquals(pinfo1, pinfo4)
+    self.assertFalse(pinfo1 == pinfo4)
+    self.assertTrue(pinfo1 != pinfo4)
+
+
+class CpuTestBase(cros_test_lib.MoxTempDirTestOutputCase):
+  """Base class for all test classes in this file."""
+
+  __slots__ = [
+      'playground',
+      'playground_envvars',
+  ]
+
+  def setUp(self):
+    self.playground = None
+    self.playground_envvars = None
+
+  def tearDown(self):
+    self._TearDownPlayground()
+
+  def _SetUpPlayground(self, ebuilds=EBUILDS, installed=INSTALLED, world=WORLD,
+                       active=True):
+    """Prepare the temporary ebuild playground (ResolverPlayground).
+
+    This leverages test code in existing Portage modules to create an ebuild
+    hierarchy.  This can be a little slow.
+
+    Args:
+      ebuilds: A list of hashes representing ebuild files in a portdir.
+      installed: A list of hashes representing ebuilds files already installed.
+      world: A list of lines to simulate in the world file.
+      active: True means that os.environ variables should be set
+        to point to the created playground, such that Portage tools
+        (such as emerge) can be run now using the playground as the active
+        PORTDIR.  Also saves the playground as self._playground. If |active|
+        is False, then no os.environ variables are set and playground is
+        not saved (only returned).
+
+    Returns:
+      Tuple (playground, envvars).
+    """
+
+    # TODO(mtennant): Support multiple overlays?  This essentially
+    # creates just a default overlay.
+    # Also note that ResolverPlayground assumes ARCH=x86 for the
+    # default profile it creates.
+    playground = respgnd.ResolverPlayground(ebuilds=ebuilds,
+                                            installed=installed,
+                                            world=world)
+
+    # Set all envvars needed by parallel_emerge, since parallel_emerge
+    # normally does that when --board is given.
+    eroot = self._GetPlaygroundRoot(playground)
+    portdir = self._GetPlaygroundPortdir(playground)
+    envvars = {
+        'PORTAGE_CONFIGROOT': eroot,
+        'ROOT': eroot,
+        'PORTDIR': portdir,
+        # See _GenPortageEnvvars for more info on this setting.
+        'PORTDIR_OVERLAY': portdir,
+    }
+
+    if active:
+      for envvar in envvars:
+        os.environ[envvar] = envvars[envvar]
+
+      self.playground = playground
+      self.playground_envvars = envvars
+
+    return (playground, envvars)
+
+  def _GetPlaygroundRoot(self, playground=None):
+    """Get the temp dir playground is using as ROOT."""
+    if playground is None:
+      playground = self.playground
+
+    eroot = playground.eroot
+    if eroot[-1:] == '/':
+      eroot = eroot[:-1]
+    return eroot
+
+  def _GetPlaygroundPortdir(self, playground=None):
+    """Get the temp portdir without playground."""
+    if playground is None:
+      playground = self.playground
+
+    eroot = self._GetPlaygroundRoot(playground)
+    portdir = '%s%s' % (eroot, DEFAULT_PORTDIR)
+    return portdir
+
+  def _TearDownPlayground(self):
+    """Delete the temporary ebuild playground files."""
+    if self.playground:
+      self.playground.cleanup()
+
+      self.playground = None
+      self.playground_envvars = None
+
+  def _MockUpgrader(self, cmdargs=None, **kwargs):
+    """Set up a mocked Upgrader object with the given args."""
+    upgrader_slot_defaults = {
+        '_curr_arch': DEFAULT_ARCH,
+        '_curr_board': 'some_board',
+        '_unstable_ok': False,
+        '_verbose': False,
+    }
+
+    upgrader = self.mox.CreateMock(cpu.Upgrader)
+
+    # Initialize each attribute with default value.
+    for slot in cpu.Upgrader.__slots__:
+      value = upgrader_slot_defaults.get(slot, None)
+      upgrader.__setattr__(slot, value)
+
+    # Initialize with command line if given.
+    if cmdargs is not None:
+      parser = cpu._CreateParser()
+      options = parser.parse_args(cmdargs)
+      cpu.Upgrader.__init__(upgrader, options)
+
+    # Override Upgrader attributes if requested.
+    for slot in cpu.Upgrader.__slots__:
+      value = None
+      if slot in kwargs:
+        upgrader.__setattr__(slot, kwargs[slot])
+
+    return upgrader
+
+
+@unittest.skip('relies on portage module not currently available')
+class CopyUpstreamTest(CpuTestBase):
+  """Test Upgrader._CopyUpstreamPackage, _CopyUpstreamEclass, _CreateManifest"""
+
+  # This is a hack until crosbug.com/21965 is completed and upstreamed
+  # to Portage.  Insert eclass simulation into tree.
+  def _AddEclassToPlayground(self, eclass, lines=None,
+                             ebuilds=None, missing=False):
+    """Hack to insert an eclass into the playground source.
+
+    Args:
+      eclass: Name of eclass to create (without .eclass suffix).  Will be
+        created as an empty file unless |lines| is specified.
+      lines: Lines of text to put into created eclass, if given.
+      ebuilds: List of ebuilds to put inherit line into.  Should be path
+        to ebuild from playground portdir.
+      missing: If True, do not actually create the eclass file.  Only makes
+        sense if |ebuilds| is non-empty, presumably to test inherit failure.
+
+    Returns:
+      Full path to the eclass file, whether it was created or not.
+    """
+    portdir = self._GetPlaygroundPortdir()
+    eclass_path = os.path.join(portdir, 'eclass', '%s.eclass' % eclass)
+
+    # Create the eclass file
+    osutils.WriteFile(eclass_path, '\n'.join(lines if lines else []))
+
+    # Insert the inherit line into the ebuild file, if requested.
+    if ebuilds:
+      for ebuild in ebuilds:
+        ebuild_path = os.path.join(portdir, ebuild)
+
+        text = osutils.ReadFile(ebuild_path)
+
+        def repl(match):
+          return match.group(1) + '\ninherit ' + eclass
+        text = re.sub(r'(EAPI.*)', repl, text)
+
+        osutils.WriteFile(ebuild_path, text)
+
+        # Remove the Manifest file
+        os.remove(os.path.join(os.path.dirname(ebuild_path), 'Manifest'))
+
+        # Recreate the Manifests using the ebuild utility.
+        cmd = ['ebuild', ebuild_path, 'manifest']
+        cros_build_lib.RunCommand(cmd, print_cmd=False, redirect_stdout=True,
+                                  combine_stdout_stderr=True)
+
+    # If requested, remove the eclass.
+    if missing:
+      os.remove(eclass_path)
+
+    return eclass_path
+
+  #
+  # _IdentifyNeededEclass
+  #
+
+  def _TestIdentifyNeededEclass(self, cpv, ebuild, eclass, create_eclass):
+    """Test Upgrader._IdentifyNeededEclass"""
+
+    self._SetUpPlayground()
+    portdir = self._GetPlaygroundPortdir()
+    mocked_upgrader = self._MockUpgrader(cmdargs=[],
+                                         _stable_repo=portdir)
+    self._AddEclassToPlayground(eclass,
+                                ebuilds=[ebuild],
+                                missing=not create_eclass)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    envvars = cpu.Upgrader._GenPortageEnvvars(mocked_upgrader,
+                                              mocked_upgrader._curr_arch,
+                                              unstable_ok=True)
+    mocked_upgrader._GenPortageEnvvars(mocked_upgrader._curr_arch,
+                                       unstable_ok=True,
+                                      ).AndReturn(envvars)
+    mocked_upgrader._GetBoardCmd('equery').AndReturn('equery')
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      result = cpu.Upgrader._IdentifyNeededEclass(mocked_upgrader, cpv)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testIdentifyNeededEclassMissing(self):
+    result = self._TestIdentifyNeededEclass('dev-libs/A-2',
+                                            'dev-libs/A/A-2.ebuild',
+                                            'inheritme',
+                                            False)
+    self.assertEquals('inheritme.eclass', result)
+
+  def testIdentifyNeededEclassOK(self):
+    result = self._TestIdentifyNeededEclass('dev-libs/A-2',
+                                            'dev-libs/A/A-2.ebuild',
+                                            'inheritme',
+                                            True)
+    self.assertTrue(result is None)
+
+  #
+  # _CopyUpstreamEclass
+  #
+
+  def _TestCopyUpstreamEclass(self, eclass, do_copy,
+                              local_copy_identical=None, error=None):
+    """Test Upgrader._CopyUpstreamEclass"""
+
+    self._SetUpPlayground()
+    upstream_portdir = self._GetPlaygroundPortdir()
+    portage_stable = self.tempdir
+    mocked_upgrader = self._MockUpgrader(_curr_board=None,
+                                         _upstream=upstream_portdir,
+                                         _stable_repo=portage_stable)
+
+    eclass_subpath = os.path.join('eclass', eclass + '.eclass')
+    eclass_path = os.path.join(portage_stable, eclass_subpath)
+    upstream_eclass_path = None
+    if do_copy or local_copy_identical:
+      lines = ['#Dummy eclass', '#Hi']
+      upstream_eclass_path = self._AddEclassToPlayground(eclass,
+                                                         lines=lines)
+    if local_copy_identical:
+      # Make it look like identical eclass already exists in portage-stable.
+      os.makedirs(os.path.dirname(eclass_path))
+      shutil.copy2(upstream_eclass_path, eclass_path)
+    elif local_copy_identical is not None:
+      # Make local copy some other gibberish.
+      os.makedirs(os.path.dirname(eclass_path))
+      osutils.WriteFile(eclass_path, 'garblety gook')
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    if do_copy:
+      mocked_upgrader._RunGit(mocked_upgrader._stable_repo,
+                              ['add', eclass_subpath])
+    self.mox.ReplayAll()
+
+    # Verify
+    result = None
+    with self.OutputCapturer():
+      if error:
+        self.assertRaises(error, cpu.Upgrader._CopyUpstreamEclass,
+                          mocked_upgrader, eclass + '.eclass')
+      else:
+        result = cpu.Upgrader._CopyUpstreamEclass(mocked_upgrader,
+                                                  eclass + '.eclass')
+    self.mox.VerifyAll()
+
+    if do_copy:
+      self.assertTrue(result)
+      # Verify that eclass has been copied into portage-stable.
+      self.assertTrue(os.path.exists(eclass_path))
+      # Verify that eclass contents are correct.
+      self.assertTrue(filecmp.cmp(upstream_eclass_path, eclass_path))
+
+    else:
+      self.assertFalse(result)
+
+  def testCopyUpstreamEclassCopyBecauseMissing(self):
+    self._TestCopyUpstreamEclass('inheritme',
+                                 do_copy=True)
+
+  def testCopyUpstreamEclassCopyBecauseDifferent(self):
+    self._TestCopyUpstreamEclass('inheritme',
+                                 do_copy=True,
+                                 local_copy_identical=False)
+
+  def testCopyUpstreamEclassNoCopyBecauseIdentical(self):
+    self._TestCopyUpstreamEclass('inheritme',
+                                 do_copy=False,
+                                 local_copy_identical=True)
+
+  def testCopyUpstreamEclassNoCopyBecauseUpstreamMissing(self):
+    self._TestCopyUpstreamEclass('inheritme',
+                                 do_copy=False,
+                                 error=RuntimeError)
+
+  #
+  # _CopyUpstreamPackage
+  #
+
+  def _TestCopyUpstreamPackage(self, catpkg, verrev, success,
+                               existing_files, extra_upstream_files,
+                               error=None):
+    """Test Upgrader._CopyUpstreamPackage"""
+
+    upstream_cpv = '%s-%s' % (catpkg, verrev)
+    ebuild = '%s-%s.ebuild' % (catpkg.split('/')[-1], verrev)
+
+    self._SetUpPlayground()
+    upstream_portdir = self._GetPlaygroundPortdir()
+
+    # Simulate extra files in upsteam package dir.
+    if extra_upstream_files:
+      pkg_dir = os.path.join(upstream_portdir, catpkg)
+      if os.path.exists(pkg_dir):
+        for extra_file in extra_upstream_files:
+          open(os.path.join(pkg_dir, extra_file), 'w')
+
+    # Prepare dummy portage-stable dir, with extra previously
+    # existing files simulated if requested.
+    portage_stable = self.tempdir
+    if existing_files:
+      pkg_dir = os.path.join(portage_stable, catpkg)
+      os.makedirs(pkg_dir)
+      for existing_file in existing_files:
+        open(os.path.join(pkg_dir, existing_file), 'w')
+
+
+    mocked_upgrader = self._MockUpgrader(_curr_board=None,
+                                         _upstream=upstream_portdir,
+                                         _stable_repo=portage_stable,
+                                        )
+
+    # Replay script
+    if success:
+      def git_rm(*args, **_kwargs):
+        # Identify file that psuedo-git is to remove, then remove it.
+        # As with real "git rm", if the dir is then empty remove that.
+        # File to remove is last argument in git command (arg 1)
+        dirpath = args[0]
+        for f in args[1][2:]:
+          os.remove(os.path.join(dirpath, f))
+        try:
+          os.rmdir(os.path.dirname(dirpath))
+        except OSError:
+          pass
+
+      pkgdir = os.path.join(mocked_upgrader._stable_repo, catpkg)
+
+      if existing_files:
+        rm_list = [os.path.join(catpkg, f) for f in existing_files]
+
+        # Accept files to remove in any order.
+        def rm_cmd_verifier(cmd):
+          cmd_args = cmd[2:] # Peel off 'rm -rf'.
+          return sorted(cmd_args) == sorted(rm_list)
+
+        mocked_upgrader._RunGit(mocked_upgrader._stable_repo,
+                                mox.Func(rm_cmd_verifier),
+                                redirect_stdout=True
+                               ).WithSideEffects(git_rm)
+
+      mocked_upgrader._CreateManifest(os.path.join(upstream_portdir, catpkg),
+                                      pkgdir, ebuild)
+      mocked_upgrader._IdentifyNeededEclass(upstream_cpv).AndReturn(None)
+    self.mox.ReplayAll()
+
+    # Verify
+    result = None
+    with self.OutputCapturer():
+      if error:
+        self.assertRaises(error, cpu.Upgrader._CopyUpstreamPackage,
+                          mocked_upgrader, upstream_cpv)
+      else:
+        result = cpu.Upgrader._CopyUpstreamPackage(mocked_upgrader,
+                                                   upstream_cpv)
+    self.mox.VerifyAll()
+
+    if success:
+      self.assertEquals(result, upstream_cpv)
+
+      # Verify that ebuild has been copied into portage-stable.
+      ebuild_path = os.path.join(portage_stable, catpkg, ebuild)
+      self.assertTrue(os.path.exists(ebuild_path),
+                      msg='Missing expected ebuild after copy from upstream')
+
+      # Verify that any extra files upstream are also copied.
+      for extra_file in extra_upstream_files:
+        file_path = os.path.join(portage_stable, catpkg, extra_file)
+        msg = ('Missing expected extra file %s after copy from upstream' %
+               extra_file)
+        self.assertTrue(os.path.exists(file_path), msg=msg)
+    else:
+      self.assertTrue(result is None)
+
+  def testCopyUpstreamPackageEmptyStable(self):
+    existing_files = []
+    extra_upstream_files = []
+    self._TestCopyUpstreamPackage('dev-libs/D', '2', True,
+                                  existing_files,
+                                  extra_upstream_files)
+
+  def testCopyUpstreamPackageClutteredStable(self):
+    existing_files = ['foo', 'bar', 'foobar.ebuild', 'D-1.ebuild']
+    extra_upstream_files = []
+    self._TestCopyUpstreamPackage('dev-libs/D', '2', True,
+                                  existing_files,
+                                  extra_upstream_files)
+
+  def testCopyUpstreamPackageVersionNotAvailable(self):
+    """Should fail, dev-libs/D version 5 does not exist 'upstream'"""
+    existing_files = []
+    extra_upstream_files = []
+    self._TestCopyUpstreamPackage('dev-libs/D', '5', False,
+                                  existing_files,
+                                  extra_upstream_files,
+                                  error=RuntimeError)
+
+  def testCopyUpstreamPackagePackageNotAvailable(self):
+    """Should fail, a-b-c/D does not exist 'upstream' in any version"""
+    existing_files = []
+    extra_upstream_files = []
+    self._TestCopyUpstreamPackage('a-b-c/D', '5', False,
+                                  existing_files,
+                                  extra_upstream_files,
+                                  error=RuntimeError)
+
+  def testCopyUpstreamPackageExtraUpstreamFiles(self):
+    existing_files = ['foo', 'bar']
+    extra_upstream_files = ['keepme', 'andme']
+    self._TestCopyUpstreamPackage('dev-libs/F', '2-r1', True,
+                                  existing_files,
+                                  extra_upstream_files)
+
+
+  def _SetupManifestTest(self, ebuild,
+                         upstream_mlines, current_mlines):
+    upstream_dir = tempfile.mkdtemp(dir=self.tempdir)
+    current_dir = tempfile.mkdtemp(dir=self.tempdir)
+
+    upstream_manifest = os.path.join(upstream_dir, 'Manifest')
+    current_manifest = os.path.join(current_dir, 'Manifest')
+
+    if upstream_mlines:
+      osutils.WriteFile(upstream_manifest,
+                        '\n'.join(str(x) for x in upstream_mlines) + '\n')
+
+    if current_mlines:
+      osutils.WriteFile(current_manifest,
+                        '\n'.join(str(x) for x in current_mlines) + '\n')
+
+    ebuild_path = os.path.join(current_dir, ebuild)
+
+    # Add test-specific mocks/stubs
+    self.mox.StubOutWithMock(cros_build_lib, 'RunCommand')
+
+    # Prepare test replay script.
+    run_result = RunCommandResult(returncode=0, output='')
+    cros_build_lib.RunCommand(['ebuild', ebuild_path, 'manifest'],
+                              error_code_ok=True, print_cmd=False,
+                              redirect_stdout=True, combine_stdout_stderr=True
+                             ).AndReturn(run_result)
+    self.mox.ReplayAll()
+
+    return (upstream_dir, current_dir)
+
+  def _AssertManifestContents(self, manifest_path, expected_manifest_lines):
+    manifest_lines = []
+    with open(manifest_path, 'r') as f:
+      for line in f:
+        manifest_lines.append(ManifestLine(line))
+
+    msg = ('Manifest contents not as expected.  Expected:\n%s\n'
+           '\nBut got:\n%s\n' %
+           ('\n'.join([str(ml) for ml in expected_manifest_lines]),
+            '\n'.join([str(ml) for ml in manifest_lines])))
+    self.assertTrue(manifest_lines == expected_manifest_lines, msg=msg)
+    self.assertFalse(manifest_lines != expected_manifest_lines, msg=msg)
+
+  def testCreateManifestNew(self):
+    """Test case with upstream but no current Manifest."""
+
+    mocked_upgrader = self._MockUpgrader()
+
+    ebuild = 'some-pkg.ebuild'
+    upst_mlines = [ManifestLine(type='DIST',
+                                file='fileA',
+                                size='100',
+                                RMD160='abc',
+                                SHA1='123',
+                                SHA256='abc123'),
+                   ManifestLine(type='EBUILD',
+                                file=ebuild,
+                                size='254',
+                                RMD160='def',
+                                SHA1='456',
+                                SHA256='def456'),]
+    upstream_dir, current_dir = self._SetupManifestTest(ebuild,
+                                                        upst_mlines, None)
+
+    upstream_manifest = os.path.join(upstream_dir, 'Manifest')
+    current_manifest = os.path.join(current_dir, 'Manifest')
+
+    # Run test verification.
+    self.assertFalse(os.path.exists(current_manifest))
+    cpu.Upgrader._CreateManifest(mocked_upgrader,
+                                 upstream_dir, current_dir, ebuild)
+    self.mox.VerifyAll()
+    self.assertTrue(filecmp.cmp(upstream_manifest, current_manifest))
+
+  def testCreateManifestMerge(self):
+    """Test case with upstream but no current Manifest."""
+
+    mocked_upgrader = self._MockUpgrader()
+
+    ebuild = 'some-pkg.ebuild'
+    curr_mlines = [ManifestLine(type='DIST',
+                                file='fileA',
+                                size='101',
+                                RMD160='abc',
+                                SHA1='123',
+                                SHA256='abc123'),
+                   ManifestLine(type='DIST',
+                                file='fileC',
+                                size='321',
+                                RMD160='cde',
+                                SHA1='345',
+                                SHA256='cde345'),
+                   ManifestLine(type='EBUILD',
+                                file=ebuild,
+                                size='254',
+                                RMD160='def',
+                                SHA1='789',
+                                SHA256='def789'),]
+    upst_mlines = [ManifestLine(type='DIST',
+                                file='fileA',
+                                size='100',
+                                RMD160='abc',
+                                SHA1='123',
+                                SHA256='abc123'),
+                   # This file is different from current manifest.
+                   # It should be picked up by _CreateManifest.
+                   ManifestLine(type='DIST',
+                                file='fileB',
+                                size='345',
+                                RMD160='bcd',
+                                SHA1='234',
+                                SHA256='bcd234'),
+                   ManifestLine(type='EBUILD',
+                                file=ebuild,
+                                size='254',
+                                RMD160='def',
+                                SHA1='789',
+                                SHA256='def789'),]
+
+    upstream_dir, current_dir = self._SetupManifestTest(ebuild,
+                                                        upst_mlines,
+                                                        curr_mlines)
+
+    current_manifest = os.path.join(current_dir, 'Manifest')
+
+    # Run test verification.
+    self.assertTrue(os.path.exists(current_manifest))
+    cpu.Upgrader._CreateManifest(mocked_upgrader,
+                                 upstream_dir, current_dir, ebuild)
+    self.mox.VerifyAll()
+
+    expected_mlines = curr_mlines + upst_mlines[1:2]
+    self._AssertManifestContents(current_manifest, expected_mlines)
+
+
+class GetPackageUpgradeStateTest(CpuTestBase):
+  """Test Upgrader._GetPackageUpgradeState"""
+
+  def _TestGetPackageUpgradeState(self, pinfo,
+                                  exists_upstream):
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    mocked_upgrader._FindUpstreamCPV(pinfo.cpv, unstable_ok=True,
+                                    ).AndReturn(exists_upstream)
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._GetPackageUpgradeState(mocked_upgrader, pinfo)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testGetPackageUpgradeStateLocalOnly(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-2',
+                      overlay='chromiumos-overlay',
+                      cpv_cmp_upstream=None,
+                      latest_upstream_cpv=None)
+    result = self._TestGetPackageUpgradeState(pinfo, exists_upstream=False)
+    self.assertEquals(result, utable.UpgradeTable.STATE_LOCAL_ONLY)
+
+  def testGetPackageUpgradeStateUnknown(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-2',
+                      overlay='portage',
+                      cpv_cmp_upstream=None,
+                      latest_upstream_cpv=None)
+    result = self._TestGetPackageUpgradeState(pinfo, exists_upstream=False)
+    self.assertEquals(result, utable.UpgradeTable.STATE_UNKNOWN)
+
+  def testGetPackageUpgradeStateUpgradeAndDuplicated(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-2',
+                      overlay='chromiumos-overlay',
+                      cpv_cmp_upstream=1, # outdated
+                      latest_upstream_cpv='not important')
+    result = self._TestGetPackageUpgradeState(pinfo, exists_upstream=True)
+    self.assertEquals(result,
+                      utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_DUPLICATED)
+
+  def testGetPackageUpgradeStateUpgradeAndPatched(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-2',
+                      overlay='chromiumos-overlay',
+                      cpv_cmp_upstream=1, # outdated
+                      latest_upstream_cpv='not important')
+    result = self._TestGetPackageUpgradeState(pinfo, exists_upstream=False)
+    self.assertEquals(result,
+                      utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_PATCHED)
+
+  def testGetPackageUpgradeStateUpgrade(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-2',
+                      overlay='portage-stable',
+                      cpv_cmp_upstream=1, # outdated
+                      latest_upstream_cpv='not important')
+    result = self._TestGetPackageUpgradeState(pinfo, exists_upstream=False)
+    self.assertEquals(result, utable.UpgradeTable.STATE_NEEDS_UPGRADE)
+
+  def testGetPackageUpgradeStateDuplicated(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-2',
+                      overlay='chromiumos-overlay',
+                      cpv_cmp_upstream=0, # current
+                      latest_upstream_cpv='not important')
+    result = self._TestGetPackageUpgradeState(pinfo, exists_upstream=True)
+    self.assertEquals(result, utable.UpgradeTable.STATE_DUPLICATED)
+
+  def testGetPackageUpgradeStatePatched(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-2',
+                      overlay='chromiumos-overlay',
+                      cpv_cmp_upstream=0, # current
+                      latest_upstream_cpv='not important')
+    result = self._TestGetPackageUpgradeState(pinfo, exists_upstream=False)
+    self.assertEquals(result, utable.UpgradeTable.STATE_PATCHED)
+
+  def testGetPackageUpgradeStateCurrent(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-2',
+                      overlay='portage-stable',
+                      cpv_cmp_upstream=0, # current
+                      latest_upstream_cpv='not important')
+    result = self._TestGetPackageUpgradeState(pinfo, exists_upstream=False)
+    self.assertEquals(result, utable.UpgradeTable.STATE_CURRENT)
+
+
+@unittest.skip('relies on portage module not currently available')
+class EmergeableTest(CpuTestBase):
+  """Test Upgrader._AreEmergeable."""
+
+  def _TestAreEmergeable(self, cpvlist, expect,
+                         debug=False, world=WORLD):
+    """Test the Upgrader._AreEmergeable method.
+
+    |cpvlist| is passed to _AreEmergeable.
+    |expect| is boolean, expected return value of _AreEmergeable
+    |debug| requests that emerge output in _AreEmergeable be shown.
+    |world| is list of lines to override default world contents.
+    """
+
+    cmdargs = ['--upgrade'] + cpvlist
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs)
+    self._SetUpPlayground(world=world)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    envvars = cpu.Upgrader._GenPortageEnvvars(mocked_upgrader,
+                                              mocked_upgrader._curr_arch,
+                                              unstable_ok=False)
+    mocked_upgrader._GenPortageEnvvars(mocked_upgrader._curr_arch,
+                                       unstable_ok=False).AndReturn(envvars)
+    mocked_upgrader._GetBoardCmd('emerge').AndReturn('emerge')
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._AreEmergeable(mocked_upgrader, cpvlist)
+    self.mox.VerifyAll()
+
+    (code, _cmd, output) = result
+    if debug or code != expect:
+      print('\nTest ended with success==%r (expected==%r)' % (code, expect))
+      print('Emerge output:\n%s' % output)
+
+    self.assertEquals(code, expect)
+
+  def testAreEmergeableOnePkg(self):
+    """Should pass, one cpv target."""
+    cpvlist = ['dev-libs/A-1']
+    return self._TestAreEmergeable(cpvlist, True)
+
+  def testAreEmergeableTwoPkgs(self):
+    """Should pass, two cpv targets."""
+    cpvlist = ['dev-libs/A-1', 'dev-libs/B-1']
+    return self._TestAreEmergeable(cpvlist, True)
+
+  def testAreEmergeableOnePkgTwoVersions(self):
+    """Should fail, targets two versions of same package."""
+    cpvlist = ['dev-libs/A-1', 'dev-libs/A-2']
+    return self._TestAreEmergeable(cpvlist, False)
+
+  def testAreEmergeableStableFlimFlam(self):
+    """Should pass, target stable version of pkg."""
+    cpvlist = ['chromeos-base/flimflam-0.0.1-r228']
+    return self._TestAreEmergeable(cpvlist, True)
+
+  def testAreEmergeableUnstableFlimFlam(self):
+    """Should fail, target unstable version of pkg."""
+    cpvlist = ['chromeos-base/flimflam-0.0.2-r123']
+    return self._TestAreEmergeable(cpvlist, False)
+
+  def testAreEmergeableBlockedPackages(self):
+    """Should fail, targets have blocking deps on each other."""
+    cpvlist = ['dev-libs/D-1', 'dev-libs/E-2']
+    return self._TestAreEmergeable(cpvlist, False)
+
+  def testAreEmergeableBlockedByInstalledPkg(self):
+    """Should fail because of installed D-1 pkg."""
+    cpvlist = ['dev-libs/E-2']
+    return self._TestAreEmergeable(cpvlist, False)
+
+  def testAreEmergeableNotBlockedByInstalledPkgNotInWorld(self):
+    """Should pass because installed D-1 pkg not in world."""
+    cpvlist = ['dev-libs/E-2']
+    return self._TestAreEmergeable(cpvlist, True, world=[])
+
+  def testAreEmergeableSamePkgDiffSlots(self):
+    """Should pass, same package but different slots."""
+    cpvlist = ['dev-libs/F-1', 'dev-libs/F-2']
+    return self._TestAreEmergeable(cpvlist, True)
+
+  def testAreEmergeableTwoPackagesIncompatibleDeps(self):
+    """Should fail, targets depend on two versions of same pkg."""
+    cpvlist = ['dev-apps/X-1', 'dev-apps/Y-2']
+    return self._TestAreEmergeable(cpvlist, False)
+
+
+class CPVUtilTest(CpuTestBase):
+  """Test various CPV utilities in Upgrader"""
+
+  def _TestCmpCpv(self, cpv1, cpv2):
+    """Test Upgrader._CmpCpv"""
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._CmpCpv(cpv1, cpv2)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testCmpCpv(self):
+    # cpvs to compare
+    equal = [('foo/bar-1', 'foo/bar-1'),
+             ('a-b-c/x-y-z-1.2.3-r1', 'a-b-c/x-y-z-1.2.3-r1'),
+             ('foo/bar-1', 'foo/bar-1-r0'),
+             (None, None)]
+    for (cpv1, cpv2) in equal:
+      self.assertEqual(0, self._TestCmpCpv(cpv1, cpv2))
+
+    lessthan = [(None, 'foo/bar-1'),
+                ('foo/bar-1', 'foo/bar-2'),
+                ('foo/bar-1', 'foo/bar-1-r1'),
+                ('foo/bar-1-r1', 'foo/bar-1-r2'),
+                ('foo/bar-1.2.3', 'foo/bar-1.2.4'),
+                ('foo/bar-5a', 'foo/bar-5b')]
+    for (cpv1, cpv2) in lessthan:
+      self.assertTrue(self._TestCmpCpv(cpv1, cpv2) < 0)
+      self.assertTrue(self._TestCmpCpv(cpv2, cpv1) > 0)
+
+    not_comparable = [('foo/bar-1', 'bar/foo-1')]
+    for (cpv1, cpv2) in not_comparable:
+      self.assertEquals(None, self._TestCmpCpv(cpv1, cpv2))
+
+  def _TestGetCatPkgFromCpv(self, cpv):
+    """Test Upgrader._GetCatPkgFromCpv"""
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._GetCatPkgFromCpv(cpv)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testGetCatPkgFromCpv(self):
+    # (input, output) tuples
+    data = [('foo/bar-1', 'foo/bar'),
+            ('a-b-c/x-y-z-1', 'a-b-c/x-y-z'),
+            ('a-b-c/x-y-z-1.2.3-r3', 'a-b-c/x-y-z'),
+            ('bar-1', 'bar'),
+            ('bar', None)]
+
+    for (cpv, catpn) in data:
+      result = self._TestGetCatPkgFromCpv(cpv)
+      self.assertEquals(catpn, result)
+
+  def _TestGetVerRevFromCpv(self, cpv):
+    """Test Upgrader._GetVerRevFromCpv"""
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._GetVerRevFromCpv(cpv)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testGetVerRevFromCpv(self):
+    # (input, output) tuples
+    data = [('foo/bar-1', '1'),
+            ('a-b-c/x-y-z-1', '1'),
+            ('a-b-c/x-y-z-1.2.3-r3', '1.2.3-r3'),
+            ('foo/bar-3.222-r0', '3.222'),
+            ('bar-1', '1'),
+            ('bar', None)]
+
+    for (cpv, verrev) in data:
+      result = self._TestGetVerRevFromCpv(cpv)
+      self.assertEquals(verrev, result)
+
+  def _TestGetEbuildPathFromCpv(self, cpv):
+    """Test Upgrader._GetEbuildPathFromCpv"""
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._GetEbuildPathFromCpv(cpv)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testGetEbuildPathFromCpv(self):
+    # (input, output) tuples
+    data = [('foo/bar-1', 'foo/bar/bar-1.ebuild'),
+            ('a-b-c/x-y-z-1', 'a-b-c/x-y-z/x-y-z-1.ebuild'),
+            ('a-b-c/x-y-z-1.2.3-r3', 'a-b-c/x-y-z/x-y-z-1.2.3-r3.ebuild'),
+            ('foo/bar-3.222-r0', 'foo/bar/bar-3.222-r0.ebuild'),]
+
+    for (cpv, verrev) in data:
+      result = self._TestGetEbuildPathFromCpv(cpv)
+      self.assertEquals(verrev, result)
+
+
+class PortageStableTest(CpuTestBase):
+  """Test Upgrader methods _SaveStatusOnStableRepo, _CheckStableRepoOnBranch"""
+
+  STATUS_MIX = {'path1/file1': 'M',
+                'path1/path2/file2': 'A',
+                'a/b/.x/y~': 'D',
+                'foo/bar': 'C',
+                '/bar/foo': 'U',
+                'unknown/file': '??',}
+  STATUS_UNKNOWN = {'foo/bar': '??',
+                    'a/b/c': '??',}
+  STATUS_EMPTY = {}
+
+  #
+  # _CheckStableRepoOnBranch
+  #
+
+  def _TestCheckStableRepoOnBranch(self, run_result, expect_err):
+    """Test Upgrader._CheckStableRepoOnBranch"""
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    mocked_upgrader._RunGit(mocked_upgrader._stable_repo,
+                            ['branch'], redirect_stdout=True,
+                           ).AndReturn(run_result)
+    self.mox.ReplayAll()
+
+    # Verify
+    try:
+      cpu.Upgrader._CheckStableRepoOnBranch(mocked_upgrader)
+      self.assertFalse(expect_err, 'Expected RuntimeError, but none raised.')
+    except RuntimeError as ex:
+      self.assertTrue(expect_err, 'Unexpected RuntimeError: %s' % str(ex))
+    self.mox.VerifyAll()
+
+  def testCheckStableRepoOnBranchNoBranch(self):
+    """Should fail due to 'git branch' saying 'no branch'"""
+    output = '* (no branch)\n  somebranch\n  otherbranch\n'
+    run_result = RunCommandResult(returncode=0, output=output)
+    self._TestCheckStableRepoOnBranch(run_result, True)
+
+  def testCheckStableRepoOnBranchOK1(self):
+    """Should pass as 'git branch' indicates a branch"""
+    output = '* somebranch\n  otherbranch\n'
+    run_result = RunCommandResult(returncode=0, output=output)
+    self._TestCheckStableRepoOnBranch(run_result, False)
+
+  def testCheckStableRepoOnBranchOK2(self):
+    """Should pass as 'git branch' indicates a branch"""
+    output = '  somebranch\n* otherbranch\n'
+    run_result = RunCommandResult(returncode=0, output=output)
+    self._TestCheckStableRepoOnBranch(run_result, False)
+
+  def testCheckStableRepoOnBranchFail(self):
+    """Should fail as 'git branch' failed"""
+    output = 'does not matter'
+    run_result = RunCommandResult(returncode=1, output=output)
+    self._TestCheckStableRepoOnBranch(run_result, True)
+
+  #
+  # _SaveStatusOnStableRepo
+  #
+
+  def _TestSaveStatusOnStableRepo(self, run_result):
+    """Test Upgrader._SaveStatusOnStableRepo"""
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    mocked_upgrader._RunGit(mocked_upgrader._stable_repo,
+                            ['status', '-s'], redirect_stdout=True,
+                           ).AndReturn(run_result)
+    self.mox.ReplayAll()
+
+    # Verify
+    cpu.Upgrader._SaveStatusOnStableRepo(mocked_upgrader)
+    self.mox.VerifyAll()
+
+    self.assertFalse(mocked_upgrader._stable_repo_stashed)
+    return mocked_upgrader._stable_repo_status
+
+  def testSaveStatusOnStableRepoFailed(self):
+    """Test case where 'git status -s' fails, should raise RuntimeError"""
+    run_result = RunCommandResult(returncode=1,
+                                  output=None)
+
+    self.assertRaises(RuntimeError,
+                      self._TestSaveStatusOnStableRepo,
+                      run_result)
+
+  def testSaveStatusOnStableRepoAllKinds(self):
+    """Test where 'git status -s' returns all status kinds"""
+    status_lines = ['%2s %s' % (v, k) for (k, v) in self.STATUS_MIX.items()]
+    status_output = '\n'.join(status_lines)
+    run_result = RunCommandResult(returncode=0,
+                                  output=status_output)
+    status = self._TestSaveStatusOnStableRepo(run_result)
+    self.assertEqual(status, self.STATUS_MIX)
+
+  def testSaveStatusOnStableRepoRename(self):
+    """Test where 'git status -s' shows a file rename"""
+    old = 'path/foo-1'
+    new = 'path/foo-2'
+    status_lines = [' R %s --> %s' % (old, new)]
+    status_output = '\n'.join(status_lines)
+    run_result = RunCommandResult(returncode=0,
+                                  output=status_output)
+    status = self._TestSaveStatusOnStableRepo(run_result)
+    self.assertEqual(status, {old: 'D', new: 'A'})
+
+  def testSaveStatusOnStableRepoEmpty(self):
+    """Test empty response from 'git status -s'"""
+    run_result = RunCommandResult(returncode=0,
+                                  output='')
+    status = self._TestSaveStatusOnStableRepo(run_result)
+    self.assertEqual(status, {})
+
+  #
+  # _AnyChangesStaged
+  #
+
+  def _TestAnyChangesStaged(self, status_dict):
+    """Test Upgrader._AnyChangesStaged"""
+    mocked_upgrader = self._MockUpgrader(_stable_repo_status=status_dict)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._AnyChangesStaged(mocked_upgrader)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testAnyChangesStagedMix(self):
+    """Should return True"""
+    self.assertTrue(self._TestAnyChangesStaged(self.STATUS_MIX),
+                    'Failed to notice files with changed status.')
+
+  def testAnyChangesStagedUnknown(self):
+    """Should return False, only files with '??' status"""
+    self.assertFalse(self._TestAnyChangesStaged(self.STATUS_UNKNOWN),
+                     'Should not consider files with "??" status.')
+
+  def testAnyChangesStagedEmpty(self):
+    """Should return False, no file statuses"""
+    self.assertFalse(self._TestAnyChangesStaged(self.STATUS_EMPTY),
+                     'No files should mean no changes staged.')
+
+  #
+  # _StashChanges
+  #
+
+  def testStashChanges(self):
+    """Test Upgrader._StashChanges"""
+    mocked_upgrader = self._MockUpgrader(cmdargs=[],
+                                         _stable_repo_stashed=False)
+    self.assertFalse(mocked_upgrader._stable_repo_stashed)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    mocked_upgrader._RunGit(mocked_upgrader._stable_repo,
+                            ['stash', 'save'],
+                            redirect_stdout=True,
+                            combine_stdout_stderr=True)
+    self.mox.ReplayAll()
+
+    # Verify
+    cpu.Upgrader._StashChanges(mocked_upgrader)
+    self.mox.VerifyAll()
+
+    self.assertTrue(mocked_upgrader._stable_repo_stashed)
+
+  #
+  # _UnstashAnyChanges
+  #
+
+  def _TestUnstashAnyChanges(self, stashed):
+    """Test Upgrader._UnstashAnyChanges"""
+    mocked_upgrader = self._MockUpgrader(cmdargs=[],
+                                         _stable_repo_stashed=stashed)
+    self.assertEqual(stashed, mocked_upgrader._stable_repo_stashed)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    if stashed:
+      mocked_upgrader._RunGit(mocked_upgrader._stable_repo,
+                              ['stash', 'pop', '--index'],
+                              redirect_stdout=True,
+                              combine_stdout_stderr=True)
+    self.mox.ReplayAll()
+
+    # Verify
+    cpu.Upgrader._UnstashAnyChanges(mocked_upgrader)
+    self.mox.VerifyAll()
+
+    self.assertFalse(mocked_upgrader._stable_repo_stashed)
+
+  def testUnstashAnyChanges(self):
+    self._TestUnstashAnyChanges(True)
+    self._TestUnstashAnyChanges(False)
+
+  #
+  # _DropAnyStashedChanges
+  #
+
+  def _TestDropAnyStashedChanges(self, stashed):
+    """Test Upgrader._DropAnyStashedChanges"""
+    mocked_upgrader = self._MockUpgrader(cmdargs=[],
+                                         _stable_repo_stashed=stashed)
+    self.assertEqual(stashed, mocked_upgrader._stable_repo_stashed)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    if stashed:
+      mocked_upgrader._RunGit(mocked_upgrader._stable_repo,
+                              ['stash', 'drop'],
+                              redirect_stdout=True,
+                              combine_stdout_stderr=True)
+    self.mox.ReplayAll()
+
+    # Verify
+    cpu.Upgrader._DropAnyStashedChanges(mocked_upgrader)
+    self.mox.VerifyAll()
+
+    self.assertFalse(mocked_upgrader._stable_repo_stashed)
+
+  def testDropAnyStashedChanges(self):
+    self._TestDropAnyStashedChanges(True)
+    self._TestDropAnyStashedChanges(False)
+
+
+class UtilityTest(CpuTestBase):
+  """Test several Upgrader methods.
+
+  Test these Upgrader methods: _SplitEBuildPath, _GenPortageEnvvars
+  """
+
+  #
+  # _IsInUpgradeMode
+  #
+
+  def _TestIsInUpgradeMode(self, cmdargs):
+    """Test Upgrader._IsInUpgradeMode.  Pretty simple."""
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._IsInUpgradeMode(mocked_upgrader)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testIsInUpgradeModeNoOpts(self):
+    """Should not be in upgrade mode with no options."""
+    result = self._TestIsInUpgradeMode([])
+    self.assertFalse(result)
+
+  def testIsInUpgradeModeUpgrade(self):
+    """Should be in upgrade mode with --upgrade."""
+    result = self._TestIsInUpgradeMode(['--upgrade'])
+    self.assertTrue(result)
+
+  def testIsInUpgradeModeUpgradeDeep(self):
+    """Should be in upgrade mode with --upgrade-deep."""
+    result = self._TestIsInUpgradeMode(['--upgrade-deep'])
+    self.assertTrue(result)
+
+  #
+  # _GetBoardCmd
+  #
+
+  def _TestGetBoardCmd(self, cmd, board):
+    """Test Upgrader._GetBoardCmd."""
+    mocked_upgrader = self._MockUpgrader(_curr_board=board)
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._GetBoardCmd(mocked_upgrader, cmd)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testGetBoardCmdKnownCmds(self):
+    board = 'x86-alex'
+    for cmd in ['emerge', 'equery', 'portageq']:
+      result = self._TestGetBoardCmd(cmd, cpu.Upgrader.HOST_BOARD)
+      self.assertEquals(result, cmd)
+      result = self._TestGetBoardCmd(cmd, board)
+      self.assertEquals(result, '%s-%s' % (cmd, board))
+
+  def testGetBoardCmdUnknownCmd(self):
+    board = 'x86-alex'
+    cmd = 'foo'
+    result = self._TestGetBoardCmd(cmd, cpu.Upgrader.HOST_BOARD)
+    self.assertEquals(result, cmd)
+    result = self._TestGetBoardCmd(cmd, board)
+    self.assertEquals(result, cmd)
+
+  #
+  # _GenPortageEnvvars testing
+  #
+
+  def _TestGenPortageEnvvars(self, arch, unstable_ok,
+                             portdir=None, portage_configroot=None):
+    """Testing the behavior of the Upgrader._GenPortageEnvvars method."""
+    mocked_upgrader = self._MockUpgrader()
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._GenPortageEnvvars(mocked_upgrader,
+                                             arch, unstable_ok,
+                                             portdir, portage_configroot)
+    self.mox.VerifyAll()
+
+    keyw = arch
+    if unstable_ok:
+      keyw = arch + ' ~' + arch
+
+    self.assertEquals(result['ACCEPT_KEYWORDS'], keyw)
+    if portdir is None:
+      self.assertFalse('PORTDIR' in result)
+    else:
+      self.assertEquals(result['PORTDIR'], portdir)
+    if portage_configroot is None:
+      self.assertFalse('PORTAGE_CONFIGROOT' in result)
+    else:
+      self.assertEquals(result['PORTAGE_CONFIGROOT'], portage_configroot)
+
+  def testGenPortageEnvvars1(self):
+    self._TestGenPortageEnvvars('arm', False)
+
+  def testGenPortageEnvvars2(self):
+    self._TestGenPortageEnvvars('x86', True)
+
+  def testGenPortageEnvvars3(self):
+    self._TestGenPortageEnvvars('x86', True,
+                                portdir='/foo/bar',
+                                portage_configroot='/bar/foo')
+
+  #
+  # _SplitEBuildPath testing
+  #
+
+  def _TestSplitEBuildPath(self, ebuild_path, golden_result):
+    """Test the behavior of the Upgrader._SplitEBuildPath method."""
+    mocked_upgrader = self._MockUpgrader()
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._SplitEBuildPath(mocked_upgrader,
+                                           ebuild_path)
+    self.assertEquals(result, golden_result)
+    self.mox.VerifyAll()
+
+  def testSplitEBuildPath1(self):
+    self._TestSplitEBuildPath('/foo/bar/portage/dev-libs/A/A-2.ebuild',
+                              ('portage', 'dev-libs', 'A', 'A-2'))
+
+  def testSplitEBuildPath2(self):
+    self._TestSplitEBuildPath('/foo/ooo/ccc/ppp/ppp-1.2.3-r123.ebuild',
+                              ('ooo', 'ccc', 'ppp', 'ppp-1.2.3-r123'))
+
+
+@unittest.skip('relies on portage module not currently available')
+class TreeInspectTest(CpuTestBase):
+  """Test Upgrader methods: _FindCurrentCPV, _FindUpstreamCPV"""
+
+  def _GenerateTestInput(self, category, pkg_name, ver_rev,
+                         path_prefix=DEFAULT_PORTDIR):
+    """Return tuple (ebuild_path, cpv, cp)."""
+    ebuild_path = None
+    cpv = None
+    if ver_rev:
+      ebuild_path = '%s/%s/%s/%s-%s.ebuild' % (path_prefix,
+                                               category, pkg_name,
+                                               pkg_name, ver_rev)
+      cpv = '%s/%s-%s' % (category, pkg_name, ver_rev)
+    cp = '%s/%s' % (category, pkg_name)
+    return (ebuild_path, cpv, cp)
+
+  #
+  # _FindUpstreamCPV testing
+  #
+
+  def _TestFindUpstreamCPV(self, pkg_arg, ebuild_expect, unstable_ok=False):
+    """Test Upgrader._FindUpstreamCPV
+
+    This points _FindUpstreamCPV at the ResolverPlayground as if it is
+    the upstream tree.
+    """
+
+    self._SetUpPlayground()
+    eroot = self._GetPlaygroundRoot()
+    mocked_upgrader = self._MockUpgrader(_curr_board=None,
+                                         _upstream=eroot)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    envvars = cpu.Upgrader._GenPortageEnvvars(mocked_upgrader,
+                                              mocked_upgrader._curr_arch,
+                                              unstable_ok,
+                                              portdir=eroot,
+                                              portage_configroot=eroot)
+    portage_configroot = mocked_upgrader._emptydir
+    mocked_upgrader._GenPortageEnvvars(mocked_upgrader._curr_arch,
+                                       unstable_ok,
+                                       portdir=mocked_upgrader._upstream,
+                                       portage_configroot=portage_configroot,
+                                      ).AndReturn(envvars)
+
+    if ebuild_expect:
+      ebuild_path = eroot + ebuild_expect
+      split_path = cpu.Upgrader._SplitEBuildPath(mocked_upgrader, ebuild_path)
+      mocked_upgrader._SplitEBuildPath(ebuild_path).AndReturn(split_path)
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._FindUpstreamCPV(mocked_upgrader, pkg_arg,
+                                           unstable_ok)
+    self.mox.VerifyAll()
+    self.assertTrue(bool(ebuild_expect) == bool(result))
+
+    return result
+
+  def testFindUpstreamA2(self):
+    (ebuild, cpv, cp) = self._GenerateTestInput(category='dev-libs',
+                                                pkg_name='A',
+                                                ver_rev='2')
+    result = self._TestFindUpstreamCPV(cp, ebuild)
+    self.assertEquals(result, cpv)
+
+  def testFindUpstreamAAA(self):
+    (ebuild, cpv, cp) = self._GenerateTestInput(category='dev-apps',
+                                                pkg_name='AAA',
+                                                ver_rev=None)
+    result = self._TestFindUpstreamCPV(cp, ebuild)
+    self.assertEquals(result, cpv)
+
+  def testFindUpstreamF(self):
+    (ebuild, cpv, cp) = self._GenerateTestInput(category='dev-libs',
+                                                pkg_name='F',
+                                                ver_rev='2')
+    result = self._TestFindUpstreamCPV(cp, ebuild)
+    self.assertEquals(result, cpv)
+
+  def testFindUpstreamFlimflam(self):
+    """Should find 0.0.1-r228 because more recent flimflam unstable."""
+    (ebuild, cpv, cp) = self._GenerateTestInput(category='chromeos-base',
+                                                pkg_name='flimflam',
+                                                ver_rev='0.0.1-r228')
+    result = self._TestFindUpstreamCPV(cp, ebuild)
+    self.assertEquals(result, cpv)
+
+  def testFindUpstreamFlimflamUnstable(self):
+    """Should find 0.0.2-r123 because of unstable_ok."""
+    (ebuild, cpv, cp) = self._GenerateTestInput(category='chromeos-base',
+                                                pkg_name='flimflam',
+                                                ver_rev='0.0.2-r123')
+    result = self._TestFindUpstreamCPV(cp, ebuild, unstable_ok=True)
+    self.assertEquals(result, cpv)
+
+  #
+  # _FindCurrentCPV testing
+  #
+
+  def _TestFindCurrentCPV(self, pkg_arg, ebuild_expect):
+    """Test Upgrader._FindCurrentCPV
+
+    This test points Upgrader._FindCurrentCPV to the ResolverPlayground
+    tree as if it is the local source.
+    """
+
+    mocked_upgrader = self._MockUpgrader(_curr_board=None)
+    self._SetUpPlayground()
+    eroot = self._GetPlaygroundRoot()
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    envvars = cpu.Upgrader._GenPortageEnvvars(mocked_upgrader,
+                                              mocked_upgrader._curr_arch,
+                                              unstable_ok=False)
+    mocked_upgrader._GenPortageEnvvars(mocked_upgrader._curr_arch,
+                                       unstable_ok=False).AndReturn(envvars)
+    mocked_upgrader._GetBoardCmd('equery').AndReturn('equery')
+
+    if ebuild_expect:
+      ebuild_path = eroot + ebuild_expect
+      split_path = cpu.Upgrader._SplitEBuildPath(mocked_upgrader, ebuild_path)
+      mocked_upgrader._SplitEBuildPath(ebuild_path).AndReturn(split_path)
+
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._FindCurrentCPV(mocked_upgrader, pkg_arg)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testFindCurrentA(self):
+    """Should find dev-libs/A-2."""
+    (ebuild, cpv, cp) = self._GenerateTestInput(category='dev-libs',
+                                                pkg_name='A',
+                                                ver_rev='2')
+    result = self._TestFindCurrentCPV(cp, ebuild)
+    self.assertEquals(result, cpv)
+
+  def testFindCurrentAAA(self):
+    """Should find None, because dev-libs/AAA does not exist in tree."""
+    (ebuild, cpv, cp) = self._GenerateTestInput(category='dev-libs',
+                                                pkg_name='AAA',
+                                                ver_rev=None)
+    result = self._TestFindCurrentCPV(cp, ebuild)
+    self.assertEquals(result, cpv)
+
+  def testFindCurrentF(self):
+    """Should find dev-libs/F-2."""
+    (ebuild, cpv, cp) = self._GenerateTestInput(category='dev-libs',
+                                                pkg_name='F',
+                                                ver_rev='2')
+    result = self._TestFindCurrentCPV(cp, ebuild)
+    self.assertEquals(result, cpv)
+
+  def testFindCurrentFlimflam(self):
+    """Should find 0.0.1-r228 because more recent flimflam unstable."""
+    (ebuild, cpv, cp) = self._GenerateTestInput(category='chromeos-base',
+                                                pkg_name='flimflam',
+                                                ver_rev='0.0.1-r228')
+    result = self._TestFindCurrentCPV(cp, ebuild)
+    self.assertEquals(result, cpv)
+
+
+class RunBoardTest(CpuTestBase):
+  """Test Upgrader.RunBoard,PrepareToRun,RunCompleted."""
+
+  def testRunCompletedSpecified(self):
+    cmdargs = ['--upstream=/some/dir']
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _emptydir='empty-dir',
+                                         _curr_board=None)
+
+    # Add test-specific mocks/stubs
+    self.mox.StubOutWithMock(osutils, 'RmDir')
+
+    # Replay script
+    osutils.RmDir('empty-dir', ignore_missing=True)
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      cpu.Upgrader.RunCompleted(mocked_upgrader)
+    self.mox.VerifyAll()
+
+  def testRunCompletedRemoveCache(self):
+    cmdargs = ['--no-upstream-cache']
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _emptydir='empty-dir',
+                                         _curr_board=None)
+
+    # Add test-specific mocks/stubs
+    self.mox.StubOutWithMock(osutils, 'RmDir')
+    self.mox.StubOutWithMock(osutils, 'SafeUnlink')
+
+    # Replay script
+    osutils.RmDir(mocked_upgrader._upstream, ignore_missing=True)
+    osutils.SafeUnlink('%s-README' % mocked_upgrader._upstream)
+    osutils.RmDir('empty-dir', ignore_missing=True)
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      cpu.Upgrader.RunCompleted(mocked_upgrader)
+    self.mox.VerifyAll()
+
+  def testRunCompletedKeepCache(self):
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _emptydir='empty-dir',
+                                         _curr_board=None)
+
+    # Add test-specific mocks/stubs
+    self.mox.StubOutWithMock(osutils, 'RmDir')
+
+    # Replay script
+    osutils.RmDir('empty-dir', ignore_missing=True)
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      cpu.Upgrader.RunCompleted(mocked_upgrader)
+    self.mox.VerifyAll()
+
+  def testPrepareToRunUpstreamRepoExists(self):
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _curr_board=None)
+
+    # Add test-specific mocks/stubs
+    self.mox.StubOutWithMock(os.path, 'exists')
+    self.mox.StubOutWithMock(osutils, 'RmDir')
+
+    # Replay script
+    os.path.exists('/tmp/portage/.git/shallow').AndReturn(False)
+    osutils.RmDir('/tmp/portage', ignore_missing=True)
+    os.path.exists('/tmp/portage').AndReturn(True)
+    mocked_upgrader._RunGit(
+        '/tmp/portage', ['remote', 'set-url', 'origin',
+                         cpu.Upgrader.PORTAGE_GIT_URL])
+    mocked_upgrader._RunGit(
+        '/tmp/portage', ['remote', 'update'])
+    mocked_upgrader._RunGit(
+        '/tmp/portage', ['checkout', '-f', 'origin/gentoo'],
+        combine_stdout_stderr=True, redirect_stdout=True)
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      cpu.Upgrader.PrepareToRun(mocked_upgrader)
+    self.mox.VerifyAll()
+
+  def testPrepareToRunUpstreamRepoNew(self):
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _upstream=self.tempdir,
+                                         _curr_board=None)
+
+    # Add test-specific mocks/stubs
+    self.mox.StubOutWithMock(os.path, 'dirname')
+    self.mox.StubOutWithMock(os.path, 'basename')
+    self.mox.StubOutWithMock(tempfile, 'mkdtemp')
+
+    # Replay script
+    tempfile.mkdtemp()
+    root = os.path.dirname(mocked_upgrader._upstream).AndReturn('root')
+    name = os.path.basename(mocked_upgrader._upstream).AndReturn('name')
+    os.path.basename('origin/gentoo').AndReturn('gentoo')
+    mocked_upgrader._RunGit(root,
+                            ['clone', '--branch', 'gentoo', '--depth', '1',
+                             cpu.Upgrader.PORTAGE_GIT_URL, name])
+    self.mox.ReplayAll()
+
+    # Verify
+    try:
+      with self.OutputCapturer():
+        cpu.Upgrader.PrepareToRun(mocked_upgrader)
+      self.mox.VerifyAll()
+    finally:
+      self.mox.UnsetStubs()
+
+    readme_path = self.tempdir + '-README'
+    self.assertTrue(os.path.exists(readme_path))
+    os.remove(readme_path)
+
+  def _TestRunBoard(self, pinfolist, upgrade=False, staged_changes=False):
+    """Test Upgrader.RunBoard."""
+
+    targetlist = [pinfo.user_arg for pinfo in pinfolist]
+    upstream_only_pinfolist = [pinfo for pinfo in pinfolist if not pinfo.cpv]
+
+    cmdargs = targetlist
+    if upgrade:
+      cmdargs = ['--upgrade'] + cmdargs
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _curr_board=None)
+    board = 'runboard_testboard'
+
+    # Add test-specific mocks/stubs
+    self.mox.StubOutWithMock(cpu.Upgrader, '_FindBoardArch')
+
+    # Replay script
+    mocked_upgrader._SaveStatusOnStableRepo()
+    mocked_upgrader._LoadStableRepoCategories()
+    cpu.Upgrader._FindBoardArch(board).AndReturn('x86')
+    upgrade_mode = cpu.Upgrader._IsInUpgradeMode(mocked_upgrader)
+    mocked_upgrader._IsInUpgradeMode().AndReturn(upgrade_mode)
+    mocked_upgrader._AnyChangesStaged().AndReturn(staged_changes)
+    if staged_changes:
+      mocked_upgrader._StashChanges()
+
+    mocked_upgrader._ResolveAndVerifyArgs(targetlist,
+                                          upgrade_mode).AndReturn(pinfolist)
+    if upgrade:
+      mocked_upgrader._FinalizeUpstreamPInfolist(pinfolist).AndReturn([])
+    else:
+      mocked_upgrader._GetCurrentVersions(pinfolist).AndReturn(pinfolist)
+      mocked_upgrader._FinalizeLocalPInfolist(pinfolist).AndReturn([])
+
+      if upgrade_mode:
+        mocked_upgrader._FinalizeUpstreamPInfolist(
+            upstream_only_pinfolist).AndReturn([])
+
+    mocked_upgrader._UnstashAnyChanges()
+    mocked_upgrader._UpgradePackages([])
+
+    mocked_upgrader._DropAnyStashedChanges()
+
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      cpu.Upgrader.RunBoard(mocked_upgrader, board)
+    self.mox.VerifyAll()
+
+  def testRunBoard1(self):
+    target_pinfolist = [cpu.PInfo(user_arg='dev-libs/A',
+                                  cpv='dev-libs/A-1',
+                                  upstream_cpv='dev-libs/A-2')]
+    return self._TestRunBoard(target_pinfolist)
+
+  def testRunBoard2(self):
+    target_pinfolist = [cpu.PInfo(user_arg='dev-libs/A',
+                                  cpv='dev-libs/A-1',
+                                  upstream_cpv='dev-libs/A-2')]
+    return self._TestRunBoard(target_pinfolist, upgrade=True)
+
+  def testRunBoard3(self):
+    target_pinfolist = [cpu.PInfo(user_arg='dev-libs/A',
+                                  cpv='dev-libs/A-1',
+                                  upstream_cpv='dev-libs/A-2')]
+    return self._TestRunBoard(target_pinfolist, upgrade=True,
+                              staged_changes=True)
+
+  def testRunBoardUpstreamOnlyStatusMode(self):
+    """Status mode with package that is only upstream should error."""
+
+    pinfolist = [cpu.PInfo(user_arg='dev-libs/M',
+                           cpv=None,
+                           upstream_cpv='dev-libs/M-2'),]
+
+    targetlist = [pinfo.user_arg for pinfo in pinfolist]
+
+    mocked_upgrader = self._MockUpgrader(cmdargs=['dev-libs/M'],
+                                         _curr_board=None)
+    board = 'runboard_testboard'
+
+    # Add test-specific mocks/stubs
+    self.mox.StubOutWithMock(cpu.Upgrader, '_FindBoardArch')
+
+    # Replay script
+    mocked_upgrader._SaveStatusOnStableRepo()
+    mocked_upgrader._LoadStableRepoCategories()
+    cpu.Upgrader._FindBoardArch(board).AndReturn('x86')
+    upgrade_mode = cpu.Upgrader._IsInUpgradeMode(mocked_upgrader)
+    mocked_upgrader._IsInUpgradeMode().AndReturn(upgrade_mode)
+    mocked_upgrader._AnyChangesStaged().AndReturn(False)
+
+    mocked_upgrader._ResolveAndVerifyArgs(targetlist,
+                                          upgrade_mode).AndReturn(pinfolist)
+    mocked_upgrader._DropAnyStashedChanges()
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      self.assertRaises(RuntimeError,
+                        cpu.Upgrader.RunBoard,
+                        mocked_upgrader, board)
+    self.mox.VerifyAll()
+
+
+class GiveEmergeResultsTest(CpuTestBase):
+  """Test Upgrader._GiveEmergeResults"""
+
+  def _TestGiveEmergeResultsOK(self, pinfolist, ok, error=None):
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    mocked_upgrader._AreEmergeable(mox.IgnoreArg(),
+                                  ).AndReturn((ok, None, None))
+    self.mox.ReplayAll()
+
+    # Verify
+    result = None
+    with self.OutputCapturer():
+      if error:
+        self.assertRaises(error, cpu.Upgrader._GiveEmergeResults,
+                          mocked_upgrader, pinfolist)
+      else:
+        result = cpu.Upgrader._GiveEmergeResults(mocked_upgrader, pinfolist)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testGiveEmergeResultsUnmaskedOK(self):
+    pinfolist = [cpu.PInfo(upgraded_cpv='abc/def-4', upgraded_unmasked=True),
+                 cpu.PInfo(upgraded_cpv='bcd/efg-8', upgraded_unmasked=True)]
+    self._TestGiveEmergeResultsOK(pinfolist, True)
+
+  def testGiveEmergeResultsUnmaskedNotOK(self):
+    pinfolist = [cpu.PInfo(upgraded_cpv='abc/def-4', upgraded_unmasked=True),
+                 cpu.PInfo(upgraded_cpv='bcd/efg-8', upgraded_unmasked=True)]
+    self._TestGiveEmergeResultsOK(pinfolist, False, error=RuntimeError)
+
+  def _TestGiveEmergeResultsMasked(self, pinfolist, ok, masked_cpvs,
+                                   error=None):
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    emergeable_tuple = (ok, 'some-cmd', 'some-output')
+    mocked_upgrader._AreEmergeable(mox.IgnoreArg(),
+                                  ).AndReturn(emergeable_tuple)
+    if not ok:
+      for cpv in masked_cpvs:
+        mocked_upgrader._GiveMaskedError(cpv, 'some-output').InAnyOrder()
+    self.mox.ReplayAll()
+
+    # Verify
+    result = None
+    with self.OutputCapturer():
+      if error:
+        self.assertRaises(error, cpu.Upgrader._GiveEmergeResults,
+                          mocked_upgrader, pinfolist)
+      else:
+        result = cpu.Upgrader._GiveEmergeResults(mocked_upgrader, pinfolist)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testGiveEmergeResultsMaskedOK(self):
+    pinfolist = [cpu.PInfo(upgraded_cpv='abc/def-4', upgraded_unmasked=False),
+                 cpu.PInfo(upgraded_cpv='bcd/efg-8', upgraded_unmasked=False)]
+    masked_cpvs = ['abc/def-4', 'bcd/efg-8']
+    self._TestGiveEmergeResultsMasked(pinfolist, True, masked_cpvs,
+                                      error=RuntimeError)
+
+  def testGiveEmergeResultsMaskedNotOK(self):
+    pinfolist = [cpu.PInfo(upgraded_cpv='abc/def-4', upgraded_unmasked=False),
+                 cpu.PInfo(upgraded_cpv='bcd/efg-8', upgraded_unmasked=False)]
+    masked_cpvs = ['abc/def-4', 'bcd/efg-8']
+    self._TestGiveEmergeResultsMasked(pinfolist, False, masked_cpvs,
+                                      error=RuntimeError)
+
+
+class CheckStagedUpgradesTest(CpuTestBase):
+  """Test Upgrader._CheckStagedUpgrades"""
+
+  def testCheckStagedUpgradesTwoStaged(self):
+    cmdargs = []
+
+    ebuild1 = 'a/b/foo/bar/bar-1.ebuild'
+    ebuild2 = 'x/y/bar/foo/foo-3.ebuild'
+    repo_status = {ebuild1: 'A',
+                   'a/b/foo/garbage': 'A',
+                   ebuild2: 'A',}
+
+    pinfolist = [cpu.PInfo(package='foo/bar'),
+                 cpu.PInfo(package='bar/foo')]
+
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _stable_repo_status=repo_status)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    for ebuild in [ebuild1, ebuild2]:
+      split = cpu.Upgrader._SplitEBuildPath(mocked_upgrader, ebuild)
+      mocked_upgrader._SplitEBuildPath(ebuild).InAnyOrder().AndReturn(split)
+
+    self.mox.ReplayAll()
+
+    # Verify
+    cpu.Upgrader._CheckStagedUpgrades(mocked_upgrader, pinfolist)
+    self.mox.VerifyAll()
+
+  def testCheckStagedUpgradesTwoStagedOneUnexpected(self):
+    cmdargs = []
+
+    ebuild1 = 'a/b/foo/bar/bar-1.ebuild'
+    ebuild2 = 'x/y/bar/foo/foo-3.ebuild'
+    repo_status = {ebuild1: 'A',
+                   'a/b/foo/garbage': 'A',
+                   ebuild2: 'A',}
+
+    # Without foo/bar in the pinfolist it should complain about that
+    # package having staged changes.
+    pinfolist = [cpu.PInfo(package='bar/foo')]
+
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _stable_repo_status=repo_status)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    for ebuild in [ebuild1, ebuild2]:
+      split = cpu.Upgrader._SplitEBuildPath(mocked_upgrader, ebuild)
+      mocked_upgrader._SplitEBuildPath(ebuild).InAnyOrder().AndReturn(split)
+
+    self.mox.ReplayAll()
+
+    # Verify
+    self.assertRaises(RuntimeError, cpu.Upgrader._CheckStagedUpgrades,
+                      mocked_upgrader, pinfolist)
+    self.mox.VerifyAll()
+
+  def testCheckStagedUpgradesNoneStaged(self):
+    cmdargs = []
+
+    pinfolist = [cpu.PInfo(package='foo/bar-1'),
+                 cpu.PInfo(package='bar/foo-3')]
+
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _stable_repo_status=None)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    cpu.Upgrader._CheckStagedUpgrades(mocked_upgrader, pinfolist)
+    self.mox.VerifyAll()
+
+
+class UpgradePackagesTest(CpuTestBase):
+  """Test Upgrader._UpgradePackages"""
+
+  def _TestUpgradePackages(self, pinfolist, upgrade):
+    cmdargs = []
+    if upgrade:
+      cmdargs.append('--upgrade')
+    table = utable.UpgradeTable('some-arch')
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _curr_table=table)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    upgrades_this_run = False
+    for pinfo in pinfolist:
+      pkg_result = bool(pinfo.upgraded_cpv)
+      mocked_upgrader._UpgradePackage(pinfo).InAnyOrder(
+          'up').AndReturn(pkg_result)
+      if pkg_result:
+        upgrades_this_run = True
+
+    for pinfo in pinfolist:
+      if pinfo.upgraded_cpv:
+        mocked_upgrader._VerifyPackageUpgrade(pinfo).InAnyOrder('ver')
+      mocked_upgrader._PackageReport(pinfo).InAnyOrder('ver')
+
+    if upgrades_this_run:
+      mocked_upgrader._GiveEmergeResults(pinfolist)
+
+    upgrade_mode = cpu.Upgrader._IsInUpgradeMode(mocked_upgrader)
+    mocked_upgrader._IsInUpgradeMode().AndReturn(upgrade_mode)
+    if upgrade_mode:
+      mocked_upgrader._CheckStagedUpgrades(pinfolist)
+    self.mox.ReplayAll()
+
+    # Verify
+    cpu.Upgrader._UpgradePackages(mocked_upgrader, pinfolist)
+    self.mox.VerifyAll()
+
+  def testUpgradePackagesUpgradeModeWithUpgrades(self):
+    pinfolist = [cpu.PInfo(upgraded_cpv='abc/def-4'),
+                 cpu.PInfo(upgraded_cpv='bcd/efg-8'),
+                 cpu.PInfo(upgraded_cpv=None),
+                 cpu.PInfo(upgraded_cpv=None)]
+    self._TestUpgradePackages(pinfolist, True)
+
+  def testUpgradePackagesUpgradeModeNoUpgrades(self):
+    pinfolist = [cpu.PInfo(upgraded_cpv=None),
+                 cpu.PInfo(upgraded_cpv=None)]
+    self._TestUpgradePackages(pinfolist, True)
+
+  def testUpgradePackagesStatusModeNoUpgrades(self):
+    pinfolist = [cpu.PInfo(upgraded_cpv=None),
+                 cpu.PInfo(upgraded_cpv=None)]
+    self._TestUpgradePackages(pinfolist, False)
+
+
+class CategoriesRoundtripTest(cros_test_lib.MoxTempDirTestOutputCase):
+  """Tests for full "round trip" runs."""
+
+  def _TestCategoriesRoundtrip(self, categories):
+    stable_repo = self.tempdir
+    cat_file = cpu.Upgrader.CATEGORIES_FILE
+    profiles_dir = os.path.join(stable_repo, os.path.dirname(cat_file))
+
+    self.mox.StubOutWithMock(cpu.Upgrader, '_RunGit')
+
+    # Prepare replay script.
+    cpu.Upgrader._RunGit(stable_repo, ['add', cat_file])
+    self.mox.ReplayAll()
+
+    options = cros_test_lib.EasyAttr(srcroot='foobar', upstream=None,
+                                     packages='')
+    upgrader = cpu.Upgrader(options=options)
+    upgrader._stable_repo = stable_repo
+    os.makedirs(profiles_dir)
+
+    # Verification phase.  Write then load categories.
+    upgrader._stable_repo_categories = set(categories)
+    upgrader._WriteStableRepoCategories()
+    upgrader._stable_repo_categories = None
+    upgrader._LoadStableRepoCategories()
+    self.mox.VerifyAll()
+    self.assertEquals(sorted(categories),
+                      sorted(upgrader._stable_repo_categories))
+
+  def test1(self):
+    categories = ['alpha-omega', 'omega-beta', 'beta-chi']
+    self._TestCategoriesRoundtrip(categories)
+
+  def test2(self):
+    categories = []
+    self._TestCategoriesRoundtrip(categories)
+
+  def test3(self):
+    categories = ['virtual', 'happy-days', 'virtually-there']
+    self._TestCategoriesRoundtrip(categories)
+
+
+class UpgradePackageTest(CpuTestBase):
+  """Test Upgrader._UpgradePackage"""
+
+  def _TestUpgradePackage(self, pinfo, upstream_cpv, upstream_cmp,
+                          stable_up, latest_up,
+                          upgrade_requested, upgrade_staged,
+                          unstable_ok, force):
+    cmdargs = []
+    if unstable_ok:
+      cmdargs.append('--unstable-ok')
+    if force:
+      cmdargs.append('--force')
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs)
+
+    # Add test-specific mocks/stubs
+    self.mox.StubOutWithMock(cros_build_lib, 'RunCommand')
+
+    # Replay script
+    mocked_upgrader._FindUpstreamCPV(pinfo.package).AndReturn(stable_up)
+    mocked_upgrader._FindUpstreamCPV(pinfo.package,
+                                     unstable_ok=True).AndReturn(latest_up)
+    if upstream_cpv:
+      mocked_upgrader._PkgUpgradeRequested(pinfo).AndReturn(upgrade_requested)
+      if upgrade_requested:
+        mocked_upgrader._PkgUpgradeStaged(
+            upstream_cpv).AndReturn(upgrade_staged)
+        if (not upgrade_staged and
+            (upstream_cmp > 0 or (upstream_cmp == 0 and force))):
+          mocked_upgrader._CopyUpstreamPackage(
+              upstream_cpv).AndReturn(upstream_cpv)
+          upgrade_staged = True
+
+
+        if upgrade_staged:
+          mocked_upgrader._SetUpgradedMaskBits(pinfo)
+          ebuild_path = cpu.Upgrader._GetEbuildPathFromCpv(upstream_cpv)
+          ebuild_path = os.path.join(mocked_upgrader._stable_repo,
+                                     ebuild_path)
+          mocked_upgrader._StabilizeEbuild(ebuild_path)
+          mocked_upgrader._RunGit(mocked_upgrader._stable_repo,
+                                  ['add', pinfo.package])
+          mocked_upgrader._UpdateCategories(pinfo)
+          cache_files = 'metadata/md5-cache/%s-[0-9]*' % pinfo.package
+          mocked_upgrader._RunGit(mocked_upgrader._stable_repo,
+                                  ['rm', '--ignore-unmatch', '-q', '-f',
+                                   cache_files])
+          cmd = ['egencache', '--update', '--repo=portage-stable',
+                 pinfo.package]
+          run_result = RunCommandResult(returncode=0, output=None)
+          cros_build_lib.RunCommand(cmd, print_cmd=False,
+                                    redirect_stdout=True,
+                                    combine_stdout_stderr=True).AndReturn(
+                                        run_result)
+          mocked_upgrader._RunGit(mocked_upgrader._stable_repo,
+                                  ['add', cache_files])
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      result = cpu.Upgrader._UpgradePackage(mocked_upgrader, pinfo)
+    self.mox.VerifyAll()
+
+    if upstream_cpv:
+      self.assertEquals(upstream_cpv, pinfo.upstream_cpv)
+
+      if upgrade_requested and (upstream_cpv != pinfo.cpv or force):
+        self.assertEquals(upstream_cpv, pinfo.upgraded_cpv)
+      else:
+        self.assertTrue(pinfo.upgraded_cpv is None)
+    else:
+      self.assertTrue(pinfo.upstream_cpv is None)
+      self.assertTrue(pinfo.upgraded_cpv is None)
+    self.assertEquals(stable_up, pinfo.stable_upstream_cpv)
+    self.assertEquals(latest_up, pinfo.latest_upstream_cpv)
+
+    return result
+
+  # Dimensions to vary:
+  # 1) Upgrade for this package requested or not
+  # 2) Upgrade can be stable or not
+  # 3) Specific version to upgrade to specified
+  # 4) Upgrade already staged or not
+  # 5) Upgrade needed or not (current)
+
+  def testUpgradePackageOutdatedRequestedStable(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-2',
+                      package='foo/bar',
+                      upstream_cpv=None)
+    result = self._TestUpgradePackage(pinfo,
+                                      upstream_cpv='foo/bar-3',
+                                      upstream_cmp=1, # outdated
+                                      stable_up='foo/bar-3',
+                                      latest_up='foo/bar-5',
+                                      upgrade_requested=True,
+                                      upgrade_staged=False,
+                                      unstable_ok=False,
+                                      force=False)
+    self.assertTrue(result)
+
+  def testUpgradePackageOutdatedRequestedUnstable(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-2',
+                      package='foo/bar',
+                      upstream_cpv=None)
+    result = self._TestUpgradePackage(pinfo,
+                                      upstream_cpv='foo/bar-5',
+                                      upstream_cmp=1, # outdated
+                                      stable_up='foo/bar-3',
+                                      latest_up='foo/bar-5',
+                                      upgrade_requested=True,
+                                      upgrade_staged=False,
+                                      unstable_ok=True,
+                                      force=False)
+    self.assertTrue(result)
+
+  def testUpgradePackageOutdatedRequestedStableSpecified(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-2',
+                      package='foo/bar',
+                      upstream_cpv='foo/bar-4')
+    result = self._TestUpgradePackage(pinfo,
+                                      upstream_cpv='foo/bar-4',
+                                      upstream_cmp=1, # outdated
+                                      stable_up='foo/bar-3',
+                                      latest_up='foo/bar-5',
+                                      upgrade_requested=True,
+                                      upgrade_staged=False,
+                                      unstable_ok=False, # not important
+                                      force=False)
+    self.assertTrue(result)
+
+  def testUpgradePackageCurrentRequestedStable(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-3',
+                      package='foo/bar',
+                      upstream_cpv=None)
+    result = self._TestUpgradePackage(pinfo,
+                                      upstream_cpv='foo/bar-3',
+                                      upstream_cmp=0, # current
+                                      stable_up='foo/bar-3',
+                                      latest_up='foo/bar-5',
+                                      upgrade_requested=True,
+                                      upgrade_staged=False,
+                                      unstable_ok=False,
+                                      force=False)
+    self.assertFalse(result)
+
+  def testUpgradePackageCurrentRequestedStableForce(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-3',
+                      package='foo/bar',
+                      upstream_cpv='foo/bar-3')
+    result = self._TestUpgradePackage(pinfo,
+                                      upstream_cpv='foo/bar-3',
+                                      upstream_cmp=0, # current
+                                      stable_up='foo/bar-3',
+                                      latest_up='foo/bar-5',
+                                      upgrade_requested=True,
+                                      upgrade_staged=False,
+                                      unstable_ok=False,
+                                      force=True)
+    self.assertTrue(result)
+
+  def testUpgradePackageOutdatedStable(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-2',
+                      package='foo/bar',
+                      upstream_cpv=None)
+    result = self._TestUpgradePackage(pinfo,
+                                      upstream_cpv='foo/bar-3',
+                                      upstream_cmp=1, # outdated
+                                      stable_up='foo/bar-3',
+                                      latest_up='foo/bar-5',
+                                      upgrade_requested=False,
+                                      upgrade_staged=False,
+                                      unstable_ok=False,
+                                      force=False)
+    self.assertFalse(result)
+
+  def testUpgradePackageOutdatedRequestedStableStaged(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-2',
+                      package='foo/bar',
+                      upstream_cpv=None)
+    result = self._TestUpgradePackage(pinfo,
+                                      upstream_cpv='foo/bar-3',
+                                      upstream_cmp=1, # outdated
+                                      stable_up='foo/bar-3',
+                                      latest_up='foo/bar-5',
+                                      upgrade_requested=True,
+                                      upgrade_staged=True,
+                                      unstable_ok=False,
+                                      force=False)
+    self.assertTrue(result)
+
+  def testUpgradePackageOutdatedRequestedUnstableStaged(self):
+    pinfo = cpu.PInfo(cpv='foo/bar-2',
+                      package='foo/bar',
+                      upstream_cpv='foo/bar-5')
+    result = self._TestUpgradePackage(pinfo,
+                                      upstream_cpv='foo/bar-5',
+                                      upstream_cmp=1, # outdated
+                                      stable_up='foo/bar-3',
+                                      latest_up='foo/bar-5',
+                                      upgrade_requested=True,
+                                      upgrade_staged=True,
+                                      unstable_ok=True,
+                                      force=False)
+    self.assertTrue(result)
+
+
+class VerifyPackageTest(CpuTestBase):
+  """Tests for _VerifyPackageUpgrade()."""
+
+  def _TestVerifyPackageUpgrade(self, pinfo):
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs)
+    was_overwrite = pinfo.cpv_cmp_upstream == 0
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    mocked_upgrader._VerifyEbuildOverlay(pinfo.upgraded_cpv,
+                                         'portage-stable',
+                                         was_overwrite)
+    self.mox.ReplayAll()
+
+    # Verify
+    cpu.Upgrader._VerifyPackageUpgrade(mocked_upgrader, pinfo)
+    self.mox.VerifyAll()
+
+  def testVerifyPackageUpgrade(self):
+    pinfo = cpu.PInfo(upgraded_cpv='foo/bar-3')
+
+    for cpv_cmp_upstream in (0, 1):
+      pinfo.cpv_cmp_upstream = cpv_cmp_upstream
+      self._TestVerifyPackageUpgrade(pinfo)
+
+  def _TestVerifyEbuildOverlay(self, cpv, overlay, ebuild_path, was_overwrite):
+    """Test Upgrader._VerifyEbuildOverlay"""
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _curr_arch=DEFAULT_ARCH)
+
+    # Add test-specific mocks/stubs
+    self.mox.StubOutWithMock(cros_build_lib, 'RunCommand')
+
+    # Replay script
+    envvars = cpu.Upgrader._GenPortageEnvvars(mocked_upgrader,
+                                              mocked_upgrader._curr_arch,
+                                              unstable_ok=False)
+    mocked_upgrader._GenPortageEnvvars(mocked_upgrader._curr_arch,
+                                       unstable_ok=False).AndReturn(envvars)
+    mocked_upgrader._GetBoardCmd('equery').AndReturn('equery')
+    run_result = RunCommandResult(returncode=0,
+                                  output=ebuild_path)
+    cros_build_lib.RunCommand(['equery', '-C', 'which', '--include-masked',
+                               cpv], error_code_ok=True,
+                              extra_env=envvars, print_cmd=False,
+                              redirect_stdout=True, combine_stdout_stderr=True,
+                             ).AndReturn(run_result)
+    split_ebuild = cpu.Upgrader._SplitEBuildPath(mocked_upgrader, ebuild_path)
+    mocked_upgrader._SplitEBuildPath(ebuild_path).AndReturn(split_ebuild)
+    self.mox.ReplayAll()
+
+    # Verify
+    cpu.Upgrader._VerifyEbuildOverlay(mocked_upgrader, cpv,
+                                      overlay, was_overwrite)
+    self.mox.VerifyAll()
+
+  def testVerifyEbuildOverlayGood(self):
+    cpv = 'foo/bar-2'
+    overlay = 'some-overlay'
+    good_path = '/some/path/%s/foo/bar/bar-2.ebuild' % overlay
+
+    self._TestVerifyEbuildOverlay(cpv, overlay, good_path, False)
+
+  def testVerifyEbuildOverlayEvilNonOverwrite(self):
+    cpv = 'foo/bar-2'
+    overlay = 'some-overlay'
+    evil_path = '/some/path/spam/foo/bar/bar-2.ebuild'
+
+    self.assertRaises(RuntimeError,
+                      self._TestVerifyEbuildOverlay,
+                      cpv, overlay, evil_path, False)
+
+  def testVerifyEbuildOverlayEvilOverwrite(self):
+    cpv = 'foo/bar-2'
+    overlay = 'some-overlay'
+    evil_path = '/some/path/spam/foo/bar/bar-2.ebuild'
+
+    self.assertRaises(RuntimeError,
+                      self._TestVerifyEbuildOverlay,
+                      cpv, overlay, evil_path, True)
+
+  def _TestSetUpgradedMaskBits(self, pinfo, output):
+    cpv = pinfo.upgraded_cpv
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _curr_arch=DEFAULT_ARCH)
+
+    # Add test-specific mocks/stubs
+    self.mox.StubOutWithMock(cros_build_lib, 'RunCommand')
+
+    # Replay script
+    mocked_upgrader._GenPortageEnvvars(mocked_upgrader._curr_arch,
+                                       unstable_ok=False).AndReturn('envvars')
+    mocked_upgrader._GetBoardCmd('equery').AndReturn('equery')
+    run_result = RunCommandResult(returncode=0,
+                                  output=output)
+    cros_build_lib.RunCommand(['equery', '-qCN', 'list', '-F',
+                               '$mask|$cpv:$slot', '-op', cpv],
+                              error_code_ok=True,
+                              extra_env='envvars', print_cmd=False,
+                              redirect_stdout=True, combine_stdout_stderr=True,
+                             ).AndReturn(run_result)
+    self.mox.ReplayAll()
+
+    # Verify
+    cpu.Upgrader._SetUpgradedMaskBits(mocked_upgrader, pinfo)
+    self.mox.VerifyAll()
+
+  def testGetMaskBitsUnmaskedStable(self):
+    output = '  |foo/bar-2.7.0:0'
+    pinfo = cpu.PInfo(upgraded_cpv='foo/bar-2.7.0')
+    self._TestSetUpgradedMaskBits(pinfo, output)
+    self.assertTrue(pinfo.upgraded_unmasked)
+
+  def testGetMaskBitsUnmaskedUnstable(self):
+    output = ' ~|foo/bar-2.7.3:0'
+    pinfo = cpu.PInfo(upgraded_cpv='foo/bar-2.7.3')
+    self._TestSetUpgradedMaskBits(pinfo, output)
+    self.assertTrue(pinfo.upgraded_unmasked)
+
+  def testGetMaskBitsMaskedStable(self):
+    output = 'M |foo/bar-2.7.4:0'
+    pinfo = cpu.PInfo(upgraded_cpv='foo/bar-2.7.4')
+    self._TestSetUpgradedMaskBits(pinfo, output)
+    self.assertFalse(pinfo.upgraded_unmasked)
+
+  def testGetMaskBitsMaskedUnstable(self):
+    output = 'M~|foo/bar-2.7.4-r1:0'
+    pinfo = cpu.PInfo(upgraded_cpv='foo/bar-2.7.4-r1')
+    self._TestSetUpgradedMaskBits(pinfo, output)
+    self.assertFalse(pinfo.upgraded_unmasked)
+
+
+class CommitTest(CpuTestBase):
+  """Test various commit-related Upgrader methods"""
+
+  #
+  # _ExtractUpgradedPkgs
+  #
+
+  def _TestExtractUpgradedPkgs(self, upgrade_lines):
+    """Test Upgrader._ExtractUpgradedPkgs"""
+    mocked_upgrader = self._MockUpgrader()
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      result = cpu.Upgrader._ExtractUpgradedPkgs(mocked_upgrader, upgrade_lines)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testExtractUpgradedPkgs(self):
+    upgrade_lines = [
+        'Upgraded abc/efg to version 1.2.3 on amd64, arm, x86',
+        'Upgraded xyz/uvw to version 1.2.3 on amd64',
+        'Upgraded xyz/uvw to version 3.2.1 on arm, x86',
+        'Upgraded mno/pqr to version 12345 on x86',
+    ]
+    result = self._TestExtractUpgradedPkgs(upgrade_lines)
+    self.assertEquals(result, ['efg', 'pqr', 'uvw'])
+
+  #
+  # _AmendCommitMessage
+  #
+
+  def _TestAmendCommitMessage(self, new_upgrade_lines,
+                              old_upgrade_lines, remaining_lines,
+                              git_show):
+    """Test Upgrader._AmendCommitMessage"""
+    mocked_upgrader = self._MockUpgrader()
+
+    gold_lines = new_upgrade_lines + old_upgrade_lines
+    def all_lines_verifier(lines):
+      return gold_lines == lines
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    git_result = RunCommandResult(returncode=0,
+                                  output=git_show)
+    mocked_upgrader._RunGit(mocked_upgrader._stable_repo,
+                            mox.IgnoreArg(), redirect_stdout=True,
+                           ).AndReturn(git_result)
+    mocked_upgrader._CreateCommitMessage(mox.Func(all_lines_verifier),
+                                         remaining_lines)
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      cpu.Upgrader._AmendCommitMessage(mocked_upgrader,
+                                       new_upgrade_lines)
+    self.mox.VerifyAll()
+
+  def testOldAndNew(self):
+    new_upgrade_lines = [
+        'Upgraded abc/efg to version 1.2.3 on amd64, arm, x86',
+        'Upgraded mno/pqr to version 4.5-r1 on x86',
+    ]
+    old_upgrade_lines = [
+        'Upgraded xyz/uvw to version 3.2.1 on arm, x86',
+        'Upgraded mno/pqr to version 12345 on x86',
+    ]
+    remaining_lines = [
+        'Extraneous extra comments in commit body.',
+        '',
+        'BUG=chromium-os:12345',
+        'TEST=test everything',
+        'again and again',
+    ]
+    git_show_output = ('\n'.join(old_upgrade_lines) + '\n'
+                       + '\n'
+                       + '\n'.join(remaining_lines))
+    self._TestAmendCommitMessage(new_upgrade_lines, old_upgrade_lines,
+                                 remaining_lines, git_show_output)
+
+  def testOldOnly(self):
+    old_upgrade_lines = [
+        'Upgraded xyz/uvw to version 3.2.1 on arm, x86',
+        'Upgraded mno/pqr to version 12345 on x86',
+    ]
+    git_show_output = ('\n'.join(old_upgrade_lines))
+    self._TestAmendCommitMessage([], old_upgrade_lines, [], git_show_output)
+
+  def testNewOnly(self):
+    new_upgrade_lines = [
+        'Upgraded abc/efg to version 1.2.3 on amd64, arm, x86',
+        'Upgraded mno/pqr to version 4.5-r1 on x86',
+    ]
+    git_show_output = ''
+    self._TestAmendCommitMessage(new_upgrade_lines, [], [], git_show_output)
+
+  def testOldEditedAndNew(self):
+    new_upgrade_lines = [
+        'Upgraded abc/efg to version 1.2.3 on amd64, arm, x86',
+        'Upgraded mno/pqr to version 4.5-r1 on x86',
+    ]
+    old_upgrade_lines = [
+        'So I upgraded xyz/uvw to version 3.2.1 on arm, x86',
+        'Then I Upgraded mno/pqr to version 12345 on x86',
+    ]
+    remaining_lines = [
+        'Extraneous extra comments in commit body.',
+        '',
+        'BUG=chromium-os:12345',
+        'TEST=test everything',
+        'again and again',
+    ]
+    git_show_output = ('\n'.join(old_upgrade_lines) + '\n'
+                       + '\n'
+                       + '\n'.join(remaining_lines))
+
+    # In this test, it should not recognize the existing old_upgrade_lines
+    # as a previous commit message from this script.  So it should give a
+    # warning and push those lines to the end (grouped with remaining_lines).
+    remaining_lines = old_upgrade_lines + [''] + remaining_lines
+    self._TestAmendCommitMessage(new_upgrade_lines, [],
+                                 remaining_lines, git_show_output)
+
+  #
+  # _CreateCommitMessage
+  #
+
+  def _TestCreateCommitMessage(self, upgrade_lines):
+    """Test Upgrader._CreateCommitMessage"""
+    mocked_upgrader = self._MockUpgrader()
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    upgrade_pkgs = cpu.Upgrader._ExtractUpgradedPkgs(mocked_upgrader,
+                                                     upgrade_lines)
+    mocked_upgrader._ExtractUpgradedPkgs(upgrade_lines).AndReturn(upgrade_pkgs)
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      result = cpu.Upgrader._CreateCommitMessage(mocked_upgrader, upgrade_lines)
+    self.mox.VerifyAll()
+
+    self.assertTrue(': upgraded package' in result or
+                    'Upgraded the following' in result)
+    return result
+
+  def testCreateCommitMessageOnePkg(self):
+    upgrade_lines = ['Upgraded abc/efg to version 1.2.3 on amd64, arm, x86']
+    result = self._TestCreateCommitMessage(upgrade_lines)
+
+    # Commit message should have:
+    # -- Summary that mentions 'efg' and ends in "package" (singular)
+    # -- Body corresponding to upgrade_lines
+    # -- BUG= line (with space after '=' to invalidate it)
+    # -- TEST= line (with space after '=' to invalidate it)
+    body = r'\n'.join([re.sub(r'\s+', r'\s', line) for line in upgrade_lines])
+    regexp = re.compile(r'''^efg:\supgraded\spackage\sto\supstream\n # Summary
+                            ^\s*\n                            # Blank line
+                            %s\n                              # Body
+                            ^\s*\n                            # Blank line
+                            ^BUG=\s.+\n                       # BUG line
+                            ^TEST=\s                          # TEST line
+                            ''' % body,
+                        re.VERBOSE | re.MULTILINE)
+    self.assertTrue(regexp.search(result))
+
+  def testCreateCommitMessageThreePkgs(self):
+    upgrade_lines = [
+        'Upgraded abc/efg to version 1.2.3 on amd64, arm, x86',
+        'Upgraded xyz/uvw to version 1.2.3 on amd64',
+        'Upgraded xyz/uvw to version 3.2.1 on arm, x86',
+        'Upgraded mno/pqr to version 12345 on x86',
+    ]
+    result = self._TestCreateCommitMessage(upgrade_lines)
+
+    # Commit message should have:
+    # -- Summary that mentions 'efg, pqr, uvw' and ends in "packages" (plural)
+    # -- Body corresponding to upgrade_lines
+    # -- BUG= line (with space after '=' to invalidate it)
+    # -- TEST= line (with space after '=' to invalidate it)
+    body = r'\n'.join([re.sub(r'\s+', r'\s', line) for line in upgrade_lines])
+    regexp = re.compile(r'''^efg,\spqr,\suvw:\supgraded\spackages.*\n # Summary
+                            ^\s*\n                            # Blank line
+                            %s\n                              # Body
+                            ^\s*\n                            # Blank line
+                            ^BUG=\s.+\n                       # BUG line
+                            ^TEST=\s                          # TEST line
+                            ''' % body,
+                        re.VERBOSE | re.MULTILINE)
+    self.assertTrue(regexp.search(result))
+
+  def testCreateCommitMessageTenPkgs(self):
+    upgrade_lines = [
+        'Upgraded abc/efg to version 1.2.3 on amd64, arm, x86',
+        'Upgraded bcd/fgh to version 1.2.3 on amd64',
+        'Upgraded cde/ghi to version 3.2.1 on arm, x86',
+        'Upgraded def/hij to version 12345 on x86',
+        'Upgraded efg/ijk to version 1.2.3 on amd64',
+        'Upgraded fgh/jkl to version 3.2.1 on arm, x86',
+        'Upgraded ghi/klm to version 12345 on x86',
+        'Upgraded hij/lmn to version 1.2.3 on amd64',
+        'Upgraded ijk/mno to version 3.2.1 on arm, x86',
+        'Upgraded jkl/nop to version 12345 on x86',
+    ]
+    result = self._TestCreateCommitMessage(upgrade_lines)
+
+    # Commit message should have:
+    # -- Summary that mentions '10' and ends in "packages" (plural)
+    # -- Body corresponding to upgrade_lines
+    # -- BUG= line (with space after '=' to invalidate it)
+    # -- TEST= line (with space after '=' to invalidate it)
+    body = r'\n'.join([re.sub(r'\s+', r'\s', line) for line in upgrade_lines])
+    regexp = re.compile(r'''^Upgraded\s.*10.*\spackages\n     # Summary
+                            ^\s*\n                            # Blank line
+                            %s\n                              # Body
+                            ^\s*\n                            # Blank line
+                            ^BUG=\s.+\n                       # BUG line
+                            ^TEST=\s                          # TEST line
+                            ''' % body,
+                        re.VERBOSE | re.MULTILINE)
+    self.assertTrue(regexp.search(result))
+
+
+@unittest.skip('relies on portage module not currently available')
+class GetCurrentVersionsTest(CpuTestBase):
+  """Test Upgrader._GetCurrentVersions"""
+
+  def _TestGetCurrentVersionsLocalCpv(self, target_pinfolist):
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _curr_board=None)
+    self._SetUpPlayground()
+
+    # Add test-specific mocks/stubs
+    self.mox.StubOutWithMock(cpu.Upgrader, '_GetPreOrderDepGraph')
+
+    # Replay script
+    packages = [pinfo.package for pinfo in target_pinfolist]
+    targets = ['=' + pinfo.cpv for pinfo in target_pinfolist]
+    pm_argv = cpu.Upgrader._GenParallelEmergeArgv(mocked_upgrader, targets)
+    pm_argv.append('--root-deps')
+    verifier = _GenDepsGraphVerifier(packages)
+    mocked_upgrader._GenParallelEmergeArgv(targets).AndReturn(pm_argv)
+    mocked_upgrader._SetPortTree(mox.IsA(portcfg.config), mox.IsA(dict))
+    cpu.Upgrader._GetPreOrderDepGraph(mox.Func(verifier)).AndReturn([])
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._GetCurrentVersions(mocked_upgrader, target_pinfolist)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testGetCurrentVersionsTwoPkgs(self):
+    target_pinfolist = [cpu.PInfo(package='dev-libs/A', cpv='dev-libs/A-2'),
+                        cpu.PInfo(package='dev-libs/D', cpv='dev-libs/D-3')]
+    self._TestGetCurrentVersionsLocalCpv(target_pinfolist)
+
+  def testGetCurrentVersionsOnePkgB(self):
+    target_pinfolist = [cpu.PInfo(package='dev-libs/B', cpv='dev-libs/B-2')]
+    self._TestGetCurrentVersionsLocalCpv(target_pinfolist)
+
+  def testGetCurrentVersionsOnePkgLibcros(self):
+    target_pinfolist = [cpu.PInfo(package='chromeos-base/libcros',
+                                  cpv='chromeos-base/libcros-1')]
+    self._TestGetCurrentVersionsLocalCpv(target_pinfolist)
+
+  def _TestGetCurrentVersionsPackageOnly(self, target_pinfolist):
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _curr_board=None)
+    self._SetUpPlayground()
+
+    # Add test-specific mocks/stubs
+    self.mox.StubOutWithMock(cpu.Upgrader, '_GetPreOrderDepGraph')
+
+    # Replay script
+    packages = [pinfo.package for pinfo in target_pinfolist]
+    pm_argv = cpu.Upgrader._GenParallelEmergeArgv(mocked_upgrader, packages)
+    pm_argv.append('--root-deps')
+    mocked_upgrader._GenParallelEmergeArgv(packages).AndReturn(pm_argv)
+    mocked_upgrader._SetPortTree(mox.IsA(portcfg.config), mox.IsA(dict))
+    cpu.Upgrader._GetPreOrderDepGraph(mox.IgnoreArg()).AndReturn([])
+    self.mox.ReplayAll()
+
+    # Verify
+    result = cpu.Upgrader._GetCurrentVersions(mocked_upgrader, target_pinfolist)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testGetCurrentVersionsWorld(self):
+    target_pinfolist = [cpu.PInfo(package='world', cpv='world')]
+    self._TestGetCurrentVersionsPackageOnly(target_pinfolist)
+
+  def testGetCurrentVersionsLocalOnlyB(self):
+    target_pinfolist = [cpu.PInfo(package='dev-libs/B', cpv=None)]
+    self._TestGetCurrentVersionsPackageOnly(target_pinfolist)
+
+
+class ResolveAndVerifyArgsTest(CpuTestBase):
+  """Test Upgrader._ResolveAndVerifyArgs"""
+
+  def _TestResolveAndVerifyArgsWorld(self, upgrade_mode):
+    args = ['world']
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _curr_board=None)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      result = cpu.Upgrader._ResolveAndVerifyArgs(mocked_upgrader, args,
+                                                  upgrade_mode=upgrade_mode)
+    self.mox.VerifyAll()
+
+    self.assertEquals(result, [cpu.PInfo(user_arg='world',
+                                         package='world',
+                                         package_name='world',
+                                         category=None,
+                                         cpv='world')])
+
+  def testResolveAndVerifyArgsWorldUpgradeMode(self):
+    self._TestResolveAndVerifyArgsWorld(True)
+
+  def testResolveAndVerifyArgsWorldStatusMode(self):
+    self._TestResolveAndVerifyArgsWorld(False)
+
+  def _TestResolveAndVerifyArgsNonWorld(self, pinfolist, cmdargs=[],
+                                        error=None, error_checker=None):
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _curr_board=None)
+    upgrade_mode = cpu.Upgrader._IsInUpgradeMode(mocked_upgrader)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    args = []
+    for pinfo in pinfolist:
+      arg = pinfo.user_arg
+      local_cpv = pinfo.cpv
+      upstream_cpv = pinfo.upstream_cpv
+      args.append(arg)
+
+      catpkg = cpu.Upgrader._GetCatPkgFromCpv(arg)
+      local_arg = catpkg if catpkg else arg
+
+      mocked_upgrader._FindCurrentCPV(local_arg).AndReturn(local_cpv)
+      mocked_upgrader._FindUpstreamCPV(
+          arg, mocked_upgrader._unstable_ok).AndReturn(upstream_cpv)
+
+      if not upstream_cpv and upgrade_mode:
+        # Real method raises an exception here.
+        if not mocked_upgrader._unstable_ok:
+          mocked_upgrader._FindUpstreamCPV(arg, True).AndReturn(arg)
+        break
+
+      any_cpv = local_cpv if local_cpv else upstream_cpv
+      if any_cpv:
+        mocked_upgrader._FillPInfoFromCPV(mox.IsA(cpu.PInfo), any_cpv)
+
+    self.mox.ReplayAll()
+
+    # Verify
+    result = None
+    with self.OutputCapturer():
+      if error:
+        exc = self.AssertRaisesAndReturn(error,
+                                         cpu.Upgrader._ResolveAndVerifyArgs,
+                                         mocked_upgrader, args, upgrade_mode)
+        if error_checker:
+          check = error_checker(exc)
+          self.assertTrue(check[0], msg=check[1])
+      else:
+        result = cpu.Upgrader._ResolveAndVerifyArgs(mocked_upgrader, args,
+                                                    upgrade_mode)
+    self.mox.VerifyAll()
+
+    return result
+
+  def testResolveAndVerifyArgsNonWorldUpgrade(self):
+    pinfolist = [cpu.PInfo(user_arg='dev-libs/B',
+                           cpv='dev-libs/B-1',
+                           upstream_cpv='dev-libs/B-2')]
+    cmdargs = ['--upgrade', '--unstable-ok']
+    result = self._TestResolveAndVerifyArgsNonWorld(pinfolist, cmdargs)
+    self.assertEquals(result, pinfolist)
+
+  def testResolveAndVerifyArgsNonWorldUpgradeSpecificVer(self):
+    pinfolist = [cpu.PInfo(user_arg='dev-libs/B-2',
+                           cpv='dev-libs/B-1',
+                           upstream_cpv='dev-libs/B-2')]
+    cmdargs = ['--upgrade', '--unstable-ok']
+    result = self._TestResolveAndVerifyArgsNonWorld(pinfolist, cmdargs)
+    self.assertEquals(result, pinfolist)
+
+  def testResolveAndVerifyArgsNonWorldUpgradeSpecificVerNotFoundStable(self):
+    pinfolist = [cpu.PInfo(user_arg='dev-libs/B-2',
+                           cpv='dev-libs/B-1')]
+    cmdargs = ['--upgrade']
+
+    def _error_checker(exception):
+      # RuntimeError text should mention 'is unstable'.
+      text = str(exception)
+      phrase = 'is unstable'
+      msg = 'No mention of "%s" in error message: %s' % (phrase, text)
+      return (0 <= text.find(phrase), msg)
+
+    self._TestResolveAndVerifyArgsNonWorld(pinfolist, cmdargs,
+                                           error=RuntimeError,
+                                           error_checker=_error_checker)
+
+  def testResolveAndVerifyArgsNonWorldUpgradeSpecificVerNotFoundUnstable(self):
+    pinfolist = [cpu.PInfo(user_arg='dev-libs/B-2', cpv='dev-libs/B-1')]
+    cmdargs = ['--upgrade', '--unstable-ok']
+
+    def _error_checker(exception):
+      # RuntimeError text should start with 'Unable to find'.
+      text = str(exception)
+      phrase = 'Unable to find'
+      msg = 'Error message expected to start with "%s": %s' % (phrase, text)
+      return (text.startswith(phrase), msg)
+
+    self._TestResolveAndVerifyArgsNonWorld(pinfolist, cmdargs,
+                                           error=RuntimeError,
+                                           error_checker=_error_checker)
+
+  def testResolveAndVerifyArgsNonWorldLocalOny(self):
+    pinfolist = [cpu.PInfo(user_arg='dev-libs/B', cpv='dev-libs/B-1')]
+    cmdargs = ['--upgrade', '--unstable-ok']
+
+    def _error_checker(exception):
+      # RuntimeError text should start with 'Unable to find'.
+      text = str(exception)
+      phrase = 'Unable to find'
+      msg = 'Error message expected to start with "%s": %s' % (phrase, text)
+      return (text.startswith(phrase), msg)
+
+    self._TestResolveAndVerifyArgsNonWorld(pinfolist, cmdargs,
+                                           error=RuntimeError,
+                                           error_checker=_error_checker)
+
+  def testResolveAndVerifyArgsNonWorldUpstreamOnly(self):
+    pinfolist = [cpu.PInfo(user_arg='dev-libs/B',
+                           upstream_cpv='dev-libs/B-2')]
+    cmdargs = ['--upgrade', '--unstable-ok']
+    result = self._TestResolveAndVerifyArgsNonWorld(pinfolist, cmdargs)
+    self.assertEquals(result, pinfolist)
+
+  def testResolveAndVerifyArgsNonWorldNeither(self):
+    pinfolist = [cpu.PInfo(user_arg='dev-libs/B')]
+    cmdargs = ['--upgrade', '--unstable-ok']
+    self._TestResolveAndVerifyArgsNonWorld(pinfolist, cmdargs,
+                                           error=RuntimeError)
+
+  def testResolveAndVerifyArgsNonWorldStatusSpecificVer(self):
+    """Exception because specific cpv arg not allowed without --ugprade."""
+    cmdargs = ['--unstable-ok']
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _curr_board=None)
+    upgrade_mode = cpu.Upgrader._IsInUpgradeMode(mocked_upgrader)
+
+    # Add test-specific mocks/stubs
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    self.assertRaises(RuntimeError,
+                      cpu.Upgrader._ResolveAndVerifyArgs,
+                      mocked_upgrader,
+                      ['dev-libs/B-2'], upgrade_mode)
+    self.mox.VerifyAll()
+
+
+class StabilizeEbuildTest(CpuTestBase):
+  """Tests for _StabilizeEbuild()."""
+
+  PREFIX_LINES = [
+      'Garbletygook nonsense unimportant',
+      'Some other nonsense with KEYWORDS mention',
+  ]
+  POSTFIX_LINES = [
+      'Some mention of KEYWORDS in a line',
+      'And other nonsense',
+  ]
+
+  def _TestStabilizeEbuild(self, ebuild_path, arch):
+
+    mocked_upgrader = self._MockUpgrader(cmdargs=[],
+                                         _curr_arch=arch)
+
+    # These are the steps of the replay script.
+    self.mox.ReplayAll()
+
+    # This is the verification phase.
+    with self.OutputCapturer():
+      cpu.Upgrader._StabilizeEbuild(mocked_upgrader, ebuild_path)
+    self.mox.VerifyAll()
+
+  def _AssertEqualsExcludingComments(self, lines1, lines2):
+    lines1 = [ln for ln in lines1 if not ln.startswith('#')]
+    lines2 = [ln for ln in lines2 if not ln.startswith('#')]
+
+    self.assertEquals(lines1, lines2)
+
+  def _TestStabilizeEbuildWrapper(self, ebuild_path, arch,
+                                  keyword_line, gold_keyword_line):
+    if not isinstance(keyword_line, list):
+      keyword_line = [keyword_line]
+    if not isinstance(gold_keyword_line, list):
+      gold_keyword_line = [gold_keyword_line]
+
+    input_content = self.PREFIX_LINES + keyword_line + self.POSTFIX_LINES
+    gold_content = self.PREFIX_LINES + gold_keyword_line + self.POSTFIX_LINES
+
+    # Write contents to ebuild_path before test.
+    osutils.WriteFile(ebuild_path, '\n'.join(input_content))
+
+    self._TestStabilizeEbuild(ebuild_path, arch)
+
+    # Read content back after test.
+    content_lines = osutils.ReadFile(ebuild_path).splitlines()
+
+    self._AssertEqualsExcludingComments(gold_content, content_lines)
+
+  @osutils.TempFileDecorator
+  def testNothingToDo(self):
+    arch = 'arm'
+    keyword_line = 'KEYWORDS="amd64 arm mips x86"'
+    gold_keyword_line = 'KEYWORDS="*"'
+    self._TestStabilizeEbuildWrapper(self.tempfile, arch,
+                                     keyword_line, gold_keyword_line)
+
+  @osutils.TempFileDecorator
+  def testNothingToDoFbsd(self):
+    arch = 'x86'
+    keyword_line = 'KEYWORDS="amd64 arm ~mips x86 ~x86-fbsd"'
+    gold_keyword_line = 'KEYWORDS="*"'
+    self._TestStabilizeEbuildWrapper(self.tempfile, arch,
+                                     keyword_line, gold_keyword_line)
+
+  @osutils.TempFileDecorator
+  def testSimpleMiddleOfLine(self):
+    arch = 'arm'
+    keyword_line = 'KEYWORDS="amd64 ~arm ~mips x86"'
+    gold_keyword_line = 'KEYWORDS="*"'
+    self._TestStabilizeEbuildWrapper(self.tempfile, arch,
+                                     keyword_line, gold_keyword_line)
+
+  @osutils.TempFileDecorator
+  def testSimpleMiddleOfLineSpacePrefix(self):
+    arch = 'arm'
+    keyword_line = '    KEYWORDS="amd64 ~arm ~mips x86"'
+    gold_keyword_line = '    KEYWORDS="*"'
+    self._TestStabilizeEbuildWrapper(self.tempfile, arch,
+                                     keyword_line, gold_keyword_line)
+
+  @osutils.TempFileDecorator
+  def testSimpleStartOfLine(self):
+    arch = 'arm'
+    keyword_line = 'KEYWORDS="~arm amd64 ~mips x86"'
+    gold_keyword_line = 'KEYWORDS="*"'
+    self._TestStabilizeEbuildWrapper(self.tempfile, arch,
+                                     keyword_line, gold_keyword_line)
+
+  @osutils.TempFileDecorator
+  def testSimpleEndOfLine(self):
+    arch = 'arm'
+    keyword_line = 'KEYWORDS="amd64 ~mips x86 ~arm"'
+    gold_keyword_line = 'KEYWORDS="*"'
+    self._TestStabilizeEbuildWrapper(self.tempfile, arch,
+                                     keyword_line, gold_keyword_line)
+
+  @osutils.TempFileDecorator
+  def testPreFbsd(self):
+    arch = 'x86'
+    keyword_line = 'KEYWORDS="amd64 ~arm ~mips ~x86 ~x86-fbsd"'
+    gold_keyword_line = 'KEYWORDS="*"'
+    self._TestStabilizeEbuildWrapper(self.tempfile, arch,
+                                     keyword_line, gold_keyword_line)
+
+  @osutils.TempFileDecorator
+  def testPostFbsd(self):
+    arch = 'x86'
+    keyword_line = 'KEYWORDS="amd64 ~arm ~mips ~x86-fbsd ~x86"'
+    gold_keyword_line = 'KEYWORDS="*"'
+    self._TestStabilizeEbuildWrapper(self.tempfile, arch,
+                                     keyword_line, gold_keyword_line)
+
+  @osutils.TempFileDecorator
+  def testMultilineKeywordsMiddle(self):
+    arch = 'arm'
+    keyword_lines = [
+        'KEYWORDS="amd64',
+        '  ~arm',
+        '  ~mips',
+        '  x86"',
+    ]
+    gold_keyword_lines = ['KEYWORDS="*"',]
+    self._TestStabilizeEbuildWrapper(self.tempfile, arch,
+                                     keyword_lines, gold_keyword_lines)
+
+  @osutils.TempFileDecorator
+  def testMultilineKeywordsStart(self):
+    arch = 'amd64'
+    keyword_lines = [
+        'KEYWORDS="~amd64',
+        '  arm',
+        '  ~mips',
+        '  x86"',
+    ]
+    gold_keyword_lines = ['KEYWORDS="*"',]
+    self._TestStabilizeEbuildWrapper(self.tempfile, arch,
+                                     keyword_lines, gold_keyword_lines)
+
+  @osutils.TempFileDecorator
+  def testMultilineKeywordsEnd(self):
+    arch = 'x86'
+    keyword_lines = [
+        'KEYWORDS="amd64',
+        '  arm',
+        '  ~mips',
+        '  ~x86"',
+    ]
+    gold_keyword_lines = ['KEYWORDS="*"',]
+    self._TestStabilizeEbuildWrapper(self.tempfile, arch,
+                                     keyword_lines, gold_keyword_lines)
+
+  @osutils.TempFileDecorator
+  def testMultipleKeywordLinesOneChange(self):
+    arch = 'arm'
+    keyword_lines = [
+        'KEYWORDS="amd64 arm mips x86"',
+        'KEYWORDS="~amd64 ~arm ~mips ~x86"',
+    ]
+    gold_keyword_lines = ['KEYWORDS="*"',] * 2
+    self._TestStabilizeEbuildWrapper(self.tempfile, arch,
+                                     keyword_lines, gold_keyword_lines)
+
+  @osutils.TempFileDecorator
+  def testMultipleKeywordLinesMultipleChanges(self):
+    arch = 'arm'
+    keyword_lines = [
+        'KEYWORDS="amd64 ~arm mips x86"',
+        'KEYWORDS="~amd64 ~arm ~mips ~x86"',
+    ]
+    gold_keyword_lines = ['KEYWORDS="*"',] * 2
+    self._TestStabilizeEbuildWrapper(self.tempfile, arch,
+                                     keyword_lines, gold_keyword_lines)
+
+  @osutils.TempFileDecorator
+  def testMultipleKeywordLinesMultipleChangesSpacePrefix(self):
+    arch = 'arm'
+    keyword_lines = [
+        '     KEYWORDS="amd64 ~arm mips x86"',
+        '     KEYWORDS="~amd64 ~arm ~mips ~x86"',
+    ]
+    gold_keyword_lines = ['     KEYWORDS="*"',] * 2
+    self._TestStabilizeEbuildWrapper(self.tempfile, arch,
+                                     keyword_lines, gold_keyword_lines)
+
+
+@unittest.skip('relies on portage module not currently available')
+class GetPreOrderDepGraphTest(CpuTestBase):
+  """Test the Upgrader class from cros_portage_upgrade."""
+
+  #
+  # _GetPreOrderDepGraph testing (defunct - to be replaced)
+  #
+
+  def _TestGetPreOrderDepGraph(self, pkg):
+    """Test the behavior of the Upgrader._GetPreOrderDepGraph method."""
+
+    cmdargs = []
+    mocked_upgrader = self._MockUpgrader(cmdargs=cmdargs,
+                                         _curr_board=None)
+    self._SetUpPlayground()
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    pm_argv = cpu.Upgrader._GenParallelEmergeArgv(mocked_upgrader, [pkg])
+    pm_argv.append('--root-deps')
+    deps = parallel_emerge.DepGraphGenerator()
+    deps.Initialize(pm_argv)
+    deps_tree, deps_info = deps.GenDependencyTree()
+    deps_graph = deps.GenDependencyGraph(deps_tree, deps_info)
+
+    deps_list = cpu.Upgrader._GetPreOrderDepGraph(deps_graph)
+    golden_deps_set = _GetGoldenDepsSet(pkg)
+    self.assertEquals(set(deps_list), golden_deps_set)
+    self.mox.VerifyAll()
+
+  def testGetPreOrderDepGraphDevLibsA(self):
+    return self._TestGetPreOrderDepGraph('dev-libs/A')
+
+  def testGetPreOrderDepGraphDevLibsC(self):
+    return self._TestGetPreOrderDepGraph('dev-libs/C')
+
+  def testGetPreOrderDepGraphVirtualLibusb(self):
+    return self._TestGetPreOrderDepGraph('virtual/libusb')
+
+  def testGetPreOrderDepGraphCrosbaseLibcros(self):
+    return self._TestGetPreOrderDepGraph('chromeos-base/libcros')
+
+
+class MainTest(CpuTestBase):
+  """Test argument handling at the main method level."""
+
+  def _AssertCPUMain(self, args, expect_zero):
+    """Run cpu.main() and assert exit value is expected.
+
+    If |expect_zero| is True, assert exit value = 0.  If False,
+    assert exit value != 0.
+    """
+    try:
+      cpu.main(args)
+    except exceptions.SystemExit as e:
+      if expect_zero:
+        self.assertEquals(e.args[0], 0,
+                          msg='expected call to main() to exit cleanly, '
+                          'but it exited with code %d' % e.args[0])
+      else:
+        self.assertNotEquals(e.args[0], 0,
+                             msg='expected call to main() to exit with '
+                             'failure code, but exited with code 0 instead.')
+
+  def testHelp(self):
+    """Test that --help is functioning"""
+
+    with self.OutputCapturer() as output:
+      # Running with --help should exit with code==0
+      try:
+        cpu.main(['--help'])
+      except exceptions.SystemExit as e:
+        self.assertEquals(e.args[0], 0)
+
+    # Verify that a message beginning with "Usage: " was printed
+    stdout = output.GetStdout()
+    self.assertTrue(stdout.startswith('usage: '))
+
+  def testMissingBoard(self):
+    """Test that running without --board exits with an error."""
+    with self.OutputCapturer():
+      # Running without --board should exit with code!=0
+      try:
+        cpu.main([])
+      except exceptions.SystemExit as e:
+        self.assertNotEquals(e.args[0], 0)
+
+    # Verify that an error message was printed.
+    self.AssertOutputEndsInError()
+
+  def testBoardWithoutPackage(self):
+    """Test that running without a package argument exits with an error."""
+    with self.OutputCapturer():
+      # Running without a package should exit with code!=0
+      self._AssertCPUMain(['--board=any-board'], expect_zero=False)
+
+    # Verify that an error message was printed.
+    self.AssertOutputEndsInError()
+
+  def testHostWithoutPackage(self):
+    """Test that running without a package argument exits with an error."""
+    with self.OutputCapturer():
+      # Running without a package should exit with code!=0
+      self._AssertCPUMain(['--host'], expect_zero=False)
+
+    # Verify that an error message was printed.
+    self.AssertOutputEndsInError()
+
+  def testUpgradeAndUpgradeDeep(self):
+    """Running with --upgrade and --upgrade-deep exits with an error."""
+    with self.OutputCapturer():
+      # Expect exit with code!=0
+      self._AssertCPUMain(['--host', '--upgrade', '--upgrade-deep',
+                           'any-package'], expect_zero=False)
+
+    # Verify that an error message was printed.
+    self.AssertOutputEndsInError()
+
+  def testForceWithoutUpgrade(self):
+    """Running with --force requires --upgrade or --upgrade-deep."""
+    with self.OutputCapturer():
+      # Expect exit with code!=0
+      self._AssertCPUMain(['--host', '--force', 'any-package'],
+                          expect_zero=False)
+
+    # Verify that an error message was printed.
+    self.AssertOutputEndsInError()
+
+  def testFlowStatusReportOneBoard(self):
+    """Test main flow for basic one-board status report."""
+
+    self.mox.StubOutWithMock(cpu.Upgrader, 'PreRunChecks')
+    self.mox.StubOutWithMock(cpu, '_BoardIsSetUp')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'PrepareToRun')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'RunBoard')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'RunCompleted')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'WriteTableFiles')
+
+    cpu.Upgrader.PreRunChecks()
+    cpu._BoardIsSetUp('any-board').AndReturn(True)
+    cpu.Upgrader.PrepareToRun()
+    cpu.Upgrader.RunBoard('any-board')
+    cpu.Upgrader.RunCompleted()
+    cpu.Upgrader.WriteTableFiles(csv='/dev/null')
+    self.mox.ReplayAll()
+
+    with self.OutputCapturer():
+      self._AssertCPUMain(['--board=any-board', '--to-csv=/dev/null',
+                           'any-package'], expect_zero=True)
+    self.mox.VerifyAll()
+
+  def testFlowStatusReportOneBoardNotSetUp(self):
+    """Test main flow for basic one-board status report."""
+    self.mox.StubOutWithMock(cpu.Upgrader, 'PreRunChecks')
+    self.mox.StubOutWithMock(cpu, '_BoardIsSetUp')
+
+    cpu.Upgrader.PreRunChecks()
+    cpu._BoardIsSetUp('any-board').AndReturn(False)
+    self.mox.ReplayAll()
+
+    # Running with a package not set up should exit with code!=0
+    with self.OutputCapturer():
+      self._AssertCPUMain(['--board=any-board', '--to-csv=/dev/null',
+                           'any-package'], expect_zero=False)
+    self.mox.VerifyAll()
+
+    # Verify that an error message was printed.
+    self.AssertOutputEndsInError()
+
+  def testFlowStatusReportTwoBoards(self):
+    """Test main flow for two-board status report."""
+    self.mox.StubOutWithMock(cpu.Upgrader, 'PreRunChecks')
+    self.mox.StubOutWithMock(cpu, '_BoardIsSetUp')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'PrepareToRun')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'RunBoard')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'RunCompleted')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'WriteTableFiles')
+
+    cpu.Upgrader.PreRunChecks()
+    cpu._BoardIsSetUp('board1').AndReturn(True)
+    cpu._BoardIsSetUp('board2').AndReturn(True)
+    cpu.Upgrader.PrepareToRun()
+    cpu.Upgrader.RunBoard('board1')
+    cpu.Upgrader.RunBoard('board2')
+    cpu.Upgrader.RunCompleted()
+    cpu.Upgrader.WriteTableFiles(csv=None)
+    self.mox.ReplayAll()
+
+    with self.OutputCapturer():
+      self._AssertCPUMain(['--board=board1:board2', 'any-package'],
+                          expect_zero=True)
+    self.mox.VerifyAll()
+
+  def testFlowUpgradeOneBoard(self):
+    """Test main flow for basic one-board upgrade."""
+    self.mox.StubOutWithMock(cpu.Upgrader, 'PreRunChecks')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'CheckBoardList')
+    self.mox.StubOutWithMock(cpu, '_BoardIsSetUp')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'PrepareToRun')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'RunBoard')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'RunCompleted')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'WriteTableFiles')
+
+    cpu.Upgrader.PreRunChecks()
+    cpu.Upgrader.CheckBoardList(['any-board'])
+    cpu._BoardIsSetUp('any-board').AndReturn(True)
+    cpu.Upgrader.PrepareToRun()
+    cpu.Upgrader.RunBoard('any-board')
+    cpu.Upgrader.RunCompleted()
+    cpu.Upgrader.WriteTableFiles(csv=None)
+    self.mox.ReplayAll()
+
+    with self.OutputCapturer():
+      self._AssertCPUMain(['--upgrade', '--board=any-board', 'any-package'],
+                          expect_zero=True)
+    self.mox.VerifyAll()
+
+  def testFlowUpgradeTwoBoards(self):
+    """Test main flow for two-board upgrade."""
+    self.mox.StubOutWithMock(cpu.Upgrader, 'PreRunChecks')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'CheckBoardList')
+    self.mox.StubOutWithMock(cpu, '_BoardIsSetUp')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'PrepareToRun')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'RunBoard')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'RunCompleted')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'WriteTableFiles')
+
+    cpu.Upgrader.PreRunChecks()
+    cpu.Upgrader.CheckBoardList(['board1', 'board2'])
+    cpu._BoardIsSetUp('board1').AndReturn(True)
+    cpu._BoardIsSetUp('board2').AndReturn(True)
+    cpu.Upgrader.PrepareToRun()
+    cpu.Upgrader.RunBoard('board1')
+    cpu.Upgrader.RunBoard('board2')
+    cpu.Upgrader.RunCompleted()
+    cpu.Upgrader.WriteTableFiles(csv='/dev/null')
+    self.mox.ReplayAll()
+
+    with self.OutputCapturer():
+      self._AssertCPUMain(['--upgrade', '--board=board1:board2',
+                           '--to-csv=/dev/null', 'any-package'],
+                          expect_zero=True)
+    self.mox.VerifyAll()
+
+  def testFlowUpgradeTwoBoardsAndHost(self):
+    """Test main flow for two-board and host upgrade."""
+    self.mox.StubOutWithMock(cpu.Upgrader, 'PreRunChecks')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'CheckBoardList')
+    self.mox.StubOutWithMock(cpu, '_BoardIsSetUp')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'PrepareToRun')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'RunBoard')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'RunCompleted')
+    self.mox.StubOutWithMock(cpu.Upgrader, 'WriteTableFiles')
+
+    cpu.Upgrader.PreRunChecks()
+    cpu.Upgrader.CheckBoardList(['board1', 'board2'])
+    cpu._BoardIsSetUp('board1').AndReturn(True)
+    cpu._BoardIsSetUp('board2').AndReturn(True)
+    cpu.Upgrader.PrepareToRun()
+    cpu.Upgrader.RunBoard(cpu.Upgrader.HOST_BOARD)
+    cpu.Upgrader.RunBoard('board1')
+    cpu.Upgrader.RunBoard('board2')
+    cpu.Upgrader.RunCompleted()
+    cpu.Upgrader.WriteTableFiles(csv='/dev/null')
+    self.mox.ReplayAll()
+
+    with self.OutputCapturer():
+      self._AssertCPUMain(['--upgrade', '--host', '--board=board1:host:board2',
+                           '--to-csv=/dev/null', 'any-package'],
+                          expect_zero=True)
+    self.mox.VerifyAll()
diff --git a/scripts/cros_run_unit_tests.py b/scripts/cros_run_unit_tests.py
new file mode 100644
index 0000000..b179121
--- /dev/null
+++ b/scripts/cros_run_unit_tests.py
@@ -0,0 +1,103 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tool to run ebuild unittests."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import commandline
+from chromite.lib import chroot_util
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import workon_helper
+from chromite.lib import portage_util
+
+
+def ParseArgs(argv):
+  """Parse arguments.
+
+  Args:
+    argv: array of arguments passed to the script.
+  """
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  target = parser.add_mutually_exclusive_group(required=True)
+  target.add_argument('--sysroot', type='path', help='Path to the sysroot.')
+  target.add_argument('--board', help='Board name.')
+
+  parser.add_argument('--pretend', default=False, action='store_true',
+                      help='Show the list of packages to be tested and return.')
+  parser.add_argument('--noinstalled_only', dest='installed', default=True,
+                      action='store_false',
+                      help='Test all testable packages, even if they are not '
+                      'currently installed.')
+  parser.add_argument('--package_file', type='path',
+                      help='Path to a file containing the list of packages '
+                      'that should be tested.')
+  parser.add_argument('--blacklist_packages', dest='package_blacklist',
+                      help='Space-separated list of blacklisted packages.')
+  parser.add_argument('--packages',
+                      help='Space-separated list of packages to test.')
+  parser.add_argument('--nowithdebug', action='store_true',
+                      help="Don't build the tests with USE=cros-debug")
+
+  options = parser.parse_args(argv)
+  options.Freeze()
+  return options
+
+
+def main(argv):
+  opts = ParseArgs(argv)
+
+  cros_build_lib.AssertInsideChroot()
+
+  sysroot = opts.sysroot or cros_build_lib.GetSysroot(opts.board)
+  package_blacklist = portage_util.UNITTEST_PACKAGE_BLACKLIST
+  if opts.package_blacklist:
+    package_blacklist |= set(opts.package_blacklist.split())
+
+  packages = set()
+  # The list of packages to test can be passed as a file containing a
+  # space-separated list of package names.
+  # This is used by the builder to test only the packages that were upreved.
+  if opts.package_file and os.path.exists(opts.package_file):
+    packages = set(osutils.ReadFile(opts.package_file).split())
+
+  if opts.packages:
+    packages |= set(opts.packages.split())
+
+  # If no packages were specified, use all testable packages.
+  if not (opts.packages or opts.package_file):
+    workon = workon_helper.WorkonHelper(sysroot)
+    packages = (workon.InstalledWorkonAtoms() if opts.installed
+                else workon.ListAtoms(use_all=True))
+
+  for cp in packages & package_blacklist:
+    logging.info('Skipping blacklisted package %s.', cp)
+
+  packages = packages - package_blacklist
+  pkg_with_test = portage_util.PackagesWithTest(sysroot, packages)
+
+  if packages - pkg_with_test:
+    logging.warning('The following packages do not have tests:')
+    logging.warning('\n'.join(sorted(packages - pkg_with_test)))
+
+  if opts.pretend:
+    print('\n'.join(sorted(pkg_with_test)))
+    return
+
+  env = None
+  if opts.nowithdebug:
+    use_flags = os.environ.get('USE', '')
+    use_flags += ' -cros-debug'
+    env = {'USE': use_flags}
+
+  try:
+    chroot_util.RunUnittests(sysroot, pkg_with_test, extra_env=env)
+  except cros_build_lib.RunCommandError:
+    logging.error('Unittests failed.')
+    raise
diff --git a/scripts/cros_sdk.py b/scripts/cros_sdk.py
new file mode 100644
index 0000000..b47475d
--- /dev/null
+++ b/scripts/cros_sdk.py
@@ -0,0 +1,721 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Manage SDK chroots.
+
+This script is used for manipulating local chroot environments; creating,
+deleting, downloading, etc.  If given --enter (or no args), it defaults
+to an interactive bash shell within the chroot.
+
+If given args those are passed to the chroot environment, and executed.
+"""
+
+from __future__ import print_function
+
+import argparse
+import glob
+import os
+import pwd
+import sys
+import urlparse
+
+from chromite.cbuildbot import constants
+from chromite.lib import cgroups
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import locking
+from chromite.lib import namespaces
+from chromite.lib import osutils
+from chromite.lib import process_util
+from chromite.lib import retry_util
+from chromite.lib import toolchain
+
+cros_build_lib.STRICT_SUDO = True
+
+
+COMPRESSION_PREFERENCE = ('xz', 'bz2')
+
+# TODO(zbehan): Remove the dependency on these, reimplement them in python
+MAKE_CHROOT = [os.path.join(constants.SOURCE_ROOT,
+                            'src/scripts/sdk_lib/make_chroot.sh')]
+ENTER_CHROOT = [os.path.join(constants.SOURCE_ROOT,
+                             'src/scripts/sdk_lib/enter_chroot.sh')]
+
+# Proxy simulator configuration.
+PROXY_HOST_IP = '192.168.240.1'
+PROXY_PORT = 8080
+PROXY_GUEST_IP = '192.168.240.2'
+PROXY_NETMASK = 30
+PROXY_VETH_PREFIX = 'veth'
+PROXY_CONNECT_PORTS = (80, 443, 9418)
+PROXY_APACHE_FALLBACK_USERS = ('www-data', 'apache', 'nobody')
+PROXY_APACHE_MPMS = ('event', 'worker', 'prefork')
+PROXY_APACHE_FALLBACK_PATH = ':'.join(
+    '/usr/lib/apache2/mpm-%s' % mpm for mpm in PROXY_APACHE_MPMS
+)
+PROXY_APACHE_MODULE_GLOBS = ('/usr/lib*/apache2/modules', '/usr/lib*/apache2')
+
+# We need these tools to run. Very common tools (tar,..) are omitted.
+NEEDED_TOOLS = ('curl', 'xz')
+
+# Tools needed for --proxy-sim only.
+PROXY_NEEDED_TOOLS = ('ip',)
+
+
+def GetArchStageTarballs(version):
+  """Returns the URL for a given arch/version"""
+  extension = {'bz2':'tbz2', 'xz':'tar.xz'}
+  return [toolchain.GetSdkURL(suburl='cros-sdk-%s.%s'
+                              % (version, extension[compressor]))
+          for compressor in COMPRESSION_PREFERENCE]
+
+
+def GetStage3Urls(version):
+  return [toolchain.GetSdkURL(suburl='stage3-amd64-%s.tar.%s' % (version, ext))
+          for ext in COMPRESSION_PREFERENCE]
+
+
+def GetToolchainsOverlayUrls(version, toolchains):
+  """Returns the URL(s) for a toolchains SDK overlay.
+
+  Args:
+    version: The SDK version used, e.g. 2015.05.27.145939. We use the year and
+        month components to point to a subdirectory on the SDK bucket where
+        overlays are stored (.../2015/05/ in this case).
+    toolchains: Iterable of toolchain target strings (e.g. 'i686-pc-linux-gnu').
+
+  Returns:
+    List of alternative download URLs for an SDK overlay tarball that contains
+    the given toolchains.
+  """
+  toolchains_desc = '-'.join(sorted(toolchains))
+  suburl_template = os.path.join(
+      *(version.split('.')[:2] +
+        ['cros-sdk-overlay-toolchains-%s-%s.tar.%%s' %
+         (toolchains_desc, version)]))
+  return [toolchain.GetSdkURL(suburl=suburl_template % ext)
+          for ext in COMPRESSION_PREFERENCE]
+
+
+def FetchRemoteTarballs(storage_dir, urls, desc, allow_none=False):
+  """Fetches a tarball given by url, and place it in |storage_dir|.
+
+  Args:
+    storage_dir: Path where to save the tarball.
+    urls: List of URLs to try to download. Download will stop on first success.
+    desc: A string describing what tarball we're downloading (for logging).
+    allow_none: Don't fail if none of the URLs worked.
+
+  Returns:
+    Full path to the downloaded file, or None if |allow_none| and no URL worked.
+
+  Raises:
+    ValueError: If |allow_none| is False and none of the URLs worked.
+  """
+
+  # Note we track content length ourselves since certain versions of curl
+  # fail if asked to resume a complete file.
+  # pylint: disable=C0301,W0631
+  # https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3482927&group_id=976
+  logging.notice('Downloading %s tarball...', desc)
+  for url in urls:
+    # http://www.logilab.org/ticket/8766
+    # pylint: disable=E1101
+    parsed = urlparse.urlparse(url)
+    tarball_name = os.path.basename(parsed.path)
+    if parsed.scheme in ('', 'file'):
+      if os.path.exists(parsed.path):
+        return parsed.path
+      continue
+    content_length = 0
+    logging.debug('Attempting download from %s', url)
+    result = retry_util.RunCurl(
+        ['-I', url], fail=False, capture_output=False, redirect_stdout=True,
+        redirect_stderr=True, print_cmd=False, debug_level=logging.NOTICE)
+    successful = False
+    for header in result.output.splitlines():
+      # We must walk the output to find the string '200 OK' for use cases where
+      # a proxy is involved and may have pushed down the actual header.
+      if header.find('200 OK') != -1:
+        successful = True
+      elif header.lower().startswith('content-length:'):
+        content_length = int(header.split(':', 1)[-1].strip())
+        if successful:
+          break
+    if successful:
+      break
+  else:
+    if allow_none:
+      return None
+    raise ValueError('No valid URLs found!')
+
+  tarball_dest = os.path.join(storage_dir, tarball_name)
+  current_size = 0
+  if os.path.exists(tarball_dest):
+    current_size = os.path.getsize(tarball_dest)
+    if current_size > content_length:
+      osutils.SafeUnlink(tarball_dest)
+      current_size = 0
+
+  if current_size < content_length:
+    retry_util.RunCurl(
+        ['-L', '-y', '30', '-C', '-', '--output', tarball_dest, url],
+        print_cmd=False, capture_output=False, debug_level=logging.NOTICE)
+
+  # Cleanup old tarballs now since we've successfull fetched; only cleanup
+  # the tarballs for our prefix, or unknown ones. This gets a bit tricky
+  # because we might have partial overlap between known prefixes.
+  my_prefix = tarball_name.rsplit('-', 1)[0] + '-'
+  all_prefixes = ('stage3-amd64-', 'cros-sdk-', 'cros-sdk-overlay-')
+  ignored_prefixes = [prefix for prefix in all_prefixes if prefix != my_prefix]
+  for filename in os.listdir(storage_dir):
+    if (filename == tarball_name or
+        any([(filename.startswith(p) and
+              not (len(my_prefix) > len(p) and filename.startswith(my_prefix)))
+             for p in ignored_prefixes])):
+      continue
+    logging.info('Cleaning up old tarball: %s', filename)
+    osutils.SafeUnlink(os.path.join(storage_dir, filename))
+
+  return tarball_dest
+
+
+def CreateChroot(chroot_path, sdk_tarball, toolchains_overlay_tarball,
+                 cache_dir, nousepkg=False):
+  """Creates a new chroot from a given SDK"""
+
+  cmd = MAKE_CHROOT + ['--stage3_path', sdk_tarball,
+                       '--chroot', chroot_path,
+                       '--cache_dir', cache_dir]
+
+  if toolchains_overlay_tarball:
+    cmd.extend(['--toolchains_overlay_path', toolchains_overlay_tarball])
+
+  if nousepkg:
+    cmd.append('--nousepkg')
+
+  logging.notice('Creating chroot. This may take a few minutes...')
+  try:
+    cros_build_lib.RunCommand(cmd, print_cmd=False)
+  except cros_build_lib.RunCommandError:
+    raise SystemExit('Running %r failed!' % cmd)
+
+
+def DeleteChroot(chroot_path):
+  """Deletes an existing chroot"""
+  cmd = MAKE_CHROOT + ['--chroot', chroot_path,
+                       '--delete']
+  try:
+    logging.notice('Deleting chroot.')
+    cros_build_lib.RunCommand(cmd, print_cmd=False)
+  except cros_build_lib.RunCommandError:
+    raise SystemExit('Running %r failed!' % cmd)
+
+
+def EnterChroot(chroot_path, cache_dir, chrome_root, chrome_root_mount,
+                workspace, additional_args):
+  """Enters an existing SDK chroot"""
+  st = os.statvfs(os.path.join(chroot_path, 'usr', 'bin', 'sudo'))
+  # The os.ST_NOSUID constant wasn't added until python-3.2.
+  if st.f_flag & 0x2:
+    cros_build_lib.Die('chroot cannot be in a nosuid mount')
+
+  cmd = ENTER_CHROOT + ['--chroot', chroot_path, '--cache_dir', cache_dir]
+  if chrome_root:
+    cmd.extend(['--chrome_root', chrome_root])
+  if chrome_root_mount:
+    cmd.extend(['--chrome_root_mount', chrome_root_mount])
+  if workspace:
+    cmd.extend(['--workspace_root', workspace])
+
+  if len(additional_args) > 0:
+    cmd.append('--')
+    cmd.extend(additional_args)
+
+  ret = cros_build_lib.RunCommand(cmd, print_cmd=False, error_code_ok=True,
+                                  mute_output=False)
+  # If we were in interactive mode, ignore the exit code; it'll be whatever
+  # they last ran w/in the chroot and won't matter to us one way or another.
+  # Note this does allow chroot entrance to fail and be ignored during
+  # interactive; this is however a rare case and the user will immediately
+  # see it (nor will they be checking the exit code manually).
+  if ret.returncode != 0 and additional_args:
+    raise SystemExit(ret.returncode)
+
+
+def _SudoCommand():
+  """Get the 'sudo' command, along with all needed environment variables."""
+
+  # Pass in the ENVIRONMENT_WHITELIST and ENV_PASSTHRU variables so that
+  # scripts in the chroot know what variables to pass through.
+  cmd = ['sudo']
+  for key in constants.CHROOT_ENVIRONMENT_WHITELIST + constants.ENV_PASSTHRU:
+    value = os.environ.get(key)
+    if value is not None:
+      cmd += ['%s=%s' % (key, value)]
+
+  # Pass in the path to the depot_tools so that users can access them from
+  # within the chroot.
+  cmd += ['DEPOT_TOOLS=%s' % constants.DEPOT_TOOLS_DIR]
+
+  return cmd
+
+
+def _ReportMissing(missing):
+  """Report missing utilities, then exit.
+
+  Args:
+    missing: List of missing utilities, as returned by
+             osutils.FindMissingBinaries.  If non-empty, will not return.
+  """
+
+  if missing:
+    raise SystemExit(
+        'The tool(s) %s were not found.\n'
+        'Please install the appropriate package in your host.\n'
+        'Example(ubuntu):\n'
+        '  sudo apt-get install <packagename>'
+        % ', '.join(missing))
+
+
+def _ProxySimSetup(options):
+  """Set up proxy simulator, and return only in the child environment.
+
+  TODO: Ideally, this should support multiple concurrent invocations of
+  cros_sdk --proxy-sim; currently, such invocations will conflict with each
+  other due to the veth device names and IP addresses.  Either this code would
+  need to generate fresh, unused names for all of these before forking, or it
+  would need to support multiple concurrent cros_sdk invocations sharing one
+  proxy and allowing it to exit when unused (without counting on any local
+  service-management infrastructure on the host).
+  """
+
+  may_need_mpm = False
+  apache_bin = osutils.Which('apache2')
+  if apache_bin is None:
+    apache_bin = osutils.Which('apache2', PROXY_APACHE_FALLBACK_PATH)
+    if apache_bin is None:
+      _ReportMissing(('apache2',))
+  else:
+    may_need_mpm = True
+
+  # Module names and .so names included for ease of grepping.
+  apache_modules = [('proxy_module', 'mod_proxy.so'),
+                    ('proxy_connect_module', 'mod_proxy_connect.so'),
+                    ('proxy_http_module', 'mod_proxy_http.so'),
+                    ('proxy_ftp_module', 'mod_proxy_ftp.so')]
+
+  # Find the apache module directory, and make sure it has the modules we need.
+  module_dirs = {}
+  for g in PROXY_APACHE_MODULE_GLOBS:
+    for mod, so in apache_modules:
+      for f in glob.glob(os.path.join(g, so)):
+        module_dirs.setdefault(os.path.dirname(f), []).append(so)
+  for apache_module_path, modules_found in module_dirs.iteritems():
+    if len(modules_found) == len(apache_modules):
+      break
+  else:
+    # Appease cros lint, which doesn't understand that this else block will not
+    # fall through to the subsequent code which relies on apache_module_path.
+    apache_module_path = None
+    raise SystemExit(
+        'Could not find apache module path containing all required modules: %s'
+        % ', '.join(so for mod, so in apache_modules))
+
+  def check_add_module(name):
+    so = 'mod_%s.so' % name
+    if os.access(os.path.join(apache_module_path, so), os.F_OK):
+      mod = '%s_module' % name
+      apache_modules.append((mod, so))
+      return True
+    return False
+
+  check_add_module('authz_core')
+  if may_need_mpm:
+    for mpm in PROXY_APACHE_MPMS:
+      if check_add_module('mpm_%s' % mpm):
+        break
+
+  veth_host = '%s-host' % PROXY_VETH_PREFIX
+  veth_guest = '%s-guest' % PROXY_VETH_PREFIX
+
+  # Set up pipes from parent to child and vice versa.
+  # The child writes a byte to the parent after calling unshare, so that the
+  # parent can then assign the guest end of the veth interface to the child's
+  # new network namespace.  The parent then writes a byte to the child after
+  # assigning the guest interface, so that the child can then configure that
+  # interface.  In both cases, if we get back an EOF when reading from the
+  # pipe, we assume the other end exited with an error message, so just exit.
+  parent_readfd, child_writefd = os.pipe()
+  child_readfd, parent_writefd = os.pipe()
+  SUCCESS_FLAG = '+'
+
+  pid = os.fork()
+  if not pid:
+    os.close(parent_readfd)
+    os.close(parent_writefd)
+
+    namespaces.Unshare(namespaces.CLONE_NEWNET)
+    os.write(child_writefd, SUCCESS_FLAG)
+    os.close(child_writefd)
+    if os.read(child_readfd, 1) != SUCCESS_FLAG:
+      # Parent failed; it will have already have outputted an error message.
+      sys.exit(1)
+    os.close(child_readfd)
+
+    # Set up child side of the network.
+    commands = (
+        ('ip', 'link', 'set', 'up', 'lo'),
+        ('ip', 'address', 'add',
+         '%s/%u' % (PROXY_GUEST_IP, PROXY_NETMASK),
+         'dev', veth_guest),
+        ('ip', 'link', 'set', veth_guest, 'up'),
+    )
+    try:
+      for cmd in commands:
+        cros_build_lib.RunCommand(cmd, print_cmd=False)
+    except cros_build_lib.RunCommandError:
+      raise SystemExit('Running %r failed!' % (cmd,))
+
+    proxy_url = 'http://%s:%u' % (PROXY_HOST_IP, PROXY_PORT)
+    for proto in ('http', 'https', 'ftp'):
+      os.environ[proto + '_proxy'] = proxy_url
+    for v in ('all_proxy', 'RSYNC_PROXY', 'no_proxy'):
+      os.environ.pop(v, None)
+    return
+
+  os.close(child_readfd)
+  os.close(child_writefd)
+
+  if os.read(parent_readfd, 1) != SUCCESS_FLAG:
+    # Child failed; it will have already have outputted an error message.
+    sys.exit(1)
+  os.close(parent_readfd)
+
+  # Set up parent side of the network.
+  uid = int(os.environ.get('SUDO_UID', '0'))
+  gid = int(os.environ.get('SUDO_GID', '0'))
+  if uid == 0 or gid == 0:
+    for username in PROXY_APACHE_FALLBACK_USERS:
+      try:
+        pwnam = pwd.getpwnam(username)
+        uid, gid = pwnam.pw_uid, pwnam.pw_gid
+        break
+      except KeyError:
+        continue
+    if uid == 0 or gid == 0:
+      raise SystemExit('Could not find a non-root user to run Apache as')
+
+  chroot_parent, chroot_base = os.path.split(options.chroot)
+  pid_file = os.path.join(chroot_parent, '.%s-apache-proxy.pid' % chroot_base)
+  log_file = os.path.join(chroot_parent, '.%s-apache-proxy.log' % chroot_base)
+
+  apache_directives = [
+      'User #%u' % uid,
+      'Group #%u' % gid,
+      'PidFile %s' % pid_file,
+      'ErrorLog %s' % log_file,
+      'Listen %s:%u' % (PROXY_HOST_IP, PROXY_PORT),
+      'ServerName %s' % PROXY_HOST_IP,
+      'ProxyRequests On',
+      'AllowCONNECT %s' % ' '.join(map(str, PROXY_CONNECT_PORTS)),
+  ] + [
+      'LoadModule %s %s' % (mod, os.path.join(apache_module_path, so))
+      for (mod, so) in apache_modules
+  ]
+  commands = (
+      ('ip', 'link', 'add', 'name', veth_host,
+       'type', 'veth', 'peer', 'name', veth_guest),
+      ('ip', 'address', 'add',
+       '%s/%u' % (PROXY_HOST_IP, PROXY_NETMASK),
+       'dev', veth_host),
+      ('ip', 'link', 'set', veth_host, 'up'),
+      ([apache_bin, '-f', '/dev/null'] +
+       [arg for d in apache_directives for arg in ('-C', d)]),
+      ('ip', 'link', 'set', veth_guest, 'netns', str(pid)),
+  )
+  cmd = None # Make cros lint happy.
+  try:
+    for cmd in commands:
+      cros_build_lib.RunCommand(cmd, print_cmd=False)
+  except cros_build_lib.RunCommandError:
+    # Clean up existing interfaces, if any.
+    cmd_cleanup = ('ip', 'link', 'del', veth_host)
+    try:
+      cros_build_lib.RunCommand(cmd_cleanup, print_cmd=False)
+    except cros_build_lib.RunCommandError:
+      logging.error('running %r failed', cmd_cleanup)
+    raise SystemExit('Running %r failed!' % (cmd,))
+  os.write(parent_writefd, SUCCESS_FLAG)
+  os.close(parent_writefd)
+
+  process_util.ExitAsStatus(os.waitpid(pid, 0)[1])
+
+
+def _ReExecuteIfNeeded(argv):
+  """Re-execute cros_sdk as root.
+
+  Also unshare the mount namespace so as to ensure that processes outside
+  the chroot can't mess with our mounts.
+  """
+  if os.geteuid() != 0:
+    cmd = _SudoCommand() + ['--'] + argv
+    os.execvp(cmd[0], cmd)
+  else:
+    # We must set up the cgroups mounts before we enter our own namespace.
+    # This way it is a shared resource in the root mount namespace.
+    cgroups.Cgroup.InitSystem()
+    namespaces.SimpleUnshare()
+
+
+def _CreateParser(sdk_latest_version, bootstrap_latest_version):
+  """Generate and return the parser with all the options."""
+  usage = ('usage: %(prog)s [options] '
+           '[VAR1=val1 ... VAR2=val2] [--] [command [args]]')
+  parser = commandline.ArgumentParser(usage=usage, description=__doc__,
+                                      caching=True)
+
+  # Global options.
+  default_chroot = os.path.join(constants.SOURCE_ROOT,
+                                constants.DEFAULT_CHROOT_DIR)
+  parser.add_argument(
+      '--chroot', dest='chroot', default=default_chroot, type='path',
+      help=('SDK chroot dir name [%s]' % constants.DEFAULT_CHROOT_DIR))
+
+  parser.add_argument('--chrome_root', type='path',
+                      help='Mount this chrome root into the SDK chroot')
+  parser.add_argument('--chrome_root_mount', type='path',
+                      help='Mount chrome into this path inside SDK chroot')
+  parser.add_argument('--nousepkg', action='store_true', default=False,
+                      help='Do not use binary packages when creating a chroot.')
+  parser.add_argument('-u', '--url', dest='sdk_url',
+                      help='Use sdk tarball located at this url. Use file:// '
+                           'for local files.')
+  parser.add_argument('--sdk-version',
+                      help=('Use this sdk version.  For prebuilt, current is %r'
+                            ', for bootstrapping it is %r.'
+                            % (sdk_latest_version, bootstrap_latest_version)))
+  parser.add_argument('--workspace',
+                      help='Workspace directory to mount into the chroot.')
+  parser.add_argument('commands', nargs=argparse.REMAINDER)
+
+  # SDK overlay tarball options (mutually exclusive).
+  group = parser.add_mutually_exclusive_group()
+  group.add_argument('--toolchains',
+                     help=('Comma-separated list of toolchains we expect to be '
+                           'using on the chroot. Used for downloading a '
+                           'corresponding SDK toolchains group (if one is '
+                           'found), which may speed up chroot initialization '
+                           'when building for the first time. Otherwise this '
+                           'has no effect and will not restrict the chroot in '
+                           'any way. Ignored if using --bootstrap.'))
+  group.add_argument('--board',
+                     help=('The board we intend to be building in the chroot. '
+                           'Used for deriving the list of required toolchains '
+                           '(see --toolchains).'))
+
+  # Commands.
+  group = parser.add_argument_group('Commands')
+  group.add_argument(
+      '--enter', action='store_true', default=False,
+      help='Enter the SDK chroot.  Implies --create.')
+  group.add_argument(
+      '--create', action='store_true', default=False,
+      help='Create the chroot only if it does not already exist.  '
+      'Implies --download.')
+  group.add_argument(
+      '--bootstrap', action='store_true', default=False,
+      help='Build everything from scratch, including the sdk.  '
+      'Use this only if you need to validate a change '
+      'that affects SDK creation itself (toolchain and '
+      'build are typically the only folk who need this).  '
+      'Note this will quite heavily slow down the build.  '
+      'This option implies --create --nousepkg.')
+  group.add_argument(
+      '-r', '--replace', action='store_true', default=False,
+      help='Replace an existing SDK chroot.  Basically an alias '
+      'for --delete --create.')
+  group.add_argument(
+      '--delete', action='store_true', default=False,
+      help='Delete the current SDK chroot if it exists.')
+  group.add_argument(
+      '--download', action='store_true', default=False,
+      help='Download the sdk.')
+  commands = group
+
+  # Namespace options.
+  group = parser.add_argument_group('Namespaces')
+  group.add_argument('--proxy-sim', action='store_true', default=False,
+                     help='Simulate a restrictive network requiring an outbound'
+                          ' proxy.')
+  group.add_argument('--no-ns-pid', dest='ns_pid',
+                     default=True, action='store_false',
+                     help='Do not create a new PID namespace.')
+
+  # Internal options.
+  group = parser.add_argument_group(
+      'Internal Chromium OS Build Team Options',
+      'Caution: these are for meant for the Chromium OS build team only')
+  group.add_argument('--buildbot-log-version', default=False,
+                     action='store_true',
+                     help='Log SDK version for buildbot consumption')
+
+  return parser, commands
+
+
+def main(argv):
+  conf = cros_build_lib.LoadKeyValueFile(
+      os.path.join(constants.SOURCE_ROOT, constants.SDK_VERSION_FILE),
+      ignore_missing=True)
+  sdk_latest_version = conf.get('SDK_LATEST_VERSION', '<unknown>')
+  bootstrap_latest_version = conf.get('BOOTSTRAP_LATEST_VERSION', '<unknown>')
+  parser, commands = _CreateParser(sdk_latest_version, bootstrap_latest_version)
+  options = parser.parse_args(argv)
+  chroot_command = options.commands
+
+  # Some sanity checks first, before we ask for sudo credentials.
+  cros_build_lib.AssertOutsideChroot()
+
+  host = os.uname()[4]
+  if host != 'x86_64':
+    parser.error(
+        "cros_sdk is currently only supported on x86_64; you're running"
+        " %s.  Please find a x86_64 machine." % (host,))
+
+  _ReportMissing(osutils.FindMissingBinaries(NEEDED_TOOLS))
+  if options.proxy_sim:
+    _ReportMissing(osutils.FindMissingBinaries(PROXY_NEEDED_TOOLS))
+
+  _ReExecuteIfNeeded([sys.argv[0]] + argv)
+  if options.ns_pid:
+    first_pid = namespaces.CreatePidNs()
+  else:
+    first_pid = None
+
+  # Expand out the aliases...
+  if options.replace:
+    options.delete = options.create = True
+
+  if options.bootstrap:
+    options.create = True
+
+  # If a command is not given, default to enter.
+  # pylint: disable=protected-access
+  # This _group_actions access sucks, but upstream decided to not include an
+  # alternative to optparse's option_list, and this is what they recommend.
+  options.enter |= not any(getattr(options, x.dest)
+                           for x in commands._group_actions)
+  # pylint: enable=protected-access
+  options.enter |= bool(chroot_command)
+
+  if options.enter and options.delete and not options.create:
+    parser.error("Trying to enter the chroot when --delete "
+                 "was specified makes no sense.")
+
+  # Finally, discern if we need to create the chroot.
+  chroot_exists = os.path.exists(options.chroot)
+  if options.create or options.enter:
+    # Only create if it's being wiped, or if it doesn't exist.
+    if not options.delete and chroot_exists:
+      options.create = False
+    else:
+      options.download = True
+
+  # Finally, flip create if necessary.
+  if options.enter:
+    options.create |= not chroot_exists
+
+  if not options.sdk_version:
+    sdk_version = (bootstrap_latest_version if options.bootstrap
+                   else sdk_latest_version)
+  else:
+    sdk_version = options.sdk_version
+  if options.buildbot_log_version:
+    logging.PrintBuildbotStepText(sdk_version)
+
+  # Based on selections, determine the tarball to fetch.
+  if options.sdk_url:
+    urls = [options.sdk_url]
+  elif options.bootstrap:
+    urls = GetStage3Urls(sdk_version)
+  else:
+    urls = GetArchStageTarballs(sdk_version)
+
+  # Get URLs for the toolchains overlay, if one is to be used.
+  toolchains_overlay_urls = None
+  if not options.bootstrap:
+    toolchains = None
+    if options.toolchains:
+      toolchains = options.toolchains.split(',')
+    elif options.board:
+      toolchains = toolchain.GetToolchainsForBoard(options.board).keys()
+
+    if toolchains:
+      toolchains_overlay_urls = GetToolchainsOverlayUrls(sdk_version,
+                                                         toolchains)
+
+  lock_path = os.path.dirname(options.chroot)
+  lock_path = os.path.join(
+      lock_path, '.%s_lock' % os.path.basename(options.chroot).lstrip('.'))
+  with cgroups.SimpleContainChildren('cros_sdk', pid=first_pid):
+    with locking.FileLock(lock_path, 'chroot lock') as lock:
+      toolchains_overlay_tarball = None
+
+      if options.proxy_sim:
+        _ProxySimSetup(options)
+
+      if options.delete and os.path.exists(options.chroot):
+        lock.write_lock()
+        DeleteChroot(options.chroot)
+
+      sdk_cache = os.path.join(options.cache_dir, 'sdks')
+      distfiles_cache = os.path.join(options.cache_dir, 'distfiles')
+      osutils.SafeMakedirsNonRoot(options.cache_dir)
+
+      for target in (sdk_cache, distfiles_cache):
+        src = os.path.join(constants.SOURCE_ROOT, os.path.basename(target))
+        if not os.path.exists(src):
+          osutils.SafeMakedirs(target)
+          continue
+        lock.write_lock(
+            "Upgrade to %r needed but chroot is locked; please exit "
+            "all instances so this upgrade can finish." % src)
+        if not os.path.exists(src):
+          # Note that while waiting for the write lock, src may've vanished;
+          # it's a rare race during the upgrade process that's a byproduct
+          # of us avoiding taking a write lock to do the src check.  If we
+          # took a write lock for that check, it would effectively limit
+          # all cros_sdk for a chroot to a single instance.
+          osutils.SafeMakedirs(target)
+        elif not os.path.exists(target):
+          # Upgrade occurred, but a reversion, or something whacky
+          # occurred writing to the old location.  Wipe and continue.
+          os.rename(src, target)
+        else:
+          # Upgrade occurred once already, but either a reversion or
+          # some before/after separate cros_sdk usage is at play.
+          # Wipe and continue.
+          osutils.RmDir(src)
+
+      if options.download:
+        lock.write_lock()
+        sdk_tarball = FetchRemoteTarballs(
+            sdk_cache, urls, 'stage3' if options.bootstrap else 'SDK')
+        if toolchains_overlay_urls:
+          toolchains_overlay_tarball = FetchRemoteTarballs(
+              sdk_cache, toolchains_overlay_urls, 'SDK toolchains overlay',
+              allow_none=True)
+
+      if options.create:
+        lock.write_lock()
+        CreateChroot(options.chroot, sdk_tarball, toolchains_overlay_tarball,
+                     options.cache_dir,
+                     nousepkg=(options.bootstrap or options.nousepkg))
+
+      if options.enter:
+        lock.read_lock()
+        EnterChroot(options.chroot, options.cache_dir, options.chrome_root,
+                    options.chrome_root_mount, options.workspace,
+                    chroot_command)
diff --git a/scripts/cros_set_lsb_release.py b/scripts/cros_set_lsb_release.py
new file mode 100644
index 0000000..240abdb
--- /dev/null
+++ b/scripts/cros_set_lsb_release.py
@@ -0,0 +1,173 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility for setting the /etc/lsb-release file of an image."""
+
+from __future__ import print_function
+
+import getpass
+import os
+
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import image_lib
+
+
+# LSB keys:
+# Set google-specific version numbers:
+# CHROMEOS_RELEASE_BOARD is the target board identifier.
+# CHROMEOS_RELEASE_BRANCH_NUMBER is the Chrome OS branch number
+# CHROMEOS_RELEASE_BUILD_NUMBER is the Chrome OS build number
+# CHROMEOS_RELEASE_BUILD_TYPE is the type of build (official, from developers,
+# etc..)
+# CHROMEOS_RELEASE_CHROME_MILESTONE is the Chrome milestone (also named Chrome
+#   branch).
+# CHROMEOS_RELEASE_DESCRIPTION is the version displayed by Chrome; see
+#   chrome/browser/chromeos/chromeos_version_loader.cc.
+# CHROMEOS_RELEASE_NAME is a human readable name for the build.
+# CHROMEOS_RELEASE_PATCH_NUMBER is the patch number for the current branch.
+# CHROMEOS_RELEASE_TRACK and CHROMEOS_RELEASE_VERSION are used by the software
+#   update service.
+# TODO(skrul):  Remove GOOGLE_RELEASE once Chromium is updated to look at
+#   CHROMEOS_RELEASE_VERSION for UserAgent data.
+LSB_KEY_NAME = 'CHROMEOS_RELEASE_NAME'
+LSB_KEY_AUSERVER = 'CHROMEOS_AUSERVER'
+LSB_KEY_DEVSERVER = 'CHROMEOS_DEVSERVER'
+LSB_KEY_TRACK = 'CHROMEOS_RELEASE_TRACK'
+LSB_KEY_BUILD_TYPE = 'CHROMEOS_RELEASE_BUILD_TYPE'
+LSB_KEY_DESCRIPTION = 'CHROMEOS_RELEASE_DESCRIPTION'
+LSB_KEY_BOARD = 'CHROMEOS_RELEASE_BOARD'
+LSB_KEY_BRANCH_NUMBER = 'CHROMEOS_RELEASE_BRANCH_NUMBER'
+LSB_KEY_BUILD_NUMBER = 'CHROMEOS_RELEASE_BUILD_NUMBER'
+LSB_KEY_CHROME_MILESTONE = 'CHROMEOS_RELEASE_CHROME_MILESTONE'
+LSB_KEY_PATCH_NUMBER = 'CHROMEOS_RELEASE_PATCH_NUMBER'
+LSB_KEY_VERSION = 'CHROMEOS_RELEASE_VERSION'
+LSB_KEY_GOOGLE_RELEASE = 'GOOGLE_RELEASE'
+LSB_KEY_APPID_RELEASE = 'CHROMEOS_RELEASE_APPID'
+LSB_KEY_APPID_BOARD = 'CHROMEOS_BOARD_APPID'
+LSB_KEY_APPID_CANARY = 'CHROMEOS_CANARY_APPID'
+
+CANARY_APP_ID = "{90F229CE-83E2-4FAF-8479-E368A34938B1}"
+
+def _ParseArguments(argv):
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  parser.add_argument('--app_id', default=None,
+                      help='The APP_ID to install.')
+  parser.add_argument('--board', help='The board name.', required=True)
+  parser.add_argument('--sysroot', required=True, type='path',
+                      help='The sysroot to install the lsb-release file into.')
+  parser.add_argument('--version_string', required=True,
+                      help='The image\'s version string.')
+  parser.add_argument('--auserver', default=None,
+                      help='The auserver url to use.')
+  parser.add_argument('--devserver', default=None,
+                      help='The devserver url to use.')
+  parser.add_argument('--official', action='store_true',
+                      help='Whether or not to populate with fields for an '
+                      'official image.')
+  parser.add_argument('--buildbot_build', default='N/A',
+                      help='The build number, for use with the continuous '
+                      'builder.')
+  parser.add_argument('--track', default='developer-build',
+                      help='The type of release track.')
+  parser.add_argument('--branch_number', default='0',
+                      help='The branch number.')
+  parser.add_argument('--build_number', default='0',
+                      help='The build number.')
+  parser.add_argument('--chrome_milestone', default='0',
+                      help='The Chrome milestone.')
+  parser.add_argument('--patch_number', default='0',
+                      help='The patch number for the given branch.')
+
+  opts = parser.parse_args(argv)
+
+  # If the auserver or devserver isn't specified or is set to blank, set it
+  # to the host's hostname.
+  hostname = cros_build_lib.GetHostName(fully_qualified=True)
+
+  if not opts.auserver:
+    opts.auserver = 'http://%s:8080/update' % hostname
+
+  if not opts.devserver:
+    opts.devserver = 'http://%s:8080' % hostname
+
+  opts.Freeze()
+
+  if not os.path.isdir(opts.sysroot):
+    cros_build_lib.Die('The target sysroot does not exist: %s' % opts.sysroot)
+
+  if not opts.version_string:
+    cros_build_lib.Die('version_string must not be empty.  Was '
+                       'chromeos_version.sh sourced correctly in the calling '
+                       'script?')
+
+  return opts
+
+
+def main(argv):
+  opts = _ParseArguments(argv)
+
+  fields = {
+      LSB_KEY_NAME: 'Chromium OS',
+      LSB_KEY_AUSERVER: opts.auserver,
+      LSB_KEY_DEVSERVER: opts.devserver,
+  }
+
+  if opts.app_id is not None:
+    fields.update({
+        LSB_KEY_APPID_RELEASE: opts.app_id,
+        LSB_KEY_APPID_BOARD: opts.app_id,
+        LSB_KEY_APPID_CANARY: CANARY_APP_ID,
+    })
+
+  if opts.official:
+    # Official builds (i.e. buildbot).
+    track = 'dev-channel'
+    build_type = 'Official Build'
+    fields.update({
+        LSB_KEY_TRACK: track,
+        LSB_KEY_NAME: 'Chrome OS',
+        LSB_KEY_BUILD_TYPE: build_type,
+        LSB_KEY_DESCRIPTION: ('%s (%s) %s %s test' %
+                              (opts.version_string,
+                               build_type,
+                               track,
+                               opts.board)),
+        LSB_KEY_AUSERVER: 'https://tools.google.com/service/update2',
+        LSB_KEY_DEVSERVER: '',
+    })
+  elif getpass.getuser() == 'chrome-bot':
+    # Continuous builder.
+    build_type = 'Continuous Builder - Builder: %s' % opts.buildbot_build
+    fields.update({
+        LSB_KEY_TRACK: 'buildbot-build',
+        LSB_KEY_BUILD_TYPE: build_type,
+        LSB_KEY_DESCRIPTION: '%s (%s) %s' % (opts.version_string,
+                                             build_type,
+                                             opts.board),
+    })
+  else:
+    # Developer manual builds.
+    build_type = 'Developer Build - %s' % getpass.getuser()
+    fields.update({
+        LSB_KEY_TRACK: opts.track,
+        LSB_KEY_BUILD_TYPE: build_type,
+        LSB_KEY_DESCRIPTION: '%s (%s) %s %s' % (opts.version_string,
+                                                build_type,
+                                                opts.track,
+                                                opts.board),
+    })
+
+  fields.update({
+      LSB_KEY_BOARD: opts.board,
+      LSB_KEY_BRANCH_NUMBER: opts.branch_number,
+      LSB_KEY_BUILD_NUMBER: opts.build_number,
+      LSB_KEY_CHROME_MILESTONE: opts.chrome_milestone,
+      LSB_KEY_PATCH_NUMBER: opts.patch_number,
+      LSB_KEY_VERSION: opts.version_string,
+      LSB_KEY_GOOGLE_RELEASE: opts.version_string,
+  })
+
+  image_lib.WriteLsbRelease(opts.sysroot, fields)
diff --git a/scripts/cros_setup_toolchains.py b/scripts/cros_setup_toolchains.py
new file mode 100644
index 0000000..6cda170
--- /dev/null
+++ b/scripts/cros_setup_toolchains.py
@@ -0,0 +1,1135 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script manages the installed toolchains in the chroot."""
+
+from __future__ import print_function
+
+import copy
+import glob
+import json
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import toolchain
+from chromite.lib import workspace_lib
+
+# Needs to be after chromite imports.
+import lddtree
+
+if cros_build_lib.IsInsideChroot():
+  # Only import portage after we've checked that we're inside the chroot.
+  # Outside may not have portage, in which case the above may not happen.
+  # We'll check in main() if the operation needs portage.
+  # pylint: disable=F0401
+  import portage
+
+
+EMERGE_CMD = os.path.join(constants.CHROMITE_BIN_DIR, 'parallel_emerge')
+PACKAGE_STABLE = '[stable]'
+PACKAGE_NONE = '[none]'
+SRC_ROOT = os.path.realpath(constants.SOURCE_ROOT)
+
+CHROMIUMOS_OVERLAY = '/usr/local/portage/chromiumos'
+ECLASS_OVERLAY = '/usr/local/portage/eclass-overlay'
+STABLE_OVERLAY = '/usr/local/portage/stable'
+CROSSDEV_OVERLAY = '/usr/local/portage/crossdev'
+
+
+# TODO: The versions are stored here very much like in setup_board.
+# The goal for future is to differentiate these using a config file.
+# This is done essentially by messing with GetDesiredPackageVersions()
+DEFAULT_VERSION = PACKAGE_STABLE
+DEFAULT_TARGET_VERSION_MAP = {
+}
+TARGET_VERSION_MAP = {
+    'host' : {
+        'gdb' : PACKAGE_NONE,
+        'ex_go' : PACKAGE_NONE,
+    },
+}
+
+# Enable the Go compiler for these targets.
+TARGET_GO_ENABLED = (
+    'x86_64-cros-linux-gnu',
+    'i686-pc-linux-gnu',
+    'armv7a-cros-linux-gnueabi',
+)
+CROSSDEV_GO_ARGS = ['--ex-pkg', 'dev-lang/go']
+
+# Overrides for {gcc,binutils}-config, pick a package with particular suffix.
+CONFIG_TARGET_SUFFIXES = {
+    'binutils' : {
+        'armv6j-cros-linux-gnueabi': '-gold',
+        'armv7a-cros-linux-gnueabi': '-gold',
+        'i686-pc-linux-gnu' : '-gold',
+        'x86_64-cros-linux-gnu' : '-gold',
+    },
+}
+# Global per-run cache that will be filled ondemand in by GetPackageMap()
+# function as needed.
+target_version_map = {
+}
+
+
+class Crossdev(object):
+  """Class for interacting with crossdev and caching its output."""
+
+  _CACHE_FILE = os.path.join(CROSSDEV_OVERLAY, '.configured.json')
+  _CACHE = {}
+
+  @classmethod
+  def Load(cls, reconfig):
+    """Load crossdev cache from disk."""
+    crossdev_version = GetStablePackageVersion('sys-devel/crossdev', True)
+    cls._CACHE = {'crossdev_version': crossdev_version}
+    if os.path.exists(cls._CACHE_FILE) and not reconfig:
+      with open(cls._CACHE_FILE) as f:
+        data = json.load(f)
+        if crossdev_version == data.get('crossdev_version'):
+          cls._CACHE = data
+
+  @classmethod
+  def Save(cls):
+    """Store crossdev cache on disk."""
+    # Save the cache from the successful run.
+    with open(cls._CACHE_FILE, 'w') as f:
+      json.dump(cls._CACHE, f)
+
+  @classmethod
+  def GetConfig(cls, target):
+    """Returns a map of crossdev provided variables about a tuple."""
+    CACHE_ATTR = '_target_tuple_map'
+
+    val = cls._CACHE.setdefault(CACHE_ATTR, {})
+    if not target in val:
+      # Find out the crossdev tuple.
+      target_tuple = target
+      if target == 'host':
+        target_tuple = toolchain.GetHostTuple()
+      # Build the crossdev command.
+      cmd = ['crossdev', '--show-target-cfg', '--ex-gdb']
+      if target in TARGET_GO_ENABLED:
+        cmd.extend(CROSSDEV_GO_ARGS)
+      cmd.extend(['-t', target_tuple])
+      # Catch output of crossdev.
+      out = cros_build_lib.RunCommand(cmd, print_cmd=False,
+                                      redirect_stdout=True).output.splitlines()
+      # List of tuples split at the first '=', converted into dict.
+      val[target] = dict([x.split('=', 1) for x in out])
+    return val[target]
+
+  @classmethod
+  def UpdateTargets(cls, targets, usepkg, config_only=False):
+    """Calls crossdev to initialize a cross target.
+
+    Args:
+      targets: The list of targets to initialize using crossdev.
+      usepkg: Copies the commandline opts.
+      config_only: Just update.
+    """
+    configured_targets = cls._CACHE.setdefault('configured_targets', [])
+
+    cmdbase = ['crossdev', '--show-fail-log']
+    cmdbase.extend(['--env', 'FEATURES=splitdebug'])
+    # Pick stable by default, and override as necessary.
+    cmdbase.extend(['-P', '--oneshot'])
+    if usepkg:
+      cmdbase.extend(['-P', '--getbinpkg',
+                      '-P', '--usepkgonly',
+                      '--without-headers'])
+
+    overlays = ' '.join((CHROMIUMOS_OVERLAY, ECLASS_OVERLAY, STABLE_OVERLAY))
+    cmdbase.extend(['--overlays', overlays])
+    cmdbase.extend(['--ov-output', CROSSDEV_OVERLAY])
+
+    for target in targets:
+      if config_only and target in configured_targets:
+        continue
+
+      cmd = cmdbase + ['-t', target]
+
+      for pkg in GetTargetPackages(target):
+        if pkg == 'gdb':
+          # Gdb does not have selectable versions.
+          cmd.append('--ex-gdb')
+        elif pkg == 'ex_go':
+          # Go does not have selectable versions.
+          cmd.extend(CROSSDEV_GO_ARGS)
+        else:
+          # The first of the desired versions is the "primary" one.
+          version = GetDesiredPackageVersions(target, pkg)[0]
+          cmd.extend(['--%s' % pkg, version])
+
+      cmd.extend(targets[target]['crossdev'].split())
+      if config_only:
+        # In this case we want to just quietly reinit
+        cmd.append('--init-target')
+        cros_build_lib.RunCommand(cmd, print_cmd=False, redirect_stdout=True)
+      else:
+        cros_build_lib.RunCommand(cmd)
+
+      configured_targets.append(target)
+
+
+def GetPackageMap(target):
+  """Compiles a package map for the given target from the constants.
+
+  Uses a cache in target_version_map, that is dynamically filled in as needed,
+  since here everything is static data and the structuring is for ease of
+  configurability only.
+
+  Args:
+    target: The target for which to return a version map
+
+  Returns:
+    A map between packages and desired versions in internal format
+    (using the PACKAGE_* constants)
+  """
+  if target in target_version_map:
+    return target_version_map[target]
+
+  # Start from copy of the global defaults.
+  result = copy.copy(DEFAULT_TARGET_VERSION_MAP)
+
+  for pkg in GetTargetPackages(target):
+  # prefer any specific overrides
+    if pkg in TARGET_VERSION_MAP.get(target, {}):
+      result[pkg] = TARGET_VERSION_MAP[target][pkg]
+    else:
+      # finally, if not already set, set a sane default
+      result.setdefault(pkg, DEFAULT_VERSION)
+  target_version_map[target] = result
+  return result
+
+
+def GetTargetPackages(target):
+  """Returns a list of packages for a given target."""
+  conf = Crossdev.GetConfig(target)
+  # Undesired packages are denoted by empty ${pkg}_pn variable.
+  return [x for x in conf['crosspkgs'].strip("'").split() if conf[x+'_pn']]
+
+
+# Portage helper functions:
+def GetPortagePackage(target, package):
+  """Returns a package name for the given target."""
+  conf = Crossdev.GetConfig(target)
+  # Portage category:
+  if target == 'host':
+    category = conf[package + '_category']
+  else:
+    category = conf['category']
+  # Portage package:
+  pn = conf[package + '_pn']
+  # Final package name:
+  assert category
+  assert pn
+  return '%s/%s' % (category, pn)
+
+
+def IsPackageDisabled(target, package):
+  """Returns if the given package is not used for the target."""
+  return GetDesiredPackageVersions(target, package) == [PACKAGE_NONE]
+
+
+def PortageTrees(root):
+  """Return the portage trees for a given root."""
+  if root == '/':
+    return portage.db['/']
+  # The portage logic requires the path always end in a slash.
+  root = root.rstrip('/') + '/'
+  return portage.create_trees(target_root=root, config_root=root)[root]
+
+
+def GetInstalledPackageVersions(atom, root='/'):
+  """Extracts the list of current versions of a target, package pair.
+
+  Args:
+    atom: The atom to operate on (e.g. sys-devel/gcc)
+    root: The root to check for installed packages.
+
+  Returns:
+    The list of versions of the package currently installed.
+  """
+  versions = []
+  # pylint: disable=E1101
+  for pkg in PortageTrees(root)['vartree'].dbapi.match(atom, use_cache=0):
+    version = portage.versions.cpv_getversion(pkg)
+    versions.append(version)
+  return versions
+
+
+def GetStablePackageVersion(atom, installed, root='/'):
+  """Extracts the current stable version for a given package.
+
+  Args:
+    atom: The target/package to operate on eg. i686-pc-linux-gnu,gcc
+    installed: Whether we want installed packages or ebuilds
+    root: The root to use when querying packages.
+
+  Returns:
+    A string containing the latest version.
+  """
+  pkgtype = 'vartree' if installed else 'porttree'
+  # pylint: disable=E1101
+  cpv = portage.best(PortageTrees(root)[pkgtype].dbapi.match(atom, use_cache=0))
+  return portage.versions.cpv_getversion(cpv) if cpv else None
+
+
+def VersionListToNumeric(target, package, versions, installed, root='/'):
+  """Resolves keywords in a given version list for a particular package.
+
+  Resolving means replacing PACKAGE_STABLE with the actual number.
+
+  Args:
+    target: The target to operate on (e.g. i686-pc-linux-gnu)
+    package: The target/package to operate on (e.g. gcc)
+    versions: List of versions to resolve
+    installed: Query installed packages
+    root: The install root to use; ignored if |installed| is False.
+
+  Returns:
+    List of purely numeric versions equivalent to argument
+  """
+  resolved = []
+  atom = GetPortagePackage(target, package)
+  if not installed:
+    root = '/'
+  for version in versions:
+    if version == PACKAGE_STABLE:
+      resolved.append(GetStablePackageVersion(atom, installed, root=root))
+    elif version != PACKAGE_NONE:
+      resolved.append(version)
+  return resolved
+
+
+def GetDesiredPackageVersions(target, package):
+  """Produces the list of desired versions for each target, package pair.
+
+  The first version in the list is implicitly treated as primary, ie.
+  the version that will be initialized by crossdev and selected.
+
+  If the version is PACKAGE_STABLE, it really means the current version which
+  is emerged by using the package atom with no particular version key.
+  Since crossdev unmasks all packages by default, this will actually
+  mean 'unstable' in most cases.
+
+  Args:
+    target: The target to operate on (e.g. i686-pc-linux-gnu)
+    package: The target/package to operate on (e.g. gcc)
+
+  Returns:
+    A list composed of either a version string, PACKAGE_STABLE
+  """
+  packagemap = GetPackageMap(target)
+
+  versions = []
+  if package in packagemap:
+    versions.append(packagemap[package])
+
+  return versions
+
+
+def TargetIsInitialized(target):
+  """Verifies if the given list of targets has been correctly initialized.
+
+  This determines whether we have to call crossdev while emerging
+  toolchain packages or can do it using emerge. Emerge is naturally
+  preferred, because all packages can be updated in a single pass.
+
+  Args:
+    target: The target to operate on (e.g. i686-pc-linux-gnu)
+
+  Returns:
+    True if |target| is completely initialized, else False
+  """
+  # Check if packages for the given target all have a proper version.
+  try:
+    for package in GetTargetPackages(target):
+      atom = GetPortagePackage(target, package)
+      # Do we even want this package && is it initialized?
+      if not IsPackageDisabled(target, package) and not (
+          GetStablePackageVersion(atom, True) and
+          GetStablePackageVersion(atom, False)):
+        return False
+    return True
+  except cros_build_lib.RunCommandError:
+    # Fails - The target has likely never been initialized before.
+    return False
+
+
+def RemovePackageMask(target):
+  """Removes a package.mask file for the given platform.
+
+  The pre-existing package.mask files can mess with the keywords.
+
+  Args:
+    target: The target to operate on (e.g. i686-pc-linux-gnu)
+  """
+  maskfile = os.path.join('/etc/portage/package.mask', 'cross-' + target)
+  osutils.SafeUnlink(maskfile)
+
+
+# Main functions performing the actual update steps.
+def RebuildLibtool(root='/'):
+  """Rebuild libtool as needed
+
+  Libtool hardcodes full paths to internal gcc files, so whenever we upgrade
+  gcc, libtool will break.  We can't use binary packages either as those will
+  most likely be compiled against the previous version of gcc.
+
+  Args:
+    root: The install root where we want libtool rebuilt.
+  """
+  needs_update = False
+  with open(os.path.join(root, 'usr/bin/libtool')) as f:
+    for line in f:
+      # Look for a line like:
+      #   sys_lib_search_path_spec="..."
+      # It'll be a list of paths and gcc will be one of them.
+      if line.startswith('sys_lib_search_path_spec='):
+        line = line.rstrip()
+        for path in line.split('=', 1)[1].strip('"').split():
+          if not os.path.exists(os.path.join(root, path.lstrip(os.path.sep))):
+            print('Rebuilding libtool after gcc upgrade')
+            print(' %s' % line)
+            print(' missing path: %s' % path)
+            needs_update = True
+            break
+
+      if needs_update:
+        break
+
+  if needs_update:
+    cmd = [EMERGE_CMD, '--oneshot']
+    if root != '/':
+      cmd.extend(['--sysroot=%s' % root, '--root=%s' % root])
+    cmd.append('sys-devel/libtool')
+    cros_build_lib.RunCommand(cmd)
+
+
+def UpdateTargets(targets, usepkg, root='/'):
+  """Determines which packages need update/unmerge and defers to portage.
+
+  Args:
+    targets: The list of targets to update
+    usepkg: Copies the commandline option
+    root: The install root in which we want packages updated.
+  """
+  # Remove keyword files created by old versions of cros_setup_toolchains.
+  osutils.SafeUnlink('/etc/portage/package.keywords/cross-host')
+
+  # For each target, we do two things. Figure out the list of updates,
+  # and figure out the appropriate keywords/masks. Crossdev will initialize
+  # these, but they need to be regenerated on every update.
+  print('Determining required toolchain updates...')
+  mergemap = {}
+  for target in targets:
+    # Record the highest needed version for each target, for masking purposes.
+    RemovePackageMask(target)
+    for package in GetTargetPackages(target):
+      # Portage name for the package
+      if IsPackageDisabled(target, package):
+        continue
+      pkg = GetPortagePackage(target, package)
+      current = GetInstalledPackageVersions(pkg, root=root)
+      desired = GetDesiredPackageVersions(target, package)
+      desired_num = VersionListToNumeric(target, package, desired, False)
+      mergemap[pkg] = set(desired_num).difference(current)
+
+  packages = []
+  for pkg in mergemap:
+    for ver in mergemap[pkg]:
+      if ver != PACKAGE_NONE:
+        packages.append(pkg)
+
+  if not packages:
+    print('Nothing to update!')
+    return False
+
+  print('Updating packages:')
+  print(packages)
+
+  cmd = [EMERGE_CMD, '--oneshot', '--update']
+  if usepkg:
+    cmd.extend(['--getbinpkg', '--usepkgonly'])
+  if root != '/':
+    cmd.extend(['--sysroot=%s' % root, '--root=%s' % root])
+
+  cmd.extend(packages)
+  cros_build_lib.RunCommand(cmd)
+  return True
+
+
+def CleanTargets(targets, root='/'):
+  """Unmerges old packages that are assumed unnecessary.
+
+  Args:
+    targets: The list of targets to clean up.
+    root: The install root in which we want packages cleaned up.
+  """
+  unmergemap = {}
+  for target in targets:
+    for package in GetTargetPackages(target):
+      if IsPackageDisabled(target, package):
+        continue
+      pkg = GetPortagePackage(target, package)
+      current = GetInstalledPackageVersions(pkg, root=root)
+      desired = GetDesiredPackageVersions(target, package)
+      # NOTE: This refers to installed packages (vartree) rather than the
+      # Portage version (porttree and/or bintree) when determining the current
+      # version. While this isn't the most accurate thing to do, it is probably
+      # a good simple compromise, which should have the desired result of
+      # uninstalling everything but the latest installed version. In
+      # particular, using the bintree (--usebinpkg) requires a non-trivial
+      # binhost sync and is probably more complex than useful.
+      desired_num = VersionListToNumeric(target, package, desired, True)
+      if not set(desired_num).issubset(current):
+        print('Error detecting stable version for %s, skipping clean!' % pkg)
+        return
+      unmergemap[pkg] = set(current).difference(desired_num)
+
+  # Cleaning doesn't care about consistency and rebuilding package.* files.
+  packages = []
+  for pkg, vers in unmergemap.iteritems():
+    packages.extend('=%s-%s' % (pkg, ver) for ver in vers if ver != '9999')
+
+  if packages:
+    print('Cleaning packages:')
+    print(packages)
+    cmd = [EMERGE_CMD, '--unmerge']
+    if root != '/':
+      cmd.extend(['--sysroot=%s' % root, '--root=%s' % root])
+    cmd.extend(packages)
+    cros_build_lib.RunCommand(cmd)
+  else:
+    print('Nothing to clean!')
+
+
+def SelectActiveToolchains(targets, suffixes, root='/'):
+  """Runs gcc-config and binutils-config to select the desired.
+
+  Args:
+    targets: The targets to select
+    suffixes: Optional target-specific hacks
+    root: The root where we want to select toolchain versions.
+  """
+  for package in ['gcc', 'binutils']:
+    for target in targets:
+      # Pick the first version in the numbered list as the selected one.
+      desired = GetDesiredPackageVersions(target, package)
+      desired_num = VersionListToNumeric(target, package, desired, True,
+                                         root=root)
+      desired = desired_num[0]
+      # *-config does not play revisions, strip them, keep just PV.
+      desired = portage.versions.pkgsplit('%s-%s' % (package, desired))[1]
+
+      if target == 'host':
+        # *-config is the only tool treating host identically (by tuple).
+        target = toolchain.GetHostTuple()
+
+      # And finally, attach target to it.
+      desired = '%s-%s' % (target, desired)
+
+      # Target specific hacks
+      if package in suffixes:
+        if target in suffixes[package]:
+          desired += suffixes[package][target]
+
+      extra_env = {'CHOST': target}
+      if root != '/':
+        extra_env['ROOT'] = root
+      cmd = ['%s-config' % package, '-c', target]
+      result = cros_build_lib.RunCommand(
+          cmd, print_cmd=False, redirect_stdout=True, extra_env=extra_env)
+      current = result.output.splitlines()[0]
+
+      # Do not reconfig when the current is live or nothing needs to be done.
+      extra_env = {'ROOT': root} if root != '/' else None
+      if current != desired and current != '9999':
+        cmd = [package + '-config', desired]
+        cros_build_lib.RunCommand(cmd, print_cmd=False, extra_env=extra_env)
+
+
+def ExpandTargets(targets_wanted):
+  """Expand any possible toolchain aliases into full targets
+
+  This will expand 'all' and 'sdk' into the respective toolchain tuples.
+
+  Args:
+    targets_wanted: The targets specified by the user.
+
+  Returns:
+    Dictionary of concrete targets and their toolchain tuples.
+  """
+  targets_wanted = set(targets_wanted)
+  if targets_wanted in (set(['boards']), set(['bricks'])):
+    # Only pull targets from the included boards/bricks.
+    return {}
+
+  all_targets = toolchain.GetAllTargets()
+  if targets_wanted == set(['all']):
+    return all_targets
+  if targets_wanted == set(['sdk']):
+    # Filter out all the non-sdk toolchains as we don't want to mess
+    # with those in all of our builds.
+    return toolchain.FilterToolchains(all_targets, 'sdk', True)
+
+  # Verify user input.
+  nonexistent = targets_wanted.difference(all_targets)
+  if nonexistent:
+    raise ValueError('Invalid targets: %s', ','.join(nonexistent))
+  return {t: all_targets[t] for t in targets_wanted}
+
+
+def UpdateToolchains(usepkg, deleteold, hostonly, reconfig,
+                     targets_wanted, boards_wanted, bricks_wanted, root='/'):
+  """Performs all steps to create a synchronized toolchain enviroment.
+
+  Args:
+    usepkg: Use prebuilt packages
+    deleteold: Unmerge deprecated packages
+    hostonly: Only setup the host toolchain
+    reconfig: Reload crossdev config and reselect toolchains
+    targets_wanted: All the targets to update
+    boards_wanted: Load targets from these boards
+    bricks_wanted: Load targets from these bricks
+    root: The root in which to install the toolchains.
+  """
+  targets, crossdev_targets, reconfig_targets = {}, {}, {}
+  if not hostonly:
+    # For hostonly, we can skip most of the below logic, much of which won't
+    # work on bare systems where this is useful.
+    targets = ExpandTargets(targets_wanted)
+
+    # Now re-add any targets that might be from this board/brick. This is to
+    # allow unofficial boards to declare their own toolchains.
+    for board in boards_wanted:
+      targets.update(toolchain.GetToolchainsForBoard(board))
+    for brick in bricks_wanted:
+      targets.update(toolchain.GetToolchainsForBrick(brick))
+
+    # First check and initialize all cross targets that need to be.
+    for target in targets:
+      if TargetIsInitialized(target):
+        reconfig_targets[target] = targets[target]
+      else:
+        crossdev_targets[target] = targets[target]
+    if crossdev_targets:
+      print('The following targets need to be re-initialized:')
+      print(crossdev_targets)
+      Crossdev.UpdateTargets(crossdev_targets, usepkg)
+    # Those that were not initialized may need a config update.
+    Crossdev.UpdateTargets(reconfig_targets, usepkg, config_only=True)
+
+  # We want host updated.
+  targets['host'] = {}
+
+  # Now update all packages.
+  if UpdateTargets(targets, usepkg, root=root) or crossdev_targets or reconfig:
+    SelectActiveToolchains(targets, CONFIG_TARGET_SUFFIXES, root=root)
+
+  if deleteold:
+    CleanTargets(targets, root=root)
+
+  # Now that we've cleared out old versions, see if we need to rebuild
+  # anything.  Can't do this earlier as it might not be broken.
+  RebuildLibtool(root=root)
+
+
+def ShowConfig(name):
+  """Show the toolchain tuples used by |name|
+
+  Args:
+    name: The board name or brick locator to query.
+  """
+  if workspace_lib.IsLocator(name):
+    toolchains = toolchain.GetToolchainsForBrick(name)
+  else:
+    toolchains = toolchain.GetToolchainsForBoard(name)
+  # Make sure we display the default toolchain first.
+  print(','.join(
+      toolchain.FilterToolchains(toolchains, 'default', True).keys() +
+      toolchain.FilterToolchains(toolchains, 'default', False).keys()))
+
+
+def GeneratePathWrapper(root, wrappath, path):
+  """Generate a shell script to execute another shell script
+
+  Since we can't symlink a wrapped ELF (see GenerateLdsoWrapper) because the
+  argv[0] won't be pointing to the correct path, generate a shell script that
+  just executes another program with its full path.
+
+  Args:
+    root: The root tree to generate scripts inside of
+    wrappath: The full path (inside |root|) to create the wrapper
+    path: The target program which this wrapper will execute
+  """
+  replacements = {
+      'path': path,
+      'relroot': os.path.relpath('/', os.path.dirname(wrappath)),
+  }
+  wrapper = """#!/bin/sh
+base=$(realpath "$0")
+basedir=${base%%/*}
+exec "${basedir}/%(relroot)s%(path)s" "$@"
+""" % replacements
+  root_wrapper = root + wrappath
+  if os.path.islink(root_wrapper):
+    os.unlink(root_wrapper)
+  else:
+    osutils.SafeMakedirs(os.path.dirname(root_wrapper))
+  osutils.WriteFile(root_wrapper, wrapper)
+  os.chmod(root_wrapper, 0o755)
+
+
+def FileIsCrosSdkElf(elf):
+  """Determine if |elf| is an ELF that we execute in the cros_sdk
+
+  We don't need this to be perfect, just quick.  It makes sure the ELF
+  is a 64bit LSB x86_64 ELF.  That is the native type of cros_sdk.
+
+  Args:
+    elf: The file to check
+
+  Returns:
+    True if we think |elf| is a native ELF
+  """
+  with open(elf) as f:
+    data = f.read(20)
+    # Check the magic number, EI_CLASS, EI_DATA, and e_machine.
+    return (data[0:4] == '\x7fELF' and
+            data[4] == '\x02' and
+            data[5] == '\x01' and
+            data[18] == '\x3e')
+
+
+def IsPathPackagable(ptype, path):
+  """Should the specified file be included in a toolchain package?
+
+  We only need to handle files as we'll create dirs as we need them.
+
+  Further, trim files that won't be useful:
+   - non-english translations (.mo) since it'd require env vars
+   - debug files since these are for the host compiler itself
+   - info/man pages as they're big, and docs are online, and the
+     native docs should work fine for the most part (`man gcc`)
+
+  Args:
+    ptype: A string describing the path type (i.e. 'file' or 'dir' or 'sym')
+    path: The full path to inspect
+
+  Returns:
+    True if we want to include this path in the package
+  """
+  return not (ptype in ('dir',) or
+              path.startswith('/usr/lib/debug/') or
+              os.path.splitext(path)[1] == '.mo' or
+              ('/man/' in path or '/info/' in path))
+
+
+def ReadlinkRoot(path, root):
+  """Like os.readlink(), but relative to a |root|
+
+  Args:
+    path: The symlink to read
+    root: The path to use for resolving absolute symlinks
+
+  Returns:
+    A fully resolved symlink path
+  """
+  while os.path.islink(root + path):
+    path = os.path.join(os.path.dirname(path), os.readlink(root + path))
+  return path
+
+
+def _GetFilesForTarget(target, root='/'):
+  """Locate all the files to package for |target|
+
+  This does not cover ELF dependencies.
+
+  Args:
+    target: The toolchain target name
+    root: The root path to pull all packages from
+
+  Returns:
+    A tuple of a set of all packable paths, and a set of all paths which
+    are also native ELFs
+  """
+  paths = set()
+  elfs = set()
+
+  # Find all the files owned by the packages for this target.
+  for pkg in GetTargetPackages(target):
+    # Ignore packages that are part of the target sysroot.
+    if pkg in ('kernel', 'libc'):
+      continue
+
+    # Skip Go compiler from redistributable packages.
+    # The "go" executable has GOROOT=/usr/lib/go/${CTARGET} hardcoded
+    # into it. Due to this, the toolchain cannot be unpacked anywhere
+    # else and be readily useful. To enable packaging Go, we need to:
+    # -) Tweak the wrappers/environment to override GOROOT
+    #    automatically based on the unpack location.
+    # -) Make sure the ELF dependency checking and wrapping logic
+    #    below skips the Go toolchain executables and libraries.
+    # -) Make sure the packaging process maintains the relative
+    #    timestamps of precompiled standard library packages.
+    #    (see dev-lang/go ebuild for details).
+    if pkg == 'ex_go':
+      continue
+
+    atom = GetPortagePackage(target, pkg)
+    cat, pn = atom.split('/')
+    ver = GetInstalledPackageVersions(atom, root=root)[0]
+    logging.info('packaging %s-%s', atom, ver)
+
+    # pylint: disable=E1101
+    dblink = portage.dblink(cat, pn + '-' + ver, myroot=root,
+                            settings=portage.settings)
+    contents = dblink.getcontents()
+    for obj in contents:
+      ptype = contents[obj][0]
+      if not IsPathPackagable(ptype, obj):
+        continue
+
+      if ptype == 'obj':
+        # For native ELFs, we need to pull in their dependencies too.
+        if FileIsCrosSdkElf(obj):
+          elfs.add(obj)
+      paths.add(obj)
+
+  return paths, elfs
+
+
+def _BuildInitialPackageRoot(output_dir, paths, elfs, ldpaths,
+                             path_rewrite_func=lambda x: x, root='/'):
+  """Link in all packable files and their runtime dependencies
+
+  This also wraps up executable ELFs with helper scripts.
+
+  Args:
+    output_dir: The output directory to store files
+    paths: All the files to include
+    elfs: All the files which are ELFs (a subset of |paths|)
+    ldpaths: A dict of static ldpath information
+    path_rewrite_func: User callback to rewrite paths in output_dir
+    root: The root path to pull all packages/files from
+  """
+  # Link in all the files.
+  sym_paths = []
+  for path in paths:
+    new_path = path_rewrite_func(path)
+    dst = output_dir + new_path
+    osutils.SafeMakedirs(os.path.dirname(dst))
+
+    # Is this a symlink which we have to rewrite or wrap?
+    # Delay wrap check until after we have created all paths.
+    src = root + path
+    if os.path.islink(src):
+      tgt = os.readlink(src)
+      if os.path.sep in tgt:
+        sym_paths.append((new_path, lddtree.normpath(ReadlinkRoot(src, root))))
+
+        # Rewrite absolute links to relative and then generate the symlink
+        # ourselves.  All other symlinks can be hardlinked below.
+        if tgt[0] == '/':
+          tgt = os.path.relpath(tgt, os.path.dirname(new_path))
+          os.symlink(tgt, dst)
+          continue
+
+    os.link(src, dst)
+
+  # Now see if any of the symlinks need to be wrapped.
+  for sym, tgt in sym_paths:
+    if tgt in elfs:
+      GeneratePathWrapper(output_dir, sym, tgt)
+
+  # Locate all the dependencies for all the ELFs.  Stick them all in the
+  # top level "lib" dir to make the wrapper simpler.  This exact path does
+  # not matter since we execute ldso directly, and we tell the ldso the
+  # exact path to search for its libraries.
+  libdir = os.path.join(output_dir, 'lib')
+  osutils.SafeMakedirs(libdir)
+  donelibs = set()
+  for elf in elfs:
+    e = lddtree.ParseELF(elf, root=root, ldpaths=ldpaths)
+    interp = e['interp']
+    if interp:
+      # Generate a wrapper if it is executable.
+      interp = os.path.join('/lib', os.path.basename(interp))
+      lddtree.GenerateLdsoWrapper(output_dir, path_rewrite_func(elf), interp,
+                                  libpaths=e['rpath'] + e['runpath'])
+
+    for lib, lib_data in e['libs'].iteritems():
+      if lib in donelibs:
+        continue
+
+      src = path = lib_data['path']
+      if path is None:
+        logging.warning('%s: could not locate %s', elf, lib)
+        continue
+      donelibs.add(lib)
+
+      # Needed libs are the SONAME, but that is usually a symlink, not a
+      # real file.  So link in the target rather than the symlink itself.
+      # We have to walk all the possible symlinks (SONAME could point to a
+      # symlink which points to a symlink), and we have to handle absolute
+      # ourselves (since we have a "root" argument).
+      dst = os.path.join(libdir, os.path.basename(path))
+      src = ReadlinkRoot(src, root)
+
+      os.link(root + src, dst)
+
+
+def _EnvdGetVar(envd, var):
+  """Given a Gentoo env.d file, extract a var from it
+
+  Args:
+    envd: The env.d file to load (may be a glob path)
+    var: The var to extract
+
+  Returns:
+    The value of |var|
+  """
+  envds = glob.glob(envd)
+  assert len(envds) == 1, '%s: should have exactly 1 env.d file' % envd
+  envd = envds[0]
+  return cros_build_lib.LoadKeyValueFile(envd)[var]
+
+
+def _ProcessBinutilsConfig(target, output_dir):
+  """Do what binutils-config would have done"""
+  binpath = os.path.join('/bin', target + '-')
+
+  # Locate the bin dir holding the gold linker.
+  binutils_bin_path = os.path.join(output_dir, 'usr', toolchain.GetHostTuple(),
+                                   target, 'binutils-bin')
+  globpath = os.path.join(binutils_bin_path, '*-gold')
+  srcpath = glob.glob(globpath)
+  if not srcpath:
+    # Maybe this target doesn't support gold.
+    globpath = os.path.join(binutils_bin_path, '*')
+    srcpath = glob.glob(globpath)
+    assert len(srcpath) == 1, ('%s: matched more than one path (but not *-gold)'
+                               % globpath)
+    srcpath = srcpath[0]
+    ld_path = os.path.join(srcpath, 'ld')
+    assert os.path.exists(ld_path), '%s: linker is missing!' % ld_path
+    ld_path = os.path.join(srcpath, 'ld.bfd')
+    assert os.path.exists(ld_path), '%s: linker is missing!' % ld_path
+    ld_path = os.path.join(srcpath, 'ld.gold')
+    assert not os.path.exists(ld_path), ('%s: exists, but gold dir does not!'
+                                         % ld_path)
+
+    # Nope, no gold support to be found.
+    gold_supported = False
+    logging.warning('%s: binutils lacks support for the gold linker', target)
+  else:
+    assert len(srcpath) == 1, '%s: did not match exactly 1 path' % globpath
+    gold_supported = True
+    srcpath = srcpath[0]
+
+  srcpath = srcpath[len(output_dir):]
+  gccpath = os.path.join('/usr', 'libexec', 'gcc')
+  for prog in os.listdir(output_dir + srcpath):
+    # Skip binaries already wrapped.
+    if not prog.endswith('.real'):
+      GeneratePathWrapper(output_dir, binpath + prog,
+                          os.path.join(srcpath, prog))
+      GeneratePathWrapper(output_dir, os.path.join(gccpath, prog),
+                          os.path.join(srcpath, prog))
+
+  libpath = os.path.join('/usr', toolchain.GetHostTuple(), target, 'lib')
+  envd = os.path.join(output_dir, 'etc', 'env.d', 'binutils', '*')
+  if gold_supported:
+    envd += '-gold'
+  srcpath = _EnvdGetVar(envd, 'LIBPATH')
+  os.symlink(os.path.relpath(srcpath, os.path.dirname(libpath)),
+             output_dir + libpath)
+
+
+def _ProcessGccConfig(target, output_dir):
+  """Do what gcc-config would have done"""
+  binpath = '/bin'
+  envd = os.path.join(output_dir, 'etc', 'env.d', 'gcc', '*')
+  srcpath = _EnvdGetVar(envd, 'GCC_PATH')
+  for prog in os.listdir(output_dir + srcpath):
+    # Skip binaries already wrapped.
+    if (not prog.endswith('.real') and
+        not prog.endswith('.elf') and
+        prog.startswith(target)):
+      GeneratePathWrapper(output_dir, os.path.join(binpath, prog),
+                          os.path.join(srcpath, prog))
+  return srcpath
+
+
+def _ProcessSysrootWrappers(_target, output_dir, srcpath):
+  """Remove chroot-specific things from our sysroot wrappers"""
+  # Disable ccache since we know it won't work outside of chroot.
+  for sysroot_wrapper in glob.glob(os.path.join(
+      output_dir + srcpath, 'sysroot_wrapper*')):
+    contents = osutils.ReadFile(sysroot_wrapper).splitlines()
+    for num in xrange(len(contents)):
+      if '@CCACHE_DEFAULT@' in contents[num]:
+        contents[num] = 'use_ccache = False'
+        break
+    # Can't update the wrapper in place since it's a hardlink to a file in /.
+    os.unlink(sysroot_wrapper)
+    osutils.WriteFile(sysroot_wrapper, '\n'.join(contents))
+    os.chmod(sysroot_wrapper, 0o755)
+
+
+def _ProcessDistroCleanups(target, output_dir):
+  """Clean up the tree and remove all distro-specific requirements
+
+  Args:
+    target: The toolchain target name
+    output_dir: The output directory to clean up
+  """
+  _ProcessBinutilsConfig(target, output_dir)
+  gcc_path = _ProcessGccConfig(target, output_dir)
+  _ProcessSysrootWrappers(target, output_dir, gcc_path)
+
+  osutils.RmDir(os.path.join(output_dir, 'etc'))
+
+
+def CreatePackagableRoot(target, output_dir, ldpaths, root='/'):
+  """Setup a tree from the packages for the specified target
+
+  This populates a path with all the files from toolchain packages so that
+  a tarball can easily be generated from the result.
+
+  Args:
+    target: The target to create a packagable root from
+    output_dir: The output directory to place all the files
+    ldpaths: A dict of static ldpath information
+    root: The root path to pull all packages/files from
+  """
+  # Find all the files owned by the packages for this target.
+  paths, elfs = _GetFilesForTarget(target, root=root)
+
+  # Link in all the package's files, any ELF dependencies, and wrap any
+  # executable ELFs with helper scripts.
+  def MoveUsrBinToBin(path):
+    """Move /usr/bin to /bin so people can just use that toplevel dir"""
+    return path[4:] if path.startswith('/usr/bin/') else path
+  _BuildInitialPackageRoot(output_dir, paths, elfs, ldpaths,
+                           path_rewrite_func=MoveUsrBinToBin, root=root)
+
+  # The packages, when part of the normal distro, have helper scripts
+  # that setup paths and such.  Since we are making this standalone, we
+  # need to preprocess all that ourselves.
+  _ProcessDistroCleanups(target, output_dir)
+
+
+def CreatePackages(targets_wanted, output_dir, root='/'):
+  """Create redistributable cross-compiler packages for the specified targets
+
+  This creates toolchain packages that should be usable in conjunction with
+  a downloaded sysroot (created elsewhere).
+
+  Tarballs (one per target) will be created in $PWD.
+
+  Args:
+    targets_wanted: The targets to package up.
+    output_dir: The directory to put the packages in.
+    root: The root path to pull all packages/files from.
+  """
+  logging.info('Writing tarballs to %s', output_dir)
+  osutils.SafeMakedirs(output_dir)
+  ldpaths = lddtree.LoadLdpaths(root)
+  targets = ExpandTargets(targets_wanted)
+
+  with osutils.TempDir() as tempdir:
+    # We have to split the root generation from the compression stages.  This is
+    # because we hardlink in all the files (to avoid overhead of reading/writing
+    # the copies multiple times).  But tar gets angry if a file's hardlink count
+    # changes from when it starts reading a file to when it finishes.
+    with parallel.BackgroundTaskRunner(CreatePackagableRoot) as queue:
+      for target in targets:
+        output_target_dir = os.path.join(tempdir, target)
+        queue.put([target, output_target_dir, ldpaths, root])
+
+    # Build the tarball.
+    with parallel.BackgroundTaskRunner(cros_build_lib.CreateTarball) as queue:
+      for target in targets:
+        tar_file = os.path.join(output_dir, target + '.tar.xz')
+        queue.put([tar_file, os.path.join(tempdir, target)])
+
+
+def main(argv):
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('-u', '--nousepkg',
+                      action='store_false', dest='usepkg', default=True,
+                      help='Use prebuilt packages if possible')
+  parser.add_argument('-d', '--deleteold',
+                      action='store_true', dest='deleteold', default=False,
+                      help='Unmerge deprecated packages')
+  parser.add_argument('-t', '--targets',
+                      dest='targets', default='sdk',
+                      help="Comma separated list of tuples. Special keywords "
+                           "'host', 'sdk', 'boards', 'bricks' and 'all' are "
+                           "allowed. Defaults to 'sdk'.")
+  parser.add_argument('--include-boards', default='', metavar='BOARDS',
+                      help='Comma separated list of boards whose toolchains we '
+                           'will always include. Default: none')
+  parser.add_argument('--include-bricks', default='', metavar='BRICKS',
+                      help='Comma separated list of bricks whose toolchains we '
+                           'will always include. Default: none')
+  parser.add_argument('--hostonly',
+                      dest='hostonly', default=False, action='store_true',
+                      help='Only setup the host toolchain. '
+                           'Useful for bootstrapping chroot')
+  parser.add_argument('--show-board-cfg', '--show-cfg',
+                      dest='cfg_name', default=None,
+                      help='Board or brick to list toolchains tuples for')
+  parser.add_argument('--create-packages',
+                      action='store_true', default=False,
+                      help='Build redistributable packages')
+  parser.add_argument('--output-dir', default=os.getcwd(), type='path',
+                      help='Output directory')
+  parser.add_argument('--reconfig', default=False, action='store_true',
+                      help='Reload crossdev config and reselect toolchains')
+  parser.add_argument('--sysroot', type='path',
+                      help='The sysroot in which to install the toolchains')
+
+  options = parser.parse_args(argv)
+  options.Freeze()
+
+  # Figure out what we're supposed to do and reject conflicting options.
+  if options.cfg_name and options.create_packages:
+    parser.error('conflicting options: create-packages & show-board-cfg')
+
+  targets_wanted = set(options.targets.split(','))
+  boards_wanted = (set(options.include_boards.split(','))
+                   if options.include_boards else set())
+  bricks_wanted = (set(options.include_bricks.split(','))
+                   if options.include_bricks else set())
+
+  if options.cfg_name:
+    ShowConfig(options.cfg_name)
+  elif options.create_packages:
+    cros_build_lib.AssertInsideChroot()
+    Crossdev.Load(False)
+    CreatePackages(targets_wanted, options.output_dir)
+  else:
+    cros_build_lib.AssertInsideChroot()
+    # This has to be always run as root.
+    if os.geteuid() != 0:
+      cros_build_lib.Die('this script must be run as root')
+
+    Crossdev.Load(options.reconfig)
+    root = options.sysroot or '/'
+    UpdateToolchains(options.usepkg, options.deleteold, options.hostonly,
+                     options.reconfig, targets_wanted, boards_wanted,
+                     bricks_wanted, root=root)
+    Crossdev.Save()
+
+  return 0
diff --git a/scripts/cros_show_waterfall_layout.py b/scripts/cros_show_waterfall_layout.py
new file mode 100644
index 0000000..0b064a6
--- /dev/null
+++ b/scripts/cros_show_waterfall_layout.py
@@ -0,0 +1,80 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Show the builder layout for CrOS waterfalls."""
+
+from __future__ import print_function
+
+import json
+import sys
+
+from chromite.cbuildbot import config_lib
+from chromite.lib import commandline
+
+
+def _FormatText(data, out):
+  """Formatter function for text output."""
+  output = lambda *a: print(*a, file=out)
+
+  for waterfall in sorted(data.iterkeys()):
+    layout = data[waterfall]
+    if not layout:
+      continue
+
+    output('== %s ==' % (waterfall,))
+    for board in sorted(layout.iterkeys()):
+      board_layout = layout[board]
+      children = board_layout.get('children', ())
+      if not children:
+        output('%(name)s' % board_layout)
+      else:
+        output('[%(name)s]' % board_layout)
+      for child in sorted(board_layout.get('children', ())):
+        output('  %s' % (child,))
+    output()
+
+
+def _FormatJson(data, out):
+  """Formatter function for JSON output."""
+  json.dump(data, out, sort_keys=True)
+
+
+_FORMATTERS = {
+    'text': _FormatText,
+    'json': _FormatJson,
+}
+
+
+def _ParseArguments(argv):
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  parser.add_argument('--format', default='text',
+                      choices=sorted(_FORMATTERS.iterkeys()),
+                      help='Choose output format.')
+  opts = parser.parse_args(argv)
+  opts.format = _FORMATTERS[opts.format]
+  opts.Freeze()
+  return opts
+
+
+def main(argv):
+  opts = _ParseArguments(argv)
+
+  site_config = config_lib.LoadConfigFromFile()
+
+  layout = {}
+  for config_name, config in site_config.iteritems():
+    active_waterfall = config['active_waterfall']
+    if not active_waterfall:
+      continue
+
+    waterfall_layout = layout.setdefault(active_waterfall, {})
+    board_layout = waterfall_layout[config_name] = {
+        'name': config_name,
+    }
+
+    children = config['child_configs']
+    if children:
+      board_layout['children'] = [c['name'] for c in children]
+  opts.format(layout, sys.stdout)
diff --git a/scripts/cros_show_waterfall_layout_unittest b/scripts/cros_show_waterfall_layout_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/cros_show_waterfall_layout_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/cros_show_waterfall_layout_unittest.py b/scripts/cros_show_waterfall_layout_unittest.py
new file mode 100644
index 0000000..49a7f48
--- /dev/null
+++ b/scripts/cros_show_waterfall_layout_unittest.py
@@ -0,0 +1,37 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for cros_show_waterfall_layout."""
+
+from __future__ import print_function
+
+import json
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_test_lib
+from chromite.scripts import cros_show_waterfall_layout
+
+# pylint: disable=protected-access
+
+
+class JsonDumpTest(cros_test_lib.OutputTestCase):
+  """Test the json dumping functionality of cbuildbot_view_config."""
+
+  def setUp(self):
+    bin_name = os.path.basename(__file__).rstrip('_unittest.py')
+    self.bin_path = os.path.join(constants.CHROMITE_BIN_DIR, bin_name)
+
+  def testJSONDumpLoadable(self):
+    """Make sure config export functionality works."""
+    with self.OutputCapturer() as output:
+      cros_show_waterfall_layout.main(['--format', 'json'])
+      layout = json.loads(output.GetStdout())
+    self.assertFalse(not layout)
+
+  def testTextDump(self):
+    """Make sure text dumping is capable of being produced."""
+    with self.OutputCapturer() as output:
+      cros_show_waterfall_layout.main(['--format', 'text'])
+    self.assertFalse(not output.GetStdout())
diff --git a/scripts/cros_sysroot_utils.py b/scripts/cros_sysroot_utils.py
new file mode 100644
index 0000000..a4d3fbf
--- /dev/null
+++ b/scripts/cros_sysroot_utils.py
@@ -0,0 +1,98 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Collection of tools to create sysroots."""
+
+
+from __future__ import print_function
+
+import os
+import sys
+
+from chromite.lib import brick_lib
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import sysroot_lib
+
+
+def ParseArgs(argv):
+  """Parse arguments.
+
+  Args:
+    argv: array of arguments passed to the script.
+  """
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.set_defaults(out_file=None)
+  subparser = parser.add_subparsers()
+  wrapper = subparser.add_parser('create-wrappers')
+  wrapper.add_argument('--sysroot', help='Path to the sysroot.', required=True)
+  wrapper.add_argument('--friendlyname', help='Name to append to the commands.')
+  wrapper.set_defaults(command='create-wrappers')
+
+  config = subparser.add_parser('generate-config')
+  target = config.add_mutually_exclusive_group(required=True)
+  target.add_argument('--board', help='Board to generate the config for.')
+  target.add_argument('--brick', help='Brick to generate the config for.')
+  config.add_argument('--out-file', dest='out_file',
+                      help='File to write into. If not specified, the '
+                      'configuration will be printed to stdout.')
+  config.add_argument('--sysroot', help='Path to the sysroot.', required=True)
+  config.set_defaults(command='generate-config')
+
+  makeconf = subparser.add_parser('generate-make-conf')
+  makeconf.add_argument('--sysroot', help='Sysroot to use.')
+  makeconf.add_argument('--out-file', dest='out_file',
+                        help='File to write the configuration into. If not '
+                        'specified, the configuration will be printed to '
+                        'stdout.')
+  makeconf.add_argument('--accepted-licenses',
+                        help='List of accepted licenses.')
+  makeconf.set_defaults(command='generate-make-conf')
+
+  binhost = subparser.add_parser('generate-binhosts')
+  binhost.add_argument('--sysroot', help='Sysroot to use.')
+  binhost.add_argument('--out-file', dest='out_file',
+                       help='File to write the configuration into. If not '
+                       'specified, the configuration will be printed to '
+                       'stdout.')
+  binhost.add_argument('--chrome-only', dest='chrome_only', action='store_true',
+                       help='Generate only the chrome binhost.')
+  binhost.add_argument('--local-only', dest='local_only', action='store_true',
+                       help='Use compatible local boards only.')
+  binhost.set_defaults(command='generate-binhosts')
+
+  options = parser.parse_args(argv)
+  options.Freeze()
+  return options
+
+
+def main(argv):
+  opts = ParseArgs(argv)
+  if not cros_build_lib.IsInsideChroot():
+    raise commandline.ChrootRequiredError()
+
+  if os.geteuid() != 0:
+    cros_build_lib.SudoRunCommand(sys.argv, print_cmd=False)
+    return
+
+  output = sys.stdout
+  if opts.out_file:
+    output = open(opts.out_file, 'w')
+
+  sysroot = sysroot_lib.Sysroot(opts.sysroot)
+  if opts.command == 'create-wrappers':
+    sysroot.CreateAllWrappers(opts.friendlyname)
+  elif opts.command == 'generate-config':
+    if opts.brick:
+      config = sysroot.GenerateBrickConfig(
+          brick_lib.Brick(opts.brick).BrickStack())
+    else:
+      config = sysroot.GenerateBoardConfig(opts.board)
+
+    output.write('\n' + config)
+  elif opts.command == 'generate-make-conf':
+    output.write('\n' + sysroot.GenerateMakeConf(opts.accepted_licenses))
+  elif opts.command == 'generate-binhosts':
+    output.write('\n' + sysroot.GenerateBinhostConf(opts.chrome_only,
+                                                    opts.local_only))
diff --git a/scripts/cros_unittest b/scripts/cros_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/cros_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/cros_unittest.py b/scripts/cros_unittest.py
new file mode 100644
index 0000000..7510fca
--- /dev/null
+++ b/scripts/cros_unittest.py
@@ -0,0 +1,43 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for cros."""
+
+from __future__ import print_function
+
+from chromite.lib import commandline
+from chromite.lib import cros_test_lib
+from chromite.lib import stats
+from chromite.lib import stats_unittest
+from chromite.scripts import cros
+
+
+class RunScriptTest(cros_test_lib.WorkspaceTestCase):
+  """Test the main functionality."""
+
+  def setUp(self):
+    self.stats_module_mock = stats_unittest.StatsModuleMock()
+    self.StartPatcher(self.stats_module_mock)
+    self.PatchObject(cros, '_RunSubCommand', autospec=True)
+
+  def testStatsUpload(self, upload_count=1, return_value=0):
+    """Test stats uploading."""
+    return_value = cros.main(['chrome-sdk', '--board', 'lumpy'])
+    # pylint: disable=protected-access
+    self.assertEquals(stats.StatsUploader._Upload.call_count, upload_count)
+    # pylint: enable=protected-access
+    self.assertEquals(return_value, return_value)
+
+  def testStatsUploadError(self):
+    """We don't upload stats if the stats creation failed."""
+    self.stats_module_mock.stats_mock.init_exception = True
+    with cros_test_lib.LoggingCapturer():
+      self.testStatsUpload(upload_count=0)
+
+  def testDefaultLogLevel(self):
+    """Test that the default log level is set to notice."""
+    arg_parser = self.PatchObject(commandline, 'ArgumentParser',
+                                  return_value=commandline.ArgumentParser())
+    cros.GetOptions({})
+    arg_parser.assert_called_with(caching=True, default_log_level='notice')
diff --git a/scripts/cros_workon.py b/scripts/cros_workon.py
new file mode 100644
index 0000000..087c0a8
--- /dev/null
+++ b/scripts/cros_workon.py
@@ -0,0 +1,119 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script moves ebuilds between 'stable' and 'live' states.
+
+By default 'stable' ebuilds point at and build from source at the
+last known good commit. Moving an ebuild to 'live' (via cros_workon start)
+is intended to support development. The current source tip is fetched,
+source modified and built using the unstable 'live' (9999) ebuild.
+"""
+
+from __future__ import print_function
+
+from chromite.lib import brick_lib
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import terminal
+from chromite.lib import workon_helper
+
+
+def main(argv):
+  shared = commandline.SharedParser()
+  shared.add_argument('--board', default=cros_build_lib.GetDefaultBoard(),
+                      help='The board to set package keywords for.')
+  shared.add_argument('--brick', help='The brick to set package keywords for.')
+  shared.add_argument('--host', default=False, action='store_true',
+                      help='Uses the host instead of board')
+  shared.add_argument('--remote', default='',
+                      help='For non-workon projects, the git remote to use.')
+  shared.add_argument('--revision', default='',
+                      help='Use to override the manifest defined default '
+                           'revision used for a project')
+  shared.add_argument('--command', default='git status', dest='iterate_command',
+                      help='The command to be run by forall.')
+  shared.add_argument('--workon_only', default=False, action='store_true',
+                      help='Apply to packages that have a workon ebuild only')
+  shared.add_argument('--all', default=False, action='store_true',
+                      help='Apply to all possible packages for the '
+                           'given command (overrides workon_only)')
+
+  parser = commandline.ArgumentParser(description=__doc__, parents=[shared,])
+
+  # Add the shared 'packages' argument after creating the main parser so that
+  # it is only bound/shared with the subcommands and doesn't confuse argparse.
+  shared.add_argument('packages', nargs='*',
+                      help='The packages to run command against.')
+
+  commands = [
+      ('start', 'Moves an ebuild to live (intended to support development)'),
+      ('stop', 'Moves an ebuild to stable (use last known good)'),
+      ('info', 'Print package name, repo name, and source directory.'),
+      ('list', 'List of live ebuilds (workon ebuilds if --all)'),
+      ('list-all', 'List all of the live ebuilds for all setup boards'),
+      ('iterate', 'For each ebuild, cd to the source dir and run a command'),
+  ]
+  command_parsers = parser.add_subparsers(dest='command', title='commands')
+  for command, description in commands:
+    command_parsers.add_parser(command, parents=(shared,), help=description,
+                               description=description)
+
+  options = parser.parse_args(argv)
+  options.Freeze()
+
+  if options.command == 'list-all':
+    board_to_packages = workon_helper.ListAllWorkedOnAtoms()
+    color = terminal.Color()
+    for board in sorted(board_to_packages):
+      print(color.Start(color.GREEN) + board + ':' + color.Stop())
+      for package in board_to_packages[board]:
+        print('    ' + package)
+      print('')
+    return 0
+
+  # TODO(wiley): Assert that we're not running as root.
+  cros_build_lib.AssertInsideChroot()
+
+  if options.host:
+    friendly_name = 'host'
+    sysroot = '/'
+  elif options.board:
+    friendly_name = options.board
+    sysroot = cros_build_lib.GetSysroot(board=options.board)
+  elif options.brick:
+    brick = brick_lib.Brick(options.brick)
+    friendly_name = brick.FriendlyName()
+    # TODO(wiley) This is a hack.  It doesn't really make sense to calculate
+    #             the sysroot from a brick alone, since bricks are installed
+    #             into sysroots.  Revisit this when blueprints are working.
+    sysroot = cros_build_lib.GetSysroot(friendly_name)
+  else:
+    cros_build_lib.Die('You must specify either --host, --board or --brick')
+
+  helper = workon_helper.WorkonHelper(sysroot, friendly_name)
+  try:
+    if options.command == 'start':
+      helper.StartWorkingOnPackages(options.packages, use_all=options.all,
+                                    use_workon_only=options.workon_only)
+    elif options.command == 'stop':
+      helper.StopWorkingOnPackages(options.packages, use_all=options.all,
+                                   use_workon_only=options.workon_only)
+    elif options.command == 'info':
+      triples = helper.GetPackageInfo(options.packages, use_all=options.all,
+                                      use_workon_only=options.workon_only)
+      for package, repos, paths in triples:
+        print(package, ','.join(repos), ','.join(paths))
+    elif options.command == 'list':
+      packages = helper.ListAtoms(
+          use_all=options.all, use_workon_only=options.workon_only)
+      if packages:
+        print('\n'.join(packages))
+    elif options.command == 'iterate':
+      helper.RunCommandInPackages(options.packages, options.iterate_command,
+                                  use_all=options.all,
+                                  use_workon_only=options.workon_only)
+  except workon_helper.WorkonError as e:
+    cros_build_lib.Die(e.message)
+
+  return 0
diff --git a/scripts/crosfw.py b/scripts/crosfw.py
new file mode 100644
index 0000000..ebf1d0a
--- /dev/null
+++ b/scripts/crosfw.py
@@ -0,0 +1,673 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""crosfw - Chrome OS Firmware build/flash script.
+
+Builds a firmware image for any board and writes it to the board. The image
+can be pure upstream or include Chrome OS components (-V). Some device
+tree parameters can be provided, including silent console (-C) and secure
+boot (-S). Use -i for a faster incremental build. The image is written to
+the board by default using USB/em100 (or sdcard with -x). Use -b to specify
+the board to build. Options can be added to ~/.crosfwrc - see the script for
+details.
+
+It can also flash SPI by writing a 'magic flasher' U-Boot with a payload
+to the board.
+
+The script is normally run from within the U-Boot directory which is
+.../src/third_party/u-boot/files
+
+Example 1: Build upstream image for coreboot and write to a 'link':
+
+ crosfw -b link
+
+Example 2: Build verified boot image (V) for daisy/snow and boot in secure
+ mode (S) so that breaking in on boot is not possible.
+
+ crosfw -b daisy -VS
+ crosfw -b daisy -VSC         (no console output)
+
+Example 3: Build a magic flasher (F) with full verified boot for peach_pit,
+ but with console enabled, write to SD card (x)
+
+ crosfw -b peach_pit -VSFx
+
+This sript does not use an ebuild. It does a similar thing to the
+chromeos-u-boot ebuild, and runs cros_bundle_firmware to produce various
+types of image, a little like the chromeos-bootimage ebuild.
+
+The purpose of this script is to make it easier and faster to perform
+common firmware build tasks without changing boards, manually updating
+device tree files or lots of USE flags and complexity in the ebuilds.
+
+This script has been tested with snow, link and peach_pit. It builds for
+peach_pit by default. Note that it will also build any upstream ARM
+board - e.g. "-b snapper9260" will build an image for that board.
+
+Mostly you can use the script inside and outside the chroot. The main
+limitation is that dut-control doesn't really work outside the chroot,
+so writing the image to the board over USB is not possible, nor can the
+board be automatically reset on x86 platforms.
+
+For an incremental build (faster), run with -i
+
+To get faster clean builds, install ccache, and create ~/.crosfwrc with
+this line:
+
+ USE_CCACHE = True
+
+(make sure ~/.ccache is not on NFS, or set CCACHE_DIR)
+
+Other options are the default board to build, and verbosity (0-4), e.g.:
+
+ DEFAULT_BOARD = 'daisy'
+ VERBOSE = 1
+
+It is possible to use multiple servo boards, each on its own port. Add
+these lines to your ~/.crosfwrc to set the servo port to use for each
+board:
+
+ SERVO_PORT['link'] = 8888
+ SERVO_PORT['daisy'] = 9999
+ SERVO_PORT['peach_pit'] = 7777
+
+All builds appear in the <outdir>/<board> subdirectory and images are written
+to <outdir>/<uboard>/out, where <uboard> is the U-Boot name for the board (in
+the U-Boot boards.cfg file)
+
+The value for <outdir> defaults to /tmp/crosfw but can be configured in your
+~/.crosfwrc file, e.g.:"
+
+ OUT_DIR = '/tmp/u-boot'
+
+For the -a option here are some useful options:
+
+--add-blob cros-splash /dev/null
+--gbb-flags -force-dev-switch-on
+--add-node-enable /spi@131b0000/cros-ecp@0 1
+--verify --full-erase
+--bootcmd "cros_test sha"
+--gbb-flags -force-dev-switch-on
+--bmpblk ~/trunk/src/third_party/u-boot/bmp.bin
+
+For example: -a "--gbb-flags -force-dev-switch-on"
+
+Note the standard bmpblk is at:
+  /home/$USER/trunk/src/third_party/chromiumos-overlay/sys-boot/
+      chromeos-bootimage/files/bmpblk.bin"
+"""
+
+from __future__ import print_function
+
+import glob
+import multiprocessing
+import os
+import re
+import sys
+
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import parallel
+
+
+arch = None
+board = None
+compiler = None
+default_board = None
+family = None
+in_chroot = True
+
+logging.basicConfig(format='%(message)s')
+kwargs = {'print_cmd': False, 'error_code_ok': True,
+          'debug_level': logging.getLogger().getEffectiveLevel()}
+
+outdir = ''
+
+ # If you have multiple boards connected on different servo ports, put lines
+# like 'SERVO_PORT{"peach_pit"} = 7777' in your ~/.crosfwrc
+SERVO_PORT = {}
+
+smdk = None
+src_root = os.path.join(constants.SOURCE_ROOT, 'src')
+in_chroot = cros_build_lib.IsInsideChroot()
+
+uboard = ''
+
+default_board = 'peach_pit'
+use_ccache = False
+vendor = None
+verbose = False
+
+# Special cases for the U-Boot board config, the SOCs and default device tree
+# since the naming is not always consistent.
+# x86 has a lot of boards, but to U-Boot they are all the same
+UBOARDS = {
+    'daisy': 'smdk5250',
+    'peach': 'smdk5420',
+}
+for b in ['alex', 'butterfly', 'emeraldlake2', 'link', 'lumpy', 'parrot',
+          'stout', 'stumpy']:
+  UBOARDS[b] = 'coreboot-x86'
+  UBOARDS['chromeos_%s' % b] = 'chromeos_coreboot'
+
+SOCS = {
+    'coreboot-x86': '',
+    'chromeos_coreboot': '',
+    'daisy': 'exynos5250-',
+    'peach': 'exynos5420-',
+}
+
+DEFAULT_DTS = {
+    'daisy': 'snow',
+    'daisy_spring': 'spring',
+    'peach_pit': 'peach-pit',
+}
+
+OUT_DIR = '/tmp/crosfw'
+
+rc_file = os.path.expanduser('~/.crosfwrc')
+if os.path.exists(rc_file):
+  execfile(rc_file)
+
+
+def Log(msg):
+  """Print out a message if we are in verbose mode.
+
+  Args:
+    msg: Message to print
+  """
+  if verbose:
+    logging.info(msg)
+
+
+def Dumper(flag, infile, outfile):
+  """Run objdump on an input file.
+
+  Args:
+    flag: Flag to pass objdump (e.g. '-d').
+    infile: Input file to process.
+    outfile: Output file to write to.
+  """
+  result = cros_build_lib.RunCommand(
+      [CompilerTool('objdump'), flag, infile],
+      log_stdout_to_file=outfile, **kwargs)
+  if result.returncode:
+    sys.exit()
+
+
+def CompilerTool(tool):
+  """Returns the cross-compiler tool filename.
+
+  Args:
+    tool: Tool name to return, e.g. 'size'.
+
+  Returns:
+    Filename of requested tool.
+  """
+  return '%s%s' % (compiler, tool)
+
+
+def ParseCmdline(argv):
+  """Parse all command line options.
+
+  Args:
+    argv: Arguments to parse.
+
+  Returns:
+    The parsed options object
+  """
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('-a', '--cbfargs', action='append',
+                      help='Pass extra arguments to cros_bundle_firmware')
+  parser.add_argument('-b', '--board', type=str, default=default_board,
+                      help='Select board to build (daisy/peach_pit/link)')
+  parser.add_argument('-B', '--build', action='store_false', default=True,
+                      help="Don't build U-Boot, just configure device tree")
+  parser.add_argument('-C', '--console', action='store_false', default=True,
+                      help='Permit console output')
+  parser.add_argument('-d', '--dt', default='seaboard',
+                      help='Select name of device tree file to use')
+  parser.add_argument('-D', '--nodefaults', dest='use_defaults',
+                      action='store_false', default=True,
+                      help="Don't select default filenames for those not given")
+  parser.add_argument('-F', '--flash', action='store_true', default=False,
+                      help='Create magic flasher for SPI flash')
+  parser.add_argument('-M', '--mmc', action='store_true', default=False,
+                      help='Create magic flasher for eMMC')
+  parser.add_argument('-i', '--incremental', action='store_true', default=False,
+                      help="Don't reconfigure and clean")
+  parser.add_argument('-k', '--kernel', action='store_true', default=False,
+                      help='Send kernel to board also')
+  parser.add_argument('-O', '--objdump', action='store_true', default=False,
+                      help='Write disassembly output')
+  parser.add_argument('-r', '--run', action='store_false', default=True,
+                      help='Run the boot command')
+  parser.add_argument('--ro', action='store_true', default=False,
+                      help='Create Chrome OS read-only image')
+  parser.add_argument('--rw', action='store_true', default=False,
+                      help='Create Chrome OS read-write image')
+  parser.add_argument('-s', '--separate', action='store_false', default=True,
+                      help='Link device tree into U-Boot, instead of separate')
+  parser.add_argument('-S', '--secure', action='store_true', default=False,
+                      help='Use vboot_twostop secure boot')
+  parser.add_argument('--small', action='store_true', default=False,
+                      help='Create Chrome OS small image')
+  parser.add_argument('-t', '--trace', action='store_true', default=False,
+                      help='Enable trace support')
+  parser.add_argument('-v', '--verbose', type=int, default=0,
+                      help='Make cros_bundle_firmware verbose')
+  parser.add_argument('-V', '--verified', action='store_true', default=False,
+                      help='Include Chrome OS verified boot components')
+  parser.add_argument('-w', '--write', action='store_false', default=True,
+                      help="Don't write image to board using usb/em100")
+  parser.add_argument('-x', '--sdcard', action='store_true', default=False,
+                      help='Write to SD card instead of USB/em100')
+  parser.add_argument('-z', '--size', action='store_true', default=False,
+                      help='Display U-Boot image size')
+  parser.add_argument('target', nargs='?',
+                      help='The target to work on')
+  return parser.parse_args(argv)
+
+
+def SetupBuild(options):
+  """Set up parameters needed for the build.
+
+  This checks the current environment and options and sets up various things
+  needed for the build, including 'base' which holds the base flags for
+  passing to the U-Boot Makefile.
+
+  Args:
+    options: Command line options
+
+  Returns:
+    Base flags to use for U-Boot, as a list.
+  """
+  # pylint: disable=W0603
+  global arch, board, compiler, family, outdir, smdk, uboard, vendor, verbose
+
+  if not verbose:
+    verbose = options.verbose != 0
+
+  logging.getLogger().setLevel(options.verbose)
+
+  Log('Building for %s' % options.board)
+
+  # Separate out board_variant string: "peach_pit" becomes "peach", "pit".
+  # But don't mess up upstream boards which use _ in their name.
+  parts = options.board.split('_')
+  if parts[0] in ['daisy', 'peach']:
+    board = parts[0]
+  else:
+    board = options.board
+
+  # To allow this to be run from 'cros_sdk'
+  if in_chroot:
+    os.chdir(os.path.join(src_root, 'third_party', 'u-boot', 'files'))
+
+  base_board = board
+
+  if options.verified:
+    base_board = 'chromeos_%s' % base_board
+
+  uboard = UBOARDS.get(base_board, base_board)
+  Log('U-Boot board is %s' % uboard)
+
+  # Pull out some information from the U-Boot boards config file
+  family = None
+  with open('boards.cfg') as f:
+    for line in f:
+      if uboard in line:
+        if line[0] == '#':
+          continue
+        fields = line.split()
+        if not fields:
+          continue
+        arch = fields[1]
+        fields += [None, None, None]
+        smdk = fields[3]
+        vendor = fields[4]
+        family = fields[5]
+        break
+  if not arch:
+    cros_build_lib.Die("Selected board '%s' not found in boards.cfg." % board)
+
+  vboot = os.path.join('build', board, 'usr')
+  if arch == 'x86':
+    family = 'em100'
+    if in_chroot:
+      compiler = 'i686-pc-linux-gnu-'
+    else:
+      compiler = '/opt/i686/bin/i686-unknown-elf-'
+  elif arch == 'arm':
+    if in_chroot:
+      # Use the Chrome OS toolchain
+      compiler = 'armv7a-cros-linux-gnueabi-'
+    else:
+      compiler = glob.glob('/opt/linaro/gcc-linaro-arm-linux-*/bin/*gcc')
+      if not compiler:
+        cros_build_lib.Die("""Please install an ARM toolchain for your machine.
+'Install a Linaro toolchain from:'
+'https://launchpad.net/linaro-toolchain-binaries'
+'or see cros/commands/cros_chrome_sdk.py.""")
+      compiler = compiler[0]
+    compiler = re.sub('gcc$', '', compiler)
+  elif arch == 'sandbox':
+    compiler = ''
+  else:
+    cros_build_lib.Die("Selected arch '%s' not supported." % arch)
+
+  if not options.build:
+    options.incremental = True
+
+  cpus = multiprocessing.cpu_count()
+
+  outdir = os.path.join(OUT_DIR, uboard)
+  base = [
+      'make',
+      '-j%d' % cpus,
+      'O=%s' % outdir,
+      'ARCH=%s' % arch,
+      'CROSS_COMPILE=%s' % compiler,
+      '--no-print-directory',
+      'HOSTSTRIP=true',
+      'DEV_TREE_SRC=%s-%s' % (family, options.dt),
+      'QEMU_ARCH=']
+
+  if options.verbose < 2:
+    base.append('-s')
+
+  if options.ro and options.rw:
+    cros_build_lib.Die('Cannot specify both --ro and --rw options')
+  if options.ro:
+    base.append('CROS_RO=1')
+    options.small = True
+
+  if options.rw:
+    base.append('CROS_RW=1')
+    options.small = True
+
+  if options.small:
+    base.append('CROS_SMALL=1')
+  else:
+    base.append('CROS_FULL=1')
+
+  if options.verified:
+    base += [
+        'VBOOT=%s' % vboot,
+        'MAKEFLAGS_VBOOT=DEBUG=1',
+        'QUIET=1',
+        'CFLAGS_EXTRA_VBOOT=-DUNROLL_LOOPS',
+        'VBOOT_SOURCE=%s/platform/vboot_reference' % src_root]
+    base.append('VBOOT_DEBUG=1')
+
+  # Handle the Chrome OS USE_STDINT workaround. Vboot needs <stdint.h> due
+  # to a recent change, the need for which I didn't fully understand. But
+  # U-Boot doesn't normally use this. We have added an option to U-Boot to
+  # enable use of <stdint.h> and without it vboot will fail to build. So we
+  # need to enable it where ww can. We can't just enable it always since
+  # that would prevent this script from building other non-Chrome OS boards
+  # with a different (older) toolchain, or Chrome OS boards without vboot.
+  # So use USE_STDINT if the toolchain supports it, and not if not. This
+  # file was originally part of glibc but has recently migrated to the
+  # compiler so it is reasonable to use it with a stand-alone program like
+  # U-Boot. At this point the comment has got long enough that we may as
+  # well include some poetry which seems to be sorely lacking the code base,
+  # so this is from Ogden Nash:
+  #    To keep your marriage brimming
+  #    With love in the loving cup,
+  #    Whenever you're wrong, admit it;
+  #    Whenever you're right, shut up.
+  cmd = [CompilerTool('gcc'), '-ffreestanding', '-x', 'c', '-c', '-']
+  result = cros_build_lib.RunCommand(cmd,
+                                     input='#include <stdint.h>',
+                                     capture_output=True,
+                                     **kwargs)
+  if result.returncode == 0:
+    base.append('USE_STDINT=1')
+
+  if options.trace:
+    base.append('FTRACE=1')
+  if options.separate:
+    base.append('DEV_TREE_SEPARATE=1')
+
+  if options.incremental:
+    # Get the correct board for cros_write_firmware
+    config_mk = '%s/include/config.mk' % outdir
+    if not os.path.exists(config_mk):
+      logging.warning('No build found for %s - dropping -i' % board)
+      options.incremental = False
+
+  config_mk = 'include/config.mk'
+  if os.path.exists(config_mk):
+    logging.warning("Warning: '%s' exists, try 'make distclean'" % config_mk)
+
+  # For when U-Boot supports ccache
+  # See http://patchwork.ozlabs.org/patch/245079/
+  if use_ccache:
+    os.environ['CCACHE'] = 'ccache'
+
+  return base
+
+
+def RunBuild(options, base, target, queue):
+  """Run the U-Boot build.
+
+  Args:
+    options: Command line options.
+    base: Base U-Boot flags.
+    target: Target to build.
+    queue: A parallel queue to add jobs to.
+  """
+  Log('U-Boot build flags: %s' % ' '.join(base))
+
+  # Reconfigure U-Boot.
+  if not options.incremental:
+    # Ignore any error from this, some older U-Boots fail on this.
+    cros_build_lib.RunCommand(base + ['distclean'], **kwargs)
+    result = cros_build_lib.RunCommand(base + ['%s_config' % uboard], **kwargs)
+    if result.returncode:
+      sys.exit()
+
+  # Do the actual build.
+  if options.build:
+    result = cros_build_lib.RunCommand(base + [target], **kwargs)
+    if result.returncode:
+      sys.exit()
+
+  files = ['%s/u-boot' % outdir]
+  spl = glob.glob('%s/spl/u-boot-spl' % outdir)
+  if spl:
+    files += spl
+  if options.size:
+    result = cros_build_lib.RunCommand([CompilerTool('size')] + files,
+                                       **kwargs)
+    if result.returncode:
+      sys.exit()
+
+  # Create disassembly files .dis and .Dis (full dump)
+  for f in files:
+    base = os.path.splitext(f)[0]
+    if options.objdump:
+      queue.put(('-d', f, base + '.dis'))
+      queue.put(('-D', f, base + '.Dis'))
+    else:
+      # Remove old files which otherwise might be confusing
+      osutils.SafeUnlink(base + '.dis')
+      osutils.SafeUnlink(base + '.Dis')
+
+  Log('Output directory %s' % outdir)
+
+
+def WriteFirmware(options):
+  """Write firmware to the board.
+
+  This uses cros_bundle_firmware to create a firmware image and write it to
+  the board.
+
+  Args:
+    options: Command line options
+  """
+  flash = []
+  kernel = []
+  run = []
+  secure = []
+  servo = []
+  silent = []
+  verbose_arg = []
+  ro_uboot = []
+
+  bl2 = ['--bl2', '%s/spl/%s-spl.bin' % (outdir, smdk)]
+
+  if options.use_defaults:
+    bl1 = []
+    bmpblk = []
+    ecro = []
+    ecrw = []
+    defaults = []
+  else:
+    bl1 = ['--bl1', '##/build/%s/firmware/u-boot.bl1.bin' % options.board]
+    bmpblk = ['--bmpblk', '##/build/%s/firmware/bmpblk.bin' % options.board]
+    ecro = ['--ecro', '##/build/%s/firmware/ec.RO.bin' % options.board]
+    ecrw = ['--ec', '##/build/%s/firmware/ec.RW.bin' % options.board]
+    defaults = ['-D']
+
+  if arch == 'x86':
+    seabios = ['--seabios',
+               '##/build/%s/firmware/seabios.cbfs' % options.board]
+  else:
+    seabios = []
+
+  if options.sdcard:
+    dest = 'sd:.'
+  elif arch == 'x86':
+    dest = 'em100'
+  elif arch == 'sandbox':
+    dest = ''
+  else:
+    dest = 'usb'
+
+  port = SERVO_PORT.get(options.board, '')
+  if port:
+    servo = ['--servo', '%d' % port]
+
+  if options.flash:
+    flash = ['-F', 'spi']
+
+    # The small builds don't have the command line interpreter so cannot
+    # run the magic flasher script. So use the standard U-Boot in this
+    # case.
+    if options.small:
+      logging.warning('Using standard U-Boot as flasher')
+      flash += ['-U', '##/build/%s/firmware/u-boot.bin' % options.board]
+
+  if options.mmc:
+    flash = ['-F', 'sdmmc']
+
+  if options.verbose:
+    verbose_arg = ['-v', '%s' % options.verbose]
+
+  if options.secure:
+    secure += ['--bootsecure', '--bootcmd', 'vboot_twostop']
+
+  if not options.verified:
+    # Make a small image, without GBB, etc.
+    secure.append('-s')
+
+  if options.kernel:
+    kernel = ['--kernel', '##/build/%s/boot/vmlinux.uimg' % options.board]
+
+  if not options.console:
+    silent = ['--add-config-int', 'silent-console', '1']
+
+  if not options.run:
+    run = ['--bootcmd', 'none']
+
+  if arch != 'sandbox' and not in_chroot and servo:
+    if dest == 'usb':
+      logging.warning('Image cannot be written to board')
+      dest = ''
+      servo = []
+    elif dest == 'em100':
+      logging.warning('Please reset the board manually to boot firmware')
+      servo = []
+
+    if not servo:
+      logging.warning('(sadly dut-control does not work outside chroot)')
+
+  if dest:
+    dest = ['-w', dest]
+  else:
+    dest = []
+
+  soc = SOCS.get(board)
+  if not soc:
+    soc = SOCS.get(uboard, '')
+  dt_name = DEFAULT_DTS.get(options.board, options.board)
+  dts_file = 'board/%s/dts/%s%s.dts' % (vendor, soc, dt_name)
+  Log('Device tree: %s' % dts_file)
+
+  if arch == 'sandbox':
+    uboot_fname = '%s/u-boot' % outdir
+  else:
+    uboot_fname = '%s/u-boot.bin' % outdir
+
+  if options.ro:
+    # RO U-Boot is passed through as blob 'ro-boot'. We use the standard
+    # ebuild one as RW.
+    # TODO(sjg@chromium.org): Option to build U-Boot a second time to get
+    # a fresh RW U-Boot.
+    logging.warning('Using standard U-Boot for RW')
+    ro_uboot = ['--add-blob', 'ro-boot', uboot_fname]
+    uboot_fname = '##/build/%s/firmware/u-boot.bin' % options.board
+  cbf = ['%s/platform/dev/host/cros_bundle_firmware' % src_root,
+         '-b', options.board,
+         '-d', dts_file,
+         '-I', 'arch/%s/dts' % arch, '-I', 'cros/dts',
+         '-u', uboot_fname,
+         '-O', '%s/out' % outdir,
+         '-M', family]
+
+  for other in [bl1, bl2, bmpblk, defaults, dest, ecro, ecrw, flash, kernel,
+                run, seabios, secure, servo, silent, verbose_arg, ro_uboot]:
+    if other:
+      cbf += other
+  if options.cbfargs:
+    for item in options.cbfargs:
+      cbf += item.split(' ')
+  os.environ['PYTHONPATH'] = ('%s/platform/dev/host/lib:%s/..' %
+                              (src_root, src_root))
+  Log(' '.join(cbf))
+  result = cros_build_lib.RunCommand(cbf, **kwargs)
+  if result.returncode:
+    cros_build_lib.Die('cros_bundle_firmware failed')
+
+  if not dest or not result.returncode:
+    logging.info('Image is available at %s/out/image.bin' % outdir)
+  else:
+    if result.returncode:
+      cros_build_lib.Die('Failed to write image to board')
+    else:
+      logging.info('Image written to board with %s' % ' '.join(dest + servo))
+
+
+def main(argv):
+  """Main function for script to build/write firmware.
+
+  Args:
+    argv: Program arguments.
+  """
+  options = ParseCmdline(argv)
+  base = SetupBuild(options)
+
+  with parallel.BackgroundTaskRunner(Dumper) as queue:
+    RunBuild(options, base, options.target, queue)
+
+    if options.write:
+      WriteFirmware(options)
+
+    if options.objdump:
+      Log('Writing diasssembly files')
diff --git a/scripts/dep_tracker.py b/scripts/dep_tracker.py
new file mode 100644
index 0000000..547e9e5
--- /dev/null
+++ b/scripts/dep_tracker.py
@@ -0,0 +1,293 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to discover dependencies and other file information from a build.
+
+Some files in the image are installed to provide some functionality, such as
+chrome, shill or bluetoothd provide different functionality that can be
+present or not on a given build. Many other files are dependencies from these
+files that need to be present in the image for them to work. These dependencies
+come from needed shared libraries, executed files and other configuration files
+read.
+
+This script currently discovers dependencies between ELF files for libraries
+required at load time (libraries loaded by the dynamic linker) but not
+libraries loaded at runtime with dlopen(). It also computes size and file type
+in several cases to help understand the contents of the built image.
+"""
+
+from __future__ import print_function
+
+import itertools
+import json
+import multiprocessing
+import os
+import stat
+
+from chromite.lib import commandline
+from chromite.lib import cros_logging as logging
+from chromite.lib import filetype
+from chromite.lib import parseelf
+from chromite.lib import portage_util
+from chromite.scripts import lddtree
+
+
+# Regex to parse Gentoo atoms. This should match the following ebuild names,
+# splitting the package name from the version.
+# without version:
+#   chromeos-base/tty
+#   chromeos-base/libchrome-271506
+#   sys-kernel/chromeos-kernel-3_8
+# with version:
+#   chromeos-base/tty-0.0.1-r4
+#   chromeos-base/libchrome-271506-r5
+#   sys-kernel/chromeos-kernel-3_8-3.8.11-r35
+RE_EBUILD_WITHOUT_VERSION = r'^([a-z0-9\-]+/[a-zA-Z0-9\_\+\-]+)$'
+RE_EBUILD_WITH_VERSION = (
+    r'^=?([a-z0-9\-]+/[a-zA-Z0-9\_\+\-]+)\-([^\-]+(\-r\d+)?)$')
+
+
+def ParseELFWithArgs(args):
+  """Wrapper to parseelf.ParseELF accepting a single arg.
+
+  This wrapper is required to use multiprocessing.Pool.map function.
+
+  Returns:
+    A 2-tuple with the passed relative path and the result of ParseELF(). On
+    error, when ParseELF() returns None, this function returns None.
+  """
+  elf = parseelf.ParseELF(*args)
+  if elf is None:
+    return
+  return args[1], elf
+
+
+class DepTracker(object):
+  """Tracks dependencies and file information in a root directory.
+
+  This class computes dependencies and other information related to the files
+  in the root image.
+  """
+
+  def __init__(self, root, jobs=1):
+    root_st = os.lstat(root)
+    if not stat.S_ISDIR(root_st.st_mode):
+      raise Exception('root (%s) must be a directory' % root)
+    self._root = root.rstrip('/') + '/'
+    self._file_type_decoder = filetype.FileTypeDecoder(root)
+
+    # A wrapper to the multiprocess map function. We avoid launching a pool
+    # of processes when jobs is 1 so python exceptions kill the main process,
+    # useful for debugging.
+    if jobs > 1:
+      self._pool = multiprocessing.Pool(jobs)
+      self._imap = self._pool.map
+    else:
+      self._imap = itertools.imap
+
+    self._files = {}
+    self._ebuilds = {}
+
+    # Mapping of rel_paths for symlinks and hardlinks. Hardlinks are assumed
+    # to point to the lowest lexicographically file with the same inode.
+    self._symlinks = {}
+    self._hardlinks = {}
+
+  def Init(self):
+    """Generates the initial list of files."""
+    # First iteration over all the files in root searching for symlinks and
+    # non-regular files.
+    seen_inodes = {}
+    for basepath, _, filenames in sorted(os.walk(self._root)):
+      for filename in sorted(filenames):
+        full_path = os.path.join(basepath, filename)
+        rel_path = full_path[len(self._root):]
+        st = os.lstat(full_path)
+
+        file_data = {
+            'size': st.st_size,
+        }
+        self._files[rel_path] = file_data
+
+        # Track symlinks.
+        if stat.S_ISLNK(st.st_mode):
+          link_path = os.readlink(full_path)
+          # lddtree's normpath handles a little more cases than the os.path
+          # version. In particular, it handles the '//' case.
+          self._symlinks[rel_path] = (
+              link_path.lstrip('/') if link_path and link_path[0] == '/' else
+              lddtree.normpath(os.path.join(os.path.dirname(rel_path),
+                                            link_path)))
+          file_data['deps'] = {
+              'symlink': [self._symlinks[rel_path]]
+          }
+
+        # Track hardlinks.
+        if st.st_ino in seen_inodes:
+          self._hardlinks[rel_path] = seen_inodes[st.st_ino]
+          continue
+        seen_inodes[st.st_ino] = rel_path
+
+  def SaveJSON(self, filename):
+    """Save the computed information to a JSON file.
+
+    Args:
+      filename: The destination JSON file.
+    """
+    data = {
+        'files': self._files,
+        'ebuilds': self._ebuilds,
+    }
+    json.dump(data, open(filename, 'w'))
+
+  def ComputeEbuildDeps(self, sysroot):
+    """Compute the dependencies between ebuilds and files.
+
+    Iterates over the list of ebuilds in the database and annotates the files
+    with the ebuilds they are in. For each ebuild installing a file in the root,
+    also compute the direct dependencies. Stores the information internally.
+
+    Args:
+      sysroot: The path to the sysroot, for example "/build/link".
+    """
+    portage_db = portage_util.PortageDB(sysroot)
+    if not os.path.exists(portage_db.db_path):
+      logging.warning('PortageDB directory not found: %s', portage_db.db_path)
+      return
+
+    for pkg in portage_db.InstalledPackages():
+      pkg_files = []
+      pkg_size = 0
+      cpf = '%s/%s' % (pkg.category, pkg.pf)
+      for typ, rel_path in pkg.ListContents():
+        # We ignore other entries like for example "dir".
+        if not typ in (pkg.OBJ, pkg.SYM):
+          continue
+        # We ignore files installed in the SYSROOT that weren't copied to the
+        # image.
+        if not rel_path in self._files:
+          continue
+        pkg_files.append(rel_path)
+        file_data = self._files[rel_path]
+        if 'ebuild' in file_data:
+          logging.warning('Duplicated entry for %s: %s and %',
+                          rel_path, file_data['ebuild'], cpf)
+        file_data['ebuild'] = cpf
+        pkg_size += file_data['size']
+      # Ignore packages that don't install any file.
+      if not pkg_files:
+        continue
+      self._ebuilds[cpf] = {
+          'size': pkg_size,
+          'files': len(pkg_files),
+          'atom': '%s/%s' % (pkg.category, pkg.package),
+          'version': pkg.version,
+      }
+    # TODO(deymo): Parse dependencies between ebuilds.
+
+  def ComputeELFFileDeps(self):
+    """Computes the dependencies between files.
+
+    Computes the dependencies between the files in the root directory passed
+    during construction. The dependencies are inferred for ELF files.
+    The list of dependencies for each file in the passed rootfs as a dict().
+    The result's keys are the relative path of the files and the value of each
+    file is a list of dependencies. A dependency is a tuple (dep_path,
+    dep_type) where the dep_path is relative path from the passed root to the
+    dependent file and dep_type is one the following strings stating how the
+    dependency was discovered:
+      'ldd': The dependent ELF file is listed as needed in the dynamic section.
+      'symlink': The dependent file is a symlink to the depending.
+    If there are dependencies of a given type whose target file wasn't
+    determined, a tuple (None, dep_type) is included. This is the case for
+    example is a program uses library that wasn't found.
+    """
+    ldpaths = lddtree.LoadLdpaths(self._root)
+
+    # First iteration over all the files in root searching for symlinks and
+    # non-regular files.
+    parseelf_args = []
+    for rel_path, file_data in self._files.iteritems():
+      if rel_path in self._symlinks or rel_path in self._hardlinks:
+        continue
+
+      full_path = os.path.join(self._root, rel_path)
+      st = os.lstat(full_path)
+      if not stat.S_ISREG(st.st_mode):
+        continue
+      parseelf_args.append((self._root, rel_path, ldpaths))
+
+    # Parallelize the ELF lookup step since it is quite expensive.
+    elfs = dict(x for x in self._imap(ParseELFWithArgs, parseelf_args)
+                if not x is None)
+
+    for rel_path, elf in elfs.iteritems():
+      file_data = self._files[rel_path]
+      # Fill in the ftype if not set yet. We complete this value at this point
+      # to avoid re-parsing the ELF file later.
+      if not 'ftype' in file_data:
+        ftype = self._file_type_decoder.GetType(rel_path, elf=elf)
+        if ftype:
+          file_data['ftype'] = ftype
+
+      file_deps = file_data.get('deps', {})
+      # Dependencies based on the result of ldd.
+      for lib in elf.get('needed', []):
+        lib_path = elf['libs'][lib]['path']
+        if not 'ldd' in file_deps:
+          file_deps['ldd'] = []
+        file_deps['ldd'].append(lib_path)
+
+      if file_deps:
+        file_data['deps'] = file_deps
+
+  def ComputeFileTypes(self):
+    """Computes all the missing file type for the files in the root."""
+    for rel_path, file_data in self._files.iteritems():
+      if 'ftype' in file_data:
+        continue
+      ftype = self._file_type_decoder.GetType(rel_path)
+      if ftype:
+        file_data['ftype'] = ftype
+
+
+def ParseArgs(argv):
+  """Return parsed commandline arguments."""
+
+  parser = commandline.ArgumentParser()
+  parser.add_argument(
+      '-j', '--jobs', type=int, default=multiprocessing.cpu_count(),
+      help='number of simultaneous jobs.')
+  parser.add_argument(
+      '--sysroot', type='path', metavar='SYSROOT',
+      help='parse portage DB for ebuild information from the provided sysroot.')
+  parser.add_argument(
+      '--json', type='path',
+      help='store information in JSON file.')
+
+  parser.add_argument(
+      'root', type='path',
+      help='path to the directory where the rootfs is mounted.')
+
+  opts = parser.parse_args(argv)
+  opts.Freeze()
+  return opts
+
+
+def main(argv):
+  """Main function to start the script."""
+  opts = ParseArgs(argv)
+  logging.debug('Options are %s', opts)
+
+  dt = DepTracker(opts.root, jobs=opts.jobs)
+  dt.Init()
+
+  dt.ComputeELFFileDeps()
+  dt.ComputeFileTypes()
+
+  if opts.sysroot:
+    dt.ComputeEbuildDeps(opts.sysroot)
+
+  if opts.json:
+    dt.SaveJSON(opts.json)
diff --git a/scripts/dep_tracker_unittest b/scripts/dep_tracker_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/dep_tracker_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/dep_tracker_unittest.py b/scripts/dep_tracker_unittest.py
new file mode 100644
index 0000000..089fb7e
--- /dev/null
+++ b/scripts/dep_tracker_unittest.py
@@ -0,0 +1,70 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for dep_tracker.py."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import unittest_lib
+from chromite.scripts import dep_tracker
+
+# Allow access private members for testing:
+# pylint: disable=W0212
+
+
+class MainTest(cros_test_lib.OutputTestCase):
+  """Tests for the main() function."""
+
+  def testHelp(self):
+    """Test that --help is functioning."""
+    argv = ['--help']
+
+    with self.OutputCapturer() as output:
+      # Running with --help should exit with code==0.
+      self.AssertFuncSystemExitZero(dep_tracker.main, argv)
+
+    # Verify that a message beginning with "usage: " was printed.
+    stdout = output.GetStdout()
+    self.assertTrue(stdout.startswith('usage: '))
+
+
+class DepTrackerTest(cros_test_lib.TempDirTestCase):
+  """Tests for the DepTracker() class."""
+
+  def testSimpleDep(self):
+    unittest_lib.BuildELF(os.path.join(self.tempdir, 'libabc.so'),
+                          ['func_a', 'func_b', 'func_c'])
+    unittest_lib.BuildELF(os.path.join(self.tempdir, 'abc_main'),
+                          undefined_symbols=['func_b'],
+                          used_libs=['abc'],
+                          executable=True)
+    dt = dep_tracker.DepTracker(self.tempdir)
+    dt.Init()
+    dt.ComputeELFFileDeps()
+
+    self.assertEquals(sorted(dt._files.keys()), ['abc_main', 'libabc.so'])
+
+  def testFiletypeSet(self):
+    """Tests that the 'ftype' member is set for ELF files first."""
+    unittest_lib.BuildELF(os.path.join(self.tempdir, 'libabc.so'),
+                          ['func_a', 'func_b', 'func_c'])
+    osutils.WriteFile(os.path.join(self.tempdir, 'pyscript'),
+                      "#!/usr/bin/python\nimport sys\nsys.exit(42)\n")
+    dt = dep_tracker.DepTracker(self.tempdir)
+    dt.Init()
+
+    # ComputeELFFileDeps() should compute the file type of ELF files so we
+    # don't need to parse them again.
+    dt.ComputeELFFileDeps()
+    self.assertTrue('ftype' in dt._files['libabc.so'])
+    self.assertFalse('ftype' in dt._files['pyscript'])
+
+    # ComputeFileTypes() shold compute the file type of every file.
+    dt.ComputeFileTypes()
+    self.assertTrue('ftype' in dt._files['libabc.so'])
+    self.assertTrue('ftype' in dt._files['pyscript'])
diff --git a/scripts/deploy_chrome.py b/scripts/deploy_chrome.py
new file mode 100644
index 0000000..354379a
--- /dev/null
+++ b/scripts/deploy_chrome.py
@@ -0,0 +1,626 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script that deploys a Chrome build to a device.
+
+The script supports deploying Chrome from these sources:
+
+1. A local build output directory, such as chromium/src/out/[Debug|Release].
+2. A Chrome tarball uploaded by a trybot/official-builder to GoogleStorage.
+3. A Chrome tarball existing locally.
+
+The script copies the necessary contents of the source location (tarball or
+build directory) and rsyncs the contents of the staging directory onto your
+device's rootfs.
+"""
+
+from __future__ import print_function
+
+import argparse
+import collections
+import contextlib
+import functools
+import glob
+import multiprocessing
+import os
+import shlex
+import shutil
+import time
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import failures_lib
+from chromite.cli.cros import cros_chrome_sdk
+from chromite.lib import chrome_util
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import remote_access as remote
+from chromite.lib import stats
+from chromite.lib import timeout_util
+
+
+KERNEL_A_PARTITION = 2
+KERNEL_B_PARTITION = 4
+
+KILL_PROC_MAX_WAIT = 10
+POST_KILL_WAIT = 2
+
+MOUNT_RW_COMMAND = 'mount -o remount,rw /'
+LSOF_COMMAND = 'lsof %s/chrome'
+
+_ANDROID_DIR = '/system/chrome'
+_ANDROID_DIR_EXTRACT_PATH = 'system/chrome/*'
+
+_CHROME_DIR = '/opt/google/chrome'
+_CHROME_DIR_MOUNT = '/mnt/stateful_partition/deploy_rootfs/opt/google/chrome'
+
+_UMOUNT_DIR_IF_MOUNTPOINT_CMD = (
+    'if mountpoint -q %(dir)s; then umount %(dir)s; fi')
+_BIND_TO_FINAL_DIR_CMD = 'mount --rbind %s %s'
+_SET_MOUNT_FLAGS_CMD = 'mount -o remount,exec,suid %s'
+
+DF_COMMAND = 'df -k %s'
+
+
+def _UrlBaseName(url):
+  """Return the last component of the URL."""
+  return url.rstrip('/').rpartition('/')[-1]
+
+
+class DeployFailure(failures_lib.StepFailure):
+  """Raised whenever the deploy fails."""
+
+
+DeviceInfo = collections.namedtuple(
+    'DeviceInfo', ['target_dir_size', 'target_fs_free'])
+
+
+class DeployChrome(object):
+  """Wraps the core deployment functionality."""
+
+  def __init__(self, options, tempdir, staging_dir):
+    """Initialize the class.
+
+    Args:
+      options: options object.
+      tempdir: Scratch space for the class.  Caller has responsibility to clean
+        it up.
+      staging_dir: Directory to stage the files to.
+    """
+    self.tempdir = tempdir
+    self.options = options
+    self.staging_dir = staging_dir
+    if not self.options.staging_only:
+      self.device = remote.RemoteDevice(options.to, port=options.port,
+                                        ping=options.ping)
+    self._target_dir_is_still_readonly = multiprocessing.Event()
+
+    self.copy_paths = chrome_util.GetCopyPaths('chrome')
+    self.chrome_dir = _CHROME_DIR
+
+  def _GetRemoteMountFree(self, remote_dir):
+    result = self.device.RunCommand(DF_COMMAND % remote_dir)
+    line = result.output.splitlines()[1]
+    value = line.split()[3]
+    multipliers = {
+        'G': 1024 * 1024 * 1024,
+        'M': 1024 * 1024,
+        'K': 1024,
+    }
+    return int(value.rstrip('GMK')) * multipliers.get(value[-1], 1)
+
+  def _GetRemoteDirSize(self, remote_dir):
+    result = self.device.RunCommand('du -ks %s' % remote_dir,
+                                    capture_output=True)
+    return int(result.output.split()[0])
+
+  def _GetStagingDirSize(self):
+    result = cros_build_lib.DebugRunCommand(['du', '-ks', self.staging_dir],
+                                            redirect_stdout=True,
+                                            capture_output=True)
+    return int(result.output.split()[0])
+
+  def _ChromeFileInUse(self):
+    result = self.device.RunCommand(LSOF_COMMAND % (self.options.target_dir,),
+                                    error_code_ok=True, capture_output=True)
+    return result.returncode == 0
+
+  def _DisableRootfsVerification(self):
+    if not self.options.force:
+      logging.error('Detected that the device has rootfs verification enabled.')
+      logging.info('This script can automatically remove the rootfs '
+                   'verification, which requires that it reboot the device.')
+      logging.info('Make sure the device is in developer mode!')
+      logging.info('Skip this prompt by specifying --force.')
+      if not cros_build_lib.BooleanPrompt('Remove roots verification?', False):
+        # Since we stopped Chrome earlier, it's good form to start it up again.
+        if self.options.startui:
+          logging.info('Starting Chrome...')
+          self.device.RunCommand('start ui')
+        raise DeployFailure('Need rootfs verification to be disabled. '
+                            'Aborting.')
+
+    logging.info('Removing rootfs verification from %s', self.options.to)
+    # Running in VM's cause make_dev_ssd's firmware sanity checks to fail.
+    # Use --force to bypass the checks.
+    cmd = ('/usr/share/vboot/bin/make_dev_ssd.sh --partitions %d '
+           '--remove_rootfs_verification --force')
+    for partition in (KERNEL_A_PARTITION, KERNEL_B_PARTITION):
+      self.device.RunCommand(cmd % partition, error_code_ok=True)
+
+    # A reboot in developer mode takes a while (and has delays), so the user
+    # will have time to read and act on the USB boot instructions below.
+    logging.info('Please remember to press Ctrl-U if you are booting from USB.')
+    self.device.Reboot()
+
+    # Now that the machine has been rebooted, we need to kill Chrome again.
+    self._KillProcsIfNeeded()
+
+    # Make sure the rootfs is writable now.
+    self._MountRootfsAsWritable(error_code_ok=False)
+
+  def _CheckUiJobStarted(self):
+    # status output is in the format:
+    # <job_name> <status> ['process' <pid>].
+    # <status> is in the format <goal>/<state>.
+    try:
+      result = self.device.RunCommand('status ui', capture_output=True)
+    except cros_build_lib.RunCommandError as e:
+      if 'Unknown job' in e.result.error:
+        return False
+      else:
+        raise e
+
+    return result.output.split()[1].split('/')[0] == 'start'
+
+  def _KillProcsIfNeeded(self):
+    if self._CheckUiJobStarted():
+      logging.info('Shutting down Chrome...')
+      self.device.RunCommand('stop ui')
+
+    # Developers sometimes run session_manager manually, in which case we'll
+    # need to help shut the chrome processes down.
+    try:
+      with timeout_util.Timeout(KILL_PROC_MAX_WAIT):
+        while self._ChromeFileInUse():
+          logging.warning('The chrome binary on the device is in use.')
+          logging.warning('Killing chrome and session_manager processes...\n')
+
+          self.device.RunCommand("pkill 'chrome|session_manager'",
+                                 error_code_ok=True)
+          # Wait for processes to actually terminate
+          time.sleep(POST_KILL_WAIT)
+          logging.info('Rechecking the chrome binary...')
+    except timeout_util.TimeoutError:
+      msg = ('Could not kill processes after %s seconds.  Please exit any '
+             'running chrome processes and try again.' % KILL_PROC_MAX_WAIT)
+      raise DeployFailure(msg)
+
+  def _MountRootfsAsWritable(self, error_code_ok=True):
+    """Mount the rootfs as writable.
+
+    If the command fails, and error_code_ok is True, and the target dir is not
+    writable then this function sets self._target_dir_is_still_readonly.
+
+    Args:
+      error_code_ok: See remote.RemoteAccess.RemoteSh for details.
+    """
+    # TODO: Should migrate to use the remount functions in remote_access.
+    result = self.device.RunCommand(MOUNT_RW_COMMAND,
+                                    error_code_ok=error_code_ok,
+                                    capture_output=True)
+    if (result.returncode and
+        not self.device.IsDirWritable(self.options.target_dir)):
+      self._target_dir_is_still_readonly.set()
+
+  def _GetDeviceInfo(self):
+    steps = [
+        functools.partial(self._GetRemoteDirSize, self.options.target_dir),
+        functools.partial(self._GetRemoteMountFree, self.options.target_dir)
+    ]
+    return_values = parallel.RunParallelSteps(steps, return_values=True)
+    return DeviceInfo(*return_values)
+
+  def _CheckDeviceFreeSpace(self, device_info):
+    """See if target device has enough space for Chrome.
+
+    Args:
+      device_info: A DeviceInfo named tuple.
+    """
+    effective_free = device_info.target_dir_size + device_info.target_fs_free
+    staging_size = self._GetStagingDirSize()
+    if effective_free < staging_size:
+      raise DeployFailure(
+          'Not enough free space on the device.  Required: %s MiB, '
+          'actual: %s MiB.' % (staging_size / 1024, effective_free / 1024))
+    if device_info.target_fs_free < (100 * 1024):
+      logging.warning('The device has less than 100MB free.  deploy_chrome may '
+                      'hang during the transfer.')
+
+  def _Deploy(self):
+    logging.info('Copying Chrome to %s on device...', self.options.target_dir)
+    # Show the output (status) for this command.
+    dest_path = _CHROME_DIR
+    if not self.device.HasRsync():
+      raise DeployFailure(
+          'rsync is not found on the device.\n'
+          'Run dev_install on the device to get rsync installed')
+    self.device.CopyToDevice('%s/' % os.path.abspath(self.staging_dir),
+                             self.options.target_dir,
+                             inplace=True, debug_level=logging.INFO,
+                             verbose=self.options.verbose)
+
+    for p in self.copy_paths:
+      if p.mode:
+        # Set mode if necessary.
+        self.device.RunCommand('chmod %o %s/%s' % (
+            p.mode, dest_path, p.src if not p.dest else p.dest))
+
+    if self.options.startui:
+      logging.info('Starting UI...')
+      self.device.RunCommand('start ui')
+
+  def _CheckConnection(self):
+    try:
+      logging.info('Testing connection to the device...')
+      self.device.RunCommand('true')
+    except cros_build_lib.RunCommandError as ex:
+      logging.error('Error connecting to the test device.')
+      raise DeployFailure(ex)
+
+  def _CheckDeployType(self):
+    if self.options.build_dir:
+      def BinaryExists(filename):
+        """Checks if the passed-in file is present in the build directory."""
+        return os.path.exists(os.path.join(self.options.build_dir, filename))
+
+      # Handle non-Chrome deployments.
+      if not BinaryExists('chrome'):
+        if BinaryExists('envoy_shell'):
+          self.copy_paths = chrome_util.GetCopyPaths('envoy')
+        elif BinaryExists('app_shell'):
+          self.copy_paths = chrome_util.GetCopyPaths('app_shell')
+
+        # TODO(derat): Update _Deploy() and remove this after figuring out how
+        # {app,envoy}_shell should be executed.
+        self.options.startui = False
+
+  def _PrepareStagingDir(self):
+    _PrepareStagingDir(self.options, self.tempdir, self.staging_dir,
+                       self.copy_paths, self.chrome_dir)
+
+  def _MountTarget(self):
+    logging.info('Mounting Chrome...')
+
+    # Create directory if does not exist
+    self.device.RunCommand('mkdir -p --mode 0775 %s' % (
+        self.options.mount_dir,))
+    # Umount the existing mount on mount_dir if present first
+    self.device.RunCommand(_UMOUNT_DIR_IF_MOUNTPOINT_CMD %
+                           {'dir': self.options.mount_dir})
+    self.device.RunCommand(_BIND_TO_FINAL_DIR_CMD % (self.options.target_dir,
+                                                     self.options.mount_dir))
+    # Chrome needs partition to have exec and suid flags set
+    self.device.RunCommand(_SET_MOUNT_FLAGS_CMD % (self.options.mount_dir,))
+
+  def Cleanup(self):
+    """Clean up RemoteDevice."""
+    if not self.options.staging_only:
+      self.device.Cleanup()
+
+  def Perform(self):
+    self._CheckDeployType()
+
+    # If requested, just do the staging step.
+    if self.options.staging_only:
+      self._PrepareStagingDir()
+      return 0
+
+    # Run setup steps in parallel. If any step fails, RunParallelSteps will
+    # stop printing output at that point, and halt any running steps.
+    steps = [self._GetDeviceInfo, self._CheckConnection,
+             self._KillProcsIfNeeded, self._MountRootfsAsWritable,
+             self._PrepareStagingDir]
+    ret = parallel.RunParallelSteps(steps, halt_on_error=True,
+                                    return_values=True)
+    self._CheckDeviceFreeSpace(ret[0])
+
+    # If we're trying to deploy to a dir which is not writable and we failed
+    # to mark the rootfs as writable, try disabling rootfs verification.
+    if self._target_dir_is_still_readonly.is_set():
+      self._DisableRootfsVerification()
+
+    if self.options.mount_dir is not None:
+      self._MountTarget()
+
+    # Actually deploy Chrome to the device.
+    self._Deploy()
+
+
+def ValidateGypDefines(value):
+  """Convert GYP_DEFINES-formatted string to dictionary."""
+  return chrome_util.ProcessGypDefines(value)
+
+
+def _CreateParser():
+  """Create our custom parser."""
+  parser = commandline.ArgumentParser(description=__doc__, caching=True)
+
+  # TODO(rcui): Have this use the UI-V2 format of having source and target
+  # device be specified as positional arguments.
+  parser.add_argument('--force', action='store_true', default=False,
+                      help='Skip all prompts (i.e., for disabling of rootfs '
+                           'verification).  This may result in the target '
+                           'machine being rebooted.')
+  sdk_board_env = os.environ.get(cros_chrome_sdk.SDKFetcher.SDK_BOARD_ENV)
+  parser.add_argument('--board', default=sdk_board_env,
+                      help="The board the Chrome build is targeted for.  When "
+                           "in a 'cros chrome-sdk' shell, defaults to the SDK "
+                           "board.")
+  parser.add_argument('--build-dir', type='path',
+                      help='The directory with Chrome build artifacts to '
+                           'deploy from. Typically of format '
+                           '<chrome_root>/out/Debug. When this option is used, '
+                           'the GYP_DEFINES environment variable must be set.')
+  parser.add_argument('--target-dir', type='path',
+                      default=None,
+                      help='Target directory on device to deploy Chrome into.')
+  parser.add_argument('-g', '--gs-path', type='gs_path',
+                      help='GS path that contains the chrome to deploy.')
+  parser.add_argument('--nostartui', action='store_false', dest='startui',
+                      default=True,
+                      help="Don't restart the ui daemon after deployment.")
+  parser.add_argument('--nostrip', action='store_false', dest='dostrip',
+                      default=True,
+                      help="Don't strip binaries during deployment.  Warning: "
+                           'the resulting binaries will be very large!')
+  parser.add_argument('-p', '--port', type=int, default=remote.DEFAULT_SSH_PORT,
+                      help='Port of the target device to connect to.')
+  parser.add_argument('-t', '--to',
+                      help='The IP address of the CrOS device to deploy to.')
+  parser.add_argument('-v', '--verbose', action='store_true', default=False,
+                      help='Show more debug output.')
+  parser.add_argument('--mount-dir', type='path', default=None,
+                      help='Deploy Chrome in target directory and bind it '
+                           'to the directory specified by this flag.'
+                           'Any existing mount on this directory will be '
+                           'umounted first.')
+  parser.add_argument('--mount', action='store_true', default=False,
+                      help='Deploy Chrome to default target directory and bind '
+                           'it to the default mount directory.'
+                           'Any existing mount on this directory will be '
+                           'umounted first.')
+
+  group = parser.add_argument_group('Advanced Options')
+  group.add_argument('-l', '--local-pkg-path', type='path',
+                     help='Path to local chrome prebuilt package to deploy.')
+  group.add_argument('--sloppy', action='store_true', default=False,
+                     help='Ignore when mandatory artifacts are missing.')
+  group.add_argument('--staging-flags', default=None, type=ValidateGypDefines,
+                     help=('Extra flags to control staging.  Valid flags are - '
+                           '%s' % ', '.join(chrome_util.STAGING_FLAGS)))
+  group.add_argument('--strict', action='store_true', default=False,
+                     help='Stage artifacts based on the GYP_DEFINES '
+                          'environment variable and --staging-flags, if set. '
+                          'Enforce that all optional artifacts are deployed.')
+  group.add_argument('--strip-flags', default=None,
+                     help="Flags to call the 'strip' binutil tool with.  "
+                          "Overrides the default arguments.")
+  group.add_argument('--ping', action='store_true', default=False,
+                     help='Ping the device before connection attempt.')
+
+  group = parser.add_argument_group(
+      'Metadata Overrides (Advanced)',
+      description='Provide all of these overrides in order to remove '
+                  'dependencies on metadata.json existence.')
+  group.add_argument('--target-tc', action='store', default=None,
+                     help='Override target toolchain name, e.g. '
+                          'x86_64-cros-linux-gnu')
+  group.add_argument('--toolchain-url', action='store', default=None,
+                     help='Override toolchain url format pattern, e.g. '
+                          '2014/04/%%(target)s-2014.04.23.220740.tar.xz')
+
+  # GYP_DEFINES that Chrome was built with.  Influences which files are staged
+  # when --build-dir is set.  Defaults to reading from the GYP_DEFINES
+  # enviroment variable.
+  parser.add_argument('--gyp-defines', default=None, type=ValidateGypDefines,
+                      help=argparse.SUPPRESS)
+  # Path of an empty directory to stage chrome artifacts to.  Defaults to a
+  # temporary directory that is removed when the script finishes. If the path
+  # is specified, then it will not be removed.
+  parser.add_argument('--staging-dir', type='path', default=None,
+                      help=argparse.SUPPRESS)
+  # Only prepare the staging directory, and skip deploying to the device.
+  parser.add_argument('--staging-only', action='store_true', default=False,
+                      help=argparse.SUPPRESS)
+  # Path to a binutil 'strip' tool to strip binaries with.  The passed-in path
+  # is used as-is, and not normalized.  Used by the Chrome ebuild to skip
+  # fetching the SDK toolchain.
+  parser.add_argument('--strip-bin', default=None, help=argparse.SUPPRESS)
+  return parser
+
+
+def _ParseCommandLine(argv):
+  """Parse args, and run environment-independent checks."""
+  parser = _CreateParser()
+  options = parser.parse_args(argv)
+
+  if not any([options.gs_path, options.local_pkg_path, options.build_dir]):
+    parser.error('Need to specify either --gs-path, --local-pkg-path, or '
+                 '--build-dir')
+  if options.build_dir and any([options.gs_path, options.local_pkg_path]):
+    parser.error('Cannot specify both --build_dir and '
+                 '--gs-path/--local-pkg-patch')
+  if options.build_dir and not options.board:
+    parser.error('--board is required when --build-dir is specified.')
+  if options.gs_path and options.local_pkg_path:
+    parser.error('Cannot specify both --gs-path and --local-pkg-path')
+  if not (options.staging_only or options.to):
+    parser.error('Need to specify --to')
+  if (options.strict or options.staging_flags) and not options.build_dir:
+    parser.error('--strict and --staging-flags require --build-dir to be '
+                 'set.')
+  if options.staging_flags and not options.strict:
+    parser.error('--staging-flags requires --strict to be set.')
+  if options.sloppy and options.strict:
+    parser.error('Cannot specify both --strict and --sloppy.')
+
+  if options.mount or options.mount_dir:
+    if not options.target_dir:
+      options.target_dir = _CHROME_DIR_MOUNT
+  else:
+    if not options.target_dir:
+      options.target_dir = _CHROME_DIR
+
+  if options.mount and not options.mount_dir:
+    options.mount_dir = _CHROME_DIR
+
+  return options
+
+
+def _PostParseCheck(options):
+  """Perform some usage validation (after we've parsed the arguments).
+
+  Args:
+    options: The options object returned by optparse.
+    _args: The args object returned by optparse.
+  """
+  if options.local_pkg_path and not os.path.isfile(options.local_pkg_path):
+    cros_build_lib.Die('%s is not a file.', options.local_pkg_path)
+
+  if not options.gyp_defines:
+    gyp_env = os.getenv('GYP_DEFINES', None)
+    if gyp_env is not None:
+      options.gyp_defines = chrome_util.ProcessGypDefines(gyp_env)
+      logging.debug('GYP_DEFINES taken from environment: %s',
+                    options.gyp_defines)
+
+  if options.strict and not options.gyp_defines:
+    cros_build_lib.Die('When --strict is set, the GYP_DEFINES environment '
+                       'variable must be set.')
+
+
+def _FetchChromePackage(cache_dir, tempdir, gs_path):
+  """Get the chrome prebuilt tarball from GS.
+
+  Returns:
+    Path to the fetched chrome tarball.
+  """
+  gs_ctx = gs.GSContext(cache_dir=cache_dir, init_boto=True)
+  files = gs_ctx.LS(gs_path)
+  files = [found for found in files if
+           _UrlBaseName(found).startswith('%s-' % constants.CHROME_PN)]
+  if not files:
+    raise Exception('No chrome package found at %s' % gs_path)
+  elif len(files) > 1:
+    # - Users should provide us with a direct link to either a stripped or
+    #   unstripped chrome package.
+    # - In the case of being provided with an archive directory, where both
+    #   stripped and unstripped chrome available, use the stripped chrome
+    #   package.
+    # - Stripped chrome pkg is chromeos-chrome-<version>.tar.gz
+    # - Unstripped chrome pkg is chromeos-chrome-<version>-unstripped.tar.gz.
+    files = [f for f in files if not 'unstripped' in f]
+    assert len(files) == 1
+    logging.warning('Multiple chrome packages found.  Using %s', files[0])
+
+  filename = _UrlBaseName(files[0])
+  logging.info('Fetching %s...', filename)
+  gs_ctx.Copy(files[0], tempdir, print_cmd=False)
+  chrome_path = os.path.join(tempdir, filename)
+  assert os.path.exists(chrome_path)
+  return chrome_path
+
+
+@contextlib.contextmanager
+def _StripBinContext(options):
+  if not options.dostrip:
+    yield None
+  elif options.strip_bin:
+    yield options.strip_bin
+  else:
+    sdk = cros_chrome_sdk.SDKFetcher(options.cache_dir, options.board)
+    components = (sdk.TARGET_TOOLCHAIN_KEY, constants.CHROME_ENV_TAR)
+    with sdk.Prepare(components=components, target_tc=options.target_tc,
+                     toolchain_url=options.toolchain_url) as ctx:
+      env_path = os.path.join(ctx.key_map[constants.CHROME_ENV_TAR].path,
+                              constants.CHROME_ENV_FILE)
+      strip_bin = osutils.SourceEnvironment(env_path, ['STRIP'])['STRIP']
+      strip_bin = os.path.join(ctx.key_map[sdk.TARGET_TOOLCHAIN_KEY].path,
+                               'bin', os.path.basename(strip_bin))
+      yield strip_bin
+
+
+def _PrepareStagingDir(options, tempdir, staging_dir, copy_paths=None,
+                       chrome_dir=_CHROME_DIR):
+  """Place the necessary files in the staging directory.
+
+  The staging directory is the directory used to rsync the build artifacts over
+  to the device.  Only the necessary Chrome build artifacts are put into the
+  staging directory.
+  """
+  osutils.SafeMakedirs(staging_dir)
+  os.chmod(staging_dir, 0o755)
+  if options.build_dir:
+    with _StripBinContext(options) as strip_bin:
+      strip_flags = (None if options.strip_flags is None else
+                     shlex.split(options.strip_flags))
+      chrome_util.StageChromeFromBuildDir(
+          staging_dir, options.build_dir, strip_bin, strict=options.strict,
+          sloppy=options.sloppy, gyp_defines=options.gyp_defines,
+          staging_flags=options.staging_flags,
+          strip_flags=strip_flags, copy_paths=copy_paths)
+  else:
+    pkg_path = options.local_pkg_path
+    if options.gs_path:
+      pkg_path = _FetchChromePackage(options.cache_dir, tempdir,
+                                     options.gs_path)
+
+    assert pkg_path
+    logging.info('Extracting %s...', pkg_path)
+    # Extract only the ./opt/google/chrome contents, directly into the staging
+    # dir, collapsing the directory hierarchy.
+    if pkg_path[-4:] == '.zip':
+      cros_build_lib.DebugRunCommand(
+          ['unzip', '-X', pkg_path, _ANDROID_DIR_EXTRACT_PATH, '-d',
+           staging_dir])
+      for filename in glob.glob(os.path.join(staging_dir, 'system/chrome/*')):
+        shutil.move(filename, staging_dir)
+      osutils.RmDir(os.path.join(staging_dir, 'system'), ignore_missing=True)
+    else:
+      cros_build_lib.DebugRunCommand(
+          ['tar', '--strip-components', '4', '--extract',
+           '--preserve-permissions', '--file', pkg_path, '.%s' % chrome_dir],
+          cwd=staging_dir)
+
+
+def main(argv):
+  options = _ParseCommandLine(argv)
+  _PostParseCheck(options)
+
+  # Set cros_build_lib debug level to hide RunCommand spew.
+  if options.verbose:
+    logging.getLogger().setLevel(logging.DEBUG)
+  else:
+    logging.getLogger().setLevel(logging.INFO)
+
+  with stats.UploadContext() as queue:
+    cmd_stats = stats.Stats.SafeInit(cmd_line=argv, cmd_base='deploy_chrome')
+    if cmd_stats:
+      queue.put([cmd_stats, stats.StatsUploader.URL, 1])
+
+    with osutils.TempDir(set_global=True) as tempdir:
+      staging_dir = options.staging_dir
+      if not staging_dir:
+        staging_dir = os.path.join(tempdir, 'chrome')
+
+      deploy = DeployChrome(options, tempdir, staging_dir)
+      try:
+        deploy.Perform()
+      except failures_lib.StepFailure as ex:
+        raise SystemExit(str(ex).strip())
+      deploy.Cleanup()
diff --git a/scripts/deploy_chrome_unittest b/scripts/deploy_chrome_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/deploy_chrome_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/deploy_chrome_unittest.py b/scripts/deploy_chrome_unittest.py
new file mode 100644
index 0000000..151058b
--- /dev/null
+++ b/scripts/deploy_chrome_unittest.py
@@ -0,0 +1,357 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the deploy_chrome script."""
+
+from __future__ import print_function
+
+import mock
+import os
+import time
+
+from chromite.cli.cros import cros_chrome_sdk_unittest
+from chromite.lib import chrome_util
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+from chromite.lib import remote_access
+from chromite.lib import remote_access_unittest
+from chromite.lib import stats
+from chromite.lib import stats_unittest
+from chromite.scripts import deploy_chrome
+
+
+# pylint: disable=W0212
+
+_REGULAR_TO = ('--to', 'monkey')
+_GS_PATH = 'gs://foon'
+
+
+def _ParseCommandLine(argv):
+  return deploy_chrome._ParseCommandLine(['--log-level', 'debug'] + argv)
+
+
+class InterfaceTest(cros_test_lib.OutputTestCase):
+  """Tests the commandline interface of the script."""
+
+  BOARD = 'lumpy'
+
+  def testGsLocalPathUnSpecified(self):
+    """Test no chrome path specified."""
+    with self.OutputCapturer():
+      self.assertRaises2(SystemExit, _ParseCommandLine, list(_REGULAR_TO),
+                         check_attrs={'code': 2})
+
+  def testGsPathSpecified(self):
+    """Test case of GS path specified."""
+    argv = list(_REGULAR_TO) + ['--gs-path', _GS_PATH]
+    _ParseCommandLine(argv)
+
+  def testLocalPathSpecified(self):
+    """Test case of local path specified."""
+    argv = list(_REGULAR_TO) + ['--local-pkg-path', '/path/to/chrome']
+    _ParseCommandLine(argv)
+
+  def testNoTarget(self):
+    """Test no target specified."""
+    argv = ['--gs-path', _GS_PATH]
+    self.assertParseError(argv)
+
+  def assertParseError(self, argv):
+    with self.OutputCapturer():
+      self.assertRaises2(SystemExit, _ParseCommandLine, argv,
+                         check_attrs={'code': 2})
+
+  def testStagingFlagsNoStrict(self):
+    """Errors out when --staging-flags is set without --strict."""
+    argv = ['--staging-only', '--build-dir=/path/to/nowhere',
+            '--board=%s' % self.BOARD, '--staging-flags=highdpi']
+    self.assertParseError(argv)
+
+  def testStrictNoBuildDir(self):
+    """Errors out when --strict is set without --build-dir."""
+    argv = ['--staging-only', '--strict', '--gs-path', _GS_PATH]
+    self.assertParseError(argv)
+
+  def testNoBoardBuildDir(self):
+    argv = ['--staging-only', '--build-dir=/path/to/nowhere']
+    self.assertParseError(argv)
+
+  def testMountOptionSetsTargetDir(self):
+    argv = list(_REGULAR_TO) + ['--gs-path', _GS_PATH, '--mount']
+    options = _ParseCommandLine(argv)
+    self.assertIsNot(options.target_dir, None)
+
+  def testMountOptionSetsMountDir(self):
+    argv = list(_REGULAR_TO) + ['--gs-path', _GS_PATH, '--mount']
+    options = _ParseCommandLine(argv)
+    self.assertIsNot(options.mount_dir, None)
+
+  def testMountOptionDoesNotOverrideTargetDir(self):
+    argv = list(_REGULAR_TO) + ['--gs-path', _GS_PATH, '--mount',
+                                '--target-dir', '/foo/bar/cow']
+    options = _ParseCommandLine(argv)
+    self.assertEqual(options.target_dir, '/foo/bar/cow')
+
+  def testMountOptionDoesNotOverrideMountDir(self):
+    argv = list(_REGULAR_TO) + ['--gs-path', _GS_PATH, '--mount',
+                                '--mount-dir', '/foo/bar/cow']
+    options = _ParseCommandLine(argv)
+    self.assertEqual(options.mount_dir, '/foo/bar/cow')
+
+
+class DeployChromeMock(partial_mock.PartialMock):
+  """Deploy Chrome Mock Class."""
+
+  TARGET = 'chromite.scripts.deploy_chrome.DeployChrome'
+  ATTRS = ('_KillProcsIfNeeded', '_DisableRootfsVerification')
+
+  def __init__(self):
+    partial_mock.PartialMock.__init__(self)
+    self.remote_device_mock = remote_access_unittest.RemoteDeviceMock()
+    # Target starts off as having rootfs verification enabled.
+    self.rsh_mock = remote_access_unittest.RemoteShMock()
+    self.rsh_mock.SetDefaultCmdResult(0)
+    self.MockMountCmd(1)
+    self.rsh_mock.AddCmdResult(
+        deploy_chrome.LSOF_COMMAND % (deploy_chrome._CHROME_DIR,), 1)
+
+  def MockMountCmd(self, returnvalue):
+    self.rsh_mock.AddCmdResult(deploy_chrome.MOUNT_RW_COMMAND,
+                               returnvalue)
+
+  def _DisableRootfsVerification(self, inst):
+    with mock.patch.object(time, 'sleep'):
+      self.backup['_DisableRootfsVerification'](inst)
+
+  def PreStart(self):
+    self.remote_device_mock.start()
+    self.rsh_mock.start()
+
+  def PreStop(self):
+    self.rsh_mock.stop()
+    self.remote_device_mock.stop()
+
+  def _KillProcsIfNeeded(self, _inst):
+    # Fully stub out for now.
+    pass
+
+
+class MainTest(cros_test_lib.MockLoggingTestCase):
+  """Main tests."""
+
+  def setUp(self):
+    self.PatchObject(deploy_chrome.DeployChrome, 'Perform', autospec=True)
+    self.stats_module_mock = stats_unittest.StatsModuleMock()
+    self.StartPatcher(self.stats_module_mock)
+
+  def testStatsUpload(self, call_count=1):
+    """The stats upload path."""
+    deploy_chrome.main(['--board=lumpy', '--staging-only',
+                        '--build-dir=/tmp/abc'])
+    self.assertEquals(stats.StatsUploader._Upload.call_count, call_count)
+
+  def testStatsUploadError(self):
+    """Don't upload stats if we fail to create it."""
+    self.stats_module_mock.stats_mock.init_exception = True
+    with cros_test_lib.LoggingCapturer():
+      self.testStatsUpload(call_count=0)
+
+
+class DeployTest(cros_test_lib.MockTempDirTestCase):
+  """Setup a deploy object with a GS-path for use in tests."""
+
+  def _GetDeployChrome(self, args):
+    options = _ParseCommandLine(args)
+    return deploy_chrome.DeployChrome(
+        options, self.tempdir, os.path.join(self.tempdir, 'staging'))
+
+  def setUp(self):
+    self.deploy_mock = self.StartPatcher(DeployChromeMock())
+    self.deploy = self._GetDeployChrome(
+        list(_REGULAR_TO) + ['--gs-path', _GS_PATH, '--force'])
+
+
+class TestDisableRootfsVerification(DeployTest):
+  """Testing disabling of rootfs verification and RO mode."""
+
+  def testDisableRootfsVerificationSuccess(self):
+    """Test the working case, disabling rootfs verification."""
+    self.deploy_mock.MockMountCmd(0)
+    self.deploy._DisableRootfsVerification()
+    self.assertFalse(self.deploy._target_dir_is_still_readonly.is_set())
+
+  def testDisableRootfsVerificationFailure(self):
+    """Test failure to disable rootfs verification."""
+    self.assertRaises(cros_build_lib.RunCommandError,
+                      self.deploy._DisableRootfsVerification)
+    self.assertFalse(self.deploy._target_dir_is_still_readonly.is_set())
+
+
+class TestMount(DeployTest):
+  """Testing mount success and failure."""
+
+  def testSuccess(self):
+    """Test case where we are able to mount as writable."""
+    self.assertFalse(self.deploy._target_dir_is_still_readonly.is_set())
+    self.deploy_mock.MockMountCmd(0)
+    self.deploy._MountRootfsAsWritable()
+    self.assertFalse(self.deploy._target_dir_is_still_readonly.is_set())
+
+  def testMountError(self):
+    """Test that mount failure doesn't raise an exception by default."""
+    self.assertFalse(self.deploy._target_dir_is_still_readonly.is_set())
+    self.PatchObject(remote_access.RemoteDevice, 'IsDirWritable',
+                     return_value=False, autospec=True)
+    self.deploy._MountRootfsAsWritable()
+    self.assertTrue(self.deploy._target_dir_is_still_readonly.is_set())
+
+  def testMountRwFailure(self):
+    """Test that mount failure raises an exception if error_code_ok=False."""
+    self.assertRaises(cros_build_lib.RunCommandError,
+                      self.deploy._MountRootfsAsWritable, error_code_ok=False)
+    self.assertFalse(self.deploy._target_dir_is_still_readonly.is_set())
+
+  def testMountTempDir(self):
+    """Test that mount succeeds if target dir is writable."""
+    self.assertFalse(self.deploy._target_dir_is_still_readonly.is_set())
+    self.PatchObject(remote_access.RemoteDevice, 'IsDirWritable',
+                     return_value=True, autospec=True)
+    self.deploy._MountRootfsAsWritable()
+    self.assertFalse(self.deploy._target_dir_is_still_readonly.is_set())
+
+
+class TestUiJobStarted(DeployTest):
+  """Test detection of a running 'ui' job."""
+
+  def MockStatusUiCmd(self, **kwargs):
+    self.deploy_mock.rsh_mock.AddCmdResult('status ui', **kwargs)
+
+  def testUiJobStartedFalse(self):
+    """Correct results with a stopped job."""
+    self.MockStatusUiCmd(output='ui stop/waiting')
+    self.assertFalse(self.deploy._CheckUiJobStarted())
+
+  def testNoUiJob(self):
+    """Correct results when the job doesn't exist."""
+    self.MockStatusUiCmd(error='start: Unknown job: ui', returncode=1)
+    self.assertFalse(self.deploy._CheckUiJobStarted())
+
+  def testCheckRootfsWriteableTrue(self):
+    """Correct results with a running job."""
+    self.MockStatusUiCmd(output='ui start/running, process 297')
+    self.assertTrue(self.deploy._CheckUiJobStarted())
+
+
+class StagingTest(cros_test_lib.MockTempDirTestCase):
+  """Test user-mode and ebuild-mode staging functionality."""
+
+  def setUp(self):
+    self.staging_dir = os.path.join(self.tempdir, 'staging')
+    self.build_dir = os.path.join(self.tempdir, 'build_dir')
+    self.common_flags = ['--build-dir', self.build_dir,
+                         '--board=lumpy', '--staging-only', '--cache-dir',
+                         self.tempdir]
+    self.sdk_mock = self.StartPatcher(cros_chrome_sdk_unittest.SDKFetcherMock())
+    self.PatchObject(
+        osutils, 'SourceEnvironment', autospec=True,
+        return_value={'STRIP': 'x86_64-cros-linux-gnu-strip'})
+
+  def testSingleFileDeployFailure(self):
+    """Default staging enforces that mandatory files are copied"""
+    options = _ParseCommandLine(self.common_flags)
+    osutils.Touch(os.path.join(self.build_dir, 'chrome'), makedirs=True)
+    self.assertRaises(
+        chrome_util.MissingPathError, deploy_chrome._PrepareStagingDir,
+        options, self.tempdir, self.staging_dir, chrome_util._COPY_PATHS_CHROME)
+
+  def testSloppyDeployFailure(self):
+    """Sloppy staging enforces that at least one file is copied."""
+    options = _ParseCommandLine(self.common_flags + ['--sloppy'])
+    self.assertRaises(
+        chrome_util.MissingPathError, deploy_chrome._PrepareStagingDir,
+        options, self.tempdir, self.staging_dir, chrome_util._COPY_PATHS_CHROME)
+
+  def testSloppyDeploySuccess(self):
+    """Sloppy staging - stage one file."""
+    options = _ParseCommandLine(self.common_flags + ['--sloppy'])
+    osutils.Touch(os.path.join(self.build_dir, 'chrome'), makedirs=True)
+    deploy_chrome._PrepareStagingDir(options, self.tempdir, self.staging_dir,
+                                     chrome_util._COPY_PATHS_CHROME)
+
+  def testEmptyDeployStrict(self):
+    """Strict staging fails when there are no files."""
+    options = _ParseCommandLine(
+        self.common_flags + ['--gyp-defines', 'chromeos=1', '--strict'])
+
+    self.assertRaises(
+        chrome_util.MissingPathError, deploy_chrome._PrepareStagingDir,
+        options, self.tempdir, self.staging_dir, chrome_util._COPY_PATHS_CHROME)
+
+
+class DeployTestBuildDir(cros_test_lib.MockTempDirTestCase):
+  """Set up a deploy object with a build-dir for use in deployment type tests"""
+
+  def _GetDeployChrome(self, args):
+    options = _ParseCommandLine(args)
+    return deploy_chrome.DeployChrome(
+        options, self.tempdir, os.path.join(self.tempdir, 'staging'))
+
+  def setUp(self):
+    self.staging_dir = os.path.join(self.tempdir, 'staging')
+    self.build_dir = os.path.join(self.tempdir, 'build_dir')
+    self.deploy_mock = self.StartPatcher(DeployChromeMock())
+    self.deploy = self._GetDeployChrome(
+        list(_REGULAR_TO) + ['--build-dir', self.build_dir,
+                             '--board=lumpy', '--staging-only', '--cache-dir',
+                             self.tempdir, '--sloppy'])
+
+  def getCopyPath(self, source_path):
+    """Return a chrome_util.Path or None if not present."""
+    paths = [p for p in self.deploy.copy_paths if p.src == source_path]
+    return paths[0] if paths else None
+
+class TestDeploymentType(DeployTestBuildDir):
+  """Test detection of deployment type using build dir."""
+
+  def testEnvoyDetection(self):
+    """Check for an envoy deployment"""
+    osutils.Touch(os.path.join(self.deploy.options.build_dir, 'envoy_shell'),
+                  makedirs=True)
+    self.deploy._CheckDeployType()
+    self.assertTrue(self.getCopyPath('envoy_shell'))
+    self.assertFalse(self.getCopyPath('app_shell'))
+    self.assertFalse(self.getCopyPath('chrome'))
+
+  def testAppShellDetection(self):
+    """Check for an app_shell deployment"""
+    osutils.Touch(os.path.join(self.deploy.options.build_dir, 'app_shell'),
+                  makedirs=True)
+    self.deploy._CheckDeployType()
+    self.assertTrue(self.getCopyPath('app_shell'))
+    self.assertFalse(self.getCopyPath('chrome'))
+    self.assertFalse(self.getCopyPath('envoy_shell'))
+
+  def testChromeAndAppShellDetection(self):
+    """Check for a chrome deployment when app_shell/envoy_shell also exist."""
+    osutils.Touch(os.path.join(self.deploy.options.build_dir, 'chrome'),
+                  makedirs=True)
+    osutils.Touch(os.path.join(self.deploy.options.build_dir, 'app_shell'),
+                  makedirs=True)
+    osutils.Touch(os.path.join(self.deploy.options.build_dir, 'envoy_shell'),
+                  makedirs=True)
+    self.deploy._CheckDeployType()
+    self.assertTrue(self.getCopyPath('chrome'))
+    self.assertFalse(self.getCopyPath('app_shell'))
+    self.assertFalse(self.getCopyPath('envoy_shell'))
+
+  def testChromeDetection(self):
+    """Check for a regular chrome deployment"""
+    osutils.Touch(os.path.join(self.deploy.options.build_dir, 'chrome'),
+                  makedirs=True)
+    self.deploy._CheckDeployType()
+    self.assertTrue(self.getCopyPath('chrome'))
+    self.assertFalse(self.getCopyPath('app_shell'))
+    self.assertFalse(self.getCopyPath('envoy_shell'))
diff --git a/scripts/diff_license_html.py b/scripts/diff_license_html.py
new file mode 100644
index 0000000..a95a593
--- /dev/null
+++ b/scripts/diff_license_html.py
@@ -0,0 +1,130 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compares the packages between 2 images by parsing the license file output."""
+
+from __future__ import print_function
+
+import re
+
+from chromite.lib import commandline
+
+
+def GetPackagesLicensesFromHtml(html_file):
+  """Get the list of packages and licenses in a ChromeOS license file.
+
+  Args:
+    html_file: which html license file to scan for packages.
+
+  Returns:
+    tuple of dictionary of packages and version numbers and set of licenses.
+
+  Raises:
+    AssertionError: if regex failed.
+  """
+
+  packages = {}
+  licenses = set()
+
+  pkg_rgx = re.compile(r'<span class="title">(.+)-(.+)</span>')
+  # Do not add <pre> in the regex or it would only show the first entry on
+  # a package that has multiple hits.
+  license_rgx1 = re.compile(r'Scanned (Source License .+):', re.IGNORECASE)
+  license_rgx2 = re.compile(r'(Custom License .+):', re.IGNORECASE)
+  license_rgx3 = re.compile(r'(Copyright Attribution .+):', re.IGNORECASE)
+  # This regex isn't as tight because it has to match these:
+  # Gentoo Package Stock License BZIP2:
+  # <a ... class="title">Gentoo Package Provided Stock License public-domain</a>
+  # <a ... class="title">Gentoo Package Stock License public-domain</a>
+  license_rgx4 = re.compile(r'(Stock License [^<:]+)', re.IGNORECASE)
+  license_rgx5 = re.compile(r'class="title">(Custom License .+)</a>',
+                            re.IGNORECASE)
+  with open(html_file, 'r') as f:
+    for line in f:
+      # Grep and turn
+      # <span class="title">ath6k-34</span>
+      # into
+      # ath6k 34
+      match = pkg_rgx.search(line)
+      if match:
+        packages[match.group(1)] = match.group(2)
+
+      match = license_rgx1.search(line)
+      if match:
+        # Turn Source license simplejson-2.5.0/LICENSE.txt
+        # into Source license simplejson/LICENSE.txt
+        # (we don't want to create diffs based on version numbers)
+        lic = re.sub(r'(.+)-([^/]+)/(.+)', r'\1/\3', match.group(1))
+        # Old files had this lowercased.
+        lic = re.sub(r'Source license', r'Source License', lic)
+        licenses.add(lic)
+
+      for rgx in (license_rgx2, license_rgx3, license_rgx4, license_rgx5):
+        match = rgx.search(line)
+        if match:
+          licenses.add(match.group(1))
+
+  return (packages, licenses)
+
+
+def ComparePkgLists(pkg_list1, pkg_list2):
+  """Compare the package list in 2 dictionaries and output the differences.
+
+  Args:
+    pkg_list1: dict from GetPackagesLicensesFromHtml.
+    pkg_list2: dict from GetPackagesLicensesFromHtml.
+
+  Returns:
+    N/A (outputs result on stdout).
+  """
+
+  for removed_package in sorted(set(pkg_list1) - set(pkg_list2)):
+    print('Package removed: %s-%s' % (
+        removed_package, pkg_list1[removed_package]))
+
+  print()
+  for added_package in sorted(set(pkg_list2) - set(pkg_list1)):
+    print('Package added: %s-%s' % (
+        added_package, pkg_list2[added_package]))
+
+  print()
+  for changed_package in sorted(set(pkg_list1) & set(pkg_list2)):
+    ver1 = pkg_list1[changed_package]
+    ver2 = pkg_list2[changed_package]
+    if ver1 != ver2:
+      print('Package updated: %s from %s to %s' % (changed_package, ver1, ver2))
+
+
+def CompareLicenseSets(set1, set2):
+  """Compare the license list in 2 sets and output the differences.
+
+  Args:
+    set1: set from GetPackagesLicensesFromHtml.
+    set2: set from GetPackagesLicensesFromHtml.
+
+  Returns:
+    N/A (outputs result on stdout).
+  """
+
+  for removed_license in sorted(set1 - set2):
+    print('License removed: %s' % (removed_license))
+
+  print()
+  for added_license in sorted(set2 - set1):
+    print('License added: %s' % (added_license))
+
+
+def main(args):
+  parser = commandline.ArgumentParser(usage=__doc__)
+  parser.add_argument('html1', metavar='license1.html', type='path',
+                      help='old html file')
+  parser.add_argument('html2', metavar='license2.html', type='path',
+                      help='new html file')
+  opts = parser.parse_args(args)
+
+  pkg_list1 = GetPackagesLicensesFromHtml(opts.html1)
+  pkg_list2 = GetPackagesLicensesFromHtml(opts.html2)
+  ComparePkgLists(pkg_list1[0], pkg_list2[0])
+  print()
+  CompareLicenseSets(pkg_list1[1], pkg_list2[1])
diff --git a/scripts/fwgdb.py b/scripts/fwgdb.py
new file mode 100644
index 0000000..aca9ace
--- /dev/null
+++ b/scripts/fwgdb.py
@@ -0,0 +1,279 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Connect to a DUT in firmware via remote GDB, install custom GDB commands."""
+
+from __future__ import print_function
+
+import errno
+import os
+import re
+import signal
+import socket
+import time
+
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+from chromite.lib import timeout_util
+
+# Need to do this before Servo import
+cros_build_lib.AssertInsideChroot()
+
+# pylint: disable=import-error
+from servo import client
+from servo import multiservo
+# pylint: enable=import-error
+
+
+_SRC_ROOT = os.path.join(constants.CHROOT_SOURCE_ROOT, 'src')
+_SRC_DC = os.path.join(_SRC_ROOT, 'platform/depthcharge')
+_SRC_VB = os.path.join(_SRC_ROOT, 'platform/vboot_reference')
+_SRC_LP = os.path.join(_SRC_ROOT, 'third_party/coreboot/payloads/libpayload')
+
+_PTRN_DEVMODE = 'Entering VbBootDeveloper()'
+_PTRN_GDB = 'Ready for GDB connection'
+_PTRN_BOARD = 'Starting(?: read-only| read/write)? depthcharge on ([a-z_]+)...'
+
+
+class TerminalFreezer(object):
+  """SIGSTOP all processes (and their parents) that have the TTY open."""
+
+  def __init__(self, tty):
+    self._tty = tty
+    self._processes = None
+    if 'cros_sdk' in osutils.ReadFile('/proc/1/cmdline'):
+      raise OSError('You must run this tool in a chroot that was entered with '
+                    '"cros_sdk --no-ns-pid" (see crbug.com/444931 for details)')
+
+  def __enter__(self):
+    lsof = cros_build_lib.RunCommand(
+        ['lsof', '-FR', self._tty],
+        capture_output=True, log_output=True, error_code_ok=True)
+    self._processes = re.findall(r'^(?:R|p)(\d+)$', lsof.output, re.MULTILINE)
+
+    # SIGSTOP parents before children
+    try:
+      for p in reversed(self._processes):
+        logging.info('Sending SIGSTOP to process %s!', p)
+        time.sleep(0.02)
+        os.kill(int(p), signal.SIGSTOP)
+    except OSError:
+      self.__exit__(None, None, None)
+      raise
+
+  def __exit__(self, _t, _v, _b):
+    # ...and wake 'em up again in reverse order
+    for p in self._processes:
+      logging.info('Sending SIGCONT to process %s!', p)
+      try:
+        os.kill(int(p), signal.SIGCONT)
+      except OSError as e:
+        logging.error('Error when trying to unfreeze process %s: %s', p, e)
+
+
+def ParsePortage(board):
+  """Parse some data from portage files. equery takes ages in comparison."""
+  with open(os.path.join('/build', board, 'packages/Packages'), 'r') as f:
+    chost = None
+    use = None
+    for line in f:
+      if line[:7] == 'CHOST: ':
+        chost = line[7:].strip()
+      if line[:5] == 'USE: ':
+        use = line[5:].strip()
+      if chost and use:
+        return (chost, use)
+
+
+def ParseArgs(argv):
+  """Parse and validate command line arguments."""
+  parser = commandline.ArgumentParser(default_log_level='warning')
+
+  parser.add_argument('-b', '--board',
+                      help='The board overlay name (auto-detect by default)')
+  parser.add_argument('-s', '--symbols',
+                      help='Root directory or complete path to symbolized ELF '
+                           '(defaults to /build/<BOARD>/firmware)')
+  parser.add_argument('-r', '--reboot', choices=['yes', 'no', 'auto'],
+                      help='Reboot the DUT before connect (default: reboot if '
+                           'the remote and is unreachable)', default='auto')
+  parser.add_argument('-e', '--execute', action='append', default=[],
+                      help='GDB command to run after connect (can be supplied '
+                           'multiple times)')
+
+  parser.add_argument('-n', '--servod-name', dest='name')
+  parser.add_argument('--servod-rcfile', default=multiservo.DEFAULT_RC_FILE)
+  parser.add_argument('--servod-server')
+  parser.add_argument('-p', '--servod-port', type=int, dest='port')
+  parser.add_argument('-t', '--tty',
+                      help='TTY file to connect to (defaults to cpu_uart_pty)')
+
+  opts = parser.parse_args(argv)
+  multiservo.get_env_options(logging, opts)
+  if opts.name:
+    rc = multiservo.parse_rc(logging, opts.servod_rcfile)
+    if opts.name not in rc:
+      raise parser.error('%s not in %s' % (opts.name, opts.servod_rcfile))
+    if not opts.servod_server:
+      opts.servod_server = rc[opts.name]['sn']
+    if not opts.port:
+      opts.port = rc[opts.name].get('port', client.DEFAULT_PORT)
+    if not opts.board and 'board' in rc[opts.name]:
+      opts.board = rc[opts.name]['board']
+      logging.warning('Inferring board %s from %s; make sure this is correct!',
+                      opts.board, opts.servod_rcfile)
+
+  if not opts.servod_server:
+    opts.servod_server = client.DEFAULT_HOST
+  if not opts.port:
+    opts.port = client.DEFAULT_PORT
+
+  return opts
+
+
+def FindSymbols(firmware_dir, board, use):
+  """Find the symbolized depthcharge ELF (may be supplied by -s flag)."""
+  if not firmware_dir:
+    firmware_dir = os.path.join(cros_build_lib.GetSysroot(board), 'firmware')
+  # Allow overriding the file directly just in case our detection screws up
+  if firmware_dir.endswith('.elf'):
+    return firmware_dir
+
+  if 'unified_depthcharge' in use:
+    basename = 'dev.elf'
+  else:
+    basename = 'dev.ro.elf'
+
+  path = os.path.join(firmware_dir, 'depthcharge', basename)
+  if not os.path.exists(path):
+    path = os.path.join(firmware_dir, basename)
+
+  if os.path.exists(path):
+    logging.warning('Auto-detected symbol file at %s... make sure that this '
+                    'matches the image on your DUT!', path)
+    return path
+
+  raise ValueError('Could not find %s symbol file!' % basename)
+
+
+# TODO(jwerner): Fine tune |wait| delay or maybe even make it configurable if
+# this causes problems due to load on the host. The callers where this is
+# critical should all have their own timeouts now, though, so it's questionable
+# whether the delay here is even needed at all anymore.
+def ReadAll(fd, wait=0.03):
+  """Read from |fd| until no more data has come for at least |wait| seconds."""
+  data = ''
+  try:
+    while True:
+      time.sleep(wait)
+      data += os.read(fd, 4096)
+  except OSError as e:
+    if e.errno == errno.EAGAIN:
+      logging.debug(data)
+      return data
+    raise
+
+
+def GdbChecksum(message):
+  """Calculate a remote-GDB style checksum."""
+  chksum = sum([ord(x) for x in message])
+  return ('%.2x' % chksum)[-2:]
+
+
+def TestConnection(fd):
+  """Return True iff there is a resposive GDB stub on the other end of 'fd'."""
+  cmd = 'vUnknownCommand'
+  for _ in xrange(3):
+    os.write(fd, '$%s#%s\n' % (cmd, GdbChecksum(cmd)))
+    reply = ReadAll(fd)
+    if '+$#00' in reply:
+      os.write(fd, '+')
+      logging.info('TestConnection: Could successfully connect to remote end.')
+      return True
+  logging.info('TestConnection: Remote end does not respond.')
+  return False
+
+
+def main(argv):
+  opts = ParseArgs(argv)
+  servo = client.ServoClient(host=opts.servod_server, port=opts.port)
+
+  if not opts.tty:
+    try:
+      opts.tty = servo.get('cpu_uart_pty')
+    except (client.ServoClientError, socket.error):
+      logging.error('Cannot auto-detect TTY file without servod. Use the --tty '
+                    'option.')
+      raise
+  with TerminalFreezer(opts.tty):
+    fd = os.open(opts.tty, os.O_RDWR | os.O_NONBLOCK)
+
+    data = ReadAll(fd)
+    if opts.reboot == 'auto':
+      if TestConnection(fd):
+        opts.reboot = 'no'
+      else:
+        opts.reboot = 'yes'
+
+    if opts.reboot == 'yes':
+      logging.info('Rebooting DUT...')
+      try:
+        servo.set('warm_reset', 'on')
+        time.sleep(0.1)
+        servo.set('warm_reset', 'off')
+      except (client.ServoClientError, socket.error):
+        logging.error('Cannot reboot without a Servo board. You have to boot '
+                      'into developer mode and press CTRL+G manually before '
+                      'running fwgdb.')
+        raise
+
+      # Throw away old data to avoid confusion from messages before the reboot
+      data = ''
+      msg = ('Could not reboot into developer mode! '
+             '(Confirm that you have GBB_FLAG_FORCE_DEV_SWITCH_ON (0x8) set.)')
+      with timeout_util.Timeout(10, msg):
+        while _PTRN_DEVMODE not in data:
+          data += ReadAll(fd)
+
+      # Send a CTRL+G
+      logging.info('Developer mode detected, pressing CTRL+G...')
+      os.write(fd, chr(ord('G') & 0x1f))
+
+      msg = ('Could not enter GDB mode with CTRL+G! '
+             '(Confirm that you flashed an "image.dev.bin" image to this DUT.)')
+      with timeout_util.Timeout(1, msg):
+        while _PTRN_GDB not in data:
+          data += ReadAll(fd)
+
+    if not opts.board:
+      matches = re.findall(_PTRN_BOARD, data)
+      if not matches:
+        raise ValueError('Could not auto-detect board! Please use -b option.')
+      opts.board = matches[-1]
+      logging.info('Auto-detected board as %s from DUT console output.',
+                   opts.board)
+
+    if not TestConnection(fd):
+      raise IOError('Could not connect to remote end! Confirm that your DUT is '
+                    'running in GDB mode on %s.' % opts.tty)
+
+    # Eat up leftover data or it will spill back to terminal
+    ReadAll(fd)
+    os.close(fd)
+
+    opts.execute.insert(0, 'target remote %s' % opts.tty)
+    ex_args = sum([['--ex', cmd] for cmd in opts.execute], [])
+
+    chost, use = ParsePortage(opts.board)
+    logging.info('Launching GDB...')
+    cros_build_lib.RunCommand(
+        [chost + '-gdb',
+         '--symbols', FindSymbols(opts.symbols, opts.board, use),
+         '--directory', _SRC_DC,
+         '--directory', _SRC_VB,
+         '--directory', _SRC_LP] + ex_args,
+        ignore_sigint=True, debug_level=logging.WARNING)
diff --git a/scripts/gconv_strip.py b/scripts/gconv_strip.py
new file mode 100644
index 0000000..406c807
--- /dev/null
+++ b/scripts/gconv_strip.py
@@ -0,0 +1,330 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to remove unused gconv charset modules from a build."""
+
+from __future__ import print_function
+
+import ahocorasick
+import glob
+import lddtree
+import operator
+import os
+import stat
+
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+
+
+# Path pattern to search for the gconv-modules file.
+GCONV_MODULES_PATH = 'usr/*/gconv/gconv-modules'
+
+# Sticky modules. These charsets modules are always included even if they
+# aren't used. You can specify any charset name as supported by 'iconv_open',
+# for example, 'LATIN1' or 'ISO-8859-1'.
+STICKY_MODULES = ('UTF-16', 'UTF-32', 'UNICODE')
+
+# List of function names (symbols) known to use a charset as a parameter.
+GCONV_SYMBOLS = (
+    # glibc
+    'iconv_open',
+    'iconv',
+    # glib
+    'g_convert',
+    'g_convert_with_fallback',
+    'g_iconv',
+    'g_locale_to_utf8',
+    'g_get_charset',
+)
+
+
+class GconvModules(object):
+  """Class to manipulate the gconv/gconv-modules file and referenced modules.
+
+  This class parses the contents of the gconv-modules file installed by glibc
+  which provides the definition of the charsets supported by iconv_open(3). It
+  allows to load the current gconv-modules file and rewrite it to include only
+  a subset of the supported modules, removing the other modules.
+
+  Each charset is involved on some transformation between that charset and an
+  internal representation. This transformation is defined on a .so file loaded
+  dynamically with dlopen(3) when the charset defined in this file is requested
+  to iconv_open(3).
+
+  See the comments on gconv-modules file for syntax details.
+  """
+
+  def __init__(self, gconv_modules_file):
+    """Initialize the class.
+
+    Args:
+      gconv_modules_file: Path to gconv/gconv-modules file.
+    """
+    self._filename = gconv_modules_file
+
+    # An alias map of charsets. The key (fromcharset) is the alias name and
+    # the value (tocharset) is the real charset name. We also support a value
+    # that is an alias for another charset.
+    self._alias = {}
+
+    # The modules dict goes from charset to module names (the filenames without
+    # the .so extension). Since several transformations involving the same
+    # charset could be defined in different files, the values of this dict are
+    # a set of module names.
+    self._modules = {}
+
+  def Load(self):
+    """Load the charsets from gconv-modules."""
+    for line in open(self._filename):
+      line = line.split('#', 1)[0].strip()
+      if not line: # Comment
+        continue
+
+      lst = line.split()
+      if lst[0] == 'module':
+        _, fromset, toset, filename = lst[:4]
+        for charset in (fromset, toset):
+          charset = charset.rstrip('/')
+          mods = self._modules.get(charset, set())
+          mods.add(filename)
+          self._modules[charset] = mods
+      elif lst[0] == 'alias':
+        _, fromset, toset = lst
+        fromset = fromset.rstrip('/')
+        toset = toset.rstrip('/')
+        # Warn if the same charset is defined as two different aliases.
+        if self._alias.get(fromset, toset) != toset:
+          logging.error('charset "%s" already defined as "%s".', fromset,
+                        self._alias[fromset])
+        self._alias[fromset] = toset
+      else:
+        cros_build_lib.Die('Unknown line: %s', line)
+
+    logging.debug('Found %d modules and %d alias in %s', len(self._modules),
+                  len(self._alias), self._filename)
+    charsets = sorted(self._alias.keys() + self._modules.keys())
+    # Remove the 'INTERNAL' charset from the list, since it is not a charset
+    # but an internal representation used to convert to and from other charsets.
+    if 'INTERNAL' in charsets:
+      charsets.remove('INTERNAL')
+    return charsets
+
+  def Rewrite(self, used_charsets, dry_run=False):
+    """Rewrite gconv-modules file with only the used charsets.
+
+    Args:
+      used_charsets: A list of used charsets. This should be a subset of the
+                     list returned by Load().
+      dry_run: Whether this function should not change any file.
+    """
+
+    # Compute the used modules.
+    used_modules = set()
+    for charset in used_charsets:
+      while charset in self._alias:
+        charset = self._alias[charset]
+      used_modules.update(self._modules[charset])
+    unused_modules = reduce(set.union, self._modules.values()) - used_modules
+
+    modules_dir = os.path.dirname(self._filename)
+
+    all_modules = set.union(used_modules, unused_modules)
+    # The list of charsets that depend on a given library. For example,
+    # libdeps['libCNS.so'] is the set of all the modules that require that
+    # library. These libraries live in the same directory as the modules.
+    libdeps = {}
+    for module in all_modules:
+      deps = lddtree.ParseELF(os.path.join(modules_dir, '%s.so' % module),
+                              modules_dir, [])
+      if not 'needed' in deps:
+        continue
+      for lib in deps['needed']:
+        # Ignore the libs without a path defined (outside the modules_dir).
+        if deps['libs'][lib]['path']:
+          libdeps[lib] = libdeps.get(lib, set()).union([module])
+
+    used_libdeps = set(lib for lib, deps in libdeps.iteritems()
+                       if deps.intersection(used_modules))
+    unused_libdeps = set(libdeps).difference(used_libdeps)
+
+    logging.debug('Used modules: %s', ', '.join(sorted(used_modules)))
+    logging.debug('Used dependency libs: %s, '.join(sorted(used_libdeps)))
+
+    unused_size = 0
+    for module in sorted(unused_modules):
+      module_path = os.path.join(modules_dir, '%s.so' % module)
+      unused_size += os.lstat(module_path).st_size
+      logging.debug('rm %s', module_path)
+      if not dry_run:
+        os.unlink(module_path)
+
+    unused_libdeps_size = 0
+    for lib in sorted(unused_libdeps):
+      lib_path = os.path.join(modules_dir, lib)
+      unused_libdeps_size += os.lstat(lib_path).st_size
+      logging.debug('rm %s', lib_path)
+      if not dry_run:
+        os.unlink(lib_path)
+
+    logging.info('Done. Using %d gconv modules. Removed %d unused modules'
+                 ' (%.1f KiB) and %d unused dependencies (%.1f KiB)',
+                 len(used_modules), len(unused_modules), unused_size / 1024.,
+                 len(unused_libdeps), unused_libdeps_size / 1024.)
+
+    # Recompute the gconv-modules file with only the included gconv modules.
+    result = []
+    for line in open(self._filename):
+      lst = line.split('#', 1)[0].strip().split()
+
+      if not lst:
+        result.append(line)  # Keep comments and copyright headers.
+      elif lst[0] == 'module':
+        _, _, _, filename = lst[:4]
+        if filename in used_modules:
+          result.append(line)  # Used module
+      elif lst[0] == 'alias':
+        _, charset, _ = lst
+        charset = charset.rstrip('/')
+        while charset in self._alias:
+          charset = self._alias[charset]
+        if used_modules.intersection(self._modules[charset]):
+          result.append(line)  # Alias to an used module
+      else:
+        cros_build_lib.Die('Unknown line: %s', line)
+
+    if not dry_run:
+      osutils.WriteFile(self._filename, ''.join(result))
+
+
+def MultipleStringMatch(patterns, corpus):
+  """Search a list of strings in a corpus string.
+
+  Args:
+    patterns: A list of strings.
+    corpus: The text where to search for the strings.
+
+  Result:
+    A list of Booleans stating whether each pattern string was found in the
+    corpus or not.
+  """
+  tree = ahocorasick.KeywordTree()
+  for word in patterns:
+    tree.add(word)
+  tree.make()
+
+  result = [False] * len(patterns)
+  for i, j in tree.findall(corpus):
+    match = corpus[i:j]
+    result[patterns.index(match)] = True
+
+  return result
+
+
+def GconvStrip(opts):
+  """Process gconv-modules and remove unused modules.
+
+  Args:
+    opts: The command-line args passed to the script.
+
+  Returns:
+    The exit code number indicating whether the process succeeded.
+  """
+  root_st = os.lstat(opts.root)
+  if not stat.S_ISDIR(root_st.st_mode):
+    cros_build_lib.Die('root (%s) must be a directory.' % opts.root)
+
+  # Detect the possible locations of the gconv-modules file.
+  gconv_modules_files = glob.glob(os.path.join(opts.root, GCONV_MODULES_PATH))
+
+  if not gconv_modules_files:
+    logging.warning('gconv-modules file not found.')
+    return 1
+
+  # Only one gconv-modules files should be present, either on /usr/lib or
+  # /usr/lib64, but not both.
+  if len(gconv_modules_files) > 1:
+    cros_build_lib.Die('Found several gconv-modules files.')
+
+  gconv_modules_file = gconv_modules_files[0]
+  logging.info('Searching for unused gconv files defined in %s',
+               gconv_modules_file)
+
+  gmods = GconvModules(gconv_modules_file)
+  charsets = gmods.Load()
+
+  # Use scanelf to search for all the binary files on the rootfs that require
+  # or define the symbol iconv_open. We also include the binaries that define
+  # it since there could be internal calls to it from other functions.
+  files = set()
+  for symbol in GCONV_SYMBOLS:
+    cmd = ['scanelf', '--mount', '--quiet', '--recursive', '--format', '#s%F',
+           '--symbol', symbol, opts.root]
+    result = cros_build_lib.RunCommand(cmd, redirect_stdout=True,
+                                       print_cmd=False)
+    symbol_files = result.output.splitlines()
+    logging.debug('Symbol %s found on %d files.', symbol, len(symbol_files))
+    files.update(symbol_files)
+
+  # The charsets are represented as nul-terminated strings in the binary files,
+  # so we append the '\0' to each string. This prevents some false positives
+  # when the name of the charset is a substring of some other string. It doesn't
+  # prevent false positives when the charset name is the suffix of another
+  # string, for example a binary with the string "DON'T DO IT\0" will match the
+  # 'IT' charset. Empirical test on ChromeOS images suggests that only 4
+  # charsets could fall in category.
+  strings = [s + '\0' for s in charsets]
+  logging.info('Will search for %d strings in %d files', len(strings),
+               len(files))
+
+  # Charsets listed in STICKY_MOUDLES are initialized as used. Note that those
+  # strings should be listed in the gconv-modules file.
+  unknown_sticky_modules = set(STICKY_MODULES) - set(charsets)
+  if unknown_sticky_modules:
+    logging.warning(
+        'The following charsets were explicitly requested in STICKY_MODULES '
+        'even though they don\'t exist: %s',
+        ', '.join(unknown_sticky_modules))
+  global_used = [charset in STICKY_MODULES for charset in charsets]
+
+  for filename in files:
+    used_filename = MultipleStringMatch(strings,
+                                        osutils.ReadFile(filename, mode='rb'))
+
+    global_used = map(operator.or_, global_used, used_filename)
+    # Check the debug flag to avoid running an useless loop.
+    if opts.debug and any(used_filename):
+      logging.debug('File %s:', filename)
+      for i in range(len(used_filename)):
+        if used_filename[i]:
+          logging.debug(' - %s', strings[i])
+
+  used_charsets = [cs for cs, used in zip(charsets, global_used) if used]
+  gmods.Rewrite(used_charsets, opts.dry_run)
+  return 0
+
+
+def ParseArgs(argv):
+  """Return parsed commandline arguments."""
+
+  parser = commandline.ArgumentParser()
+  parser.add_argument(
+      '--dry-run', action='store_true', default=False,
+      help='process but don\'t modify any file.')
+  parser.add_argument(
+      'root', type='path',
+      help='path to the directory where the rootfs is mounted.')
+
+  opts = parser.parse_args(argv)
+  opts.Freeze()
+  return opts
+
+
+def main(argv):
+  """Main function to start the script."""
+  opts = ParseArgs(argv)
+  logging.debug('Options are %s', opts)
+
+  return GconvStrip(opts)
diff --git a/scripts/generate_container_spec.py b/scripts/generate_container_spec.py
new file mode 100644
index 0000000..335c61e
--- /dev/null
+++ b/scripts/generate_container_spec.py
@@ -0,0 +1,38 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script generates brick runtime environment metadata from appc manifests.
+
+On the device image we encode the outlines of the brick runtime environment in
+SandboxSpecs, a protocol buffer understood by somad.  Brick developers specify
+the information that goes into a SandboxSpec in the form of an appc pod
+manifest, which a JSON blob adhering to an open standard.  This scripts maps
+from pod manifests to SandboxSpecs.
+"""
+
+from __future__ import print_function
+
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import sandbox_spec_generator
+
+
+def main(argv):
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('--sysroot', type='path',
+                      help='The sysroot to use for brick metadata validation.')
+  parser.add_argument('appc_pod_manifest_path', type='path',
+                      help='path to appc pod manifest')
+  parser.add_argument('sandbox_spec_path', type='path',
+                      help='path to file to write resulting SandboxSpec to. '
+                           'Must not exist.')
+  options = parser.parse_args(argv)
+  options.Freeze()
+
+  cros_build_lib.AssertInsideChroot()
+
+  generator = sandbox_spec_generator.SandboxSpecGenerator(options.sysroot)
+  generator.WriteSandboxSpec(options.appc_pod_manifest_path,
+                             options.sandbox_spec_path)
+  return 0
diff --git a/scripts/generate_delta_sysroot.py b/scripts/generate_delta_sysroot.py
new file mode 100644
index 0000000..4243969
--- /dev/null
+++ b/scripts/generate_delta_sysroot.py
@@ -0,0 +1,155 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to generate a sysroot tarball.
+
+Script that generates a tarball containing changes that are needed to create a
+complete sysroot from extracted prebuilt packages.
+"""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import osutils
+from chromite.lib import portage_util
+from chromite.lib import sysroot_lib
+
+_CREATE_BATCH_CMD = ('rsync',)
+_CREATE_BATCH_EXCLUDE = ('--exclude=/tmp/', '--exclude=/var/cache/',
+                         '--exclude=/usr/local/autotest/packages**',
+                         '--exclude=/packages/', '--exclude=**.pyc',
+                         '--exclude=**.pyo')
+# rsync is used in archive mode with no --times.
+# --checksum is used to ensure 100% accuracy.
+# --delete is used to account for files that may be deleted during emerge.
+# Short version: rsync -rplgoDc --delete
+_CREATE_BATCH_ARGS = ('--recursive', '--links', '--perms', '--group',
+                      '--owner', '--devices', '--specials', '--checksum',
+                      '--delete')
+
+# We want to ensure that we use only binary packages. However,
+# build_packages will try to rebuild any unbuilt packages. Ignore those through
+# --norebuild.
+_BUILD_PKGS_CMD = (os.path.join(constants.CROSUTILS_DIR, 'build_packages'),
+                   '--skip_chroot_upgrade', '--norebuild', '--usepkgonly')
+
+
+def CreateBatchFile(build_dir, out_dir, batch_file):
+  """Creates a batch file using rsync between build_dir and out_dir.
+
+  This batch file can be applied to any directory identical to out_dir, to make
+  it identical to build_dir.
+
+  Args:
+    build_dir: Directory to rsync from.
+    out_dir: Directory to rsync to.
+    batch_file: Batch file to be created.
+  """
+  cmd = list(_CREATE_BATCH_CMD)
+  cmd.extend(list(_CREATE_BATCH_EXCLUDE))
+  cmd.extend(list(_CREATE_BATCH_ARGS))
+  cmd.extend(['--only-write-batch=' + batch_file, build_dir + '/', out_dir])
+  cros_build_lib.SudoRunCommand(cmd)
+
+
+def _ParseCommandLine(argv):
+  """Parse args, and run environment-independent checks."""
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('--board', required=True,
+                      help='The board to generate the sysroot for.')
+  parser.add_argument('--out-dir', type='path', required=True,
+                      help='Directory to place the generated tarball.')
+  parser.add_argument('--out-batch', default=constants.DELTA_SYSROOT_BATCH,
+                      help=('The name to give to the batch file. Defaults to '
+                            '%r.' % constants.DELTA_SYSROOT_BATCH))
+  parser.add_argument('--out-file', default=constants.DELTA_SYSROOT_TAR,
+                      help=('The name to give to the tarball. Defaults to %r.'
+                            % constants.DELTA_SYSROOT_TAR))
+  parser.add_argument('--skip-tests', action='store_false', default=True,
+                      dest='build_tests',
+                      help='If we should not build the autotests packages.')
+  options = parser.parse_args(argv)
+
+  return options
+
+
+def FinishParsing(options):
+  """Run environment dependent checks on parsed args."""
+  target = os.path.join(options.out_dir, options.out_file)
+  if os.path.exists(target):
+    cros_build_lib.Die('Output file %r already exists.' % target)
+
+  if not os.path.isdir(options.out_dir):
+    cros_build_lib.Die(
+        'Non-existent directory %r specified for --out-dir' % options.out_dir)
+
+
+def GenerateSysroot(sysroot_path, board, build_tests, unpack_only=False):
+  """Create a sysroot using only binary packages from local binhost.
+
+  Args:
+    sysroot_path: Where we want to place the sysroot.
+    board: Board we want to build for.
+    build_tests: If we should include autotest packages.
+    unpack_only: If we only want to unpack the binary packages, and not build
+                 them.
+  """
+  osutils.SafeMakedirs(sysroot_path)
+  if not unpack_only:
+    # Generate the sysroot configuration.
+    sysroot = sysroot_lib.Sysroot(sysroot_path)
+    sysroot.WriteConfig(sysroot.GenerateBoardConfiguration(board))
+    cros_build_lib.RunCommand(
+        [os.path.join(constants.CROSUTILS_DIR, 'install_toolchain'),
+         '--noconfigure', '--sysroot', sysroot_path])
+  cmd = list(_BUILD_PKGS_CMD)
+  cmd.extend(['--board_root', sysroot_path, '--board', board])
+  if unpack_only:
+    cmd.append('--unpackonly')
+  if not build_tests:
+    cmd.append('--nowithautotest')
+  env = {'USE': os.environ.get('USE', ''),
+         'PORTAGE_BINHOST': 'file://%s' % portage_util.GetBinaryPackageDir(
+             sysroot=cros_build_lib.GetSysroot(board))}
+  cros_build_lib.RunCommand(cmd, extra_env=env)
+
+
+def main(argv):
+  """Generate the delta sysroot
+
+  Create a tarball containing a sysroot that can be patched over extracted
+  prebuilt package contents to create a complete sysroot.
+
+  1. Unpack all packages for a board into an unpack_only sysroot directory.
+  2. Emerge all packages for a board into a build sysroot directory.
+  3. Create a batch file using:
+    rsync -rplgoDc --delete --write-batch=<batch> <build_sys> <unpackonly_sys>
+  4. Put the batch file inside a tarball.
+  """
+  options = _ParseCommandLine(argv)
+  FinishParsing(options)
+
+  cros_build_lib.AssertInsideChroot()
+
+  with osutils.TempDir(set_global=False, sudo_rm=True) as tmp_dir:
+    build_sysroot = os.path.join(tmp_dir, 'build-sys')
+    unpackonly_sysroot = os.path.join(tmp_dir, 'tmp-sys')
+    batch_filename = options.out_batch
+
+    GenerateSysroot(unpackonly_sysroot, options.board, options.build_tests,
+                    unpack_only=True)
+    GenerateSysroot(build_sysroot, options.board, options.build_tests,
+                    unpack_only=False)
+
+    # Finally create batch file.
+    CreateBatchFile(build_sysroot, unpackonly_sysroot,
+                    os.path.join(tmp_dir, batch_filename))
+
+    cros_build_lib.CreateTarball(
+        os.path.join(options.out_dir, options.out_file), tmp_dir, sudo=True,
+        inputs=[batch_filename])
diff --git a/scripts/generate_delta_sysroot_unittest b/scripts/generate_delta_sysroot_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/generate_delta_sysroot_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/generate_delta_sysroot_unittest.py b/scripts/generate_delta_sysroot_unittest.py
new file mode 100644
index 0000000..f723e0a
--- /dev/null
+++ b/scripts/generate_delta_sysroot_unittest.py
@@ -0,0 +1,68 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for generate_delta_sysroot."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.scripts import generate_delta_sysroot as gds
+
+
+# pylint: disable=W0212
+def _Parse(argv):
+  return gds._ParseCommandLine(argv)
+
+
+class InterfaceTest(cros_test_lib.OutputTestCase,
+                    cros_test_lib.TempDirTestCase):
+  """Test the commandline interface of the script"""
+
+  def testNoBoard(self):
+    """Test no board specified."""
+    argv = ['--out-dir', '/path/to/nowhere']
+    self.assertParseError(argv)
+
+  def testNoOutDir(self):
+    """Test no out dir specified."""
+    argv = ['--board', 'link']
+    self.assertParseError(argv)
+
+  def testCorrectArgv(self):
+    """Test successful parsing"""
+    argv = ['--board', 'link', '--out-dir', self.tempdir]
+    options = _Parse(argv)
+    gds.FinishParsing(options)
+
+  def testTestsSet(self):
+    """Test successful parsing"""
+    argv = ['--board', 'link', '--out-dir', self.tempdir]
+    options = _Parse(argv)
+    self.assertTrue(options.build_tests)
+
+  def testNoTestsSet(self):
+    """Test successful parsing"""
+    argv = ['--board', 'link', '--out-dir', self.tempdir, '--skip-tests']
+    options = _Parse(argv)
+    self.assertFalse(options.build_tests)
+
+  def assertParseError(self, argv):
+    """Helper to assert parsing error, given argv."""
+    with self.OutputCapturer():
+      self.assertRaises2(SystemExit, _Parse, argv)
+
+
+class TestCreateBatchFile(cros_test_lib.TempDirTestCase):
+  """Test the batch file creation."""
+
+  def testSourceDirDoesNotExist(self):
+    """Test error is raised if there is no source directory."""
+    no_source = os.path.join(self.tempdir, 'foo/bar/cow')
+
+    self.assertRaises2(
+        cros_build_lib.RunCommandError, gds.CreateBatchFile,
+        no_source, self.tempdir, os.path.join(self.tempdir, 'batch'))
diff --git a/scripts/gerrit.py b/scripts/gerrit.py
new file mode 100644
index 0000000..deefe4d
--- /dev/null
+++ b/scripts/gerrit.py
@@ -0,0 +1,527 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A command line interface to Gerrit-on-borg instances.
+
+Internal Note:
+To expose a function directly to the command line interface, name your function
+with the prefix "UserAct".
+"""
+
+from __future__ import print_function
+
+import inspect
+import pprint
+import re
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gerrit
+from chromite.lib import git
+from chromite.lib import gob_util
+from chromite.lib import terminal
+
+
+site_config = config_lib.GetConfig()
+
+
+COLOR = None
+
+# Map the internal names to the ones we normally show on the web ui.
+GERRIT_APPROVAL_MAP = {
+    'COMR': ['CQ', 'Commit Queue   ',],
+    'CRVW': ['CR', 'Code Review    ',],
+    'SUBM': ['S ', 'Submitted      ',],
+    'TRY':  ['T ', 'Trybot Ready   ',],
+    'VRIF': ['V ', 'Verified       ',],
+}
+
+# Order is important -- matches the web ui.  This also controls the short
+# entries that we summarize in non-verbose mode.
+GERRIT_SUMMARY_CATS = ('CR', 'CQ', 'V',)
+
+
+def red(s):
+  return COLOR.Color(terminal.Color.RED, s)
+
+
+def green(s):
+  return COLOR.Color(terminal.Color.GREEN, s)
+
+
+def blue(s):
+  return COLOR.Color(terminal.Color.BLUE, s)
+
+
+def limits(cls):
+  """Given a dict of fields, calculate the longest string lengths
+
+  This allows you to easily format the output of many results so that the
+  various cols all line up correctly.
+  """
+  lims = {}
+  for cl in cls:
+    for k in cl.keys():
+      # Use %s rather than str() to avoid codec issues.
+      # We also do this so we can format integers.
+      lims[k] = max(lims.get(k, 0), len('%s' % cl[k]))
+  return lims
+
+
+# TODO: This func really needs to be merged into the core gerrit logic.
+def GetGerrit(opts, cl=None):
+  """Auto pick the right gerrit instance based on the |cl|
+
+  Args:
+    opts: The general options object.
+    cl: A CL taking one of the forms: 1234 *1234 chromium:1234
+
+  Returns:
+    A tuple of a gerrit object and a sanitized CL #.
+  """
+  gob = opts.gob
+  if cl is not None:
+    if cl.startswith('*'):
+      gob = site_config.params.INTERNAL_GOB_INSTANCE
+      cl = cl[1:]
+    elif ':' in cl:
+      gob, cl = cl.split(':', 1)
+
+  if not gob in opts.gerrit:
+    opts.gerrit[gob] = gerrit.GetGerritHelper(gob=gob, print_cmd=opts.debug)
+
+  return (opts.gerrit[gob], cl)
+
+
+def GetApprovalSummary(_opts, cls):
+  """Return a dict of the most important approvals"""
+  approvs = dict([(x, '') for x in GERRIT_SUMMARY_CATS])
+  if 'approvals' in cls['currentPatchSet']:
+    for approver in cls['currentPatchSet']['approvals']:
+      cats = GERRIT_APPROVAL_MAP.get(approver['type'])
+      if not cats:
+        logging.warning('unknown gerrit approval type: %s', approver['type'])
+        continue
+      cat = cats[0].strip()
+      val = int(approver['value'])
+      if not cat in approvs:
+        # Ignore the extended categories in the summary view.
+        continue
+      elif approvs[cat] is '':
+        approvs[cat] = val
+      elif val < 0:
+        approvs[cat] = min(approvs[cat], val)
+      else:
+        approvs[cat] = max(approvs[cat], val)
+  return approvs
+
+
+def PrintCl(opts, cls, lims, show_approvals=True):
+  """Pretty print a single result"""
+  if opts.raw:
+    # Special case internal Chrome GoB as that is what most devs use.
+    # They can always redirect the list elsewhere via the -g option.
+    if opts.gob == site_config.params.INTERNAL_GOB_INSTANCE:
+      print(site_config.params.INTERNAL_CHANGE_PREFIX, end='')
+    print(cls['number'])
+    return
+
+  if not lims:
+    lims = {'url': 0, 'project': 0}
+
+  status = ''
+  if show_approvals and not opts.verbose:
+    approvs = GetApprovalSummary(opts, cls)
+    for cat in GERRIT_SUMMARY_CATS:
+      if approvs[cat] is '':
+        functor = lambda x: x
+      elif approvs[cat] < 0:
+        functor = red
+      else:
+        functor = green
+      status += functor('%s:%2s ' % (cat, approvs[cat]))
+
+  print('%s %s%-*s %s' % (blue('%-*s' % (lims['url'], cls['url'])), status,
+                          lims['project'], cls['project'], cls['subject']))
+
+  if show_approvals and opts.verbose:
+    for approver in cls['currentPatchSet'].get('approvals', []):
+      functor = red if int(approver['value']) < 0 else green
+      n = functor('%2s' % approver['value'])
+      t = GERRIT_APPROVAL_MAP.get(approver['type'], [approver['type'],
+                                                     approver['type']])[1]
+      print('      %s %s %s' % (n, t, approver['by']['email']))
+
+
+def _MyUserInfo():
+  email = git.GetProjectUserEmail(constants.CHROMITE_DIR)
+  [username, _, domain] = email.partition('@')
+  if domain in ('google.com', 'chromium.org'):
+    emails = ['%s@%s' % (username, domain)
+              for domain in ('google.com', 'chromium.org')]
+  else:
+    emails = [email]
+  reviewers = ['reviewer:%s' % x for x in emails]
+  owners = ['owner:%s' % x for x in emails]
+  return emails, reviewers, owners
+
+
+def _Query(opts, query, raw=True):
+  """Queries Gerrit with a query string built from the commandline options"""
+  if opts.branch is not None:
+    query += ' branch:%s' % opts.branch
+  if opts.project is not None:
+    query += ' project: %s' % opts.project
+  if opts.topic is not None:
+    query += ' topic: %s' % opts.topic
+
+  helper, _ = GetGerrit(opts)
+  return helper.Query(query, raw=raw, bypass_cache=False)
+
+
+def FilteredQuery(opts, query):
+  """Query gerrit and filter/clean up the results"""
+  ret = []
+
+  for cl in _Query(opts, query, raw=True):
+    # Gerrit likes to return a stats record too.
+    if not 'project' in cl:
+      continue
+
+    # Strip off common leading names since the result is still
+    # unique over the whole tree.
+    if not opts.verbose:
+      for pfx in ('chromeos', 'chromiumos', 'overlays', 'platform',
+                  'third_party'):
+        if cl['project'].startswith('%s/' % pfx):
+          cl['project'] = cl['project'][len(pfx) + 1:]
+
+    ret.append(cl)
+
+  if opts.sort == 'number':
+    key = lambda x: int(x[opts.sort])
+  else:
+    key = lambda x: x[opts.sort]
+  return sorted(ret, key=key)
+
+
+def IsApprover(cl, users):
+  """See if the approvers in |cl| is listed in |users|"""
+  # See if we are listed in the approvals list.  We have to parse
+  # this by hand as the gerrit query system doesn't support it :(
+  # http://code.google.com/p/gerrit/issues/detail?id=1235
+  if 'approvals' not in cl['currentPatchSet']:
+    return False
+
+  if isinstance(users, basestring):
+    users = (users,)
+
+  for approver in cl['currentPatchSet']['approvals']:
+    if (approver['by']['email'] in users and
+        approver['type'] == 'CRVW' and
+        int(approver['value']) != 0):
+      return True
+
+  return False
+
+
+def UserActTodo(opts):
+  """List CLs needing your review"""
+  emails, reviewers, owners = _MyUserInfo()
+  cls = FilteredQuery(opts, ('( %s ) status:open NOT ( %s )' %
+                             (' OR '.join(reviewers), ' OR '.join(owners))))
+  cls = [x for x in cls if not IsApprover(x, emails)]
+  lims = limits(cls)
+  for cl in cls:
+    PrintCl(opts, cl, lims)
+
+
+def UserActSearch(opts, query):
+  """List CLs matching the Gerrit <search query>"""
+  cls = FilteredQuery(opts, query)
+  lims = limits(cls)
+  for cl in cls:
+    PrintCl(opts, cl, lims)
+
+
+def UserActMine(opts):
+  """List your CLs with review statuses"""
+  _, _, owners = _MyUserInfo()
+  if opts.draft:
+    rule = 'is:draft'
+  else:
+    rule = 'status:new'
+  UserActSearch(opts, '( %s ) %s' % (' OR '.join(owners), rule))
+
+
+def _BreadthFirstSearch(to_visit, children, visited_key=lambda x: x):
+  """Runs breadth first search starting from the nodes in |to_visit|
+
+  Args:
+    to_visit: the starting nodes
+    children: a function which takes a node and returns the nodes adjacent to it
+    visited_key: a function for deduplicating node visits. Defaults to the
+      identity function (lambda x: x)
+
+  Returns:
+    A list of nodes which are reachable from any node in |to_visit| by calling
+    |children| any number of times.
+  """
+  to_visit = list(to_visit)
+  seen = set(map(visited_key, to_visit))
+  for node in to_visit:
+    for child in children(node):
+      key = visited_key(child)
+      if key not in seen:
+        seen.add(key)
+        to_visit.append(child)
+  return to_visit
+
+
+def UserActDeps(opts, query):
+  """List CLs matching a query, and all transitive dependencies of those CLs"""
+  cls = _Query(opts, query, raw=False)
+
+  @cros_build_lib.Memoize
+  def _QueryChange(cl):
+    return _Query(opts, cl, raw=False)
+
+  def _Children(cl):
+    """Returns the Gerrit and CQ-Depends dependencies of a patch"""
+    cq_deps = cl.PaladinDependencies(None)
+    direct_deps = cl.GerritDependencies() + cq_deps
+    # We need to query the change to guarantee that we have a .gerrit_number
+    for dep in direct_deps:
+      # TODO(phobbs) this should maybe catch network errors.
+      change = _QueryChange(dep.ToGerritQueryText())[-1]
+      if change.status == 'NEW':
+        yield change
+
+  transitives = _BreadthFirstSearch(
+      cls, _Children,
+      visited_key=lambda cl: cl.gerrit_number)
+
+  transitives_raw = [cl.patch_dict for cl in transitives]
+  lims = limits(transitives_raw)
+  for cl in transitives_raw:
+    PrintCl(opts, cl, lims)
+
+
+def UserActInspect(opts, *args):
+  """Inspect CL number <n> [n ...]"""
+  for arg in args:
+    cl = FilteredQuery(opts, arg)
+    if cl:
+      PrintCl(opts, cl[0], None)
+    else:
+      print('no results found for CL %s' % arg)
+
+
+def UserActReview(opts, *args):
+  """Mark CL <n> [n ...] with code review status <-2,-1,0,1,2>"""
+  num = args[-1]
+  for arg in args[:-1]:
+    helper, cl = GetGerrit(opts, arg)
+    helper.SetReview(cl, labels={'Code-Review': num}, dryrun=opts.dryrun)
+UserActReview.arg_min = 2
+
+
+def UserActVerify(opts, *args):
+  """Mark CL <n> [n ...] with verify status <-1,0,1>"""
+  num = args[-1]
+  for arg in args[:-1]:
+    helper, cl = GetGerrit(opts, arg)
+    helper.SetReview(cl, labels={'Verified': num}, dryrun=opts.dryrun)
+UserActVerify.arg_min = 2
+
+
+def UserActReady(opts, *args):
+  """Mark CL <n> [n ...] with ready status <0,1,2>"""
+  num = args[-1]
+  for arg in args[:-1]:
+    helper, cl = GetGerrit(opts, arg)
+    helper.SetReview(cl, labels={'Commit-Queue': num}, dryrun=opts.dryrun)
+UserActReady.arg_min = 2
+
+
+def UserActTrybotready(opts, *args):
+  """Mark CL <n> [n ...] with trybot-ready status <0,1>"""
+  num = args[-1]
+  for arg in args[:-1]:
+    helper, cl = GetGerrit(opts, arg)
+    helper.SetReview(cl, labels={'Trybot-Ready': num}, dryrun=opts.dryrun)
+UserActTrybotready.arg_min = 2
+
+
+def UserActSubmit(opts, *args):
+  """Submit CL <n> [n ...]"""
+  for arg in args:
+    helper, cl = GetGerrit(opts, arg)
+    helper.SubmitChange(cl, dryrun=opts.dryrun)
+
+
+def UserActAbandon(opts, *args):
+  """Abandon CL <n> [n ...]"""
+  for arg in args:
+    helper, cl = GetGerrit(opts, arg)
+    helper.AbandonChange(cl, dryrun=opts.dryrun)
+
+
+def UserActRestore(opts, *args):
+  """Restore CL <n> [n ...] that was abandoned"""
+  for arg in args:
+    helper, cl = GetGerrit(opts, arg)
+    helper.RestoreChange(cl, dryrun=opts.dryrun)
+
+
+def UserActReviewers(opts, cl, *args):
+  """Add/remove reviewers' emails for CL <n> (prepend with '~' to remove)"""
+  emails = args
+  # Allow for optional leading '~'.
+  email_validator = re.compile(r'^[~]?%s$' % constants.EMAIL_REGEX)
+  add_list, remove_list, invalid_list = [], [], []
+
+  for x in emails:
+    if not email_validator.match(x):
+      invalid_list.append(x)
+    elif x[0] == '~':
+      remove_list.append(x[1:])
+    else:
+      add_list.append(x)
+
+  if invalid_list:
+    cros_build_lib.Die(
+        'Invalid email address(es): %s' % ', '.join(invalid_list))
+
+  if add_list or remove_list:
+    helper, cl = GetGerrit(opts, cl)
+    helper.SetReviewers(cl, add=add_list, remove=remove_list,
+                        dryrun=opts.dryrun)
+
+
+def UserActMessage(opts, cl, message):
+  """Add a message to CL <n>"""
+  helper, cl = GetGerrit(opts, cl)
+  helper.SetReview(cl, msg=message, dryrun=opts.dryrun)
+
+
+def UserActTopic(opts, topic, *args):
+  """Set |topic| for CL number <n> [n ...]"""
+  for arg in args:
+    helper, arg = GetGerrit(opts, arg)
+    helper.SetTopic(arg, topic, dryrun=opts.dryrun)
+
+
+def UserActDeletedraft(opts, *args):
+  """Delete draft patch set <n> [n ...]"""
+  for arg in args:
+    helper, cl = GetGerrit(opts, arg)
+    helper.DeleteDraft(cl, dryrun=opts.dryrun)
+
+
+def UserActAccount(opts):
+  """Get user account information."""
+  helper, _ = GetGerrit(opts)
+  pprint.PrettyPrinter().pprint(helper.GetAccount())
+
+
+def main(argv):
+  # Locate actions that are exposed to the user.  All functions that start
+  # with "UserAct" are fair game.
+  act_pfx = 'UserAct'
+  actions = [x for x in globals() if x.startswith(act_pfx)]
+
+  usage = """%(prog)s [options] <action> [action args]
+
+There is no support for doing line-by-line code review via the command line.
+This helps you manage various bits and CL status.
+
+For general Gerrit documentation, see:
+  https://gerrit-review.googlesource.com/Documentation/
+The Searching Changes page covers the search query syntax:
+  https://gerrit-review.googlesource.com/Documentation/user-search.html
+
+Example:
+  $ gerrit todo             # List all the CLs that await your review.
+  $ gerrit mine             # List all of your open CLs.
+  $ gerrit inspect 28123    # Inspect CL 28123 on the public gerrit.
+  $ gerrit inspect *28123   # Inspect CL 28123 on the internal gerrit.
+  $ gerrit verify 28123 1   # Mark CL 28123 as verified (+1).
+Scripting:
+  $ gerrit ready `gerrit --raw mine` 1      # Mark *ALL* of your public CLs \
+ready.
+  $ gerrit ready `gerrit --raw -i mine` 1   # Mark *ALL* of your internal CLs \
+ready.
+
+Actions:"""
+  indent = max([len(x) - len(act_pfx) for x in actions])
+  for a in sorted(actions):
+    cmd = a[len(act_pfx):]
+    # Sanity check for devs adding new commands.  Should be quick.
+    if cmd != cmd.lower().capitalize():
+      raise RuntimeError('callback "%s" is misnamed; should be "%s"' %
+                         (cmd, cmd.lower().capitalize()))
+    usage += '\n  %-*s: %s' % (indent, cmd.lower(), globals()[a].__doc__)
+
+  parser = commandline.ArgumentParser(usage=usage)
+  parser.add_argument('-i', '--internal', dest='gob', action='store_const',
+                      default=site_config.params.EXTERNAL_GOB_INSTANCE,
+                      const=site_config.params.INTERNAL_GOB_INSTANCE,
+                      help='Query internal Chromium Gerrit instance')
+  parser.add_argument('-g', '--gob',
+                      default=site_config.params.EXTERNAL_GOB_INSTANCE,
+                      help=('Gerrit (on borg) instance to query (default: %s)' %
+                            (site_config.params.EXTERNAL_GOB_INSTANCE)))
+  parser.add_argument('--sort', default='number',
+                      help='Key to sort on (number, project)')
+  parser.add_argument('--raw', default=False, action='store_true',
+                      help='Return raw results (suitable for scripting)')
+  parser.add_argument('-n', '--dry-run', default=False, action='store_true',
+                      dest='dryrun',
+                      help='Show what would be done, but do not make changes')
+  parser.add_argument('-v', '--verbose', default=False, action='store_true',
+                      help='Be more verbose in output')
+  parser.add_argument('-b', '--branch',
+                      help='Limit output to the specific branch')
+  parser.add_argument('--draft', default=False, action='store_true',
+                      help="Show draft changes (applicable to 'mine' only)")
+  parser.add_argument('-p', '--project',
+                      help='Limit output to the specific project')
+  parser.add_argument('-t', '--topic',
+                      help='Limit output to the specific topic')
+  parser.add_argument('args', nargs='+')
+  opts = parser.parse_args(argv)
+
+  # A cache of gerrit helpers we'll load on demand.
+  opts.gerrit = {}
+  opts.Freeze()
+
+  # pylint: disable=W0603
+  global COLOR
+  COLOR = terminal.Color(enabled=opts.color)
+
+  # Now look up the requested user action and run it.
+  cmd = opts.args[0].lower()
+  args = opts.args[1:]
+  functor = globals().get(act_pfx + cmd.capitalize())
+  if functor:
+    argspec = inspect.getargspec(functor)
+    if argspec.varargs:
+      arg_min = getattr(functor, 'arg_min', len(argspec.args))
+      if len(args) < arg_min:
+        parser.error('incorrect number of args: %s expects at least %s' %
+                     (cmd, arg_min))
+    elif len(argspec.args) - 1 != len(args):
+      parser.error('incorrect number of args: %s expects %s' %
+                   (cmd, len(argspec.args) - 1))
+    try:
+      functor(opts, *args)
+    except (cros_build_lib.RunCommandError, gerrit.GerritException,
+            gob_util.GOBError) as e:
+      cros_build_lib.Die(e.message)
+  else:
+    parser.error('unknown action: %s' % (cmd,))
diff --git a/scripts/gs_fetch_binpkg.py b/scripts/gs_fetch_binpkg.py
new file mode 100644
index 0000000..a6c02be
--- /dev/null
+++ b/scripts/gs_fetch_binpkg.py
@@ -0,0 +1,55 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Download a binpkg from Google Storage.
+
+This is needed for two reasons:
+  1) In the case where a binpkg is left over in the packages dir,
+     portage doesn't handle retries well and reports an error.
+  2) gsutil retries when a download is interrupted, but it doesn't
+     handle the case where we are unable to resume a transfer and the
+     transfer needs to be restarted from scratch. Ensuring that the
+     file is deleted between each retry helps handle that eventuality.
+"""
+
+from __future__ import print_function
+
+import shutil
+
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import gs
+from chromite.lib import osutils
+
+
+def GetParser():
+  """Creates the argparse parser."""
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('--boto', type='path', help='Path to boto auth file.')
+  parser.add_argument('uri', help='Google Storage URI to download')
+  parser.add_argument('filename', help='Location to store the file.')
+  return parser
+
+
+def Copy(ctx, uri, filename):
+  """Run the copy using a temp file."""
+  temp_path = '%s.tmp' % filename
+  osutils.SafeUnlink(temp_path)
+  try:
+    ctx.Copy(uri, temp_path)
+    shutil.move(temp_path, filename)
+  finally:
+    osutils.SafeUnlink(temp_path)
+
+
+def main(argv):
+  parser = GetParser()
+  options = parser.parse_args(argv)
+  options.Freeze()
+  ctx = gs.GSContext(boto_file=options.boto)
+  try:
+    Copy(ctx, options.uri, options.filename)
+  except gs.GSContextException as ex:
+    # Hide the stack trace using Die.
+    cros_build_lib.Die('%s', ex)
diff --git a/scripts/lddtree.py b/scripts/lddtree.py
new file mode 120000
index 0000000..0f0e4ab
--- /dev/null
+++ b/scripts/lddtree.py
@@ -0,0 +1 @@
+../third_party/lddtree.py
\ No newline at end of file
diff --git a/scripts/loman.py b/scripts/loman.py
new file mode 100644
index 0000000..d068c4e
--- /dev/null
+++ b/scripts/loman.py
@@ -0,0 +1,253 @@
+# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module allows adding and deleting of projects to the local manifest."""
+
+from __future__ import print_function
+
+import platform
+import optparse
+import os
+import sys
+import xml.etree.ElementTree as ElementTree
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+
+
+class Manifest(object):
+  """Class which provides an abstraction for manipulating the local manifest."""
+
+  @classmethod
+  def FromPath(cls, path, empty_if_missing=False):
+    if os.path.isfile(path):
+      with open(path) as f:
+        return cls(f.read())
+    elif empty_if_missing:
+      cros_build_lib.Die('Manifest file, %r, not found' % path)
+    return cls()
+
+  def __init__(self, text=None):
+    self._text = text or '<manifest>\n</manifest>'
+    self.nodes = ElementTree.fromstring(self._text)
+
+  def AddNonWorkonProject(self, name, path, remote=None, revision=None):
+    """Add a new nonworkon project element to the manifest tree."""
+    element = ElementTree.Element('project', name=name, path=path,
+                                  remote=remote)
+    element.attrib['workon'] = 'False'
+    if revision is not None:
+      element.attrib['revision'] = revision
+    self.nodes.append(element)
+    return element
+
+  def GetProject(self, name, path=None):
+    """Accessor method for getting a project node from the manifest tree.
+
+    Returns:
+      project element node from ElementTree, otherwise, None
+    """
+    if path is None:
+      # Use a unique value that can't ever match.
+      path = object()
+    for project in self.nodes.findall('project'):
+      if project.attrib['name'] == name or project.attrib['path'] == path:
+        return project
+    return None
+
+  def ToString(self):
+    # Reset the tail for each node, then just do a hacky replace.
+    project = None
+    for project in self.nodes.findall('project'):
+      project.tail = '\n  '
+    if project is not None:
+      # Tweak the last project to not have the trailing space.
+      project.tail = '\n'
+    # Fix manifest tag text and tail.
+    self.nodes.text = '\n  '
+    self.nodes.tail = '\n'
+    return ElementTree.tostring(self.nodes)
+
+  def GetProjects(self):
+    return list(self.nodes.findall('project'))
+
+
+def _AddProjectsToManifestGroups(options, *args):
+  """Enable the given manifest groups for the configured repository."""
+
+  groups_to_enable = ['name:%s' % x for x in args]
+
+  git_config = options.git_config
+
+  cmd = ['config', '-f', git_config, '--get', 'manifest.groups']
+  enabled_groups = git.RunGit('.', cmd, error_code_ok=True).output.split(',')
+
+  # Note that ordering actually matters, thus why the following code
+  # is written this way.
+  # Per repo behaviour, enforce an appropriate platform group if
+  # we're converting from a default manifest group to a limited one.
+  # Finally, note we reprocess the existing groups; this is to allow
+  # us to cleanup any user screwups, or our own screwups.
+  requested_groups = (
+      ['minilayout', 'platform-%s' % (platform.system().lower(),)] +
+      enabled_groups + list(groups_to_enable))
+
+  processed_groups = set()
+  finalized_groups = []
+
+  for group in requested_groups:
+    if group not in processed_groups:
+      finalized_groups.append(group)
+      processed_groups.add(group)
+
+  cmd = ['config', '-f', git_config, 'manifest.groups',
+         ','.join(finalized_groups)]
+  git.RunGit('.', cmd)
+
+
+def _UpgradeMinilayout(options):
+  """Convert a repo checkout away from minilayout.xml to default.xml."""
+
+  full_tree = Manifest.FromPath(options.default_manifest_path)
+  local_manifest_exists = os.path.exists(options.local_manifest_path)
+
+  new_groups = []
+  if local_manifest_exists:
+    local_tree = Manifest.FromPath(options.local_manifest_path)
+    # Identify which projects need to be transferred across.
+    projects = local_tree.GetProjects()
+    new_groups = [x.attrib['name'] for x in projects]
+    allowed = set(x.attrib['name'] for x in full_tree.GetProjects())
+    transferred = [x for x in projects if x.attrib['name'] in allowed]
+    for project in transferred:
+      # Mangle local_manifest object, removing those projects;
+      # note we'll still be adding those projects to the default groups,
+      # including those that didn't intersect the main manifest.
+      local_tree.nodes.remove(project)
+
+  _AddProjectsToManifestGroups(options, *new_groups)
+
+  if local_manifest_exists:
+    # Rewrite the local_manifest now; if there is no settings left in
+    # the local_manifest, wipe it.
+    if local_tree.nodes.getchildren():
+      with open(options.local_manifest_path, 'w') as f:
+        f.write(local_tree.ToString())
+    else:
+      os.unlink(options.local_manifest_path)
+
+  # Finally, move the symlink.
+  os.unlink(options.manifest_sym_path)
+  os.symlink('manifests/default.xml', options.manifest_sym_path)
+  logging.info("Converted the checkout to manifest groups based minilayout.")
+
+
+def main(argv):
+  parser = optparse.OptionParser(usage='usage: %prog add [options] <name> '
+                                       '<--workon | <path> --remote <remote> >')
+  parser.add_option('-w', '--workon', action='store_true', dest='workon',
+                    default=False, help='Is this a workon package?')
+  parser.add_option('-r', '--remote', dest='remote',
+                    default=None)
+  parser.add_option('-v', '--revision', dest='revision',
+                    default=None,
+                    help="Use to override the manifest defined default "
+                    "revision used for a given project.")
+  parser.add_option('--upgrade-minilayout', default=False, action='store_true',
+                    help="Upgrade a minilayout checkout into a full.xml "
+                    "checkout utilizing manifest groups.")
+  (options, args) = parser.parse_args(argv)
+
+  repo_dir = git.FindRepoDir(os.getcwd())
+  if not repo_dir:
+    parser.error("This script must be invoked from within a repository "
+                 "checkout.")
+
+  options.git_config = os.path.join(repo_dir, 'manifests.git', 'config')
+  options.repo_dir = repo_dir
+  options.local_manifest_path = os.path.join(repo_dir, 'local_manifest.xml')
+  # This constant is used only when we're doing an upgrade away from
+  # minilayout.xml to default.xml.
+  options.default_manifest_path = os.path.join(repo_dir, 'manifests',
+                                               'default.xml')
+  options.manifest_sym_path = os.path.join(repo_dir, 'manifest.xml')
+
+  active_manifest = os.path.basename(os.readlink(options.manifest_sym_path))
+  upgrade_required = active_manifest == 'minilayout.xml'
+
+  if options.upgrade_minilayout:
+    if args:
+      parser.error("--upgrade-minilayout takes no arguments.")
+    if not upgrade_required:
+      print("This repository checkout isn't using minilayout.xml; "
+            "nothing to do")
+    else:
+      _UpgradeMinilayout(options)
+    return 0
+  elif upgrade_required:
+    logging.warning(
+        "Your repository checkout is using the old minilayout.xml workflow; "
+        "auto-upgrading it.")
+    cros_build_lib.RunCommand(
+        [sys.argv[0], '--upgrade-minilayout'], cwd=os.getcwd(), print_cmd=False)
+
+  if not args:
+    parser.error("No command specified.")
+  elif args[0] != 'add':
+    parser.error("Only supported subcommand is add right now.")
+  elif options.workon:
+    if len(args) != 2:
+      parser.error(
+          "Argument count is wrong for --workon; must be add <project>")
+    name, path = args[1], None
+  else:
+    if options.remote is None:
+      parser.error('Adding non-workon projects requires a remote.')
+    elif len(args) != 3:
+      parser.error(
+          "Argument count is wrong for non-workon mode; "
+          "must be add <project> <path> --remote <remote-arg>")
+    name, path = args[1:]
+
+  revision = options.revision
+  if revision is not None:
+    if (not git.IsRefsTags(revision) and
+        not git.IsSHA1(revision)):
+      revision = git.StripRefsHeads(revision, False)
+
+  main_manifest = Manifest.FromPath(options.manifest_sym_path,
+                                    empty_if_missing=False)
+  local_manifest = Manifest.FromPath(options.local_manifest_path)
+
+  main_element = main_manifest.GetProject(name, path=path)
+
+  if options.workon:
+    if main_element is None:
+      parser.error('No project named %r in the default manifest.' % name)
+    _AddProjectsToManifestGroups(options, main_element.attrib['name'])
+
+  elif main_element is not None:
+    if options.remote is not None:
+      # Likely this project wasn't meant to be remote, so workon main element
+      print("Project already exists in manifest. Using that as workon project.")
+      _AddProjectsToManifestGroups(options, main_element.attrib['name'])
+    else:
+      # Conflict will occur; complain.
+      parser.error("Requested project name=%r path=%r will conflict with "
+                   "your current manifest %s" % (name, path, active_manifest))
+
+  elif local_manifest.GetProject(name, path=path) is not None:
+    parser.error("Requested project name=%r path=%r conflicts with "
+                 "your local_manifest.xml" % (name, path))
+
+  else:
+    element = local_manifest.AddNonWorkonProject(name=name, path=path,
+                                                 remote=options.remote,
+                                                 revision=revision)
+    _AddProjectsToManifestGroups(options, element.attrib['name'])
+
+    with open(options.local_manifest_path, 'w') as f:
+      f.write(local_manifest.ToString())
+  return 0
diff --git a/scripts/merge_package_status.py b/scripts/merge_package_status.py
new file mode 100644
index 0000000..018f8e3
--- /dev/null
+++ b/scripts/merge_package_status.py
@@ -0,0 +1,275 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Merge multiple package status CSV files into one csv file.
+
+This simplifies uploading to a Google Docs spreadsheet.
+"""
+
+from __future__ import print_function
+
+import optparse
+import os
+import re
+
+from chromite.lib import operation
+from chromite.lib import table
+from chromite.lib import upgrade_table as utable
+
+COL_PACKAGE = utable.UpgradeTable.COL_PACKAGE
+COL_SLOT = utable.UpgradeTable.COL_SLOT
+COL_TARGET = utable.UpgradeTable.COL_TARGET
+COL_OVERLAY = utable.UpgradeTable.COL_OVERLAY
+ID_COLS = [COL_PACKAGE, COL_SLOT]
+
+oper = operation.Operation('merge_package_status')
+
+# A bit of hard-coding with knowledge of how cros targets work.
+CHROMEOS_TARGET_ORDER = [
+    'virtual/target-os',
+    'virtual/target-os-dev',
+    'virtual/target-os-test',
+]
+
+
+def _GetCrosTargetRank(target):
+  """Hard-coded ranking of known/expected OS root targets for sorting.
+
+  The lower the ranking, the earlier in the target list it falls by
+  convention.  In other words, in the typical target combination
+  "virtual/target-os virtual/target-os-dev", "virtual/target-os" has
+  a lower ranking than "virtual/target-os-dev".
+
+  All valid rankings are greater than zero.
+
+  Returns:
+    Valid ranking for target or a false value if target is unrecognized.
+  """
+  for ix, targ in enumerate(CHROMEOS_TARGET_ORDER):
+    if target == targ:
+      return ix + 1  # Avoid a 0 (non-true) result
+  return None
+
+
+def ProcessTargets(targets, reverse_cros=False):
+  """Process a list of |targets| to smaller, sorted list.
+
+  For example:
+  virtual/target-os virtual/target-os-dev -> virtual/target-os-dev
+  virtual/target-os virtual/target-os-dev world -> virtual/target-os-dev world
+  world virtual/target-sdk -> virtual/target-sdk world
+
+  The one virtual/target-os target always comes back first, with targets
+  otherwise sorted alphabetically.  The virtual/target-os target that is
+  kept will be the one with the highest 'ranking', as decided
+  by _GetCrosTargetRank.  To reverse the ranking sense, specify
+  |reverse_cros| as True.
+
+  These rules are specific to how we want the information to appear
+  in the final spreadsheet.
+  """
+  if targets:
+    # Sort cros targets according to "rank".
+    cros_targets = [t for t in targets if _GetCrosTargetRank(t)]
+    cros_targets.sort(key=_GetCrosTargetRank, reverse=reverse_cros)
+
+    # Don't condense non-cros targets.
+    other_targets = [t for t in targets if not _GetCrosTargetRank(t)]
+    other_targets.sort()
+
+    # Assemble final target list, with single cros target first.
+    final_targets = []
+    if cros_targets:
+      final_targets.append(cros_targets[-1])
+    if other_targets:
+      final_targets.extend(other_targets)
+
+    return final_targets
+
+
+def LoadTable(filepath):
+  """Load the csv file at |filepath| into a table.Table object."""
+  table_name = os.path.basename(filepath)
+  if table_name.endswith('.csv'):
+    table_name = table_name[:-4]
+  return table.Table.LoadFromCSV(filepath, name=table_name)
+
+
+def MergeTables(tables):
+  """Merge all |tables| into one merged table.  Return table."""
+  def TargetMerger(_col, val, other_val):
+    """Function to merge two values in Root Target column from two tables."""
+    targets = []
+    if val:
+      targets.extend(val.split())
+    if other_val:
+      targets.extend(other_val.split())
+
+    processed_targets = ProcessTargets(targets, reverse_cros=True)
+    return ' '.join(processed_targets)
+
+  def DefaultMerger(col, val, other_val):
+    """Merge |val| and |other_val| in column |col| for some row."""
+    # This function is registered as the default merge function,
+    # so verify that the column is a supported one.
+    prfx = utable.UpgradeTable.COL_DEPENDS_ON.replace('ARCH', '')
+    if col.startswith(prfx):
+      # Merge dependencies by taking the superset.
+      return MergeToSuperset(col, val, other_val)
+
+    prfx = utable.UpgradeTable.COL_USED_BY.replace('ARCH', '')
+    if col.startswith(prfx):
+      # Merge users by taking the superset.
+      return MergeToSuperset(col, val, other_val)
+
+    regexp = utable.UpgradeTable.COL_UPGRADED.replace('ARCH', r'\S+')
+    if re.search(regexp, col):
+      return MergeWithAND(col, val, other_val)
+
+    # For any column, if one value is missing just accept the other value.
+    # For example, when one table has an entry for 'arm version' but
+    # the other table does not.
+    if val == table.Table.EMPTY_CELL and other_val != table.Table.EMPTY_CELL:
+      return other_val
+    if other_val == table.Table.EMPTY_CELL and val != table.Table.EMPTY_CELL:
+      return val
+
+    # Raise a generic ValueError, which MergeTable function will clarify.
+    # The effect should be the same as having no merge_rule for this column.
+    raise ValueError
+
+  def MergeToSuperset(_col, val, other_val):
+    """Merge |col| values as superset of tokens in |val| and |other_val|."""
+    tokens = set(val.split())
+    other_tokens = set(other_val.split())
+    all_tokens = tokens.union(other_tokens)
+    return ' '.join(sorted(tok for tok in all_tokens))
+
+  # This is only needed because the automake-wrapper package is coming from
+  # different overlays for different boards right now!
+  def MergeWithAND(_col, val, other_val):
+    """For merging columns that might have differences but should not!."""
+    if not val:
+      return '"" AND ' + other_val
+    if not other_val + ' AND ""':
+      return val
+    return val + ' AND ' + other_val
+
+  # Prepare merge_rules with the defined functions.
+  merge_rules = {COL_TARGET: TargetMerger,
+                 COL_OVERLAY: MergeWithAND,
+                 '__DEFAULT__': DefaultMerger}
+
+  # Merge each table one by one.
+  csv_table = tables[0]
+  if len(tables) > 1:
+    oper.Notice('Merging tables into one.')
+    for tmp_table in tables[1:]:
+      oper.Notice('Merging "%s" and "%s".' %
+                  (csv_table.GetName(), tmp_table.GetName()))
+      csv_table.MergeTable(tmp_table, ID_COLS,
+                           merge_rules=merge_rules, allow_new_columns=True)
+
+  # Sort the table by package name, then slot.
+  def IdSort(row):
+    return tuple(row[col] for col in ID_COLS)
+  csv_table.Sort(IdSort)
+
+  return csv_table
+
+
+def LoadAndMergeTables(args):
+  """Load all csv files in |args| into one merged table.  Return table."""
+  tables = []
+  for arg in args:
+    oper.Notice('Loading csv table from "%s".' % arg)
+    tables.append(LoadTable(arg))
+
+  return MergeTables(tables)
+
+
+# Used by upload_package_status.
+def FinalizeTable(csv_table):
+  """Process the table to prepare it for upload to online spreadsheet."""
+  oper.Notice('Processing final table to prepare it for upload.')
+
+  col_ver = utable.UpgradeTable.COL_CURRENT_VER
+  col_arm_ver = utable.UpgradeTable.GetColumnName(col_ver, 'arm')
+  col_x86_ver = utable.UpgradeTable.GetColumnName(col_ver, 'x86')
+
+  # Insert new columns
+  col_cros_target = 'ChromeOS Root Target'
+  col_host_target = 'Host Root Target'
+  col_cmp_arch = 'Comparing arm vs x86 Versions'
+  csv_table.AppendColumn(col_cros_target)
+  csv_table.AppendColumn(col_host_target)
+  csv_table.AppendColumn(col_cmp_arch)
+
+  # Row by row processing
+  for row in csv_table:
+    # If the row is not unique when just the package
+    # name is considered, then add a ':<slot>' suffix to the package name.
+    id_values = {COL_PACKAGE: row[COL_PACKAGE]}
+    matching_rows = csv_table.GetRowsByValue(id_values)
+    if len(matching_rows) > 1:
+      for mr in matching_rows:
+        mr[COL_PACKAGE] += ':' + mr[COL_SLOT]
+
+    # Split target column into cros_target and host_target columns
+    target_str = row.get(COL_TARGET, None)
+    if target_str:
+      targets = target_str.split()
+      cros_targets = []
+      host_targets = []
+      for target in targets:
+        if _GetCrosTargetRank(target):
+          cros_targets.append(target)
+        else:
+          host_targets.append(target)
+
+      row[col_cros_target] = ' '.join(cros_targets)
+      row[col_host_target] = ' '.join(host_targets)
+
+    # Compare x86 vs. arm version, add result to col_cmp_arch.
+    x86_ver = row.get(col_x86_ver)
+    arm_ver = row.get(col_arm_ver)
+    if x86_ver and arm_ver:
+      if x86_ver != arm_ver:
+        row[col_cmp_arch] = 'different'
+      else:
+        row[col_cmp_arch] = 'same'
+
+
+def WriteTable(csv_table, outpath):
+  """Write |csv_table| out to |outpath| as csv."""
+  try:
+    fh = open(outpath, 'w')
+    csv_table.WriteCSV(fh)
+    oper.Notice('Wrote merged table to "%s"' % outpath)
+  except IOError as ex:
+    oper.Error('Unable to open %s for write: %s' % (outpath, ex))
+    raise
+
+
+def main(argv):
+  """Main function."""
+  usage = 'Usage: %prog --out=merged_csv_file input_csv_files...'
+  parser = optparse.OptionParser(usage=usage)
+  parser.add_option('--out', dest='outpath', type='string',
+                    action='store', default=None,
+                    help='File to write merged results to')
+
+  (options, args) = parser.parse_args(argv)
+
+  # Check required options
+  if not options.outpath:
+    parser.print_help()
+    oper.Die('The --out option is required.')
+  if not args:
+    parser.print_help()
+    oper.Die('At least one input_csv_file is required.')
+
+  csv_table = LoadAndMergeTables(args)
+
+  WriteTable(csv_table, options.outpath)
diff --git a/scripts/merge_package_status_unittest b/scripts/merge_package_status_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/merge_package_status_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/merge_package_status_unittest.py b/scripts/merge_package_status_unittest.py
new file mode 100644
index 0000000..067f905
--- /dev/null
+++ b/scripts/merge_package_status_unittest.py
@@ -0,0 +1,276 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for cros_portage_upgrade.py."""
+
+from __future__ import print_function
+
+import exceptions
+import os
+import tempfile
+
+from chromite.lib import cros_test_lib
+from chromite.lib import table
+from chromite.scripts import merge_package_status as mps
+
+
+# pylint: disable=protected-access
+
+
+class MergeTest(cros_test_lib.OutputTestCase, cros_test_lib.TempDirTestCase):
+  """Test the functionality of merge_package_status."""
+
+  # These taken from cros_portage_upgrade column names.
+  COL_VER_x86 = 'Current x86 Version'
+  COL_VER_arm = 'Current arm Version'
+
+  COL_CROS_TARGET = 'ChromeOS Root Target'
+  COL_HOST_TARGET = 'Host Root Target'
+  COL_CMP_ARCH = 'Comparing arm vs x86 Versions'
+
+  COLUMNS = [mps.COL_PACKAGE,
+             mps.COL_SLOT,
+             mps.COL_OVERLAY,
+             COL_VER_x86,
+             COL_VER_arm,
+             mps.COL_TARGET]
+
+  ROW0 = {mps.COL_PACKAGE: 'lib/foo',
+          mps.COL_SLOT: '0',
+          mps.COL_OVERLAY: 'portage',
+          COL_VER_x86: '1.2.3',
+          COL_VER_arm: '1.2.3',
+          mps.COL_TARGET: 'virtual/target-os-dev virtual/target-sdk'}
+  ROW0_FINAL = dict(ROW0)
+  ROW0_FINAL[mps.COL_PACKAGE] = ROW0[mps.COL_PACKAGE] + ':' + ROW0[mps.COL_SLOT]
+  ROW0_FINAL[COL_CROS_TARGET] = 'virtual/target-os-dev'
+  ROW0_FINAL[COL_HOST_TARGET] = 'virtual/target-sdk'
+  ROW0_FINAL[COL_CMP_ARCH] = 'same'
+
+  ROW1 = {mps.COL_PACKAGE: 'dev/bar',
+          mps.COL_SLOT: '0',
+          mps.COL_OVERLAY: 'chromiumos-overlay',
+          COL_VER_x86: '1.2.3',
+          COL_VER_arm: '1.2.3-r1',
+          mps.COL_TARGET: 'virtual/target-os'}
+  ROW1_FINAL = dict(ROW1)
+  ROW1_FINAL[COL_CROS_TARGET] = 'virtual/target-os'
+  ROW1_FINAL[COL_HOST_TARGET] = ''
+  ROW1_FINAL[COL_CMP_ARCH] = 'different'
+
+  ROW2 = {mps.COL_PACKAGE: 'lib/foo',
+          mps.COL_SLOT: '1',
+          mps.COL_OVERLAY: 'portage',
+          COL_VER_x86: '1.2.3',
+          COL_VER_arm: '',
+          mps.COL_TARGET: 'virtual/target-os-dev world'}
+  ROW2_FINAL = dict(ROW2)
+  ROW2_FINAL[mps.COL_PACKAGE] = ROW2[mps.COL_PACKAGE] + ':' + ROW2[mps.COL_SLOT]
+  ROW2_FINAL[COL_CROS_TARGET] = 'virtual/target-os-dev'
+  ROW2_FINAL[COL_HOST_TARGET] = 'world'
+  ROW2_FINAL[COL_CMP_ARCH] = ''
+
+  def setUp(self):
+    self._table = self._CreateTableWithRows(self.COLUMNS,
+                                            [self.ROW0, self.ROW1, self.ROW2])
+
+  def _CreateTableWithRows(self, cols, rows):
+    mytable = table.Table(list(cols))
+    if rows:
+      for row in rows:
+        mytable.AppendRow(dict(row))
+    return mytable
+
+  def _CreateTmpCsvFile(self, table_obj):
+    _fd, path = tempfile.mkstemp(text=True)
+    tmpfile = open(path, 'w')
+    table_obj.WriteCSV(tmpfile)
+    tmpfile.close()
+    return path
+
+  def _GetFullRowFor(self, row, cols):
+    return dict((col, row.get(col, '')) for col in cols)
+
+  def assertRowsEqual(self, row1, row2):
+    # Determine column superset
+    cols = set(row1.keys() + row2.keys())
+    self.assertEquals(self._GetFullRowFor(row1, cols),
+                      self._GetFullRowFor(row2, cols))
+
+  def testGetCrosTargetRank(self):
+    cros_rank = mps._GetCrosTargetRank('virtual/target-os')
+    crosdev_rank = mps._GetCrosTargetRank('virtual/target-os-dev')
+    crostest_rank = mps._GetCrosTargetRank('virtual/target-os-test')
+    other_rank = mps._GetCrosTargetRank('foobar')
+
+    self.assertTrue(cros_rank)
+    self.assertTrue(crosdev_rank)
+    self.assertTrue(crostest_rank)
+    self.assertFalse(other_rank)
+    self.assertTrue(cros_rank < crosdev_rank)
+    self.assertTrue(crosdev_rank < crostest_rank)
+
+  def testProcessTargets(self):
+    test_in = [
+        ['virtual/target-os', 'virtual/target-os-dev'],
+        ['world', 'virtual/target-os', 'virtual/target-os-dev',
+         'virtual/target-os-test'],
+        ['world', 'virtual/target-sdk', 'virtual/target-os-dev',
+         'virtual/target-os-test'],
+    ]
+    test_out = [
+        ['virtual/target-os-dev'],
+        ['virtual/target-os-test', 'world'],
+        ['virtual/target-os-test', 'virtual/target-sdk', 'world'],
+    ]
+    test_rev_out = [
+        ['virtual/target-os'],
+        ['virtual/target-os', 'world'],
+        ['virtual/target-os-dev', 'virtual/target-sdk', 'world'],
+    ]
+
+    for targets, good_out, rev_out in zip(test_in, test_out, test_rev_out):
+      output = mps.ProcessTargets(targets)
+      self.assertEquals(output, good_out)
+      output = mps.ProcessTargets(targets, reverse_cros=True)
+      self.assertEquals(output, rev_out)
+
+  def testLoadTable(self):
+    path = self._CreateTmpCsvFile(self._table)
+    csv_table = mps.LoadTable(path)
+    self.assertEquals(self._table, csv_table)
+    os.unlink(path)
+
+  def testLoadAndMergeTables(self):
+    # Create a second table to merge with standard table.
+    row0_2 = {mps.COL_PACKAGE: 'lib/foo',
+              mps.COL_SLOT: '1',
+              mps.COL_OVERLAY: 'portage',
+              self.COL_VER_arm: '1.2.4',
+              mps.COL_TARGET: 'virtual/target-os-dev world'}
+    row1_2 = {mps.COL_PACKAGE: 'dev/bar',
+              mps.COL_SLOT: '0',
+              mps.COL_OVERLAY: 'chromiumos-overlay',
+              self.COL_VER_arm: '1.2.3-r1',
+              mps.COL_TARGET: 'virtual/target-os-test'}
+    row2_2 = {mps.COL_PACKAGE: 'dev/newby',
+              mps.COL_SLOT: '2',
+              mps.COL_OVERLAY: 'chromiumos-overlay',
+              self.COL_VER_arm: '3.2.1',
+              mps.COL_TARGET: 'virtual/target-os virtual/target-sdk'}
+    cols = [col for col in self.COLUMNS if col != self.COL_VER_x86]
+    table_2 = self._CreateTableWithRows(cols,
+                                        [row0_2, row1_2, row2_2])
+
+    # Minor patch to main table for this test.
+    self._table.GetRowByIndex(2)[self.COL_VER_arm] = '1.2.4'
+
+    with self.OutputCapturer():
+      path1 = self._CreateTmpCsvFile(self._table)
+      path2 = self._CreateTmpCsvFile(table_2)
+
+      combined_table1 = mps.MergeTables([self._table, table_2])
+      combined_table2 = mps.LoadAndMergeTables([path1, path2])
+
+    final_row0 = {mps.COL_PACKAGE: 'dev/bar',
+                  mps.COL_SLOT: '0',
+                  mps.COL_OVERLAY: 'chromiumos-overlay',
+                  self.COL_VER_x86: '1.2.3',
+                  self.COL_VER_arm: '1.2.3-r1',
+                  mps.COL_TARGET: 'virtual/target-os'}
+    final_row1 = {mps.COL_PACKAGE: 'dev/newby',
+                  mps.COL_SLOT: '2',
+                  mps.COL_OVERLAY: 'chromiumos-overlay',
+                  self.COL_VER_x86: '',
+                  self.COL_VER_arm: '3.2.1',
+                  mps.COL_TARGET: 'virtual/target-os virtual/target-sdk'}
+    final_row2 = {mps.COL_PACKAGE: 'lib/foo',
+                  mps.COL_SLOT: '0',
+                  mps.COL_OVERLAY: 'portage',
+                  self.COL_VER_x86: '1.2.3',
+                  self.COL_VER_arm: '1.2.3',
+                  mps.COL_TARGET: 'virtual/target-os-dev virtual/target-sdk'}
+    final_row3 = {mps.COL_PACKAGE: 'lib/foo',
+                  mps.COL_SLOT: '1',
+                  mps.COL_OVERLAY: 'portage',
+                  self.COL_VER_x86: '1.2.3',
+                  self.COL_VER_arm: '1.2.4',
+                  mps.COL_TARGET: 'virtual/target-os-dev world'}
+
+    final_rows = (final_row0, final_row1, final_row2, final_row3)
+    for ix, row_out in enumerate(final_rows):
+      self.assertRowsEqual(row_out, combined_table1[ix])
+      self.assertRowsEqual(row_out, combined_table2[ix])
+
+    os.unlink(path1)
+    os.unlink(path2)
+
+  def testFinalizeTable(self):
+    self.assertEquals(3, self._table.GetNumRows())
+    self.assertEquals(len(self.COLUMNS), self._table.GetNumColumns())
+
+    with self.OutputCapturer():
+      mps.FinalizeTable(self._table)
+
+    self.assertEquals(3, self._table.GetNumRows())
+    self.assertEquals(len(self.COLUMNS) + 3, self._table.GetNumColumns())
+
+    final_rows = (self.ROW0_FINAL, self.ROW1_FINAL, self.ROW2_FINAL)
+    for ix, row_out in enumerate(final_rows):
+      self.assertRowsEqual(row_out, self._table[ix])
+
+
+class MainTest(cros_test_lib.MockOutputTestCase):
+  """Test argument handling at the main method level."""
+
+  def testHelp(self):
+    """Test that --help is functioning"""
+    with self.OutputCapturer() as output:
+      # Running with --help should exit with code==0
+      try:
+        mps.main(['--help'])
+      except exceptions.SystemExit as e:
+        self.assertEquals(e.args[0], 0)
+
+    # Verify that a message beginning with "Usage: " was printed
+    stdout = output.GetStdout()
+    self.assertTrue(stdout.startswith('Usage: '),
+                    msg='Expected output starting with "Usage: " but got:\n%s' %
+                    stdout)
+
+  def testMissingOut(self):
+    """Test that running without --out exits with an error."""
+    with self.OutputCapturer():
+      # Running without --out should exit with code!=0
+      try:
+        mps.main([])
+      except exceptions.SystemExit as e:
+        self.assertNotEquals(e.args[0], 0)
+
+    # Verify that output ends in error.
+    self.AssertOutputEndsInError()
+
+  def testMissingPackage(self):
+    """Test that running without a package argument exits with an error."""
+    with self.OutputCapturer():
+      # Running without a package should exit with code!=0
+      try:
+        mps.main(['--out=any-out'])
+      except exceptions.SystemExit as e:
+        self.assertNotEquals(e.args[0], 0)
+
+    # Verify that output ends in error.
+    self.AssertOutputEndsInError()
+
+  def testMain(self):
+    """Verify that running main method runs expected functons.
+
+    Expected: LoadAndMergeTables, WriteTable.
+    """
+    self.PatchObject(mps, 'LoadAndMergeTables', return_value='csv_table')
+    m = self.PatchObject(mps, 'WriteTable')
+
+    mps.main(['--out=any-out', 'any-package'])
+
+    m.assert_called_with('csv_table', 'any-out')
diff --git a/scripts/parallel_emerge.py b/scripts/parallel_emerge.py
new file mode 100644
index 0000000..176ef34
--- /dev/null
+++ b/scripts/parallel_emerge.py
@@ -0,0 +1,1926 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Program to run emerge in parallel, for significant speedup.
+
+Usage:
+ ./parallel_emerge [--board=BOARD] [--workon=PKGS]
+                   [--force-remote-binary=PKGS] [emerge args] package
+
+This script runs multiple emerge processes in parallel, using appropriate
+Portage APIs. It is faster than standard emerge because it has a
+multiprocess model instead of an asynchronous model.
+"""
+
+from __future__ import print_function
+
+import codecs
+import copy
+import errno
+import gc
+import heapq
+import multiprocessing
+import os
+try:
+  import Queue
+except ImportError:
+  # Python-3 renamed to "queue".  We still use Queue to avoid collisions
+  # with naming variables as "queue".  Maybe we'll transition at some point.
+  # pylint: disable=F0401
+  import queue as Queue
+import signal
+import subprocess
+import sys
+import tempfile
+import threading
+import time
+import traceback
+
+from chromite.lib import cros_build_lib
+from chromite.lib import process_util
+from chromite.lib import proctitle
+
+# If PORTAGE_USERNAME isn't specified, scrape it from the $HOME variable. On
+# Chromium OS, the default "portage" user doesn't have the necessary
+# permissions. It'd be easier if we could default to $USERNAME, but $USERNAME
+# is "root" here because we get called through sudo.
+#
+# We need to set this before importing any portage modules, because portage
+# looks up "PORTAGE_USERNAME" at import time.
+#
+# NOTE: .bashrc sets PORTAGE_USERNAME = $USERNAME, so most people won't
+# encounter this case unless they have an old chroot or blow away the
+# environment by running sudo without the -E specifier.
+if "PORTAGE_USERNAME" not in os.environ:
+  homedir = os.environ.get("HOME")
+  if homedir:
+    os.environ["PORTAGE_USERNAME"] = os.path.basename(homedir)
+
+# Wrap Popen with a lock to ensure no two Popen are executed simultaneously in
+# the same process.
+# Two Popen call at the same time might be the cause for crbug.com/433482.
+_popen_lock = threading.Lock()
+_old_popen = subprocess.Popen
+
+def _LockedPopen(*args, **kwargs):
+  with _popen_lock:
+    return _old_popen(*args, **kwargs)
+
+subprocess.Popen = _LockedPopen
+
+# Portage doesn't expose dependency trees in its public API, so we have to
+# make use of some private APIs here. These modules are found under
+# /usr/lib/portage/pym/.
+#
+# TODO(davidjames): Update Portage to expose public APIs for these features.
+# pylint: disable=F0401
+from _emerge.actions import adjust_configs
+from _emerge.actions import load_emerge_config
+from _emerge.create_depgraph_params import create_depgraph_params
+from _emerge.depgraph import backtrack_depgraph
+from _emerge.main import emerge_main
+from _emerge.main import parse_opts
+from _emerge.Package import Package
+from _emerge.post_emerge import clean_logs
+from _emerge.Scheduler import Scheduler
+from _emerge.stdout_spinner import stdout_spinner
+from portage._global_updates import _global_updates
+import portage
+import portage.debug
+# pylint: enable=F0401
+
+
+def Usage():
+  """Print usage."""
+  print("Usage:")
+  print(" ./parallel_emerge [--board=BOARD] [--workon=PKGS]")
+  print("                   [--rebuild] [emerge args] package")
+  print()
+  print("Packages specified as workon packages are always built from source.")
+  print()
+  print("The --workon argument is mainly useful when you want to build and")
+  print("install packages that you are working on unconditionally, but do not")
+  print("to have to rev the package to indicate you want to build it from")
+  print("source. The build_packages script will automatically supply the")
+  print("workon argument to emerge, ensuring that packages selected using")
+  print("cros-workon are rebuilt.")
+  print()
+  print("The --rebuild option rebuilds packages whenever their dependencies")
+  print("are changed. This ensures that your build is correct.")
+
+
+# Global start time
+GLOBAL_START = time.time()
+
+# Whether process has been killed by a signal.
+KILLED = multiprocessing.Event()
+
+
+class EmergeData(object):
+  """This simple struct holds various emerge variables.
+
+  This struct helps us easily pass emerge variables around as a unit.
+  These variables are used for calculating dependencies and installing
+  packages.
+  """
+
+  __slots__ = ["action", "cmdline_packages", "depgraph", "favorites",
+               "mtimedb", "opts", "root_config", "scheduler_graph",
+               "settings", "spinner", "trees"]
+
+  def __init__(self):
+    # The action the user requested. If the user is installing packages, this
+    # is None. If the user is doing anything other than installing packages,
+    # this will contain the action name, which will map exactly to the
+    # long-form name of the associated emerge option.
+    #
+    # Example: If you call parallel_emerge --unmerge package, the action name
+    #          will be "unmerge"
+    self.action = None
+
+    # The list of packages the user passed on the command-line.
+    self.cmdline_packages = None
+
+    # The emerge dependency graph. It'll contain all the packages involved in
+    # this merge, along with their versions.
+    self.depgraph = None
+
+    # The list of candidates to add to the world file.
+    self.favorites = None
+
+    # A dict of the options passed to emerge. This dict has been cleaned up
+    # a bit by parse_opts, so that it's a bit easier for the emerge code to
+    # look at the options.
+    #
+    # Emerge takes a few shortcuts in its cleanup process to make parsing of
+    # the options dict easier. For example, if you pass in "--usepkg=n", the
+    # "--usepkg" flag is just left out of the dictionary altogether. Because
+    # --usepkg=n is the default, this makes parsing easier, because emerge
+    # can just assume that if "--usepkg" is in the dictionary, it's enabled.
+    #
+    # These cleanup processes aren't applied to all options. For example, the
+    # --with-bdeps flag is passed in as-is.  For a full list of the cleanups
+    # applied by emerge, see the parse_opts function in the _emerge.main
+    # package.
+    self.opts = None
+
+    # A dictionary used by portage to maintain global state. This state is
+    # loaded from disk when portage starts up, and saved to disk whenever we
+    # call mtimedb.commit().
+    #
+    # This database contains information about global updates (i.e., what
+    # version of portage we have) and what we're currently doing. Portage
+    # saves what it is currently doing in this database so that it can be
+    # resumed when you call it with the --resume option.
+    #
+    # parallel_emerge does not save what it is currently doing in the mtimedb,
+    # so we do not support the --resume option.
+    self.mtimedb = None
+
+    # The portage configuration for our current root. This contains the portage
+    # settings (see below) and the three portage trees for our current root.
+    # (The three portage trees are explained below, in the documentation for
+    #  the "trees" member.)
+    self.root_config = None
+
+    # The scheduler graph is used by emerge to calculate what packages to
+    # install. We don't actually install any deps, so this isn't really used,
+    # but we pass it in to the Scheduler object anyway.
+    self.scheduler_graph = None
+
+    # Portage settings for our current session. Most of these settings are set
+    # in make.conf inside our current install root.
+    self.settings = None
+
+    # The spinner, which spews stuff to stdout to indicate that portage is
+    # doing something. We maintain our own spinner, so we set the portage
+    # spinner to "silent" mode.
+    self.spinner = None
+
+    # The portage trees. There are separate portage trees for each root. To get
+    # the portage tree for the current root, you can look in self.trees[root],
+    # where root = self.settings["ROOT"].
+    #
+    # In each root, there are three trees: vartree, porttree, and bintree.
+    #  - vartree: A database of the currently-installed packages.
+    #  - porttree: A database of ebuilds, that can be used to build packages.
+    #  - bintree: A database of binary packages.
+    self.trees = None
+
+
+class DepGraphGenerator(object):
+  """Grab dependency information about packages from portage.
+
+  Typical usage:
+    deps = DepGraphGenerator()
+    deps.Initialize(sys.argv[1:])
+    deps_tree, deps_info = deps.GenDependencyTree()
+    deps_graph = deps.GenDependencyGraph(deps_tree, deps_info)
+    deps.PrintTree(deps_tree)
+    PrintDepsMap(deps_graph)
+  """
+
+  __slots__ = ["board", "emerge", "package_db", "show_output", "sysroot",
+               "unpack_only", "max_retries"]
+
+  def __init__(self):
+    self.board = None
+    self.emerge = EmergeData()
+    self.package_db = {}
+    self.show_output = False
+    self.sysroot = None
+    self.unpack_only = False
+    self.max_retries = 1
+
+  def ParseParallelEmergeArgs(self, argv):
+    """Read the parallel emerge arguments from the command-line.
+
+    We need to be compatible with emerge arg format.  We scrape arguments that
+    are specific to parallel_emerge, and pass through the rest directly to
+    emerge.
+
+    Args:
+      argv: arguments list
+
+    Returns:
+      Arguments that don't belong to parallel_emerge
+    """
+    emerge_args = []
+    for arg in argv:
+      # Specifically match arguments that are specific to parallel_emerge, and
+      # pass through the rest.
+      if arg.startswith("--board="):
+        self.board = arg.replace("--board=", "")
+      elif arg.startswith("--sysroot="):
+        self.sysroot = arg.replace("--sysroot=", "")
+      elif arg.startswith("--workon="):
+        workon_str = arg.replace("--workon=", "")
+        emerge_args.append("--reinstall-atoms=%s" % workon_str)
+        emerge_args.append("--usepkg-exclude=%s" % workon_str)
+      elif arg.startswith("--force-remote-binary="):
+        force_remote_binary = arg.replace("--force-remote-binary=", "")
+        emerge_args.append("--useoldpkg-atoms=%s" % force_remote_binary)
+      elif arg.startswith("--retries="):
+        self.max_retries = int(arg.replace("--retries=", ""))
+      elif arg == "--show-output":
+        self.show_output = True
+      elif arg == "--rebuild":
+        emerge_args.append("--rebuild-if-unbuilt")
+      elif arg == "--unpackonly":
+        emerge_args.append("--fetchonly")
+        self.unpack_only = True
+      else:
+        # Not one of our options, so pass through to emerge.
+        emerge_args.append(arg)
+
+    # These packages take a really long time to build, so, for expediency, we
+    # are blacklisting them from automatic rebuilds because one of their
+    # dependencies needs to be recompiled.
+    for pkg in ("chromeos-base/chromeos-chrome",):
+      emerge_args.append("--rebuild-exclude=%s" % pkg)
+
+    return emerge_args
+
+  def Initialize(self, args):
+    """Initializer. Parses arguments and sets up portage state."""
+
+    # Parse and strip out args that are just intended for parallel_emerge.
+    emerge_args = self.ParseParallelEmergeArgs(args)
+
+    if self.sysroot and self.board:
+      cros_build_lib.Die("--sysroot and --board are incompatible.")
+
+    # Setup various environment variables based on our current board. These
+    # variables are normally setup inside emerge-${BOARD}, but since we don't
+    # call that script, we have to set it up here. These variables serve to
+    # point our tools at /build/BOARD and to setup cross compiles to the
+    # appropriate board as configured in toolchain.conf.
+    if self.board:
+      self.sysroot = os.environ.get('SYSROOT',
+                                    cros_build_lib.GetSysroot(self.board))
+
+    if self.sysroot:
+      os.environ["PORTAGE_CONFIGROOT"] = self.sysroot
+      os.environ["SYSROOT"] = self.sysroot
+
+    # Turn off interactive delays
+    os.environ["EBEEP_IGNORE"] = "1"
+    os.environ["EPAUSE_IGNORE"] = "1"
+    os.environ["CLEAN_DELAY"] = "0"
+
+    # Parse the emerge options.
+    action, opts, cmdline_packages = parse_opts(emerge_args, silent=True)
+
+    # Set environment variables based on options. Portage normally sets these
+    # environment variables in emerge_main, but we can't use that function,
+    # because it also does a bunch of other stuff that we don't want.
+    # TODO(davidjames): Patch portage to move this logic into a function we can
+    # reuse here.
+    if "--debug" in opts:
+      os.environ["PORTAGE_DEBUG"] = "1"
+    if "--config-root" in opts:
+      os.environ["PORTAGE_CONFIGROOT"] = opts["--config-root"]
+    if "--root" in opts:
+      os.environ["ROOT"] = opts["--root"]
+    if "--accept-properties" in opts:
+      os.environ["ACCEPT_PROPERTIES"] = opts["--accept-properties"]
+
+    # If we're installing packages to the board, we can disable vardb locks.
+    # This is safe because we only run up to one instance of parallel_emerge in
+    # parallel.
+    # TODO(davidjames): Enable this for the host too.
+    if self.sysroot:
+      os.environ.setdefault("PORTAGE_LOCKS", "false")
+
+    # Now that we've setup the necessary environment variables, we can load the
+    # emerge config from disk.
+    # pylint: disable=unpacking-non-sequence
+    settings, trees, mtimedb = load_emerge_config()
+
+    # Add in EMERGE_DEFAULT_OPTS, if specified.
+    tmpcmdline = []
+    if "--ignore-default-opts" not in opts:
+      tmpcmdline.extend(settings["EMERGE_DEFAULT_OPTS"].split())
+    tmpcmdline.extend(emerge_args)
+    action, opts, cmdline_packages = parse_opts(tmpcmdline)
+
+    # If we're installing to the board, we want the --root-deps option so that
+    # portage will install the build dependencies to that location as well.
+    if self.sysroot:
+      opts.setdefault("--root-deps", True)
+
+    # Check whether our portage tree is out of date. Typically, this happens
+    # when you're setting up a new portage tree, such as in setup_board and
+    # make_chroot. In that case, portage applies a bunch of global updates
+    # here. Once the updates are finished, we need to commit any changes
+    # that the global update made to our mtimedb, and reload the config.
+    #
+    # Portage normally handles this logic in emerge_main, but again, we can't
+    # use that function here.
+    if _global_updates(trees, mtimedb["updates"]):
+      mtimedb.commit()
+      # pylint: disable=unpacking-non-sequence
+      settings, trees, mtimedb = load_emerge_config(trees=trees)
+
+    # Setup implied options. Portage normally handles this logic in
+    # emerge_main.
+    if "--buildpkgonly" in opts or "buildpkg" in settings.features:
+      opts.setdefault("--buildpkg", True)
+    if "--getbinpkgonly" in opts:
+      opts.setdefault("--usepkgonly", True)
+      opts.setdefault("--getbinpkg", True)
+    if "getbinpkg" in settings.features:
+      # Per emerge_main, FEATURES=getbinpkg overrides --getbinpkg=n
+      opts["--getbinpkg"] = True
+    if "--getbinpkg" in opts or "--usepkgonly" in opts:
+      opts.setdefault("--usepkg", True)
+    if "--fetch-all-uri" in opts:
+      opts.setdefault("--fetchonly", True)
+    if "--skipfirst" in opts:
+      opts.setdefault("--resume", True)
+    if "--buildpkgonly" in opts:
+      # --buildpkgonly will not merge anything, so it overrides all binary
+      # package options.
+      for opt in ("--getbinpkg", "--getbinpkgonly",
+                  "--usepkg", "--usepkgonly"):
+        opts.pop(opt, None)
+    if (settings.get("PORTAGE_DEBUG", "") == "1" and
+        "python-trace" in settings.features):
+      portage.debug.set_trace(True)
+
+    # Complain about unsupported options
+    for opt in ("--ask", "--ask-enter-invalid", "--resume", "--skipfirst"):
+      if opt in opts:
+        print("%s is not supported by parallel_emerge" % opt)
+        sys.exit(1)
+
+    # Make emerge specific adjustments to the config (e.g. colors!)
+    adjust_configs(opts, trees)
+
+    # Save our configuration so far in the emerge object
+    emerge = self.emerge
+    emerge.action, emerge.opts = action, opts
+    emerge.settings, emerge.trees, emerge.mtimedb = settings, trees, mtimedb
+    emerge.cmdline_packages = cmdline_packages
+    root = settings["ROOT"]
+    emerge.root_config = trees[root]["root_config"]
+
+    if "--usepkg" in opts:
+      emerge.trees[root]["bintree"].populate("--getbinpkg" in opts)
+
+  def CreateDepgraph(self, emerge, packages):
+    """Create an emerge depgraph object."""
+    # Setup emerge options.
+    emerge_opts = emerge.opts.copy()
+
+    # Ask portage to build a dependency graph. with the options we specified
+    # above.
+    params = create_depgraph_params(emerge_opts, emerge.action)
+    success, depgraph, favorites = backtrack_depgraph(
+        emerge.settings, emerge.trees, emerge_opts, params, emerge.action,
+        packages, emerge.spinner)
+    emerge.depgraph = depgraph
+
+    # Is it impossible to honor the user's request? Bail!
+    if not success:
+      depgraph.display_problems()
+      sys.exit(1)
+
+    emerge.depgraph = depgraph
+    emerge.favorites = favorites
+
+    # Prime and flush emerge caches.
+    root = emerge.settings["ROOT"]
+    vardb = emerge.trees[root]["vartree"].dbapi
+    if "--pretend" not in emerge.opts:
+      vardb.counter_tick()
+    vardb.flush_cache()
+
+  def GenDependencyTree(self):
+    """Get dependency tree info from emerge.
+
+    Returns:
+      Dependency tree
+    """
+    start = time.time()
+
+    emerge = self.emerge
+
+    # Create a list of packages to merge
+    packages = set(emerge.cmdline_packages[:])
+
+    # Tell emerge to be quiet. We print plenty of info ourselves so we don't
+    # need any extra output from portage.
+    portage.util.noiselimit = -1
+
+    # My favorite feature: The silent spinner. It doesn't spin. Ever.
+    # I'd disable the colors by default too, but they look kind of cool.
+    emerge.spinner = stdout_spinner()
+    emerge.spinner.update = emerge.spinner.update_quiet
+
+    if "--quiet" not in emerge.opts:
+      print("Calculating deps...")
+
+    self.CreateDepgraph(emerge, packages)
+    depgraph = emerge.depgraph
+
+    # Build our own tree from the emerge digraph.
+    deps_tree = {}
+    # pylint: disable=W0212
+    digraph = depgraph._dynamic_config.digraph
+    root = emerge.settings["ROOT"]
+    final_db = depgraph._dynamic_config._filtered_trees[root]['graph_db']
+    for node, node_deps in digraph.nodes.items():
+      # Calculate dependency packages that need to be installed first. Each
+      # child on the digraph is a dependency. The "operation" field specifies
+      # what we're doing (e.g. merge, uninstall, etc.). The "priorities" array
+      # contains the type of dependency (e.g. build, runtime, runtime_post,
+      # etc.)
+      #
+      # Portage refers to the identifiers for packages as a CPV. This acronym
+      # stands for Component/Path/Version.
+      #
+      # Here's an example CPV: chromeos-base/power_manager-0.0.1-r1
+      # Split up, this CPV would be:
+      #   C -- Component: chromeos-base
+      #   P -- Path:      power_manager
+      #   V -- Version:   0.0.1-r1
+      #
+      # We just refer to CPVs as packages here because it's easier.
+      deps = {}
+      for child, priorities in node_deps[0].items():
+        if isinstance(child, Package) and child.root == root:
+          cpv = str(child.cpv)
+          action = str(child.operation)
+
+          # If we're uninstalling a package, check whether Portage is
+          # installing a replacement. If so, just depend on the installation
+          # of the new package, because the old package will automatically
+          # be uninstalled at that time.
+          if action == "uninstall":
+            for pkg in final_db.match_pkgs(child.slot_atom):
+              cpv = str(pkg.cpv)
+              action = "merge"
+              break
+
+          deps[cpv] = dict(action=action,
+                           deptypes=[str(x) for x in priorities],
+                           deps={})
+
+      # We've built our list of deps, so we can add our package to the tree.
+      if isinstance(node, Package) and node.root == root:
+        deps_tree[str(node.cpv)] = dict(action=str(node.operation),
+                                        deps=deps)
+
+    # Ask portage for its install plan, so that we can only throw out
+    # dependencies that portage throws out.
+    deps_info = {}
+    for pkg in depgraph.altlist():
+      if isinstance(pkg, Package):
+        assert pkg.root == root
+        self.package_db[pkg.cpv] = pkg
+
+        # Save off info about the package
+        deps_info[str(pkg.cpv)] = {"idx": len(deps_info)}
+
+    seconds = time.time() - start
+    if "--quiet" not in emerge.opts:
+      print("Deps calculated in %dm%.1fs" % (seconds / 60, seconds % 60))
+
+    return deps_tree, deps_info
+
+  def PrintTree(self, deps, depth=""):
+    """Print the deps we have seen in the emerge output.
+
+    Args:
+      deps: Dependency tree structure.
+      depth: Allows printing the tree recursively, with indentation.
+    """
+    for entry in sorted(deps):
+      action = deps[entry]["action"]
+      print("%s %s (%s)" % (depth, entry, action))
+      self.PrintTree(deps[entry]["deps"], depth=depth + "  ")
+
+  def GenDependencyGraph(self, deps_tree, deps_info):
+    """Generate a doubly linked dependency graph.
+
+    Args:
+      deps_tree: Dependency tree structure.
+      deps_info: More details on the dependencies.
+
+    Returns:
+      Deps graph in the form of a dict of packages, with each package
+      specifying a "needs" list and "provides" list.
+    """
+    emerge = self.emerge
+
+    # deps_map is the actual dependency graph.
+    #
+    # Each package specifies a "needs" list and a "provides" list. The "needs"
+    # list indicates which packages we depend on. The "provides" list
+    # indicates the reverse dependencies -- what packages need us.
+    #
+    # We also provide some other information in the dependency graph:
+    #  - action: What we're planning on doing with this package. Generally,
+    #            "merge", "nomerge", or "uninstall"
+    deps_map = {}
+
+    def ReverseTree(packages):
+      """Convert tree to digraph.
+
+      Take the tree of package -> requirements and reverse it to a digraph of
+      buildable packages -> packages they unblock.
+
+      Args:
+        packages: Tree(s) of dependencies.
+
+      Returns:
+        Unsanitized digraph.
+      """
+      binpkg_phases = set(["setup", "preinst", "postinst"])
+      needed_dep_types = set(["blocker", "buildtime", "buildtime_slot_op",
+                              "runtime", "runtime_slot_op"])
+      ignored_dep_types = set(["ignored", "optional", "runtime_post", "soft"])
+      all_dep_types = ignored_dep_types | needed_dep_types
+      for pkg in packages:
+
+        # Create an entry for the package
+        action = packages[pkg]["action"]
+        default_pkg = {"needs": {}, "provides": set(), "action": action,
+                       "nodeps": False, "binary": False}
+        this_pkg = deps_map.setdefault(pkg, default_pkg)
+
+        if pkg in deps_info:
+          this_pkg["idx"] = deps_info[pkg]["idx"]
+
+        # If a package doesn't have any defined phases that might use the
+        # dependent packages (i.e. pkg_setup, pkg_preinst, or pkg_postinst),
+        # we can install this package before its deps are ready.
+        emerge_pkg = self.package_db.get(pkg)
+        if emerge_pkg and emerge_pkg.type_name == "binary":
+          this_pkg["binary"] = True
+          defined_phases = emerge_pkg.defined_phases
+          defined_binpkg_phases = binpkg_phases.intersection(defined_phases)
+          if not defined_binpkg_phases:
+            this_pkg["nodeps"] = True
+
+        # Create entries for dependencies of this package first.
+        ReverseTree(packages[pkg]["deps"])
+
+        # Add dependencies to this package.
+        for dep, dep_item in packages[pkg]["deps"].iteritems():
+          # We only need to enforce strict ordering of dependencies if the
+          # dependency is a blocker, or is a buildtime or runtime dependency.
+          # (I.e., ignored, optional, and runtime_post dependencies don't
+          # depend on ordering.)
+          dep_types = dep_item["deptypes"]
+          if needed_dep_types.intersection(dep_types):
+            deps_map[dep]["provides"].add(pkg)
+            this_pkg["needs"][dep] = "/".join(dep_types)
+
+          # Verify we processed all appropriate dependency types.
+          unknown_dep_types = set(dep_types) - all_dep_types
+          if unknown_dep_types:
+            print("Unknown dependency types found:")
+            print("  %s -> %s (%s)" % (pkg, dep, "/".join(unknown_dep_types)))
+            sys.exit(1)
+
+          # If there's a blocker, Portage may need to move files from one
+          # package to another, which requires editing the CONTENTS files of
+          # both packages. To avoid race conditions while editing this file,
+          # the two packages must not be installed in parallel, so we can't
+          # safely ignore dependencies. See http://crosbug.com/19328
+          if "blocker" in dep_types:
+            this_pkg["nodeps"] = False
+
+    def FindCycles():
+      """Find cycles in the dependency tree.
+
+      Returns:
+        A dict mapping cyclic packages to a dict of the deps that cause
+        cycles. For each dep that causes cycles, it returns an example
+        traversal of the graph that shows the cycle.
+      """
+
+      def FindCyclesAtNode(pkg, cycles, unresolved, resolved):
+        """Find cycles in cyclic dependencies starting at specified package.
+
+        Args:
+          pkg: Package identifier.
+          cycles: A dict mapping cyclic packages to a dict of the deps that
+                  cause cycles. For each dep that causes cycles, it returns an
+                  example traversal of the graph that shows the cycle.
+          unresolved: Nodes that have been visited but are not fully processed.
+          resolved: Nodes that have been visited and are fully processed.
+        """
+        pkg_cycles = cycles.get(pkg)
+        if pkg in resolved and not pkg_cycles:
+          # If we already looked at this package, and found no cyclic
+          # dependencies, we can stop now.
+          return
+        unresolved.append(pkg)
+        for dep in deps_map[pkg]["needs"]:
+          if dep in unresolved:
+            idx = unresolved.index(dep)
+            mycycle = unresolved[idx:] + [dep]
+            for i in xrange(len(mycycle) - 1):
+              pkg1, pkg2 = mycycle[i], mycycle[i+1]
+              cycles.setdefault(pkg1, {}).setdefault(pkg2, mycycle)
+          elif not pkg_cycles or dep not in pkg_cycles:
+            # Looks like we haven't seen this edge before.
+            FindCyclesAtNode(dep, cycles, unresolved, resolved)
+        unresolved.pop()
+        resolved.add(pkg)
+
+      cycles, unresolved, resolved = {}, [], set()
+      for pkg in deps_map:
+        FindCyclesAtNode(pkg, cycles, unresolved, resolved)
+      return cycles
+
+    def RemoveUnusedPackages():
+      """Remove installed packages, propagating dependencies."""
+      # Schedule packages that aren't on the install list for removal
+      rm_pkgs = set(deps_map.keys()) - set(deps_info.keys())
+
+      # Remove the packages we don't want, simplifying the graph and making
+      # it easier for us to crack cycles.
+      for pkg in sorted(rm_pkgs):
+        this_pkg = deps_map[pkg]
+        needs = this_pkg["needs"]
+        provides = this_pkg["provides"]
+        for dep in needs:
+          dep_provides = deps_map[dep]["provides"]
+          dep_provides.update(provides)
+          dep_provides.discard(pkg)
+          dep_provides.discard(dep)
+        for target in provides:
+          target_needs = deps_map[target]["needs"]
+          target_needs.update(needs)
+          target_needs.pop(pkg, None)
+          target_needs.pop(target, None)
+        del deps_map[pkg]
+
+    def PrintCycleBreak(basedep, dep, mycycle):
+      """Print details about a cycle that we are planning on breaking.
+
+      We are breaking a cycle where dep needs basedep. mycycle is an
+      example cycle which contains dep -> basedep.
+      """
+
+      needs = deps_map[dep]["needs"]
+      depinfo = needs.get(basedep, "deleted")
+
+      # It's OK to swap install order for blockers, as long as the two
+      # packages aren't installed in parallel. If there is a cycle, then
+      # we know the packages depend on each other already, so we can drop the
+      # blocker safely without printing a warning.
+      if depinfo == "blocker":
+        return
+
+      # Notify the user that we're breaking a cycle.
+      print("Breaking %s -> %s (%s)" % (dep, basedep, depinfo))
+
+      # Show cycle.
+      for i in xrange(len(mycycle) - 1):
+        pkg1, pkg2 = mycycle[i], mycycle[i+1]
+        needs = deps_map[pkg1]["needs"]
+        depinfo = needs.get(pkg2, "deleted")
+        if pkg1 == dep and pkg2 == basedep:
+          depinfo = depinfo + ", deleting"
+        print("  %s -> %s (%s)" % (pkg1, pkg2, depinfo))
+
+    def SanitizeTree():
+      """Remove circular dependencies.
+
+      We prune all dependencies involved in cycles that go against the emerge
+      ordering. This has a nice property: we're guaranteed to merge
+      dependencies in the same order that portage does.
+
+      Because we don't treat any dependencies as "soft" unless they're killed
+      by a cycle, we pay attention to a larger number of dependencies when
+      merging. This hurts performance a bit, but helps reliability.
+      """
+      start = time.time()
+      cycles = FindCycles()
+      while cycles:
+        for dep, mycycles in cycles.iteritems():
+          for basedep, mycycle in mycycles.iteritems():
+            if deps_info[basedep]["idx"] >= deps_info[dep]["idx"]:
+              if "--quiet" not in emerge.opts:
+                PrintCycleBreak(basedep, dep, mycycle)
+              del deps_map[dep]["needs"][basedep]
+              deps_map[basedep]["provides"].remove(dep)
+        cycles = FindCycles()
+      seconds = time.time() - start
+      if "--quiet" not in emerge.opts and seconds >= 0.1:
+        print("Tree sanitized in %dm%.1fs" % (seconds / 60, seconds % 60))
+
+    def FindRecursiveProvides(pkg, seen):
+      """Find all nodes that require a particular package.
+
+      Assumes that graph is acyclic.
+
+      Args:
+        pkg: Package identifier.
+        seen: Nodes that have been visited so far.
+      """
+      if pkg in seen:
+        return
+      seen.add(pkg)
+      info = deps_map[pkg]
+      info["tprovides"] = info["provides"].copy()
+      for dep in info["provides"]:
+        FindRecursiveProvides(dep, seen)
+        info["tprovides"].update(deps_map[dep]["tprovides"])
+
+    ReverseTree(deps_tree)
+
+    # We need to remove unused packages so that we can use the dependency
+    # ordering of the install process to show us what cycles to crack.
+    RemoveUnusedPackages()
+    SanitizeTree()
+    seen = set()
+    for pkg in deps_map:
+      FindRecursiveProvides(pkg, seen)
+    return deps_map
+
+  def PrintInstallPlan(self, deps_map):
+    """Print an emerge-style install plan.
+
+    The install plan lists what packages we're installing, in order.
+    It's useful for understanding what parallel_emerge is doing.
+
+    Args:
+      deps_map: The dependency graph.
+    """
+
+    def InstallPlanAtNode(target, deps_map):
+      nodes = []
+      nodes.append(target)
+      for dep in deps_map[target]["provides"]:
+        del deps_map[dep]["needs"][target]
+        if not deps_map[dep]["needs"]:
+          nodes.extend(InstallPlanAtNode(dep, deps_map))
+      return nodes
+
+    deps_map = copy.deepcopy(deps_map)
+    install_plan = []
+    plan = set()
+    for target, info in deps_map.iteritems():
+      if not info["needs"] and target not in plan:
+        for item in InstallPlanAtNode(target, deps_map):
+          plan.add(item)
+          install_plan.append(self.package_db[item])
+
+    for pkg in plan:
+      del deps_map[pkg]
+
+    if deps_map:
+      print("Cyclic dependencies:", " ".join(deps_map))
+      PrintDepsMap(deps_map)
+      sys.exit(1)
+
+    self.emerge.depgraph.display(install_plan)
+
+
+def PrintDepsMap(deps_map):
+  """Print dependency graph, for each package list it's prerequisites."""
+  for i in sorted(deps_map):
+    print("%s: (%s) needs" % (i, deps_map[i]["action"]))
+    needs = deps_map[i]["needs"]
+    for j in sorted(needs):
+      print("    %s" % (j))
+    if not needs:
+      print("    no dependencies")
+
+
+class EmergeJobState(object):
+  """Structure describing the EmergeJobState."""
+
+  __slots__ = ["done", "filename", "last_notify_timestamp", "last_output_seek",
+               "last_output_timestamp", "pkgname", "retcode", "start_timestamp",
+               "target", "fetch_only", "unpack_only"]
+
+  def __init__(self, target, pkgname, done, filename, start_timestamp,
+               retcode=None, fetch_only=False, unpack_only=False):
+
+    # The full name of the target we're building (e.g.
+    # virtual/target-os-1-r60)
+    self.target = target
+
+    # The short name of the target we're building (e.g. target-os-1-r60)
+    self.pkgname = pkgname
+
+    # Whether the job is done. (True if the job is done; false otherwise.)
+    self.done = done
+
+    # The filename where output is currently stored.
+    self.filename = filename
+
+    # The timestamp of the last time we printed the name of the log file. We
+    # print this at the beginning of the job, so this starts at
+    # start_timestamp.
+    self.last_notify_timestamp = start_timestamp
+
+    # The location (in bytes) of the end of the last complete line we printed.
+    # This starts off at zero. We use this to jump to the right place when we
+    # print output from the same ebuild multiple times.
+    self.last_output_seek = 0
+
+    # The timestamp of the last time we printed output. Since we haven't
+    # printed output yet, this starts at zero.
+    self.last_output_timestamp = 0
+
+    # The return code of our job, if the job is actually finished.
+    self.retcode = retcode
+
+    # Was this just a fetch job?
+    self.fetch_only = fetch_only
+
+    # The timestamp when our job started.
+    self.start_timestamp = start_timestamp
+
+    # No emerge, only unpack packages.
+    self.unpack_only = unpack_only
+
+
+def KillHandler(_signum, _frame):
+  # Kill self and all subprocesses.
+  os.killpg(0, signal.SIGKILL)
+
+
+def SetupWorkerSignals():
+  def ExitHandler(_signum, _frame):
+    # Set KILLED flag.
+    KILLED.set()
+
+    # Remove our signal handlers so we don't get called recursively.
+    signal.signal(signal.SIGINT, KillHandler)
+    signal.signal(signal.SIGTERM, KillHandler)
+
+  # Ensure that we exit quietly and cleanly, if possible, when we receive
+  # SIGTERM or SIGINT signals. By default, when the user hits CTRL-C, all
+  # of the child processes will print details about KeyboardInterrupt
+  # exceptions, which isn't very helpful.
+  signal.signal(signal.SIGINT, ExitHandler)
+  signal.signal(signal.SIGTERM, ExitHandler)
+
+
+def EmergeProcess(output, target, *args, **kwargs):
+  """Merge a package in a subprocess.
+
+  Args:
+    output: Temporary file to write output.
+    target: The package we'll be processing (for display purposes).
+    *args: Arguments to pass to Scheduler constructor.
+    **kwargs: Keyword arguments to pass to Scheduler constructor.
+
+  Returns:
+    The exit code returned by the subprocess.
+  """
+  pid = os.fork()
+  if pid == 0:
+    try:
+      proctitle.settitle('EmergeProcess', target)
+
+      # Sanity checks.
+      if sys.stdout.fileno() != 1:
+        raise Exception("sys.stdout.fileno() != 1")
+      if sys.stderr.fileno() != 2:
+        raise Exception("sys.stderr.fileno() != 2")
+
+      # - Redirect 1 (stdout) and 2 (stderr) at our temporary file.
+      # - Redirect 0 to point at sys.stdin. In this case, sys.stdin
+      #   points at a file reading os.devnull, because multiprocessing mucks
+      #   with sys.stdin.
+      # - Leave the sys.stdin and output filehandles alone.
+      fd_pipes = {0: sys.stdin.fileno(),
+                  1: output.fileno(),
+                  2: output.fileno(),
+                  sys.stdin.fileno(): sys.stdin.fileno(),
+                  output.fileno(): output.fileno()}
+      # pylint: disable=W0212
+      portage.process._setup_pipes(fd_pipes, close_fds=False)
+
+      # Portage doesn't like when sys.stdin.fileno() != 0, so point sys.stdin
+      # at the filehandle we just created in _setup_pipes.
+      if sys.stdin.fileno() != 0:
+        sys.__stdin__ = sys.stdin = os.fdopen(0, "r")
+
+      scheduler = Scheduler(*args, **kwargs)
+
+      # Enable blocker handling even though we're in --nodeps mode. This
+      # allows us to unmerge the blocker after we've merged the replacement.
+      scheduler._opts_ignore_blockers = frozenset()
+
+      # Actually do the merge.
+      retval = scheduler.merge()
+
+    # We catch all exceptions here (including SystemExit, KeyboardInterrupt,
+    # etc) so as to ensure that we don't confuse the multiprocessing module,
+    # which expects that all forked children exit with os._exit().
+    # pylint: disable=W0702
+    except:
+      traceback.print_exc(file=output)
+      retval = 1
+    sys.stdout.flush()
+    sys.stderr.flush()
+    output.flush()
+    # pylint: disable=W0212
+    os._exit(retval)
+  else:
+    # Return the exit code of the subprocess.
+    return os.waitpid(pid, 0)[1]
+
+
+def UnpackPackage(pkg_state):
+  """Unpacks package described by pkg_state.
+
+  Args:
+    pkg_state: EmergeJobState object describing target.
+
+  Returns:
+    Exit code returned by subprocess.
+  """
+  pkgdir = os.environ.get("PKGDIR",
+                          os.path.join(os.environ["SYSROOT"], "packages"))
+  root = os.environ.get("ROOT", os.environ["SYSROOT"])
+  path = os.path.join(pkgdir, pkg_state.target + ".tbz2")
+  comp = cros_build_lib.FindCompressor(cros_build_lib.COMP_BZIP2)
+  cmd = [comp, "-dc"]
+  if comp.endswith("pbzip2"):
+    cmd.append("--ignore-trailing-garbage=1")
+  cmd.append(path)
+
+  result = cros_build_lib.RunCommand(cmd, cwd=root, stdout_to_pipe=True,
+                                     print_cmd=False, error_code_ok=True)
+
+  # If we were not successful, return now and don't attempt untar.
+  if result.returncode:
+    return result.returncode
+
+  cmd = ["sudo", "tar", "-xf", "-", "-C", root]
+  result = cros_build_lib.RunCommand(cmd, cwd=root, input=result.output,
+                                     print_cmd=False, error_code_ok=True)
+
+  return result.returncode
+
+
+def EmergeWorker(task_queue, job_queue, emerge, package_db, fetch_only=False,
+                 unpack_only=False):
+  """This worker emerges any packages given to it on the task_queue.
+
+  Args:
+    task_queue: The queue of tasks for this worker to do.
+    job_queue: The queue of results from the worker.
+    emerge: An EmergeData() object.
+    package_db: A dict, mapping package ids to portage Package objects.
+    fetch_only: A bool, indicating if we should just fetch the target.
+    unpack_only: A bool, indicating if we should just unpack the target.
+
+  It expects package identifiers to be passed to it via task_queue. When
+  a task is started, it pushes the (target, filename) to the started_queue.
+  The output is stored in filename. When a merge starts or finishes, we push
+  EmergeJobState objects to the job_queue.
+  """
+  if fetch_only:
+    mode = 'fetch'
+  elif unpack_only:
+    mode = 'unpack'
+  else:
+    mode = 'emerge'
+  proctitle.settitle('EmergeWorker', mode, '[idle]')
+
+  SetupWorkerSignals()
+  settings, trees, mtimedb = emerge.settings, emerge.trees, emerge.mtimedb
+
+  # Disable flushing of caches to save on I/O.
+  root = emerge.settings["ROOT"]
+  vardb = emerge.trees[root]["vartree"].dbapi
+  vardb._flush_cache_enabled = False  # pylint: disable=protected-access
+  bindb = emerge.trees[root]["bintree"].dbapi
+  # Might be a set, might be a list, might be None; no clue, just use shallow
+  # copy to ensure we can roll it back.
+  # pylint: disable=W0212
+  original_remotepkgs = copy.copy(bindb.bintree._remotepkgs)
+
+  opts, spinner = emerge.opts, emerge.spinner
+  opts["--nodeps"] = True
+  if fetch_only:
+    opts["--fetchonly"] = True
+
+  while True:
+    # Wait for a new item to show up on the queue. This is a blocking wait,
+    # so if there's nothing to do, we just sit here.
+    pkg_state = task_queue.get()
+    if pkg_state is None:
+      # If target is None, this means that the main thread wants us to quit.
+      # The other workers need to exit too, so we'll push the message back on
+      # to the queue so they'll get it too.
+      task_queue.put(None)
+      return
+    if KILLED.is_set():
+      return
+
+    target = pkg_state.target
+    proctitle.settitle('EmergeWorker', mode, target)
+
+    db_pkg = package_db[target]
+
+    if db_pkg.type_name == "binary":
+      if not fetch_only and pkg_state.fetched_successfully:
+        # Ensure portage doesn't think our pkg is remote- else it'll force
+        # a redownload of it (even if the on-disk file is fine).  In-memory
+        # caching basically, implemented dumbly.
+        bindb.bintree._remotepkgs = None
+    else:
+      bindb.bintree_remotepkgs = original_remotepkgs
+
+    db_pkg.root_config = emerge.root_config
+    install_list = [db_pkg]
+    pkgname = db_pkg.pf
+    output = tempfile.NamedTemporaryFile(prefix=pkgname + "-", delete=False)
+    os.chmod(output.name, 644)
+    start_timestamp = time.time()
+    job = EmergeJobState(target, pkgname, False, output.name, start_timestamp,
+                         fetch_only=fetch_only, unpack_only=unpack_only)
+    job_queue.put(job)
+    if "--pretend" in opts:
+      retcode = 0
+    else:
+      try:
+        emerge.scheduler_graph.mergelist = install_list
+        if unpack_only:
+          retcode = UnpackPackage(pkg_state)
+        else:
+          retcode = EmergeProcess(output, target, settings, trees, mtimedb,
+                                  opts, spinner, favorites=emerge.favorites,
+                                  graph_config=emerge.scheduler_graph)
+      except Exception:
+        traceback.print_exc(file=output)
+        retcode = 1
+      output.close()
+
+    if KILLED.is_set():
+      return
+
+    job = EmergeJobState(target, pkgname, True, output.name, start_timestamp,
+                         retcode, fetch_only=fetch_only,
+                         unpack_only=unpack_only)
+    job_queue.put(job)
+
+    # Set the title back to idle as the multiprocess pool won't destroy us;
+    # when another job comes up, it'll re-use this process.
+    proctitle.settitle('EmergeWorker', mode, '[idle]')
+
+
+class LinePrinter(object):
+  """Helper object to print a single line."""
+
+  def __init__(self, line):
+    self.line = line
+
+  def Print(self, _seek_locations):
+    print(self.line)
+
+
+class JobPrinter(object):
+  """Helper object to print output of a job."""
+
+  def __init__(self, job, unlink=False):
+    """Print output of job.
+
+    If unlink is True, unlink the job output file when done.
+    """
+    self.current_time = time.time()
+    self.job = job
+    self.unlink = unlink
+
+  def Print(self, seek_locations):
+
+    job = self.job
+
+    # Calculate how long the job has been running.
+    seconds = self.current_time - job.start_timestamp
+
+    # Note that we've printed out the job so far.
+    job.last_output_timestamp = self.current_time
+
+    # Note that we're starting the job
+    info = "job %s (%dm%.1fs)" % (job.pkgname, seconds / 60, seconds % 60)
+    last_output_seek = seek_locations.get(job.filename, 0)
+    if last_output_seek:
+      print("=== Continue output for %s ===" % info)
+    else:
+      print("=== Start output for %s ===" % info)
+
+    # Print actual output from job
+    f = codecs.open(job.filename, encoding='utf-8', errors='replace')
+    f.seek(last_output_seek)
+    prefix = job.pkgname + ":"
+    for line in f:
+
+      # Save off our position in the file
+      if line and line[-1] == "\n":
+        last_output_seek = f.tell()
+        line = line[:-1]
+
+      # Print our line
+      print(prefix, line.encode('utf-8', 'replace'))
+    f.close()
+
+    # Save our last spot in the file so that we don't print out the same
+    # location twice.
+    seek_locations[job.filename] = last_output_seek
+
+    # Note end of output section
+    if job.done:
+      print("=== Complete: %s ===" % info)
+    else:
+      print("=== Still running: %s ===" % info)
+
+    if self.unlink:
+      os.unlink(job.filename)
+
+
+def PrintWorker(queue):
+  """A worker that prints stuff to the screen as requested."""
+  proctitle.settitle('PrintWorker')
+
+  def ExitHandler(_signum, _frame):
+    # Set KILLED flag.
+    KILLED.set()
+
+    # Switch to default signal handlers so that we'll die after two signals.
+    signal.signal(signal.SIGINT, KillHandler)
+    signal.signal(signal.SIGTERM, KillHandler)
+
+  # Don't exit on the first SIGINT / SIGTERM, because the parent worker will
+  # handle it and tell us when we need to exit.
+  signal.signal(signal.SIGINT, ExitHandler)
+  signal.signal(signal.SIGTERM, ExitHandler)
+
+  # seek_locations is a map indicating the position we are at in each file.
+  # It starts off empty, but is set by the various Print jobs as we go along
+  # to indicate where we left off in each file.
+  seek_locations = {}
+  while True:
+    try:
+      job = queue.get()
+      if job:
+        job.Print(seek_locations)
+        sys.stdout.flush()
+      else:
+        break
+    except IOError as ex:
+      if ex.errno == errno.EINTR:
+        # Looks like we received a signal. Keep printing.
+        continue
+      raise
+
+
+class TargetState(object):
+  """Structure descriting the TargetState."""
+
+  __slots__ = ("target", "info", "score", "prefetched", "fetched_successfully")
+
+  def __init__(self, target, info):
+    self.target, self.info = target, info
+    self.fetched_successfully = False
+    self.prefetched = False
+    self.score = None
+    self.update_score()
+
+  def __cmp__(self, other):
+    return cmp(self.score, other.score)
+
+  def update_score(self):
+    self.score = (
+        -len(self.info["tprovides"]),
+        len(self.info["needs"]),
+        not self.info["binary"],
+        -len(self.info["provides"]),
+        self.info["idx"],
+        self.target,
+        )
+
+
+class ScoredHeap(object):
+  """Implementation of a general purpose scored heap."""
+
+  __slots__ = ("heap", "_heap_set")
+
+  def __init__(self, initial=()):
+    self.heap = list()
+    self._heap_set = set()
+    if initial:
+      self.multi_put(initial)
+
+  def get(self):
+    item = heapq.heappop(self.heap)
+    self._heap_set.remove(item.target)
+    return item
+
+  def put(self, item):
+    if not isinstance(item, TargetState):
+      raise ValueError("Item %r isn't a TargetState" % (item,))
+    heapq.heappush(self.heap, item)
+    self._heap_set.add(item.target)
+
+  def multi_put(self, sequence):
+    sequence = list(sequence)
+    self.heap.extend(sequence)
+    self._heap_set.update(x.target for x in sequence)
+    self.sort()
+
+  def sort(self):
+    heapq.heapify(self.heap)
+
+  def __contains__(self, target):
+    return target in self._heap_set
+
+  def __nonzero__(self):
+    return bool(self.heap)
+
+  def __len__(self):
+    return len(self.heap)
+
+
+class EmergeQueue(object):
+  """Class to schedule emerge jobs according to a dependency graph."""
+
+  def __init__(self, deps_map, emerge, package_db, show_output, unpack_only,
+               max_retries):
+    # Store the dependency graph.
+    self._deps_map = deps_map
+    self._state_map = {}
+    # Initialize the running queue to empty
+    self._build_jobs = {}
+    self._build_ready = ScoredHeap()
+    self._fetch_jobs = {}
+    self._fetch_ready = ScoredHeap()
+    self._unpack_jobs = {}
+    self._unpack_ready = ScoredHeap()
+    # List of total package installs represented in deps_map.
+    install_jobs = [x for x in deps_map if deps_map[x]["action"] == "merge"]
+    self._total_jobs = len(install_jobs)
+    self._show_output = show_output
+    self._unpack_only = unpack_only
+    self._max_retries = max_retries
+
+    if "--pretend" in emerge.opts:
+      print("Skipping merge because of --pretend mode.")
+      sys.exit(0)
+
+    # Set up a session so we can easily terminate all children.
+    self._SetupSession()
+
+    # Setup scheduler graph object. This is used by the child processes
+    # to help schedule jobs.
+    emerge.scheduler_graph = emerge.depgraph.schedulerGraph()
+
+    # Calculate how many jobs we can run in parallel. We don't want to pass
+    # the --jobs flag over to emerge itself, because that'll tell emerge to
+    # hide its output, and said output is quite useful for debugging hung
+    # jobs.
+    procs = min(self._total_jobs,
+                emerge.opts.pop("--jobs", multiprocessing.cpu_count()))
+    self._build_procs = self._unpack_procs = max(1, procs)
+    # Fetch is IO bound, we can use more processes.
+    self._fetch_procs = max(4, procs)
+    self._load_avg = emerge.opts.pop("--load-average", None)
+    self._job_queue = multiprocessing.Queue()
+    self._print_queue = multiprocessing.Queue()
+
+    self._fetch_queue = multiprocessing.Queue()
+    args = (self._fetch_queue, self._job_queue, emerge, package_db, True)
+    self._fetch_pool = multiprocessing.Pool(self._fetch_procs, EmergeWorker,
+                                            args)
+
+    self._build_queue = multiprocessing.Queue()
+    args = (self._build_queue, self._job_queue, emerge, package_db)
+    self._build_pool = multiprocessing.Pool(self._build_procs, EmergeWorker,
+                                            args)
+
+    if self._unpack_only:
+      # Unpack pool only required on unpack_only jobs.
+      self._unpack_queue = multiprocessing.Queue()
+      args = (self._unpack_queue, self._job_queue, emerge, package_db, False,
+              True)
+      self._unpack_pool = multiprocessing.Pool(self._unpack_procs, EmergeWorker,
+                                               args)
+
+    self._print_worker = multiprocessing.Process(target=PrintWorker,
+                                                 args=[self._print_queue])
+    self._print_worker.start()
+
+    # Initialize the failed queue to empty.
+    self._retry_queue = []
+    self._failed_count = dict()
+
+    # Setup an exit handler so that we print nice messages if we are
+    # terminated.
+    self._SetupExitHandler()
+
+    # Schedule our jobs.
+    self._state_map.update(
+        (pkg, TargetState(pkg, data)) for pkg, data in deps_map.iteritems())
+    self._fetch_ready.multi_put(self._state_map.itervalues())
+
+  def _SetupSession(self):
+    """Set up a session so we can easily terminate all children."""
+    # When we call os.setsid(), this sets up a session / process group for this
+    # process and all children. These session groups are needed so that we can
+    # easily kill all children (including processes launched by emerge) before
+    # we exit.
+    #
+    # One unfortunate side effect of os.setsid() is that it blocks CTRL-C from
+    # being received. To work around this, we only call os.setsid() in a forked
+    # process, so that the parent can still watch for CTRL-C. The parent will
+    # just sit around, watching for signals and propagating them to the child,
+    # until the child exits.
+    #
+    # TODO(davidjames): It would be nice if we could replace this with cgroups.
+    pid = os.fork()
+    if pid == 0:
+      os.setsid()
+    else:
+      proctitle.settitle('SessionManager')
+
+      def PropagateToChildren(signum, _frame):
+        # Just propagate the signals down to the child. We'll exit when the
+        # child does.
+        try:
+          os.kill(pid, signum)
+        except OSError as ex:
+          if ex.errno != errno.ESRCH:
+            raise
+      signal.signal(signal.SIGINT, PropagateToChildren)
+      signal.signal(signal.SIGTERM, PropagateToChildren)
+
+      def StopGroup(_signum, _frame):
+        # When we get stopped, stop the children.
+        try:
+          os.killpg(pid, signal.SIGSTOP)
+          os.kill(0, signal.SIGSTOP)
+        except OSError as ex:
+          if ex.errno != errno.ESRCH:
+            raise
+      signal.signal(signal.SIGTSTP, StopGroup)
+
+      def ContinueGroup(_signum, _frame):
+        # Launch the children again after being stopped.
+        try:
+          os.killpg(pid, signal.SIGCONT)
+        except OSError as ex:
+          if ex.errno != errno.ESRCH:
+            raise
+      signal.signal(signal.SIGCONT, ContinueGroup)
+
+      # Loop until the children exit. We exit with os._exit to be sure we
+      # don't run any finalizers (those will be run by the child process.)
+      # pylint: disable=W0212
+      while True:
+        try:
+          # Wait for the process to exit. When it does, exit with the return
+          # value of the subprocess.
+          os._exit(process_util.GetExitStatus(os.waitpid(pid, 0)[1]))
+        except OSError as ex:
+          if ex.errno == errno.EINTR:
+            continue
+          traceback.print_exc()
+          os._exit(1)
+        except BaseException:
+          traceback.print_exc()
+          os._exit(1)
+
+  def _SetupExitHandler(self):
+
+    def ExitHandler(signum, _frame):
+      # Set KILLED flag.
+      KILLED.set()
+
+      # Kill our signal handlers so we don't get called recursively
+      signal.signal(signal.SIGINT, KillHandler)
+      signal.signal(signal.SIGTERM, KillHandler)
+
+      # Print our current job status
+      for job in self._build_jobs.itervalues():
+        if job:
+          self._print_queue.put(JobPrinter(job, unlink=True))
+
+      # Notify the user that we are exiting
+      self._Print("Exiting on signal %s" % signum)
+      self._print_queue.put(None)
+      self._print_worker.join()
+
+      # Kill child threads, then exit.
+      os.killpg(0, signal.SIGKILL)
+      sys.exit(1)
+
+    # Print out job status when we are killed
+    signal.signal(signal.SIGINT, ExitHandler)
+    signal.signal(signal.SIGTERM, ExitHandler)
+
+  def _ScheduleUnpack(self, pkg_state):
+    self._unpack_jobs[pkg_state.target] = None
+    self._unpack_queue.put(pkg_state)
+
+  def _Schedule(self, pkg_state):
+    # We maintain a tree of all deps, if this doesn't need
+    # to be installed just free up its children and continue.
+    # It is possible to reinstall deps of deps, without reinstalling
+    # first level deps, like so:
+    # virtual/target-os (merge) -> eselect (nomerge) -> python (merge)
+    this_pkg = pkg_state.info
+    target = pkg_state.target
+    if pkg_state.info is not None:
+      if this_pkg["action"] == "nomerge":
+        self._Finish(target)
+      elif target not in self._build_jobs:
+        # Kick off the build if it's marked to be built.
+        self._build_jobs[target] = None
+        self._build_queue.put(pkg_state)
+        return True
+
+  def _ScheduleLoop(self, unpack_only=False):
+    if unpack_only:
+      ready_queue = self._unpack_ready
+      jobs_queue = self._unpack_jobs
+      procs = self._unpack_procs
+    else:
+      ready_queue = self._build_ready
+      jobs_queue = self._build_jobs
+      procs = self._build_procs
+
+    # If the current load exceeds our desired load average, don't schedule
+    # more than one job.
+    if self._load_avg and os.getloadavg()[0] > self._load_avg:
+      needed_jobs = 1
+    else:
+      needed_jobs = procs
+
+    # Schedule more jobs.
+    while ready_queue and len(jobs_queue) < needed_jobs:
+      state = ready_queue.get()
+      if unpack_only:
+        self._ScheduleUnpack(state)
+      else:
+        if state.target not in self._failed_count:
+          self._Schedule(state)
+
+  def _Print(self, line):
+    """Print a single line."""
+    self._print_queue.put(LinePrinter(line))
+
+  def _Status(self):
+    """Print status."""
+    current_time = time.time()
+    no_output = True
+
+    # Print interim output every minute if --show-output is used. Otherwise,
+    # print notifications about running packages every 2 minutes, and print
+    # full output for jobs that have been running for 60 minutes or more.
+    if self._show_output:
+      interval = 60
+      notify_interval = 0
+    else:
+      interval = 60 * 60
+      notify_interval = 60 * 2
+    for job in self._build_jobs.itervalues():
+      if job:
+        last_timestamp = max(job.start_timestamp, job.last_output_timestamp)
+        if last_timestamp + interval < current_time:
+          self._print_queue.put(JobPrinter(job))
+          job.last_output_timestamp = current_time
+          no_output = False
+        elif (notify_interval and
+              job.last_notify_timestamp + notify_interval < current_time):
+          job_seconds = current_time - job.start_timestamp
+          args = (job.pkgname, job_seconds / 60, job_seconds % 60, job.filename)
+          info = "Still building %s (%dm%.1fs). Logs in %s" % args
+          job.last_notify_timestamp = current_time
+          self._Print(info)
+          no_output = False
+
+    # If we haven't printed any messages yet, print a general status message
+    # here.
+    if no_output:
+      seconds = current_time - GLOBAL_START
+      fjobs, fready = len(self._fetch_jobs), len(self._fetch_ready)
+      ujobs, uready = len(self._unpack_jobs), len(self._unpack_ready)
+      bjobs, bready = len(self._build_jobs), len(self._build_ready)
+      retries = len(self._retry_queue)
+      pending = max(0, len(self._deps_map) - fjobs - bjobs)
+      line = "Pending %s/%s, " % (pending, self._total_jobs)
+      if fjobs or fready:
+        line += "Fetching %s/%s, " % (fjobs, fready + fjobs)
+      if ujobs or uready:
+        line += "Unpacking %s/%s, " % (ujobs, uready + ujobs)
+      if bjobs or bready or retries:
+        line += "Building %s/%s, " % (bjobs, bready + bjobs)
+        if retries:
+          line += "Retrying %s, " % (retries,)
+      load = " ".join(str(x) for x in os.getloadavg())
+      line += ("[Time %dm%.1fs Load %s]" % (seconds / 60, seconds % 60, load))
+      self._Print(line)
+
+  def _Finish(self, target):
+    """Mark a target as completed and unblock dependencies."""
+    this_pkg = self._deps_map[target]
+    if this_pkg["needs"] and this_pkg["nodeps"]:
+      # We got installed, but our deps have not been installed yet. Dependent
+      # packages should only be installed when our needs have been fully met.
+      this_pkg["action"] = "nomerge"
+    else:
+      for dep in this_pkg["provides"]:
+        dep_pkg = self._deps_map[dep]
+        state = self._state_map[dep]
+        del dep_pkg["needs"][target]
+        state.update_score()
+        if not state.prefetched:
+          if dep in self._fetch_ready:
+            # If it's not currently being fetched, update the prioritization
+            self._fetch_ready.sort()
+        elif not dep_pkg["needs"]:
+          if dep_pkg["nodeps"] and dep_pkg["action"] == "nomerge":
+            self._Finish(dep)
+          else:
+            self._build_ready.put(self._state_map[dep])
+      self._deps_map.pop(target)
+
+  def _Retry(self):
+    while self._retry_queue:
+      state = self._retry_queue.pop(0)
+      if self._Schedule(state):
+        self._Print("Retrying emerge of %s." % state.target)
+        break
+
+  def _Shutdown(self):
+    # Tell emerge workers to exit. They all exit when 'None' is pushed
+    # to the queue.
+
+    # Shutdown the workers first; then jobs (which is how they feed things back)
+    # then finally the print queue.
+
+    def _stop(queue, pool):
+      if pool is None:
+        return
+      try:
+        queue.put(None)
+        pool.close()
+        pool.join()
+      finally:
+        pool.terminate()
+
+    _stop(self._fetch_queue, self._fetch_pool)
+    self._fetch_queue = self._fetch_pool = None
+
+    _stop(self._build_queue, self._build_pool)
+    self._build_queue = self._build_pool = None
+
+    if self._unpack_only:
+      _stop(self._unpack_queue, self._unpack_pool)
+      self._unpack_queue = self._unpack_pool = None
+
+    if self._job_queue is not None:
+      self._job_queue.close()
+      self._job_queue = None
+
+    # Now that our workers are finished, we can kill the print queue.
+    if self._print_worker is not None:
+      try:
+        self._print_queue.put(None)
+        self._print_queue.close()
+        self._print_worker.join()
+      finally:
+        self._print_worker.terminate()
+    self._print_queue = self._print_worker = None
+
+  def Run(self):
+    """Run through the scheduled ebuilds.
+
+    Keep running so long as we have uninstalled packages in the
+    dependency graph to merge.
+    """
+    if not self._deps_map:
+      return
+
+    # Start the fetchers.
+    for _ in xrange(min(self._fetch_procs, len(self._fetch_ready))):
+      state = self._fetch_ready.get()
+      self._fetch_jobs[state.target] = None
+      self._fetch_queue.put(state)
+
+    # Print an update, then get going.
+    self._Status()
+
+    while self._deps_map:
+      # Check here that we are actually waiting for something.
+      if (self._build_queue.empty() and
+          self._job_queue.empty() and
+          not self._fetch_jobs and
+          not self._fetch_ready and
+          not self._unpack_jobs and
+          not self._unpack_ready and
+          not self._build_jobs and
+          not self._build_ready and
+          self._deps_map):
+        # If we have failed on a package, retry it now.
+        if self._retry_queue:
+          self._Retry()
+        else:
+          # Tell the user why we're exiting.
+          if self._failed_count:
+            print('Packages failed:\n\t%s' %
+                  '\n\t'.join(self._failed_count.iterkeys()))
+            status_file = os.environ.get("PARALLEL_EMERGE_STATUS_FILE")
+            if status_file:
+              failed_pkgs = set(portage.versions.cpv_getkey(x)
+                                for x in self._failed_count.iterkeys())
+              with open(status_file, "a") as f:
+                f.write("%s\n" % " ".join(failed_pkgs))
+          else:
+            print("Deadlock! Circular dependencies!")
+          sys.exit(1)
+
+      for _ in xrange(12):
+        try:
+          job = self._job_queue.get(timeout=5)
+          break
+        except Queue.Empty:
+          # Check if any more jobs can be scheduled.
+          self._ScheduleLoop()
+      else:
+        # Print an update every 60 seconds.
+        self._Status()
+        continue
+
+      target = job.target
+
+      if job.fetch_only:
+        if not job.done:
+          self._fetch_jobs[job.target] = job
+        else:
+          state = self._state_map[job.target]
+          state.prefetched = True
+          state.fetched_successfully = (job.retcode == 0)
+          del self._fetch_jobs[job.target]
+          self._Print("Fetched %s in %2.2fs"
+                      % (target, time.time() - job.start_timestamp))
+
+          if self._show_output or job.retcode != 0:
+            self._print_queue.put(JobPrinter(job, unlink=True))
+          else:
+            os.unlink(job.filename)
+          # Failure or not, let build work with it next.
+          if not self._deps_map[job.target]["needs"]:
+            self._build_ready.put(state)
+            self._ScheduleLoop()
+
+          if self._unpack_only and job.retcode == 0:
+            self._unpack_ready.put(state)
+            self._ScheduleLoop(unpack_only=True)
+
+          if self._fetch_ready:
+            state = self._fetch_ready.get()
+            self._fetch_queue.put(state)
+            self._fetch_jobs[state.target] = None
+          else:
+            # Minor optimization; shut down fetchers early since we know
+            # the queue is empty.
+            self._fetch_queue.put(None)
+        continue
+
+      if job.unpack_only:
+        if not job.done:
+          self._unpack_jobs[target] = job
+        else:
+          del self._unpack_jobs[target]
+          self._Print("Unpacked %s in %2.2fs"
+                      % (target, time.time() - job.start_timestamp))
+          if self._show_output or job.retcode != 0:
+            self._print_queue.put(JobPrinter(job, unlink=True))
+          else:
+            os.unlink(job.filename)
+          if self._unpack_ready:
+            state = self._unpack_ready.get()
+            self._unpack_queue.put(state)
+            self._unpack_jobs[state.target] = None
+        continue
+
+      if not job.done:
+        self._build_jobs[target] = job
+        self._Print("Started %s (logged in %s)" % (target, job.filename))
+        continue
+
+      # Print output of job
+      if self._show_output or job.retcode != 0:
+        self._print_queue.put(JobPrinter(job, unlink=True))
+      else:
+        os.unlink(job.filename)
+      del self._build_jobs[target]
+
+      seconds = time.time() - job.start_timestamp
+      details = "%s (in %dm%.1fs)" % (target, seconds / 60, seconds % 60)
+
+      # Complain if necessary.
+      if job.retcode != 0:
+        # Handle job failure.
+        failed_count = self._failed_count.get(target, 0)
+        if failed_count >= self._max_retries:
+          # If this job has failed and can't be retried, give up.
+          self._Print("Failed %s. Your build has failed." % details)
+        else:
+          # Queue up this build to try again after a long while.
+          self._retry_queue.append(self._state_map[target])
+          self._failed_count[target] = failed_count + 1
+          self._Print("Failed %s, retrying later." % details)
+      else:
+        self._Print("Completed %s" % details)
+
+        # Mark as completed and unblock waiting ebuilds.
+        self._Finish(target)
+
+        if target in self._failed_count and self._retry_queue:
+          # If we have successfully retried a failed package, and there
+          # are more failed packages, try the next one. We will only have
+          # one retrying package actively running at a time.
+          self._Retry()
+
+
+      # Schedule pending jobs and print an update.
+      self._ScheduleLoop()
+      self._Status()
+
+    # If packages were retried, output a warning.
+    if self._failed_count:
+      self._Print("")
+      self._Print("WARNING: The following packages failed once or more,")
+      self._Print("but succeeded upon retry. This might indicate incorrect")
+      self._Print("dependencies.")
+      for pkg in self._failed_count.iterkeys():
+        self._Print("  %s" % pkg)
+      self._Print("@@@STEP_WARNINGS@@@")
+      self._Print("")
+
+    # Tell child threads to exit.
+    self._Print("Merge complete")
+
+
+def main(argv):
+  try:
+    return real_main(argv)
+  finally:
+    # Work around multiprocessing sucking and not cleaning up after itself.
+    # http://bugs.python.org/issue4106;
+    # Step one; ensure GC is ran *prior* to the VM starting shutdown.
+    gc.collect()
+    # Step two; go looking for those threads and try to manually reap
+    # them if we can.
+    for x in threading.enumerate():
+      # Filter on the name, and ident; if ident is None, the thread
+      # wasn't started.
+      if x.name == 'QueueFeederThread' and x.ident is not None:
+        x.join(1)
+
+
+def real_main(argv):
+  parallel_emerge_args = argv[:]
+  deps = DepGraphGenerator()
+  deps.Initialize(parallel_emerge_args)
+  emerge = deps.emerge
+
+  if emerge.action is not None:
+    argv = deps.ParseParallelEmergeArgs(argv)
+    return emerge_main(argv)
+  elif not emerge.cmdline_packages:
+    Usage()
+    return 1
+
+  # Unless we're in pretend mode, there's not much point running without
+  # root access. We need to be able to install packages.
+  #
+  # NOTE: Even if you're running --pretend, it's a good idea to run
+  #       parallel_emerge with root access so that portage can write to the
+  #       dependency cache. This is important for performance.
+  if "--pretend" not in emerge.opts and portage.data.secpass < 2:
+    print("parallel_emerge: superuser access is required.")
+    return 1
+
+  if "--quiet" not in emerge.opts:
+    cmdline_packages = " ".join(emerge.cmdline_packages)
+    print("Starting fast-emerge.")
+    print(" Building package %s on %s" % (cmdline_packages,
+                                          deps.sysroot or "root"))
+
+  deps_tree, deps_info = deps.GenDependencyTree()
+
+  # You want me to be verbose? I'll give you two trees! Twice as much value.
+  if "--tree" in emerge.opts and "--verbose" in emerge.opts:
+    deps.PrintTree(deps_tree)
+
+  deps_graph = deps.GenDependencyGraph(deps_tree, deps_info)
+
+  # OK, time to print out our progress so far.
+  deps.PrintInstallPlan(deps_graph)
+  if "--tree" in emerge.opts:
+    PrintDepsMap(deps_graph)
+
+  # Are we upgrading portage? If so, and there are more packages to merge,
+  # schedule a restart of parallel_emerge to merge the rest. This ensures that
+  # we pick up all updates to portage settings before merging any more
+  # packages.
+  portage_upgrade = False
+  root = emerge.settings["ROOT"]
+  # pylint: disable=W0212
+  if root == "/":
+    final_db = emerge.depgraph._dynamic_config._filtered_trees[root]['graph_db']
+    for db_pkg in final_db.match_pkgs("sys-apps/portage"):
+      portage_pkg = deps_graph.get(db_pkg.cpv)
+      if portage_pkg:
+        portage_upgrade = True
+        if "--quiet" not in emerge.opts:
+          print("Upgrading portage first, then restarting...")
+
+  # Upgrade Portage first, then the rest of the packages.
+  #
+  # In order to grant the child permission to run setsid, we need to run sudo
+  # again. We preserve SUDO_USER here in case an ebuild depends on it.
+  if portage_upgrade:
+    # Calculate what arguments to use when re-invoking.
+    args = ["sudo", "-E", "SUDO_USER=%s" % os.environ.get("SUDO_USER", "")]
+    args += [os.path.abspath(sys.argv[0])] + parallel_emerge_args
+    args += ["--exclude=sys-apps/portage"]
+
+    # First upgrade Portage.
+    passthrough_args = ("--quiet", "--pretend", "--verbose")
+    emerge_args = [k for k in emerge.opts if k in passthrough_args]
+    ret = emerge_main(emerge_args + ["portage"])
+    if ret != 0:
+      return ret
+
+    # Now upgrade the rest.
+    os.execvp(args[0], args)
+
+  # Attempt to solve crbug.com/433482
+  # The file descriptor error appears only when getting userpriv_groups
+  # (lazily generated). Loading userpriv_groups here will reduce the number of
+  # calls from few hundreds to one.
+  portage.data._get_global('userpriv_groups')
+
+  # Run the queued emerges.
+  scheduler = EmergeQueue(deps_graph, emerge, deps.package_db, deps.show_output,
+                          deps.unpack_only, deps.max_retries)
+  try:
+    scheduler.Run()
+  finally:
+    # pylint: disable=W0212
+    scheduler._Shutdown()
+  scheduler = None
+
+  clean_logs(emerge.settings)
+
+  print("Done")
+  return 0
diff --git a/scripts/pushimage.py b/scripts/pushimage.py
new file mode 100644
index 0000000..58c0ca2
--- /dev/null
+++ b/scripts/pushimage.py
@@ -0,0 +1,506 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""ChromeOS image pusher (from cbuildbot to signer).
+
+This pushes files from the archive bucket to the signer bucket and marks
+artifacts for signing (which a signing process will look for).
+"""
+
+from __future__ import print_function
+
+import ConfigParser
+import cStringIO
+import getpass
+import os
+import re
+import tempfile
+import textwrap
+
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import signing
+
+
+# This will split a fully qualified ChromeOS version string up.
+# R34-5126.0.0 will break into "34" and "5126.0.0".
+VERSION_REGEX = r'^R([0-9]+)-([^-]+)'
+
+# The test signers will scan this dir looking for test work.
+# Keep it in sync with the signer config files [gs_test_buckets].
+TEST_SIGN_BUCKET_BASE = 'gs://chromeos-throw-away-bucket/signer-tests'
+
+# Keysets that are only valid in the above test bucket.
+TEST_KEYSET_PREFIX = 'test-keys'
+TEST_KEYSETS = set((
+    'mp',
+    'premp',
+    'nvidia-premp',
+))
+
+# Supported image types for signing.
+_SUPPORTED_IMAGE_TYPES = (
+    constants.IMAGE_TYPE_RECOVERY,
+    constants.IMAGE_TYPE_FACTORY,
+    constants.IMAGE_TYPE_FIRMWARE,
+    constants.IMAGE_TYPE_NV_LP0_FIRMWARE,
+    constants.IMAGE_TYPE_BASE,
+)
+
+
+class PushError(Exception):
+  """When an (unknown) error happened while trying to push artifacts."""
+
+
+class MissingBoardInstructions(Exception):
+  """Raised when a board lacks any signer instructions."""
+
+
+class InputInsns(object):
+  """Object to hold settings for a signable board.
+
+  Note: The format of the instruction file pushimage outputs (and the signer
+  reads) is not exactly the same as the instruction file pushimage reads.
+  """
+
+  def __init__(self, board):
+    self.board = board
+
+    config = ConfigParser.ConfigParser()
+    config.readfp(open(self.GetInsnFile('DEFAULT')))
+    # What pushimage internally refers to as 'recovery', are the basic signing
+    # instructions in practice, and other types are stacked on top.
+    input_insns = self.GetInsnFile(constants.IMAGE_TYPE_RECOVERY)
+    if not os.path.exists(input_insns):
+      # This board doesn't have any signing instructions.
+      raise MissingBoardInstructions(self.board)
+    config.readfp(open(input_insns))
+    self.cfg = config
+
+  def GetInsnFile(self, image_type):
+    """Find the signer instruction files for this board/image type.
+
+    Args:
+      image_type: The type of instructions to load.  It can be a common file
+        (like "DEFAULT"), or one of the --sign-types.
+
+    Returns:
+      Full path to the instruction file using |image_type| and |self.board|.
+    """
+    if image_type == image_type.upper():
+      name = image_type
+    elif image_type in (constants.IMAGE_TYPE_RECOVERY,
+                        constants.IMAGE_TYPE_BASE):
+      name = self.board
+    else:
+      name = '%s.%s' % (self.board, image_type)
+
+    return os.path.join(signing.INPUT_INSN_DIR, '%s.instructions' % name)
+
+  @staticmethod
+  def SplitCfgField(val):
+    """Split a string into multiple elements.
+
+    This centralizes our convention for multiple elements in the input files
+    being delimited by either a space or comma.
+
+    Args:
+      val: The string to split.
+
+    Returns:
+      The list of elements from having done split the string.
+    """
+    return val.replace(',', ' ').split()
+
+  def GetChannels(self):
+    """Return the list of channels to sign for this board.
+
+    If the board-specific config doesn't specify a preference, we'll use the
+    common settings.
+    """
+    return self.SplitCfgField(self.cfg.get('insns', 'channel'))
+
+  def GetKeysets(self):
+    """Return the list of keysets to sign for this board."""
+    return self.SplitCfgField(self.cfg.get('insns', 'keyset'))
+
+  def OutputInsns(self, image_type, output_file, sect_insns, sect_general):
+    """Generate the output instruction file for sending to the signer.
+
+    Note: The format of the instruction file pushimage outputs (and the signer
+    reads) is not exactly the same as the instruction file pushimage reads.
+
+    Args:
+      image_type: The type of image we will be signing (see --sign-types).
+      output_file: The file to write the new instruction file to.
+      sect_insns: Items to set/override in the [insns] section.
+      sect_general: Items to set/override in the [general] section.
+    """
+    config = ConfigParser.ConfigParser()
+    config.readfp(open(self.GetInsnFile(image_type)))
+
+    # Clear channel entry in instructions file, ensuring we only get
+    # one channel for the signer to look at.  Then provide all the
+    # other details for this signing request to avoid any ambiguity
+    # and to avoid relying on encoding data into filenames.
+    for sect, fields in zip(('insns', 'general'), (sect_insns, sect_general)):
+      if not config.has_section(sect):
+        config.add_section(sect)
+      for k, v in fields.iteritems():
+        config.set(sect, k, v)
+
+    output = cStringIO.StringIO()
+    config.write(output)
+    data = output.getvalue()
+    osutils.WriteFile(output_file, data)
+    logging.debug('generated insns file for %s:\n%s', image_type, data)
+
+
+def MarkImageToBeSigned(ctx, tbs_base, insns_path, priority):
+  """Mark an instructions file for signing.
+
+  This will upload a file to the GS bucket flagging an image for signing by
+  the signers.
+
+  Args:
+    ctx: A viable gs.GSContext.
+    tbs_base: The full path to where the tobesigned directory lives.
+    insns_path: The path (relative to |tbs_base|) of the file to sign.
+    priority: Set the signing priority (lower == higher prio).
+
+  Returns:
+    The full path to the remote tobesigned file.
+  """
+  if priority < 0 or priority > 99:
+    raise ValueError('priority must be [0, 99] inclusive')
+
+  if insns_path.startswith(tbs_base):
+    insns_path = insns_path[len(tbs_base):].lstrip('/')
+
+  tbs_path = '%s/tobesigned/%02i,%s' % (tbs_base, priority,
+                                        insns_path.replace('/', ','))
+
+  # The caller will catch gs.GSContextException for us.
+  ctx.Copy('-', tbs_path, input=cros_build_lib.MachineDetails())
+
+  return tbs_path
+
+
+def PushImage(src_path, board, versionrev=None, profile=None, priority=50,
+              sign_types=None, dry_run=False, mock=False, force_keysets=()):
+  """Push the image from the archive bucket to the release bucket.
+
+  Args:
+    src_path: Where to copy the files from; can be a local path or gs:// URL.
+      Should be a full path to the artifacts in either case.
+    board: The board we're uploading artifacts for (e.g. $BOARD).
+    versionrev: The full Chromium OS version string (e.g. R34-5126.0.0).
+    profile: The board profile in use (e.g. "asan").
+    priority: Set the signing priority (lower == higher prio).
+    sign_types: If set, a set of types which we'll restrict ourselves to
+      signing.  See the --sign-types option for more details.
+    dry_run: Show what would be done, but do not upload anything.
+    mock: Upload to a testing bucket rather than the real one.
+    force_keysets: Set of keysets to use rather than what the inputs say.
+
+  Returns:
+    A dictionary that maps 'channel' -> ['gs://signer_instruction_uri1',
+                                         'gs://signer_instruction_uri2',
+                                         ...]
+  """
+  # Whether we hit an unknown error.  If so, we'll throw an error, but only
+  # at the end (so that we still upload as many files as possible).
+  # It's implemented using a list to deal with variable scopes in nested
+  # functions below.
+  unknown_error = [False]
+
+  if versionrev is None:
+    # Extract milestone/version from the directory name.
+    versionrev = os.path.basename(src_path)
+
+  # We only support the latest format here.  Older releases can use pushimage
+  # from the respective branch which deals with legacy cruft.
+  m = re.match(VERSION_REGEX, versionrev)
+  if not m:
+    raise ValueError('version %s does not match %s' %
+                     (versionrev, VERSION_REGEX))
+  milestone = m.group(1)
+  version = m.group(2)
+
+  # Normalize board to always use dashes not underscores.  This is mostly a
+  # historical artifact at this point, but we can't really break it since the
+  # value is used in URLs.
+  boardpath = board.replace('_', '-')
+  if profile is not None:
+    boardpath += '-%s' % profile.replace('_', '-')
+
+  ctx = gs.GSContext(dry_run=dry_run)
+
+  try:
+    input_insns = InputInsns(board)
+  except MissingBoardInstructions as e:
+    logging.warning('board "%s" is missing base instruction file: %s', board, e)
+    logging.warning('not uploading anything for signing')
+    return
+  channels = input_insns.GetChannels()
+
+  # We want force_keysets as a set, and keysets as a list.
+  force_keysets = set(force_keysets)
+  keysets = list(force_keysets) if force_keysets else input_insns.GetKeysets()
+
+  if mock:
+    logging.info('Upload mode: mock; signers will not process anything')
+    tbs_base = gs_base = os.path.join(constants.TRASH_BUCKET, 'pushimage-tests',
+                                      getpass.getuser())
+  elif set(['%s-%s' % (TEST_KEYSET_PREFIX, x)
+            for x in TEST_KEYSETS]) & force_keysets:
+    logging.info('Upload mode: test; signers will process test keys')
+    # We need the tbs_base to be in the place the signer will actually scan.
+    tbs_base = TEST_SIGN_BUCKET_BASE
+    gs_base = os.path.join(tbs_base, getpass.getuser())
+  else:
+    logging.info('Upload mode: normal; signers will process the images')
+    tbs_base = gs_base = constants.RELEASE_BUCKET
+
+  sect_general = {
+      'config_board': board,
+      'board': boardpath,
+      'version': version,
+      'versionrev': versionrev,
+      'milestone': milestone,
+  }
+  sect_insns = {}
+
+  if dry_run:
+    logging.info('DRY RUN MODE ACTIVE: NOTHING WILL BE UPLOADED')
+  logging.info('Signing for channels: %s', ' '.join(channels))
+  logging.info('Signing for keysets : %s', ' '.join(keysets))
+
+  instruction_urls = {}
+
+  def _ImageNameBase(image_type=None):
+    lmid = ('%s-' % image_type) if image_type else ''
+    return 'ChromeOS-%s%s-%s' % (lmid, versionrev, boardpath)
+
+  # These variables are defined outside the loop so that the nested functions
+  # below can access them without 'cell-var-from-loop' linter warning.
+  dst_path = ""
+  files_to_sign = []
+  for channel in channels:
+    logging.debug('\n\n#### CHANNEL: %s ####\n', channel)
+    sect_insns['channel'] = channel
+    sub_path = '%s-channel/%s/%s' % (channel, boardpath, version)
+    dst_path = '%s/%s' % (gs_base, sub_path)
+    logging.info('Copying images to %s', dst_path)
+
+    recovery_basename = _ImageNameBase(constants.IMAGE_TYPE_RECOVERY)
+    factory_basename = _ImageNameBase(constants.IMAGE_TYPE_FACTORY)
+    firmware_basename = _ImageNameBase(constants.IMAGE_TYPE_FIRMWARE)
+    nv_lp0_firmware_basename = _ImageNameBase(
+        constants.IMAGE_TYPE_NV_LP0_FIRMWARE)
+    test_basename = _ImageNameBase(constants.IMAGE_TYPE_TEST)
+    base_basename = _ImageNameBase(constants.IMAGE_TYPE_BASE)
+    hwqual_tarball = 'chromeos-hwqual-%s-%s.tar.bz2' % (board, versionrev)
+
+    # The following build artifacts, if present, are always copied regardless of
+    # requested signing types.
+    files_to_copy_only = (
+        # (<src>, <dst>, <suffix>),
+        ('image.zip', _ImageNameBase(), 'zip'),
+        (constants.TEST_IMAGE_TAR, test_basename, 'tar.xz'),
+        ('debug.tgz', 'debug-%s' % boardpath, 'tgz'),
+        (hwqual_tarball, '', ''),
+        ('au-generator.zip', '', ''),
+        ('stateful.tgz', '', ''),
+    )
+
+    # The following build artifacts, if present, are always copied.
+    # If |sign_types| is None, all of them are marked for signing, otherwise
+    # only the image types specified in |sign_types| are marked for signing.
+    files_to_copy_and_maybe_sign = (
+        # (<src>, <dst>, <suffix>, <signing type>),
+        (constants.RECOVERY_IMAGE_TAR, recovery_basename, 'tar.xz',
+         constants.IMAGE_TYPE_RECOVERY),
+
+        ('factory_image.zip', factory_basename, 'zip',
+         constants.IMAGE_TYPE_FACTORY),
+
+        ('firmware_from_source.tar.bz2', firmware_basename, 'tar.bz2',
+         constants.IMAGE_TYPE_FIRMWARE),
+
+        ('firmware_from_source.tar.bz2', nv_lp0_firmware_basename, 'tar.bz2',
+         constants.IMAGE_TYPE_NV_LP0_FIRMWARE),
+    )
+
+    # The following build artifacts are copied and marked for signing, if
+    # they are present *and* if the image type is specified via |sign_types|.
+    files_to_maybe_copy_and_sign = (
+        # (<src>, <dst>, <suffix>, <signing type>),
+        (constants.BASE_IMAGE_TAR, base_basename, 'tar.xz',
+         constants.IMAGE_TYPE_BASE),
+    )
+
+    def _CopyFileToGS(src, dst, suffix):
+      """Returns |dst| file name if the copying was successful."""
+      if not dst:
+        dst = src
+      elif suffix:
+        dst = '%s.%s' % (dst, suffix)
+      success = False
+      try:
+        ctx.Copy(os.path.join(src_path, src), os.path.join(dst_path, dst))
+        success = True
+      except gs.GSNoSuchKey:
+        logging.warning('Skipping %s as it does not exist', src)
+      except gs.GSContextException:
+        unknown_error[0] = True
+        logging.error('Skipping %s due to unknown GS error', src, exc_info=True)
+      return dst if success else None
+
+    for src, dst, suffix in files_to_copy_only:
+      _CopyFileToGS(src, dst, suffix)
+
+    # Clear the list of files to sign before adding new artifacts.
+    files_to_sign = []
+
+    def _AddToFilesToSign(image_type, dst, suffix):
+      assert dst.endswith('.' + suffix), (
+          'dst: %s, suffix: %s' % (dst, suffix))
+      dst_base = dst[:-(len(suffix) + 1)]
+      files_to_sign.append([image_type, dst_base, suffix])
+
+    for src, dst, suffix, image_type in files_to_copy_and_maybe_sign:
+      dst = _CopyFileToGS(src, dst, suffix)
+      if dst and (not sign_types or image_type in sign_types):
+        _AddToFilesToSign(image_type, dst, suffix)
+
+    for src, dst, suffix, image_type in files_to_maybe_copy_and_sign:
+      if sign_types and image_type in sign_types:
+        dst = _CopyFileToGS(src, dst, suffix)
+        if dst:
+          _AddToFilesToSign(image_type, dst, suffix)
+
+    logging.debug('Files to sign: %s', files_to_sign)
+    # Now go through the subset for signing.
+    for keyset in keysets:
+      logging.debug('\n\n#### KEYSET: %s ####\n', keyset)
+      sect_insns['keyset'] = keyset
+      for image_type, dst_name, suffix in files_to_sign:
+        dst_archive = '%s.%s' % (dst_name, suffix)
+        sect_general['archive'] = dst_archive
+        sect_general['type'] = image_type
+
+        # In the default/automatic mode, only flag files for signing if the
+        # archives were actually uploaded in a previous stage. This additional
+        # check can be removed in future once |sign_types| becomes a required
+        # argument.
+        # TODO: Make |sign_types| a required argument.
+        gs_artifact_path = os.path.join(dst_path, dst_archive)
+        exists = False
+        try:
+          exists = ctx.Exists(gs_artifact_path)
+        except gs.GSContextException:
+          unknown_error[0] = True
+          logging.error('Unknown error while checking %s', gs_artifact_path,
+                        exc_info=True)
+        if not exists:
+          logging.info('%s does not exist.  Nothing to sign.',
+                       gs_artifact_path)
+          continue
+
+        input_insn_path = input_insns.GetInsnFile(image_type)
+        if not os.path.exists(input_insn_path):
+          logging.info('%s does not exist.  Nothing to sign.', input_insn_path)
+          continue
+
+        # Generate the insn file for this artifact that the signer will use,
+        # and flag it for signing.
+        with tempfile.NamedTemporaryFile(
+            bufsize=0, prefix='pushimage.insns.') as insns_path:
+          input_insns.OutputInsns(image_type, insns_path.name, sect_insns,
+                                  sect_general)
+
+          gs_insns_path = '%s/%s' % (dst_path, dst_name)
+          if keyset != keysets[0]:
+            gs_insns_path += '-%s' % keyset
+          gs_insns_path += '.instructions'
+
+          try:
+            ctx.Copy(insns_path.name, gs_insns_path)
+          except gs.GSContextException:
+            unknown_error[0] = True
+            logging.error('Unknown error while uploading insns %s',
+                          gs_insns_path, exc_info=True)
+            continue
+
+          try:
+            MarkImageToBeSigned(ctx, tbs_base, gs_insns_path, priority)
+          except gs.GSContextException:
+            unknown_error[0] = True
+            logging.error('Unknown error while marking for signing %s',
+                          gs_insns_path, exc_info=True)
+            continue
+          logging.info('Signing %s image %s', image_type, gs_insns_path)
+          instruction_urls.setdefault(channel, []).append(gs_insns_path)
+
+  if unknown_error[0]:
+    raise PushError('hit some unknown error(s)', instruction_urls)
+
+  return instruction_urls
+
+
+def main(argv):
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  # The type of image_dir will strip off trailing slashes (makes later
+  # processing simpler and the display prettier).
+  parser.add_argument('image_dir', default=None, type='local_or_gs_path',
+                      help='full path of source artifacts to upload')
+  parser.add_argument('--board', default=None, required=True,
+                      help='board to generate symbols for')
+  parser.add_argument('--profile', default=None,
+                      help='board profile in use (e.g. "asan")')
+  parser.add_argument('--version', default=None,
+                      help='version info (normally extracted from image_dir)')
+  parser.add_argument('-n', '--dry-run', default=False, action='store_true',
+                      help='show what would be done, but do not upload')
+  parser.add_argument('-M', '--mock', default=False, action='store_true',
+                      help='upload things to a testing bucket (dev testing)')
+  parser.add_argument('--test-sign', default=[], action='append',
+                      choices=TEST_KEYSETS,
+                      help='mung signing behavior to sign w/ test keys')
+  parser.add_argument('--priority', type=int, default=50,
+                      help='set signing priority (lower == higher prio)')
+  parser.add_argument('--sign-types', default=None, nargs='+',
+                      choices=_SUPPORTED_IMAGE_TYPES,
+                      help='only sign specified image types')
+  parser.add_argument('--yes', action='store_true', default=False,
+                      help='answer yes to all prompts')
+
+  opts = parser.parse_args(argv)
+  opts.Freeze()
+
+  force_keysets = set(['%s-%s' % (TEST_KEYSET_PREFIX, x)
+                       for x in opts.test_sign])
+
+  # If we aren't using mock or test or dry run mode, then let's prompt the user
+  # to make sure they actually want to do this.  It's rare that people want to
+  # run this directly and hit the release bucket.
+  if not (opts.mock or force_keysets or opts.dry_run) and not opts.yes:
+    prolog = '\n'.join(textwrap.wrap(textwrap.dedent(
+        'Uploading images for signing to the *release* bucket is not something '
+        'you generally should be doing yourself.'), 80)).strip()
+    if not cros_build_lib.BooleanPrompt(
+        prompt='Are you sure you want to sign these images',
+        default=False, prolog=prolog):
+      cros_build_lib.Die('better safe than sorry')
+
+  PushImage(opts.image_dir, opts.board, versionrev=opts.version,
+            profile=opts.profile, priority=opts.priority,
+            sign_types=opts.sign_types, dry_run=opts.dry_run, mock=opts.mock,
+            force_keysets=force_keysets)
diff --git a/scripts/pushimage_unittest b/scripts/pushimage_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/pushimage_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/pushimage_unittest.py b/scripts/pushimage_unittest.py
new file mode 100644
index 0000000..ae6f6c8
--- /dev/null
+++ b/scripts/pushimage_unittest.py
@@ -0,0 +1,292 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for pushimage.py"""
+
+from __future__ import print_function
+
+import mock
+import os
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import gs
+from chromite.lib import gs_unittest
+from chromite.lib import osutils
+from chromite.lib import partial_mock
+from chromite.lib import signing
+from chromite.scripts import pushimage
+
+
+class InputInsnsTest(cros_test_lib.MockTestCase):
+  """Tests for InputInsns"""
+
+  def setUp(self):
+    self.StartPatcher(gs_unittest.GSContextMock())
+
+  def testBasic(self):
+    """Simple smoke test"""
+    insns = pushimage.InputInsns('test.board')
+    insns.GetInsnFile('recovery')
+    self.assertEqual(insns.GetChannels(), ['dev', 'canary'])
+    self.assertEqual(insns.GetKeysets(), ['stumpy-mp-v3'])
+
+  def testGetInsnFile(self):
+    """Verify various inputs result in right insns path"""
+    testdata = (
+        ('UPPER_CAPS', 'UPPER_CAPS'),
+        ('recovery', 'test.board'),
+        ('firmware', 'test.board.firmware'),
+        ('factory', 'test.board.factory'),
+    )
+    insns = pushimage.InputInsns('test.board')
+    for image_type, filename in testdata:
+      ret = insns.GetInsnFile(image_type)
+      self.assertEqual(os.path.basename(ret), '%s.instructions' % (filename))
+
+  def testSplitCfgField(self):
+    """Verify splitting behavior behaves"""
+    testdata = (
+        ('', []),
+        ('a b c', ['a', 'b', 'c']),
+        ('a, b', ['a', 'b']),
+        ('a,b', ['a', 'b']),
+        ('a,\tb', ['a', 'b']),
+        ('a\tb', ['a', 'b']),
+    )
+    for val, exp in testdata:
+      ret = pushimage.InputInsns.SplitCfgField(val)
+      self.assertEqual(ret, exp)
+
+  def testOutputInsnsBasic(self):
+    """Verify output instructions are sane"""
+    exp_content = """[insns]
+keyset = stumpy-mp-v3
+channel = dev canary
+chromeos_shell = false
+ensure_no_password = true
+firmware_update = true
+security_checks = true
+create_nplusone = true
+
+[general]
+"""
+
+    insns = pushimage.InputInsns('test.board')
+    m = self.PatchObject(osutils, 'WriteFile')
+    insns.OutputInsns('recovery', '/bogus', {}, {})
+    self.assertTrue(m.called)
+    content = m.call_args_list[0][0][1]
+    self.assertEqual(content.rstrip(), exp_content.rstrip())
+
+  def testOutputInsnsReplacements(self):
+    """Verify output instructions can be updated"""
+    exp_content = """[insns]
+keyset = batman
+channel = dev
+chromeos_shell = false
+ensure_no_password = true
+firmware_update = true
+security_checks = true
+create_nplusone = true
+
+[general]
+board = board
+config_board = test.board
+"""
+    sect_insns = {
+        'channel': 'dev',
+        'keyset': 'batman',
+    }
+    sect_general = {
+        'config_board': 'test.board',
+        'board': 'board',
+    }
+
+    insns = pushimage.InputInsns('test.board')
+    m = self.PatchObject(osutils, 'WriteFile')
+    insns.OutputInsns('recovery', '/a/file', sect_insns, sect_general)
+    self.assertTrue(m.called)
+    content = m.call_args_list[0][0][1]
+    self.assertEqual(content.rstrip(), exp_content.rstrip())
+
+
+class MarkImageToBeSignedTest(gs_unittest.AbstractGSContextTest):
+  """Tests for MarkImageToBeSigned()"""
+
+  def setUp(self):
+    # Minor optimization -- we call this for logging purposes in the main
+    # code, but don't really care about it for testing.  It just slows us.
+    self.PatchObject(cros_build_lib, 'MachineDetails', return_value='1234\n')
+
+  def testBasic(self):
+    """Simple smoke test"""
+    tbs_base = 'gs://some-bucket'
+    insns_path = 'chan/board/ver/file.instructions'
+    tbs_file = '%s/tobesigned/90,chan,board,ver,file.instructions' % tbs_base
+    ret = pushimage.MarkImageToBeSigned(self.ctx, tbs_base, insns_path, 90)
+    self.assertEqual(ret, tbs_file)
+
+  def testPriority(self):
+    """Verify diff priority values get used correctly"""
+    for prio, sprio in ((0, '00'), (9, '09'), (35, '35'), (99, '99')):
+      ret = pushimage.MarkImageToBeSigned(self.ctx, '', '', prio)
+      self.assertEquals(ret, '/tobesigned/%s,' % sprio)
+
+  def testBadPriority(self):
+    """Verify we reject bad priority values"""
+    for prio in (-10, -1, 100, 91239):
+      self.assertRaises(ValueError, pushimage.MarkImageToBeSigned, self.ctx,
+                        '', '', prio)
+
+  def testTbsUpload(self):
+    """Make sure we actually try to upload the file"""
+    pushimage.MarkImageToBeSigned(self.ctx, '', '', 50)
+    self.gs_mock.assertCommandContains(['cp', '--'])
+
+
+class PushImageTests(gs_unittest.AbstractGSContextTest):
+  """Tests for PushImage()"""
+
+  def setUp(self):
+    self.mark_mock = self.PatchObject(pushimage, 'MarkImageToBeSigned')
+
+  def testBasic(self):
+    """Simple smoke test"""
+    EXPECTED = {
+        'canary': [
+            ('gs://chromeos-releases/canary-channel/test.board-hi/5126.0.0/'
+             'ChromeOS-recovery-R34-5126.0.0-test.board-hi.instructions')],
+        'dev': [
+            ('gs://chromeos-releases/dev-channel/test.board-hi/5126.0.0/'
+             'ChromeOS-recovery-R34-5126.0.0-test.board-hi.instructions')],
+    }
+    with mock.patch.object(gs.GSContext, 'Exists', return_value=True):
+      urls = pushimage.PushImage('/src', 'test.board', 'R34-5126.0.0',
+                                 profile='hi')
+
+    self.assertEqual(urls, EXPECTED)
+
+  def testBasic_SignTypesEmptyList(self):
+    """Tests PushImage behavior when |sign_types| is empty instead of None.
+
+    As part of the buildbots, PushImage function always receives a tuple for
+    |sign_types| argument.  This test checks the behavior for empty tuple.
+    """
+    EXPECTED = {
+        'canary': [
+            ('gs://chromeos-releases/canary-channel/test.board-hi/5126.0.0/'
+             'ChromeOS-recovery-R34-5126.0.0-test.board-hi.instructions')],
+        'dev': [
+            ('gs://chromeos-releases/dev-channel/test.board-hi/5126.0.0/'
+             'ChromeOS-recovery-R34-5126.0.0-test.board-hi.instructions')],
+    }
+    with mock.patch.object(gs.GSContext, 'Exists', return_value=True):
+      urls = pushimage.PushImage('/src', 'test.board', 'R34-5126.0.0',
+                                 profile='hi', sign_types=())
+
+    self.assertEqual(urls, EXPECTED)
+
+  def testBasic_RealBoardName(self):
+    """Runs a simple smoke test using a real board name."""
+    EXPECTED = {
+        'canary': [
+            ('gs://chromeos-releases/canary-channel/x86-alex/5126.0.0/'
+             'ChromeOS-recovery-R34-5126.0.0-x86-alex.instructions')],
+        'dev': [
+            ('gs://chromeos-releases/dev-channel/x86-alex/5126.0.0/'
+             'ChromeOS-recovery-R34-5126.0.0-x86-alex.instructions')],
+    }
+    with mock.patch.object(gs.GSContext, 'Exists', return_value=True):
+      urls = pushimage.PushImage('/src', 'x86-alex', 'R34-5126.0.0')
+
+    self.assertEqual(urls, EXPECTED)
+
+  def testBasicMock(self):
+    """Simple smoke test in mock mode"""
+    with mock.patch.object(gs.GSContext, 'Exists', return_value=True):
+      pushimage.PushImage('/src', 'test.board', 'R34-5126.0.0',
+                          dry_run=True, mock=True)
+
+  def testBadVersion(self):
+    """Make sure we barf on bad version strings"""
+    self.assertRaises(ValueError, pushimage.PushImage, '', '', 'asdf')
+
+  def testNoInsns(self):
+    """Boards w/out insn files should get skipped"""
+    urls = pushimage.PushImage('/src', 'a bad bad board', 'R34-5126.0.0')
+    self.assertEqual(self.gs_mock.call_count, 0)
+    self.assertEqual(urls, None)
+
+  def testSignTypesRecovery(self):
+    """Only sign the requested recovery type"""
+    EXPECTED = {
+        'canary': [
+            ('gs://chromeos-releases/canary-channel/test.board/5126.0.0/'
+             'ChromeOS-recovery-R34-5126.0.0-test.board.instructions')],
+        'dev': [
+            ('gs://chromeos-releases/dev-channel/test.board/5126.0.0/'
+             'ChromeOS-recovery-R34-5126.0.0-test.board.instructions')],
+    }
+
+    with mock.patch.object(gs.GSContext, 'Exists', return_value=True):
+      urls = pushimage.PushImage('/src', 'test.board', 'R34-5126.0.0',
+                                 sign_types=['recovery'])
+    self.assertEqual(self.gs_mock.call_count, 22)
+    self.assertTrue(self.mark_mock.called)
+    self.assertEqual(urls, EXPECTED)
+
+  def testSignTypesBase(self):
+    """Only sign the requested recovery type"""
+    EXPECTED = {
+        'canary': [
+            ('gs://chromeos-releases/canary-channel/test.board/5126.0.0/'
+             'ChromeOS-base-R34-5126.0.0-test.board.instructions')],
+        'dev': [
+            ('gs://chromeos-releases/dev-channel/test.board/5126.0.0/'
+             'ChromeOS-base-R34-5126.0.0-test.board.instructions')],
+    }
+
+    with mock.patch.object(gs.GSContext, 'Exists', return_value=True):
+      urls = pushimage.PushImage('/src', 'test.board', 'R34-5126.0.0',
+                                 sign_types=['base'])
+    self.assertEqual(self.gs_mock.call_count, 24)
+    self.assertTrue(self.mark_mock.called)
+    self.assertEqual(urls, EXPECTED)
+
+  def testSignTypesNone(self):
+    """Verify nothing is signed when we request an unavailable type"""
+    urls = pushimage.PushImage('/src', 'test.board', 'R34-5126.0.0',
+                               sign_types=['nononononono'])
+    self.assertEqual(self.gs_mock.call_count, 20)
+    self.assertFalse(self.mark_mock.called)
+    self.assertEqual(urls, {})
+
+  def testGsError(self):
+    """Verify random GS errors don't make us blow up entirely"""
+    self.gs_mock.AddCmdResult(partial_mock.In('stat'), returncode=1,
+                              output='gobblety gook\n')
+    with cros_test_lib.LoggingCapturer('chromite'):
+      self.assertRaises(pushimage.PushError, pushimage.PushImage, '/src',
+                        'test.board', 'R34-5126.0.0')
+
+
+class MainTests(cros_test_lib.MockTestCase):
+  """Tests for main()"""
+
+  def setUp(self):
+    self.PatchObject(pushimage, 'PushImage')
+
+  def testBasic(self):
+    """Simple smoke test"""
+    pushimage.main(['--board', 'test.board', '/src', '--yes'])
+
+
+def main(_argv):
+  # Use our local copy of insns for testing as the main one is not
+  # available in the public manifest.
+  signing.INPUT_INSN_DIR = signing.TEST_INPUT_INSN_DIR
+
+  # Run the tests.
+  cros_test_lib.main(level='info', module=__name__)
diff --git a/scripts/refresh_package_status.py b/scripts/refresh_package_status.py
new file mode 100644
index 0000000..51423e8
--- /dev/null
+++ b/scripts/refresh_package_status.py
@@ -0,0 +1,166 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Refresh online Portage package status spreadsheet.
+
+This must be run inside the chroot.
+
+This script encapsulates the steps involved in updating the online Portage
+package status spreadsheet.
+
+It was created for use by a buildbot, but can be run manually.
+"""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import config_lib
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import operation
+from chromite.lib import osutils
+
+
+site_config = config_lib.GetConfig()
+
+
+oper = operation.Operation('refresh_package_status')
+oper.verbose = True  # Without verbose Info messages don't show up.
+
+TMP_ROOT = '/tmp'
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+# TODO(mtennant): Remove these two and replace with variables in gdata_lib.
+GDATA_CRED_FILE = os.path.expanduser('~/.gdata_cred.txt')
+GDATA_TOKEN_FILE = os.path.expanduser('~/.gdata_token')
+GENTOO_DIR = 'gentoo-portage'
+PRTG_GIT_URL = (
+    '%s/chromiumos/overlays/portage.git' % site_config.params.EXTERNAL_GOB_URL)
+FUNTOO_GIT_URL = 'git://github.com/funtoo/portage.git'
+
+
+def RunGit(cwd, cmd, args=()):
+  """Run the git |cmd| with |args| in the |cwd| directory."""
+  cmdline = ['git', cmd] + list(args)
+  cros_build_lib.RunCommand(cmdline, cwd=cwd)
+
+
+def PrepareBoards(boards):
+  """Run setup_board for any boards that need it."""
+  scripts_dir = os.path.join(SCRIPT_DIR, '..', '..', 'src', 'scripts')
+  for board in boards.split(':'):
+    if not os.path.isdir(cros_build_lib.GetSysroot(board=board)):
+      oper.Info('Running setup_board for board=%s' % board)
+      cros_build_lib.RunCommand(['./setup_board', '--board=%s' % board],
+                                cwd=scripts_dir)
+
+
+def PrepareCSVRoot():
+  """Prepare CSV output directory for use."""
+  csv_root = '%s/%s' % (TMP_ROOT, 'csv')
+  osutils.RmDir(csv_root, ignore_missing=True)
+
+  os.mkdir(csv_root)
+
+  return csv_root
+
+
+def RefreshPackageStatus(board, csv_root, test,
+                         token_file, cred_file):
+  """Run through steps to refresh package status spreadsheet.
+
+  Args:
+    board: A colon-separated list of chromeos boards to use.
+    csv_root: Put all csv files under this directory.
+    test: If True, upload to test spreadsheet instead of real one.
+    token_file: Use as auth token file for spreadsheet upload if it exists.
+    cred_file: If |token_file| is missing, use this as credentials file for
+      spreadsheet upload.
+  """
+
+  # Run all chromeos targets for all boards.
+  oper.Info('Getting package status for all chromeos targets on all boards.')
+  cpu_cmd_baseline = ['cros_portage_upgrade', '--board=%s' % board]
+
+  cros_csv = '%s/chromeos-boards.csv' % csv_root
+  cros_build_lib.RunCommand(cpu_cmd_baseline +
+                            ['--to-csv=%s' % cros_csv,
+                             'virtual/target-os'])
+
+  crosdev_csv = '%s/chromeos-dev-boards.csv' % csv_root
+  cros_build_lib.RunCommand(cpu_cmd_baseline +
+                            ['--to-csv=%s' % crosdev_csv,
+                             'virtual/target-os', 'virtual/target-os-dev'])
+
+  crostest_csv = '%s/chromeos-test-boards.csv' % csv_root
+  cros_build_lib.RunCommand(cpu_cmd_baseline +
+                            ['--to-csv=%s' % crostest_csv,
+                             'virtual/target-os', 'virtual/target-os-dev',
+                             'virtual/target-os-test'])
+
+  # Run all host targets for the sdk.
+  oper.Info('Getting package status for all host (sdk) targets.')
+  cpu_host_baseline = ['cros_portage_upgrade', '--host']
+
+  hostworld_csv = '%s/world-host.csv' % csv_root
+  cros_build_lib.RunCommand(cpu_host_baseline +
+                            ['--to-csv=%s' % hostworld_csv,
+                             'world'])
+
+  hosthhd_csv = '%s/hhd-host.csv' % csv_root
+  cros_build_lib.RunCommand(cpu_host_baseline +
+                            ['--to-csv=%s' % hosthhd_csv,
+                             'virtual/target-sdk'])
+
+  # Merge all csv tables into one.
+  oper.Info('Merging all package status files into one.')
+  allboards_csv = '%s/all-boards.csv' % csv_root
+  cros_build_lib.RunCommand(['merge_package_status',
+                             '--out=%s' % allboards_csv,
+                             cros_csv, crosdev_csv, crostest_csv])
+
+  allhost_csv = '%s/all-host.csv' % csv_root
+  cros_build_lib.RunCommand(['merge_package_status',
+                             '--out=%s' % allhost_csv,
+                             hostworld_csv, hosthhd_csv])
+
+  allfinal_csv = '%s/all-final.csv' % csv_root
+  cros_build_lib.RunCommand(['merge_package_status',
+                             '--out=%s' % allfinal_csv,
+                             allboards_csv, allhost_csv])
+
+  # Upload the final csv file to the online spreadsheet.
+  oper.Info('Uploading package status to online spreadsheet.')
+  upload_cmdline = ['upload_package_status', '--verbose',
+                    '--cred-file=%s' % cred_file,
+                    '--auth-token-file=%s' % token_file]
+  if test:
+    upload_cmdline.append('--test-spreadsheet')
+
+  upload_cmdline.append(allfinal_csv)
+  cros_build_lib.RunCommand(upload_cmdline)
+
+
+def main(argv):
+  """Main function."""
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument('--token-file', type='path', default=GDATA_TOKEN_FILE,
+                      help='Path to gdata auth token file '
+                           '[default: %(default)s]')
+  parser.add_argument('--board', type=str, required=True,
+                      help='Target board(s), colon-separated')
+  parser.add_argument('--cred-file', type='path', default=GDATA_CRED_FILE,
+                      help='Path to gdata credentials file '
+                           '[default: %(default)s]')
+  parser.add_argument('--test-spreadsheet', dest='test',
+                      action='store_true', default=False,
+                      help='Upload changes to test spreadsheet instead')
+  options = parser.parse_args(argv)
+
+  csv_root = PrepareCSVRoot()
+  PrepareBoards(options.board)
+
+  RefreshPackageStatus(board=options.board, csv_root=csv_root,
+                       test=options.test, token_file=options.token_file,
+                       cred_file=options.cred_file)
diff --git a/scripts/summarize_build_stats.py b/scripts/summarize_build_stats.py
new file mode 100644
index 0000000..404a18d
--- /dev/null
+++ b/scripts/summarize_build_stats.py
@@ -0,0 +1,530 @@
+# Copyright 2015 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to summarize stats for different builds in prod."""
+
+from __future__ import print_function
+
+import datetime
+import numpy
+import re
+import sys
+
+from chromite.cbuildbot import constants
+from chromite.lib import cidb
+from chromite.lib import commandline
+from chromite.lib import cros_logging as logging
+
+
+# These are the preferred base URLs we use to canonicalize bugs/CLs.
+BUGANIZER_BASE_URL = 'b/'
+GUTS_BASE_URL = 't/'
+CROS_BUG_BASE_URL = 'crbug.com/'
+INTERNAL_CL_BASE_URL = 'crosreview.com/i/'
+EXTERNAL_CL_BASE_URL = 'crosreview.com/'
+CHROMIUM_CL_BASE_URL = 'codereview.chromium.org/'
+
+class CLStatsEngine(object):
+  """Engine to generate stats about CL actions taken by the Commit Queue."""
+
+  def __init__(self, db):
+    self.db = db
+    self.builds = []
+    self.claction_history = None
+    self.reasons = {}
+    self.blames = {}
+    self.summary = {}
+    self.builds_by_build_id = {}
+
+  def GatherBuildAnnotations(self):
+    """Gather the failure annotations for builds from cidb."""
+    annotations_by_builds = self.db.GetAnnotationsForBuilds(
+        [b['id'] for b in self.builds])
+    for b in self.builds:
+      build_id = b['id']
+      build_number = b['build_number']
+      annotations = annotations_by_builds.get(build_id, [])
+      if not annotations:
+        self.reasons[build_number] = ['None']
+        self.blames[build_number] = []
+      else:
+        # TODO(pprabhu) crbug.com/458275
+        # We currently squash together multiple annotations into one to ease
+        # co-existence with the spreadsheet based logic. Once we've moved off of
+        # using the spreadsheet, we should update all uses of the annotations to
+        # expect one or more annotations.
+        self.reasons[build_number] = [
+            a['failure_category'] for a in annotations]
+        self.blames[build_number] = []
+        for annotation in annotations:
+          self.blames[build_number] += self.ProcessBlameString(
+              annotation['blame_url'])
+
+  @staticmethod
+  def ProcessBlameString(blame_string):
+    """Parse a human-created |blame_string| from the spreadsheet.
+
+    Returns:
+      A list of canonicalized URLs for bugs or CLs that appear in the blame
+      string. Canonicalized form will be 'crbug.com/1234',
+      'crosreview.com/1234', 'b/1234', 't/1234', or 'crosreview.com/i/1234' as
+      applicable.
+    """
+    urls = []
+    tokens = blame_string.split()
+
+    # Format to generate the regex patterns. Matches one of provided domain
+    # names, followed by lazy wildcard, followed by greedy digit wildcard,
+    # followed by optional slash and optional comma and optional (# +
+    # alphanum wildcard).
+    general_regex = r'^.*(%s).*?([0-9]+)/?,?(#\S*)?$'
+
+    crbug = general_regex % r'crbug.com|code.google.com'
+    internal_review = general_regex % (
+        r'chrome-internal-review.googlesource.com|crosreview.com/i')
+    external_review = general_regex % (
+        r'crosreview.com|chromium-review.googlesource.com')
+    guts = general_regex % r't/|gutsv\d.corp.google.com/#ticket/'
+    chromium_review = general_regex % r'codereview.chromium.org'
+
+    # Buganizer regex is different, as buganizer urls do not end with the bug
+    # number.
+    buganizer = r'^.*(b/|b.corp.google.com/issue\?id=)([0-9]+).*$'
+
+    # Patterns need to be tried in a specific order -- internal review needs
+    # to be tried before external review, otherwise urls like crosreview.com/i
+    # will be incorrectly parsed as external.
+    patterns = [crbug,
+                internal_review,
+                external_review,
+                buganizer,
+                guts,
+                chromium_review]
+    url_patterns = [CROS_BUG_BASE_URL,
+                    INTERNAL_CL_BASE_URL,
+                    EXTERNAL_CL_BASE_URL,
+                    BUGANIZER_BASE_URL,
+                    GUTS_BASE_URL,
+                    CHROMIUM_CL_BASE_URL]
+
+    for t in tokens:
+      for p, u in zip(patterns, url_patterns):
+        m = re.match(p, t)
+        if m:
+          urls.append(u + m.group(2))
+          break
+
+    return urls
+
+  def Gather(self, start_date, end_date,
+             master_config=constants.CQ_MASTER,
+             sort_by_build_number=True,
+             starting_build_number=None):
+    """Fetches build data and failure reasons.
+
+    Args:
+      start_date: A datetime.date instance for the earliest build to
+          examine.
+      end_date: A datetime.date instance for the latest build to
+          examine.
+      master_config: Config name of master to gather data for.
+                     Default to CQ_MASTER.
+      sort_by_build_number: Optional boolean. If True, builds will be
+          sorted by build number.
+      starting_build_number: (optional) The lowest build number to
+          include in the results.
+    """
+    logging.info('Gathering data for %s from %s until %s', master_config,
+                 start_date, end_date)
+    self.builds = self.db.GetBuildHistory(
+        master_config,
+        start_date=start_date,
+        end_date=end_date,
+        starting_build_number=starting_build_number,
+        num_results=self.db.NUM_RESULTS_NO_LIMIT)
+    if self.builds:
+      logging.info('Fetched %d builds (build_id: %d to %d)', len(self.builds),
+                   self.builds[0]['id'], self.builds[-1]['id'])
+    else:
+      logging.info('Fetched no builds.')
+    if sort_by_build_number:
+      logging.info('Sorting by build number.')
+      self.builds.sort(key=lambda x: x['build_number'])
+
+    self.claction_history = self.db.GetActionHistory(start_date, end_date)
+    self.GatherBuildAnnotations()
+
+    self.builds_by_build_id.update(
+        {b['id'] : b for b in self.builds})
+
+  def _PrintCounts(self, reasons, fmt):
+    """Print a sorted list of reasons in descending order of frequency.
+
+    Args:
+      reasons: A key/value mapping mapping the reason to the count.
+      fmt: A format string for our log message, containing %(cnt)d
+        and %(reason)s.
+    """
+    d = reasons
+    for cnt, reason in sorted(((v, k) for (k, v) in d.items()), reverse=True):
+      logging.info(fmt, dict(cnt=cnt, reason=reason))
+    if not d:
+      logging.info('  None')
+
+  def FalseRejectionRate(self, good_patch_count, false_rejection_count):
+    """Calculate the false rejection ratio.
+
+    This is the chance that a good patch will be rejected by the Pre-CQ or CQ
+    in a given run.
+
+    Args:
+      good_patch_count: The number of good patches in the run.
+      false_rejection_count: A dict containing the number of false rejections
+          for the CQ and PRE_CQ.
+
+    Returns:
+      A dict containing the false rejection ratios for CQ, PRE_CQ, and combined.
+    """
+    false_rejection_rate = dict()
+    for bot, rejection_count in false_rejection_count.iteritems():
+      false_rejection_rate[bot] = (
+          rejection_count * 100. / (rejection_count + good_patch_count)
+      )
+    false_rejection_rate['combined'] = 0
+    if good_patch_count:
+      rejection_count = sum(false_rejection_count.values())
+      false_rejection_rate['combined'] = (
+          rejection_count * 100. / (good_patch_count + rejection_count)
+      )
+    return false_rejection_rate
+
+  def Summarize(self, build_type):
+    """Process, print, and return a summary of statistics.
+
+    As a side effect, save summary to self.summary.
+
+    Returns:
+      A dictionary summarizing the statistics.
+    """
+    if build_type == 'cq':
+      return self.SummarizeCQ()
+    else:
+      return self.SummarizePFQ()
+
+  def SummarizeCQ(self):
+    """Process, print, and return a summary of cl action statistics.
+
+    As a side effect, save summary to self.summary.
+
+    Returns:
+      A dictionary summarizing the statistics.
+    """
+    if self.builds:
+      logging.info('%d total runs included, from build %d to %d.',
+                   len(self.builds), self.builds[-1]['build_number'],
+                   self.builds[0]['build_number'])
+      total_passed = len([b for b in self.builds
+                          if b['status'] == constants.BUILDER_STATUS_PASSED])
+      logging.info('%d of %d runs passed.', total_passed, len(self.builds))
+    else:
+      logging.info('No runs included.')
+
+    build_reason_counts = {}
+    for reasons in self.reasons.values():
+      for reason in reasons:
+        if reason != 'None':
+          build_reason_counts[reason] = build_reason_counts.get(reason, 0) + 1
+
+    unique_blames = set()
+    for blames in self.blames.itervalues():
+      unique_blames.update(blames)
+    unique_cl_blames = {blame for blame in unique_blames if
+                        EXTERNAL_CL_BASE_URL in blame}
+
+    # Shortcuts to some time aggregates about action history.
+    patch_handle_times = self.claction_history.GetPatchHandlingTimes().values()
+    pre_cq_handle_times = self.claction_history.GetPreCQHandlingTimes().values()
+    cq_wait_times = self.claction_history.GetCQWaitingTimes().values()
+    cq_handle_times = self.claction_history.GetCQHandlingTimes().values()
+
+    # Calculate how many good patches were falsely rejected and why.
+    good_patch_rejections = self.claction_history.GetFalseRejections()
+    patch_reason_counts = {}
+    patch_blame_counts = {}
+    for k, v in good_patch_rejections.iteritems():
+      for a in v:
+        build = self.builds_by_build_id.get(a.build_id)
+        if a.bot_type == constants.CQ and build is not None:
+          build_number = build['build_number']
+          reasons = self.reasons.get(build_number, ['None'])
+          blames = self.blames.get(build_number, ['None'])
+          for x in reasons:
+            patch_reason_counts[x] = patch_reason_counts.get(x, 0) + 1
+          for x in blames:
+            patch_blame_counts[x] = patch_blame_counts.get(x, 0) + 1
+
+    good_patch_count = len(self.claction_history.GetSubmittedPatches(False))
+    false_rejection_count = {}
+    bad_cl_candidates = {}
+    for bot_type in [constants.CQ, constants.PRE_CQ]:
+      rejections = self.claction_history.GetFalseRejections(bot_type)
+      false_rejection_count[bot_type] = sum(map(len,
+                                                rejections.values()))
+
+      rejections = self.claction_history.GetTrueRejections(bot_type)
+      rejected_cls = set([x.GetChangeTuple() for x in rejections.keys()])
+      bad_cl_candidates[bot_type] = self.claction_history.SortBySubmitTimes(
+          rejected_cls)
+
+    false_rejection_rate = self.FalseRejectionRate(good_patch_count,
+                                                   false_rejection_count)
+
+    # This list counts how many times each good patch was rejected.
+    rejection_counts = [0] * (good_patch_count - len(good_patch_rejections))
+    rejection_counts += [len(x) for x in good_patch_rejections.values()]
+
+    # Break down the frequency of how many times each patch is rejected.
+    good_patch_rejection_breakdown = []
+    if rejection_counts:
+      for x in range(max(rejection_counts) + 1):
+        good_patch_rejection_breakdown.append((x, rejection_counts.count(x)))
+
+    summary = {
+        'total_cl_actions': len(self.claction_history),
+        'unique_cls': len(self.claction_history.affected_cls),
+        'unique_patches': len(self.claction_history.affected_patches),
+        'submitted_patches': len(self.claction_history.submit_actions),
+        'rejections': len(self.claction_history.reject_actions),
+        'submit_fails': len(self.claction_history.submit_fail_actions),
+        'good_patch_rejections': sum(rejection_counts),
+        'mean_good_patch_rejections': numpy.mean(rejection_counts),
+        'good_patch_rejection_breakdown': good_patch_rejection_breakdown,
+        'good_patch_rejection_count': false_rejection_count,
+        'false_rejection_rate': false_rejection_rate,
+        'median_handling_time': numpy.median(patch_handle_times),
+        'patch_handling_time': patch_handle_times,
+        'bad_cl_candidates': bad_cl_candidates,
+        'unique_blames_change_count': len(unique_cl_blames),
+    }
+
+    logging.info('CQ committed %s changes', summary['submitted_patches'])
+    logging.info('CQ correctly rejected %s unique changes',
+                 summary['unique_blames_change_count'])
+    logging.info('pre-CQ and CQ incorrectly rejected %s changes a total of '
+                 '%s times (pre-CQ: %s; CQ: %s)',
+                 len(good_patch_rejections),
+                 sum(false_rejection_count.values()),
+                 false_rejection_count[constants.PRE_CQ],
+                 false_rejection_count[constants.CQ])
+
+    logging.info('      Total CL actions: %d.', summary['total_cl_actions'])
+    logging.info('    Unique CLs touched: %d.', summary['unique_cls'])
+    logging.info('Unique patches touched: %d.', summary['unique_patches'])
+    logging.info('   Total CLs submitted: %d.', summary['submitted_patches'])
+    logging.info('      Total rejections: %d.', summary['rejections'])
+    logging.info(' Total submit failures: %d.', summary['submit_fails'])
+    logging.info(' Good patches rejected: %d.',
+                 len(good_patch_rejections))
+    logging.info('   Mean rejections per')
+    logging.info('            good patch: %.2f',
+                 summary['mean_good_patch_rejections'])
+    logging.info(' False rejection rate for CQ: %.1f%%',
+                 summary['false_rejection_rate'].get(constants.CQ, 0))
+    logging.info(' False rejection rate for Pre-CQ: %.1f%%',
+                 summary['false_rejection_rate'].get(constants.PRE_CQ, 0))
+    logging.info(' Combined false rejection rate: %.1f%%',
+                 summary['false_rejection_rate']['combined'])
+
+    for x, p in summary['good_patch_rejection_breakdown']:
+      logging.info('%d good patches were rejected %d times.', p, x)
+    logging.info('')
+    logging.info('Good patch handling time:')
+    logging.info('  10th percentile: %.2f hours',
+                 numpy.percentile(patch_handle_times, 10) / 3600.0)
+    logging.info('  25th percentile: %.2f hours',
+                 numpy.percentile(patch_handle_times, 25) / 3600.0)
+    logging.info('  50th percentile: %.2f hours',
+                 summary['median_handling_time'] / 3600.0)
+    logging.info('  75th percentile: %.2f hours',
+                 numpy.percentile(patch_handle_times, 75) / 3600.0)
+    logging.info('  90th percentile: %.2f hours',
+                 numpy.percentile(patch_handle_times, 90) / 3600.0)
+    logging.info('')
+    logging.info('Time spent in Pre-CQ:')
+    logging.info('  10th percentile: %.2f hours',
+                 numpy.percentile(pre_cq_handle_times, 10) / 3600.0)
+    logging.info('  25th percentile: %.2f hours',
+                 numpy.percentile(pre_cq_handle_times, 25) / 3600.0)
+    logging.info('  50th percentile: %.2f hours',
+                 numpy.percentile(pre_cq_handle_times, 50) / 3600.0)
+    logging.info('  75th percentile: %.2f hours',
+                 numpy.percentile(pre_cq_handle_times, 75) / 3600.0)
+    logging.info('  90th percentile: %.2f hours',
+                 numpy.percentile(pre_cq_handle_times, 90) / 3600.0)
+    logging.info('')
+    logging.info('Time spent waiting for CQ:')
+    logging.info('  10th percentile: %.2f hours',
+                 numpy.percentile(cq_wait_times, 10) / 3600.0)
+    logging.info('  25th percentile: %.2f hours',
+                 numpy.percentile(cq_wait_times, 25) / 3600.0)
+    logging.info('  50th percentile: %.2f hours',
+                 numpy.percentile(cq_wait_times, 50) / 3600.0)
+    logging.info('  75th percentile: %.2f hours',
+                 numpy.percentile(cq_wait_times, 75) / 3600.0)
+    logging.info('  90th percentile: %.2f hours',
+                 numpy.percentile(cq_wait_times, 90) / 3600.0)
+    logging.info('')
+    logging.info('Time spent in CQ:')
+    logging.info('  10th percentile: %.2f hours',
+                 numpy.percentile(cq_handle_times, 10) / 3600.0)
+    logging.info('  25th percentile: %.2f hours',
+                 numpy.percentile(cq_handle_times, 25) / 3600.0)
+    logging.info('  50th percentile: %.2f hours',
+                 numpy.percentile(cq_handle_times, 50) / 3600.0)
+    logging.info('  75th percentile: %.2f hours',
+                 numpy.percentile(cq_handle_times, 75) / 3600.0)
+    logging.info('  90th percentile: %.2f hours',
+                 numpy.percentile(cq_handle_times, 90) / 3600.0)
+    logging.info('')
+
+    for bot_type, patches in summary['bad_cl_candidates'].items():
+      logging.info('%d bad patch candidates were rejected by the %s',
+                   len(patches), bot_type)
+      for k in patches:
+        logging.info('Bad patch candidate in: %s', k)
+
+    fmt_fai = '  %(cnt)d failures in %(reason)s'
+    fmt_rej = '  %(cnt)d rejections due to %(reason)s'
+
+    logging.info('Reasons why good patches were rejected:')
+    self._PrintCounts(patch_reason_counts, fmt_rej)
+
+    logging.info('Bugs or CLs responsible for good patches rejections:')
+    self._PrintCounts(patch_blame_counts, fmt_rej)
+
+    logging.info('Reasons why builds failed:')
+    self._PrintCounts(build_reason_counts, fmt_fai)
+
+    return summary
+
+  # TODO(akeshet): some of this logic is copied directly from SummarizeCQ.
+  # Refactor to reuse that code instead.
+  def SummarizePFQ(self):
+    """Process, print, and return a summary of pfq bug and failure statistics.
+
+    As a side effect, save summary to self.summary.
+
+    Returns:
+      A dictionary summarizing the statistics.
+    """
+    if self.builds:
+      logging.info('%d total runs included, from build %d to %d.',
+                   len(self.builds), self.builds[-1]['build_number'],
+                   self.builds[0]['build_number'])
+      total_passed = len([b for b in self.builds
+                          if b['status'] == constants.BUILDER_STATUS_PASSED])
+      logging.info('%d of %d runs passed.', total_passed, len(self.builds))
+    else:
+      logging.info('No runs included.')
+
+    # TODO(akeshet): This is the end of the verbatim copied code.
+
+    # Count the number of times each particular (canonicalized) blame url was
+    # given.
+    unique_blame_counts = {}
+    for blames in self.blames.itervalues():
+      for b in blames:
+        unique_blame_counts[b] = unique_blame_counts.get(b, 0) + 1
+
+    top_blames = sorted([(count, blame) for
+                         blame, count in unique_blame_counts.iteritems()],
+                        reverse=True)
+    logging.info('Top blamed issues:')
+    if top_blames:
+      for tb in top_blames:
+        logging.info('   %s x %s', tb[0], tb[1])
+    else:
+      logging.info('None!')
+
+    return {}
+
+
+def _CheckOptions(options):
+  # Ensure that specified start date is in the past.
+  now = datetime.datetime.now()
+  if options.start_date and now.date() < options.start_date:
+    logging.error('Specified start date is in the future: %s',
+                  options.start_date)
+    return False
+
+  return True
+
+
+def GetParser():
+  """Creates the argparse parser."""
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  ex_group = parser.add_mutually_exclusive_group(required=True)
+  ex_group.add_argument('--start-date', action='store', type='date',
+                        default=None,
+                        help='Limit scope to a start date in the past.')
+  ex_group.add_argument('--past-month', action='store_true', default=False,
+                        help='Limit scope to the past 30 days up to now.')
+  ex_group.add_argument('--past-week', action='store_true', default=False,
+                        help='Limit scope to the past week up to now.')
+  ex_group.add_argument('--past-day', action='store_true', default=False,
+                        help='Limit scope to the past day up to now.')
+
+  parser.add_argument('--cred-dir', action='store', required=True,
+                      metavar='CIDB_CREDENTIALS_DIR',
+                      help='Database credentials directory with certificates '
+                           'and other connection information. Obtain your '
+                           'credentials at go/cros-cidb-admin .')
+  parser.add_argument('--starting-build', action='store', type=int,
+                      default=None, help='Filter to builds after given number'
+                                         '(inclusive).')
+  parser.add_argument('--end-date', action='store', type='date', default=None,
+                      help='Limit scope to an end date in the past.')
+
+  parser.add_argument('--build-type', choices=['cq', 'chrome-pfq'],
+                      default='cq',
+                      help='Build type to summarize. Default: cq.')
+  return parser
+
+
+def main(argv):
+  parser = GetParser()
+  options = parser.parse_args(argv)
+
+  if not _CheckOptions(options):
+    sys.exit(1)
+
+  db = cidb.CIDBConnection(options.cred_dir)
+
+  if options.end_date:
+    end_date = options.end_date
+  else:
+    end_date = datetime.datetime.now().date()
+
+  # Determine the start date to use, which is required.
+  if options.start_date:
+    start_date = options.start_date
+  else:
+    assert options.past_month or options.past_week or options.past_day
+    if options.past_month:
+      start_date = end_date - datetime.timedelta(days=30)
+    elif options.past_week:
+      start_date = end_date - datetime.timedelta(days=7)
+    else:
+      start_date = end_date - datetime.timedelta(days=1)
+
+  if options.build_type == 'cq':
+    master_config = constants.CQ_MASTER
+  else:
+    master_config = constants.PFQ_MASTER
+
+  cl_stats_engine = CLStatsEngine(db)
+  cl_stats_engine.Gather(start_date, end_date, master_config,
+                         starting_build_number=options.starting_build)
+  cl_stats_engine.Summarize(options.build_type)
diff --git a/scripts/summarize_build_stats_unittest b/scripts/summarize_build_stats_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/summarize_build_stats_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/summarize_build_stats_unittest.py b/scripts/summarize_build_stats_unittest.py
new file mode 100644
index 0000000..c6a417b
--- /dev/null
+++ b/scripts/summarize_build_stats_unittest.py
@@ -0,0 +1,228 @@
+# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for summarize_build_stats."""
+
+from __future__ import print_function
+
+import datetime
+import itertools
+import mock
+import random
+
+from chromite.lib import clactions
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import fake_cidb
+from chromite.scripts import summarize_build_stats
+from chromite.cbuildbot import metadata_lib
+from chromite.cbuildbot import constants
+
+
+CQ = constants.CQ
+PRE_CQ = constants.PRE_CQ
+
+
+class TestCLActionLogic(cros_test_lib.TestCase):
+  """Ensures that CL action analysis logic is correct."""
+
+  def setUp(self):
+    self.fake_db = fake_cidb.FakeCIDBConnection()
+
+  def _PopulateFakeCidbWithTestData(self, cq):
+    """Generate test data and insert it in the the fake cidb object.
+
+    Args:
+      cq: Whether this is a CQ run. If False, this is a Pre-CQ run.
+    """
+    # Mock patches for test data.
+    c1p1 = metadata_lib.GerritPatchTuple(1, 1, False)
+    c2p1 = metadata_lib.GerritPatchTuple(2, 1, True)
+    c2p2 = metadata_lib.GerritPatchTuple(2, 2, True)
+    c3p1 = metadata_lib.GerritPatchTuple(3, 1, True)
+    c3p2 = metadata_lib.GerritPatchTuple(3, 2, True)
+    c4p1 = metadata_lib.GerritPatchTuple(4, 1, True)
+    c4p2 = metadata_lib.GerritPatchTuple(4, 2, True)
+
+    # Mock builder status dictionaries
+    passed_status = {'status': constants.FINAL_STATUS_PASSED}
+    failed_status = {'status': constants.FINAL_STATUS_FAILED}
+
+    t = itertools.count()
+    bot_config = (constants.CQ_MASTER if cq
+                  else constants.PRE_CQ_DEFAULT_CONFIGS[0])
+
+    # pylint: disable=bad-continuation
+    test_metadata = [
+      # Build 1 picks up no patches.
+      metadata_lib.CBuildbotMetadata(
+          ).UpdateWithDict({'build-number' : 1,
+                            'bot-config' : bot_config,
+                            'results' : [],
+                            'status' : passed_status}),
+      # Build 2 picks up c1p1 and does nothing.
+      metadata_lib.CBuildbotMetadata(
+          ).UpdateWithDict({'build-number' : 2,
+                            'bot-config' : bot_config,
+                            'results' : [],
+                            'status' : failed_status,
+                            'changes': [c1p1._asdict()]}
+          ).RecordCLAction(c1p1, constants.CL_ACTION_PICKED_UP, t.next()),
+      # Build 3 picks up c1p1 and c2p1 and rejects both.
+      # c3p1 is not included in the run because it fails to apply.
+      metadata_lib.CBuildbotMetadata(
+          ).UpdateWithDict({'build-number' : 3,
+                            'bot-config' : bot_config,
+                            'results' : [],
+                            'status' : failed_status,
+                            'changes': [c1p1._asdict(),
+                                        c2p1._asdict()]}
+          ).RecordCLAction(c1p1, constants.CL_ACTION_PICKED_UP, t.next()
+          ).RecordCLAction(c2p1, constants.CL_ACTION_PICKED_UP, t.next()
+          ).RecordCLAction(c1p1, constants.CL_ACTION_KICKED_OUT, t.next()
+          ).RecordCLAction(c2p1, constants.CL_ACTION_KICKED_OUT, t.next()
+          ).RecordCLAction(c3p1, constants.CL_ACTION_KICKED_OUT, t.next()),
+      # Build 4 picks up c4p1 and does nothing with it.
+      # c4p2 isn't picked up because it fails to apply.
+      metadata_lib.CBuildbotMetadata(
+          ).UpdateWithDict({'build-number' : 3,
+                            'bot-config' : bot_config,
+                            'results' : [],
+                            'status' : failed_status,
+                            'changes': [c4p1._asdict()]}
+          ).RecordCLAction(c4p1, constants.CL_ACTION_PICKED_UP, t.next()
+          ).RecordCLAction(c4p2, constants.CL_ACTION_KICKED_OUT, t.next()),
+    ]
+    if cq:
+      test_metadata += [
+        # Build 4 picks up c1p1, c2p2, c3p2, c4p1 and submits the first three.
+        # c4p2 is submitted without being tested.
+        # So  c1p1 should be detected as a 1-time rejected good patch,
+        # and c2p1 should be detected as a possibly bad patch.
+        metadata_lib.CBuildbotMetadata(
+            ).UpdateWithDict({'build-number' : 4,
+                              'bot-config' : bot_config,
+                              'results' : [],
+                              'status' : passed_status,
+                              'changes': [c1p1._asdict(),
+                                          c2p2._asdict()]}
+            ).RecordCLAction(c1p1, constants.CL_ACTION_PICKED_UP, t.next()
+            ).RecordCLAction(c2p2, constants.CL_ACTION_PICKED_UP, t.next()
+            ).RecordCLAction(c3p2, constants.CL_ACTION_PICKED_UP, t.next()
+            ).RecordCLAction(c4p1, constants.CL_ACTION_PICKED_UP, t.next()
+            ).RecordCLAction(c1p1, constants.CL_ACTION_SUBMITTED, t.next()
+            ).RecordCLAction(c2p2, constants.CL_ACTION_SUBMITTED, t.next()
+            ).RecordCLAction(c3p2, constants.CL_ACTION_SUBMITTED, t.next()
+            ).RecordCLAction(c4p2, constants.CL_ACTION_SUBMITTED, t.next()),
+      ]
+    else:
+      test_metadata += [
+        metadata_lib.CBuildbotMetadata(
+            ).UpdateWithDict({'build-number' : 5,
+                              'bot-config' : bot_config,
+                              'results' : [],
+                              'status' : failed_status,
+                              'changes': [c4p1._asdict()]}
+            ).RecordCLAction(c4p1, constants.CL_ACTION_PICKED_UP, t.next()
+            ).RecordCLAction(c4p1, constants.CL_ACTION_KICKED_OUT, t.next()),
+        metadata_lib.CBuildbotMetadata(
+            ).UpdateWithDict({'build-number' : 6,
+                              'bot-config' : bot_config,
+                              'results' : [],
+                              'status' : failed_status,
+                              'changes': [c4p1._asdict()]}
+            ).RecordCLAction(c1p1, constants.CL_ACTION_PICKED_UP, t.next()
+            ).RecordCLAction(c1p1, constants.CL_ACTION_KICKED_OUT, t.next())
+      ]
+    # pylint: enable=bad-continuation
+
+    # test_metadata should not be guaranteed to be ordered by build number
+    # so shuffle it, but use the same seed each time so that unit test is
+    # deterministic.
+    random.seed(0)
+    random.shuffle(test_metadata)
+
+    for m in test_metadata:
+      build_id = self.fake_db.InsertBuild(
+          m.GetValue('bot-config'), constants.WATERFALL_INTERNAL,
+          m.GetValue('build-number'), m.GetValue('bot-config'),
+          'bot-hostname')
+      m.UpdateWithDict({'build_id': build_id})
+      actions = []
+      for action_metadata in m.GetDict()['cl_actions']:
+        actions.append(clactions.CLAction.FromMetadataEntry(action_metadata))
+      self.fake_db.InsertCLActions(build_id, actions)
+
+  def testCLStatsEngineSummary(self):
+    with cros_build_lib.ContextManagerStack() as stack:
+      self._PopulateFakeCidbWithTestData(cq=False)
+      self._PopulateFakeCidbWithTestData(cq=True)
+      stack.Add(mock.patch.object, summarize_build_stats.CLStatsEngine,
+                'GatherBuildAnnotations')
+      cl_stats = summarize_build_stats.CLStatsEngine(self.fake_db)
+      cl_stats.Gather(datetime.date.today(), datetime.date.today())
+      cl_stats.reasons = {1: '', 2: '', 3: constants.FAILURE_CATEGORY_BAD_CL,
+                          4: constants.FAILURE_CATEGORY_BAD_CL}
+      cl_stats.blames = {1: '', 2: '', 3: 'crosreview.com/1',
+                         4: 'crosreview.com/1'}
+      summary = cl_stats.Summarize('cq')
+
+      expected = {
+          'mean_good_patch_rejections': 0.5,
+          'unique_patches': 7,
+          'unique_blames_change_count': 0,
+          'total_cl_actions': 28,
+          'good_patch_rejection_breakdown': [(0, 3), (1, 0), (2, 1)],
+          'good_patch_rejection_count': {CQ: 1, PRE_CQ: 1},
+          'good_patch_rejections': 2,
+          'false_rejection_rate': {CQ: 20., PRE_CQ: 20., 'combined': 100. / 3},
+          'submitted_patches': 4,
+          'submit_fails': 0,
+          'unique_cls': 4,
+          'median_handling_time': -1,  # This will be ignored in comparison
+          'patch_handling_time': -1,  # This will be ignored in comparison
+          'bad_cl_candidates': {
+              CQ: [metadata_lib.GerritChangeTuple(gerrit_number=2,
+                                                  internal=True)],
+              PRE_CQ: [metadata_lib.GerritChangeTuple(gerrit_number=2,
+                                                      internal=True),
+                       metadata_lib.GerritChangeTuple(gerrit_number=4,
+                                                      internal=True)],
+          },
+          'rejections': 10}
+      # Ignore handling times in comparison, since these are not fully
+      # reproducible from run to run of the unit test.
+      summary['median_handling_time'] = expected['median_handling_time']
+      summary['patch_handling_time'] = expected['patch_handling_time']
+      self.maxDiff = None
+      self.assertEqual(summary, expected)
+
+  def testProcessBlameString(self):
+    """Tests that bug and CL links are correctly parsed."""
+    blame = ('some words then crbug.com/1234, then other junk and '
+             'https://code.google.com/p/chromium/issues/detail?id=4321 '
+             'then some stuff and other stuff and b/2345 and also '
+             'https://b.corp.google.com/issue?id=5432&query=5432 '
+             'and then some crosreview.com/3456 or some '
+             'https://chromium-review.googlesource.com/#/c/6543/ and '
+             'then crosreview.com/i/9876 followed by '
+             'https://chrome-internal-review.googlesource.com/#/c/6789/ '
+             'blah https://gutsv3.corp.google.com/#ticket/1234 t/4321 and '
+             'https://code.google.com/p/chromium/issues/detail?id=522555#c58'
+             ' and https://codereview.chromium.org/1216423002 ')
+    expected = ['crbug.com/1234',
+                'crbug.com/4321',
+                'b/2345',
+                'b/5432',
+                'crosreview.com/3456',
+                'crosreview.com/6543',
+                'crosreview.com/i/9876',
+                'crosreview.com/i/6789',
+                't/1234',
+                't/4321',
+                'crbug.com/522555',
+                'codereview.chromium.org/1216423002']
+    self.assertEqual(
+        summarize_build_stats.CLStatsEngine.ProcessBlameString(blame),
+        expected)
diff --git a/scripts/sync_chrome.py b/scripts/sync_chrome.py
new file mode 100644
index 0000000..d01da26
--- /dev/null
+++ b/scripts/sync_chrome.py
@@ -0,0 +1,66 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Sync the Chrome source code used by Chrome OS to the specified directory."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import gclient
+from chromite.lib import osutils
+
+
+def GetParser():
+  """Creates the argparse parser."""
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  version = parser.add_mutually_exclusive_group()
+  version.add_argument('--tag', help='Sync to specified Chrome release',
+                       dest='version')
+  version.add_argument('--revision', help='Sync to specified git revision',
+                       dest='version')
+
+  parser.add_argument('--internal', help='Sync internal version of Chrome',
+                      action='store_true', default=False)
+  parser.add_argument('--reset', help='Revert local changes',
+                      action='store_true', default=False)
+  parser.add_argument('--gclient', help=commandline.argparse.SUPPRESS,
+                      default=None)
+  parser.add_argument('--gclient_template', help='Template gclient input file')
+  parser.add_argument('--skip_cache', help='Skip using git cache',
+                      dest='use_cache', action='store_false')
+  parser.add_argument('chrome_root', help='Directory to sync chrome in')
+
+  return parser
+
+
+def main(argv):
+  parser = GetParser()
+  options = parser.parse_args(argv)
+
+  if options.gclient is '':
+    parser.error('--gclient can not be an empty string!')
+  gclient_path = options.gclient or osutils.Which('gclient')
+  if not gclient_path:
+    gclient_path = os.path.join(constants.DEPOT_TOOLS_DIR, 'gclient')
+
+  # Revert any lingering local changes.
+  if not osutils.SafeMakedirs(options.chrome_root) and options.reset:
+    try:
+      gclient.Revert(gclient_path, options.chrome_root)
+    except cros_build_lib.RunCommandError:
+      osutils.RmDir(options.chrome_root)
+      osutils.SafeMakedirs(options.chrome_root)
+
+  # Sync new Chrome.
+  gclient.WriteConfigFile(gclient_path, options.chrome_root,
+                          options.internal, options.version,
+                          options.gclient_template, options.use_cache)
+  gclient.Sync(gclient_path, options.chrome_root, reset=options.reset)
+
+  return 0
diff --git a/scripts/sync_package_status.py b/scripts/sync_package_status.py
new file mode 100644
index 0000000..d5f10be
--- /dev/null
+++ b/scripts/sync_package_status.py
@@ -0,0 +1,557 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Synchronize issues in Package Status spreadsheet with Issue Tracker."""
+
+from __future__ import print_function
+
+import optparse
+import os
+import sys
+
+from chromite.lib import cros_build_lib
+from chromite.lib import gdata_lib
+from chromite.lib import operation
+from chromite.lib import upgrade_table as utable
+from chromite.scripts import upload_package_status as ups
+
+
+TRACKER_PROJECT_NAME = 'chromium'
+
+PKGS_WS_NAME = 'Packages'
+
+CR_ORG = 'chromium.org'
+CHROMIUMOS_SITE = 'http://www.%s/chromium-os' % CR_ORG
+PKG_UPGRADE_PAGE = '%s/gentoo-package-upgrade-process' % CHROMIUMOS_SITE
+DOCS_SITE = 'https://docs.google.com/a'
+
+COL_PACKAGE = gdata_lib.PrepColNameForSS(utable.UpgradeTable.COL_PACKAGE)
+COL_TEAM = gdata_lib.PrepColNameForSS('Team/Lead')
+COL_OWNER = gdata_lib.PrepColNameForSS('Owner')
+COL_TRACKER = gdata_lib.PrepColNameForSS('Tracker')
+
+ARCHES = ('amd64', 'arm', 'x86')
+
+DISABLING_TRACKER_VALUES = set(['n/a', 'disable', 'disabled'])
+
+oper = operation.Operation('sync_package_status')
+
+
+def _GetPkgSpreadsheetURL(ss_key):
+  return '%s/%s/spreadsheet/ccc?key=%s' % (DOCS_SITE, CR_ORG, ss_key)
+
+
+class SyncError(RuntimeError):
+  """Extend RuntimeError for use in this module."""
+
+
+class PackageBlacklisted(RuntimeError):
+  """Raised when package has disabled automatic tracker syncing."""
+
+
+class Syncer(object):
+  """Class to manage synchronizing between spreadsheet and Tracker."""
+
+  # Map spreadsheet team names to Tracker team labels.
+  VALID_TEAMS = {'build': 'Build',
+                 'kernel': 'Cr-OS-Kernel',
+                 'security': 'Security',
+                 'system': 'Cr-OS-Systems',
+                 'ui': 'Cr-UI'}
+  UPGRADE_STATES = set(
+      [utable.UpgradeTable.STATE_NEEDS_UPGRADE,
+       utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_PATCHED,
+       utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_DUPLICATED])
+
+  __slots__ = (
+      'default_owner',  # Default owner to use when creating issues
+      'owners',         # Set of owners to select (None means no filter)
+      'pretend',        # If True, make no real changes
+      'scomm',          # SpreadsheetComm
+      'tcomm',          # TrackerComm
+      'teams',          # Set of teams to select (None means no filter)
+      'tracker_col_ix',  # Index of Tracker column in spreadsheet
+      'verbose',        # Verbose boolean
+  )
+
+  def __init__(self, tcomm, scomm, pretend=False, verbose=False):
+    self.tcomm = tcomm
+    self.scomm = scomm
+
+    self.tracker_col_ix = None
+
+    self.teams = None
+    self.owners = None
+    self.default_owner = None
+
+    self.pretend = pretend
+    self.verbose = verbose
+
+  def _ReduceTeamName(self, team):
+    """Translate |team| from spreadsheet/commandline name to short name.
+
+    For example:  build/bdavirro --> build, build --> build
+    """
+    if team:
+      return team.lower().split('/')[0]
+    return None
+
+  def SetTeamFilter(self, teamarg):
+    """Set team filter using colon-separated team names in |teamarg|
+
+    Resulting filter in self.teams is set of "reduced" team names.
+    Examples:
+      'build:system:ui' --> set(['build', 'system', 'ui'])
+      'Build:system:UI' --> set(['build', 'system', 'ui'])
+
+    If an invalid team name is given oper.Die is called with explanation.
+    """
+    if teamarg:
+      teamlist = []
+      for team in teamarg.split(':'):
+        t = self._ReduceTeamName(team)
+        if t in self.VALID_TEAMS:
+          teamlist.append(t)
+        else:
+          oper.Die('Invalid team name "%s".  Choose from: %s' %
+                   (team, ','.join(self.VALID_TEAMS.keys())))
+      self.teams = set(teamlist)
+    else:
+      self.teams = None
+
+  def _ReduceOwnerName(self, owner):
+    """Translate |owner| from spreadsheet/commandline name to short name.
+
+    For example:  joe@chromium.org -> joe, joe --> joe
+    """
+    if owner:
+      return owner.lower().split('@')[0]
+    return None
+
+  def SetOwnerFilter(self, ownerarg):
+    """Set owner filter using colon-separated owner names in |ownerarg|."""
+    if ownerarg:
+      self.owners = set([self._ReduceOwnerName(o) for o in ownerarg.split(':')])
+    else:
+      self.owners = None
+
+  def SetDefaultOwner(self, default_owner):
+    """Use |default_owner| as isue owner when none set in spreadsheet."""
+    if default_owner and default_owner == 'me':
+      self.default_owner = os.environ['USER']
+    else:
+      self.default_owner = default_owner
+
+  def _RowPassesFilters(self, row):
+    """Return true if |row| passes any team/owner filters."""
+    if self.teams:
+      team = self._ReduceTeamName(row[COL_TEAM])
+      if team not in self.teams:
+        return False
+
+    if self.owners:
+      owner = self._ReduceOwnerName(row[COL_OWNER])
+      if owner not in self.owners:
+        return False
+
+    return True
+
+  def Sync(self):
+    """Do synchronization between Spreadsheet and Tracker."""
+    self.tracker_col_ix = self.scomm.GetColumnIndex(COL_TRACKER)
+    if None is self.tracker_col_ix:
+      raise SyncError('Unable to find "Tracker" column in spreadsheet')
+
+    errors = []
+
+    # Go over each row in Spreadsheet.  Row index starts at 2
+    # because spreadsheet rows start at 1 and we don't want the header row.
+    rows = self.scomm.GetRows()
+    for rowIx, row in enumerate(rows, start=self.scomm.ROW_NUMBER_OFFSET):
+      if not self._RowPassesFilters(row):
+        oper.Info('\nSkipping row %d, pkg: %r (team=%s, owner=%s) ...' %
+                  (rowIx, row[COL_PACKAGE], row[COL_TEAM], row[COL_OWNER]))
+        continue
+
+      oper.Info('\nProcessing row %d, pkg: %r (team=%s, owner=%s) ...' %
+                (rowIx, row[COL_PACKAGE], row[COL_TEAM], row[COL_OWNER]))
+
+      try:
+        new_issue = self._GenIssueForRow(row)
+        old_issue_id = self._GetRowTrackerId(row)
+
+        if new_issue and not old_issue_id:
+          self._CreateRowIssue(rowIx, row, new_issue)
+        elif not new_issue and old_issue_id:
+          self._ClearRowIssue(rowIx, row)
+        else:
+          # Nothing to do for this package.
+          reason = 'already has issue' if old_issue_id else 'no upgrade needed'
+          oper.Notice('Nothing to do for row %d, package %r: %s.' %
+                      (rowIx, row[COL_PACKAGE], reason))
+      except PackageBlacklisted:
+        oper.Notice('Tracker sync disabled for row %d, package %r: skipped.' %
+                    (rowIx, row[COL_PACKAGE]))
+      except SyncError:
+        errors.append('Error processing row %d, pkg: %r.  See above.' %
+                      (rowIx, row[COL_PACKAGE]))
+
+    if errors:
+      raise SyncError('\n'.join(errors))
+
+  def _GetRowValue(self, row, colName, arch=None):
+    """Get value from |row| at |colName|, adjusted for |arch|"""
+    if arch:
+      colName = utable.UpgradeTable.GetColumnName(colName, arch=arch)
+    colName = gdata_lib.PrepColNameForSS(colName)
+    return row[colName]
+
+  def _GenIssueForRow(self, row):
+    """Generate an Issue object for |row| if applicable"""
+    # Row needs an issue if it "needs upgrade" on any platform.
+    statuses = {}
+    needs_issue = False
+    for arch in ARCHES:
+      state = self._GetRowValue(row, utable.UpgradeTable.COL_STATE, arch)
+      statuses[arch] = state
+      if state in self.UPGRADE_STATES:
+        needs_issue = True
+
+    if not needs_issue:
+      return None
+
+    pkg = row[COL_PACKAGE]
+    team = self._ReduceTeamName(row[COL_TEAM])
+    if not team:
+      oper.Error('Unable to create Issue for package "%s" because no '
+                 'team value is specified.' % pkg)
+      raise SyncError()
+
+    labels = ['Type-Bug',
+              'OS-Chrome',
+              'Cr-OS-Packages',
+              'Pri-2',
+              self.VALID_TEAMS[team]]
+
+    owner = self._ReduceOwnerName(row[COL_OWNER])
+    status = 'Untriaged'
+    if owner:
+      owner = owner + '@chromium.org'
+      status = 'Available'
+    elif self.default_owner:
+      owner = self.default_owner + '@chromium.org.'
+    else:
+      owner = None  # Rather than ''
+
+    title = '%s package needs upgrade from upstream Portage' % pkg
+
+    lines = ['The %s package can be upgraded from upstream Portage' % pkg,
+             '',
+             'At this moment the status on each arch is as follows:']
+
+    for arch in sorted(statuses):
+      arch_status = statuses[arch]
+      if arch_status:
+        # Get all versions for this arch right now.
+        curr_ver_col = utable.UpgradeTable.COL_CURRENT_VER
+        curr_ver = self._GetRowValue(row, curr_ver_col, arch)
+        stable_upst_ver_col = utable.UpgradeTable.COL_STABLE_UPSTREAM_VER
+        stable_upst_ver = self._GetRowValue(row, stable_upst_ver_col, arch)
+        latest_upst_ver_col = utable.UpgradeTable.COL_LATEST_UPSTREAM_VER
+        latest_upst_ver = self._GetRowValue(row, latest_upst_ver_col, arch)
+
+        arch_vers = ['Current version: %s' % curr_ver,
+                     'Stable upstream version: %s' % stable_upst_ver,
+                     'Latest upstream version: %s' % latest_upst_ver]
+        lines.append('  On %s: %s' % (arch, arch_status))
+        lines.append('    %s' % ', '.join(arch_vers))
+      else:
+        lines.append('  On %s: not used' % arch)
+
+    lines.append('')
+    lines.append('Check the latest status for this package, including '
+                 'which upstream versions are available, at:\n  %s' %
+                 _GetPkgSpreadsheetURL(self.scomm.ss_key))
+    lines.append('For help upgrading see: %s' % PKG_UPGRADE_PAGE)
+
+    summary = '\n'.join(lines)
+
+    issue = gdata_lib.Issue(title=title,
+                            summary=summary,
+                            status=status,
+                            owner=owner,
+                            labels=labels,)
+    return issue
+
+  def _GetRowTrackerId(self, row):
+    """Get the tracker issue id in |row| if it exists, return None otherwise.
+
+    Raises:
+      PackageBlacklisted if package has Tracker column value to disable syncing.
+    """
+    tracker_val = row[COL_TRACKER]
+    if tracker_val:
+      try:
+        return int(tracker_val)
+      except ValueError:
+        # See if the unexpected value is one that disables tracker syncing.
+        if tracker_val.replace(' ', '').lower() in DISABLING_TRACKER_VALUES:
+          raise PackageBlacklisted()
+
+        raise
+
+    return None
+
+  def _CreateRowIssue(self, rowIx, row, issue):
+    """Create a Tracker issue for |issue|, insert into |row| at |rowIx|"""
+
+    pkg = row[COL_PACKAGE]
+    if not self.pretend:
+      oper.Info('Creating Tracker issue for package %s with details:\n%s' %
+                (pkg, issue))
+
+      # Before actually creating the Tracker issue, confirm that writing
+      # to this spreadsheet row is going to work.
+      try:
+        self.scomm.ClearCellValue(rowIx, self.tracker_col_ix)
+      except gdata_lib.SpreadsheetError as ex:
+        oper.Error('Unable to write to row %d, package %r.  Aborting issue'
+                   ' creation.  Error was:\n%s' % (rowIx, pkg, ex))
+        raise SyncError
+
+      try:
+        issue_id = self.tcomm.CreateTrackerIssue(issue)
+      except gdata_lib.TrackerInvalidUserError as ex:
+        oper.Warning('%s.  Ignoring owner field for issue %d, package %r.' %
+                     (ex, rowIx, pkg))
+        issue.summary += ('\n\nNote that the row for this package in'
+                          ' the spreadsheet at go/crospkgs has an "owner"\n'
+                          'value that is not a valid Tracker user: "%s".' %
+                          issue.owner)
+        issue.owner = None
+        issue_id = self.tcomm.CreateTrackerIssue(issue)
+
+      oper.Info('Inserting new Tracker issue %d for package %s' %
+                (issue_id, pkg))
+      ss_issue_val = self._GenSSLinkToIssue(issue_id)
+
+      # This really should not fail since write access was checked before.
+      try:
+        self.scomm.ReplaceCellValue(rowIx, self.tracker_col_ix, ss_issue_val)
+        oper.Notice('Created Tracker issue %d for row %d, package %r' %
+                    (issue_id, rowIx, pkg))
+      except gdata_lib.SpreadsheetError as ex:
+        oper.Error('Failed to write link to new issue %d into'
+                   ' row %d, package %r:\n%s' %
+                   (issue_id, rowIx, pkg, ex))
+        oper.Error('This means that the spreadsheet will have no record of'
+                   ' this Tracker Issue and will create one again next time'
+                   ' unless the spreadsheet is edited by hand!')
+        raise SyncError
+
+    else:
+      oper.Notice('Would create and insert issue for row %d, package %r' %
+                  (rowIx, pkg))
+      oper.Info('Issue would be as follows:\n%s' % issue)
+
+  def _GenSSLinkToIssue(self, issue_id):
+    """Create the spreadsheet hyperlink format for |issue_id|"""
+    return '=hyperlink("crbug.com/%d";"%d")' % (issue_id, issue_id)
+
+  def _ClearRowIssue(self, rowIx, row):
+    """Clear the Tracker cell for row at |rowIx|"""
+
+    pkg = row[COL_PACKAGE]
+    if not self.pretend:
+      try:
+        self.scomm.ClearCellValue(rowIx, self.tracker_col_ix)
+        oper.Notice('Cleared Tracker issue from row %d, package %r' %
+                    (rowIx, pkg))
+      except gdata_lib.SpreadsheetError as ex:
+        oper.Error('Error while clearing Tracker issue for'
+                   ' row %d, package %r:\n%s' % (rowIx, pkg, ex))
+        raise SyncError
+
+    else:
+      oper.Notice('Would clear Tracker issue from row %d, package %r' %
+                  (rowIx, pkg))
+
+
+def PrepareCreds(cred_file, token_file, email):
+  """Return a Creds object from given credentials.
+
+  If |email| is given, the Creds object will contain that |email|
+  and a password entered at a prompt.
+
+  Otherwise, if |token_file| is given then the Creds object will have
+  the auth_token from that file.
+
+  Otherwise, if |cred_file| is given then the Creds object will have
+  the email/password from that file.
+  """
+
+  creds = gdata_lib.Creds()
+
+  if email:
+    creds.SetCreds(email)
+  elif token_file and os.path.exists(token_file):
+    creds.LoadAuthToken(token_file)
+  elif cred_file and os.path.exists(cred_file):
+    creds.LoadCreds(cred_file)
+
+  return creds
+
+
+def _CreateOptParser():
+  """Create the optparser.parser object for command-line args."""
+  usage = 'Usage: %prog [options]'
+  epilog = ("""
+Use this script to synchronize tracker issues between the package status
+spreadsheet and the chromium-os Tracker.  It uses the "Tracker" column of the
+package spreadsheet.  If a package needs an upgrade and has no tracker issue in
+that column then a tracker issue is created.  If it does not need an upgrade
+then that column is cleared.
+
+Credentials must be specified using --auth-token-file, --cred-file or --email.
+The first two have default values which you can rely on if valid, the latter
+will prompt for your password.  If you specify --email you will be given a
+chance to save your email/password out as a credentials file for next time.
+
+Uses spreadsheet key %(ss_key)s, worksheet "%(ws_name)s".
+(if --test-spreadsheet is set then spreadsheet
+%(test_ss_key)s is used).
+
+Use the --team and --owner options to operate only on packages assigned to
+particular owners or teams.  Generally, running without a team or owner filter
+is not intended, so use --team=all and/or --owner=all.
+
+Issues will be assigned to the owner of the package in the spreadsheet, if
+available.  If not, the owner defaults to value given to --default-owner.
+
+The --owner and --default-owner options accept "me" as an argument, which is
+only useful if your username matches your chromium account name.
+
+""" %
+            {'ss_key': ups.REAL_SS_KEY, 'ws_name': PKGS_WS_NAME,
+             'test_ss_key': ups.TEST_SS_KEY})
+
+  class MyOptParser(optparse.OptionParser):
+    """Override default epilog formatter, which strips newlines."""
+
+    def format_epilog(self, formatter):
+      return self.epilog
+
+  teamhelp = '[%s]' % ', '.join(Syncer.VALID_TEAMS.keys())
+
+  parser = MyOptParser(usage=usage, epilog=epilog)
+  parser.add_option('--auth-token-file', dest='token_file', type='string',
+                    action='store', default=gdata_lib.TOKEN_FILE,
+                    help='File for reading/writing Docs auth token.'
+                    ' [default: "%default"]')
+  parser.add_option('--cred-file', dest='cred_file', type='string',
+                    action='store', default=gdata_lib.CRED_FILE,
+                    help='Path to gdata credentials file [default: "%default"]')
+  parser.add_option('--email', dest='email', type='string',
+                    action='store', default=None,
+                    help='Email for Google Doc/Tracker user')
+  parser.add_option('--pretend', dest='pretend', action='store_true',
+                    default=False,
+                    help='Do not make any actual changes.')
+  parser.add_option('--team', dest='team', type='string', action='store',
+                    default=None,
+                    help='Filter by team; colon-separated %s' % teamhelp)
+  parser.add_option('--default-owner', dest='default_owner',
+                    type='string', action='store', default=None,
+                    help='Specify issue owner to use when package has no owner')
+  parser.add_option('--owner', dest='owner', type='string', action='store',
+                    default=None,
+                    help='Filter by package owner;'
+                    ' colon-separated chromium.org accounts')
+  parser.add_option('--test-spreadsheet', dest='test_ss',
+                    action='store_true', default=False,
+                    help='Sync to the testing spreadsheet (implies --pretend).')
+  parser.add_option('--verbose', dest='verbose', action='store_true',
+                    default=False,
+                    help='Enable verbose output (for debugging)')
+
+  return parser
+
+
+def _CheckOptions(options):
+  """Vet the options."""
+  me = os.environ['USER']
+
+  if not options.email and not os.path.exists(options.cred_file):
+    options.email = me
+    oper.Notice('Assuming your chromium email is %s@chromium.org.'
+                '  Override with --email.' % options.email)
+
+  if not options.team and not options.owner:
+    oper.Notice('Without --owner or --team filters this will run for all'
+                ' packages in the spreadsheet (same as --team=all).')
+    if not cros_build_lib.BooleanPrompt(
+        'Are you sure you want to run for all packages?', False):
+      sys.exit(0)
+
+  if options.team and options.team == 'all':
+    options.team = None
+
+  if options.owner and options.owner == 'all':
+    options.owner = None
+
+  if options.owner and options.owner == 'me':
+    options.owner = me
+    oper.Notice('Using %r for owner filter (from $USER envvar)' % options.owner)
+
+  if options.test_ss and not options.pretend:
+    oper.Notice('Running in --pretend mode because of --test-spreadsheet')
+    options.pretend = True
+
+
+def main(argv):
+  """Main function."""
+  parser = _CreateOptParser()
+  (options, _args) = parser.parse_args(argv)
+
+  oper.verbose = options.verbose
+
+  _CheckOptions(options)
+
+  ss_key = ups.TEST_SS_KEY if options.test_ss else ups.REAL_SS_KEY
+
+  # Prepare credentials for Docs and Tracker access.
+  creds = PrepareCreds(options.cred_file, options.token_file, options.email)
+
+  scomm = gdata_lib.SpreadsheetComm()
+  scomm.Connect(creds, ss_key, PKGS_WS_NAME, source='Sync Package Status')
+  tcomm = gdata_lib.TrackerComm()
+  tcomm.Connect(creds, TRACKER_PROJECT_NAME, source='Sync Package Status')
+
+  oper.Notice('Syncing between Tracker and spreadsheet %s' % ss_key)
+  syncer = Syncer(tcomm, scomm,
+                  pretend=options.pretend, verbose=options.verbose)
+
+  if options.team:
+    syncer.SetTeamFilter(options.team)
+  if options.owner:
+    syncer.SetOwnerFilter(options.owner)
+  if options.default_owner:
+    syncer.SetDefaultOwner(options.default_owner)
+
+  try:
+    syncer.Sync()
+  except SyncError as ex:
+    oper.Die(str(ex))
+
+  # If --email, which is only effective when run interactively (because the
+  # password must be entered), give the option of saving to a creds file for
+  # next time.
+  if options.email and options.cred_file:
+    prompt = ('Do you want to save credentials for next time to %r?' %
+              options.cred_file)
+    if cros_build_lib.BooleanPrompt(prompt, False):
+      creds.StoreCreds(options.cred_file)
+      oper.Notice('Be sure to save the creds file to the same location'
+                  ' outside your chroot so it will also be used with'
+                  ' future chroots.')
diff --git a/scripts/sync_package_status_unittest b/scripts/sync_package_status_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/sync_package_status_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/sync_package_status_unittest.py b/scripts/sync_package_status_unittest.py
new file mode 100644
index 0000000..11b0803
--- /dev/null
+++ b/scripts/sync_package_status_unittest.py
@@ -0,0 +1,540 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for cros_portage_upgrade.py."""
+
+from __future__ import print_function
+
+from chromite.lib import cros_test_lib
+from chromite.lib import gdata_lib
+from chromite.lib import upgrade_table as utable
+from chromite.scripts import sync_package_status as sps
+
+# pylint: disable=W0212,R0904
+
+
+class SyncerTest(cros_test_lib.MoxOutputTestCase):
+  """Tests for sync_package_status.Syncer."""
+
+  col_amd64 = utable.UpgradeTable.GetColumnName(utable.UpgradeTable.COL_STATE,
+                                                'amd64')
+  col_amd64 = gdata_lib.PrepColNameForSS(col_amd64)
+  col_arm = utable.UpgradeTable.GetColumnName(utable.UpgradeTable.COL_STATE,
+                                              'arm')
+  col_arm = gdata_lib.PrepColNameForSS(col_arm)
+  col_x86 = utable.UpgradeTable.GetColumnName(utable.UpgradeTable.COL_STATE,
+                                              'x86')
+  col_x86 = gdata_lib.PrepColNameForSS(col_x86)
+
+  def testInit(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+    tcomm, scomm = 'TComm', 'SComm'
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    sps.Syncer.__init__(mocked_syncer, tcomm, scomm)
+    self.mox.VerifyAll()
+    self.assertEquals(scomm, mocked_syncer.scomm)
+    self.assertEquals(tcomm, mocked_syncer.tcomm)
+    self.assertEquals(None, mocked_syncer.teams)
+    self.assertEquals(None, mocked_syncer.owners)
+    self.assertEquals(False, mocked_syncer.pretend)
+    self.assertEquals(False, mocked_syncer.verbose)
+
+  def testReduceTeamName(self):
+    syncer = sps.Syncer('tcomm_obj', 'scomm_obj')
+
+    tests = {
+        'build/bdavirro': 'build',
+        'build/rtc': 'build',
+        'build': 'build',
+        'UI/zel': 'ui',
+        'UI': 'ui',
+        'Build': 'build',
+        None: None,
+    }
+
+    # Verify
+    for key in tests:
+      result = syncer._ReduceTeamName(key)
+      self.assertEquals(tests[key], result)
+
+  def testReduceOwnerName(self):
+    syncer = sps.Syncer('tcomm_obj', 'scomm_obj')
+
+    tests = {
+        'joe': 'joe',
+        'Joe': 'joe',
+        'joe@chromium.org': 'joe',
+        'Joe@chromium.org': 'joe',
+        'Joe.Bob@chromium.org': 'joe.bob',
+        None: None,
+    }
+
+    # Verify
+    for key in tests:
+      result = syncer._ReduceOwnerName(key)
+      self.assertEquals(tests[key], result)
+
+  def testSetTeamFilterOK(self):
+    syncer = sps.Syncer('tcomm_obj', 'scomm_obj')
+
+    tests = {
+        'build:system:ui': set(['build', 'system', 'ui']),
+        'Build:system:UI': set(['build', 'system', 'ui']),
+        'kernel': set(['kernel']),
+        'KERNEL': set(['kernel']),
+        None: None,
+        '': None,
+    }
+
+    # Verify
+    for test in tests:
+      syncer.SetTeamFilter(test)
+      self.assertEquals(tests[test], syncer.teams)
+
+  def testSetTeamFilterError(self):
+    syncer = sps.Syncer('tcomm_obj', 'scomm_obj')
+
+    # "systems" is not valid (should be "system")
+    teamarg = 'build:systems'
+
+    # Verify
+    with self.OutputCapturer():
+      self.assertRaises(SystemExit, sps.Syncer.SetTeamFilter,
+                        syncer, teamarg)
+
+  def testSetOwnerFilter(self):
+    syncer = sps.Syncer('tcomm_obj', 'scomm_obj')
+
+    tests = {
+        'joe:bill:bob': set(['joe', 'bill', 'bob']),
+        'Joe:Bill:BOB': set(['joe', 'bill', 'bob']),
+        'joe@chromium.org:bill:bob': set(['joe', 'bill', 'bob']),
+        'joe': set(['joe']),
+        'joe@chromium.org': set(['joe']),
+        '': None,
+        None: None,
+    }
+
+    # Verify
+    for test in tests:
+      syncer.SetOwnerFilter(test)
+      self.assertEquals(tests[test], syncer.owners)
+
+  def testRowPassesFilters(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+
+    row1 = {sps.COL_TEAM: 'build', sps.COL_OWNER: 'joe'}
+    row2 = {sps.COL_TEAM: 'build', sps.COL_OWNER: 'bob'}
+    row3 = {sps.COL_TEAM: 'build', sps.COL_OWNER: None}
+    row4 = {sps.COL_TEAM: None, sps.COL_OWNER: None}
+
+    teams1 = set(['build'])
+    teams2 = set(['kernel'])
+    teams3 = set(['build', 'ui'])
+
+    owners1 = set(['joe'])
+    owners2 = set(['bob'])
+    owners3 = set(['joe', 'dan'])
+
+    tests = [
+        {'row': row1, 'teams': None, 'owners': None, 'result': True},
+        {'row': row1, 'teams': teams1, 'owners': None, 'result': True},
+        {'row': row1, 'teams': teams2, 'owners': None, 'result': False},
+        {'row': row1, 'teams': teams3, 'owners': None, 'result': True},
+        {'row': row1, 'teams': teams1, 'owners': owners1, 'result': True},
+        {'row': row1, 'teams': None, 'owners': owners2, 'result': False},
+        {'row': row1, 'teams': None, 'owners': owners3, 'result': True},
+
+        {'row': row2, 'teams': None, 'owners': None, 'result': True},
+        {'row': row2, 'teams': teams1, 'owners': None, 'result': True},
+        {'row': row2, 'teams': teams2, 'owners': None, 'result': False},
+        {'row': row2, 'teams': teams3, 'owners': None, 'result': True},
+        {'row': row2, 'teams': teams1, 'owners': owners1, 'result': False},
+        {'row': row2, 'teams': None, 'owners': owners2, 'result': True},
+        {'row': row2, 'teams': None, 'owners': owners3, 'result': False},
+
+        {'row': row3, 'teams': None, 'owners': None, 'result': True},
+        {'row': row3, 'teams': teams1, 'owners': None, 'result': True},
+        {'row': row3, 'teams': teams2, 'owners': None, 'result': False},
+        {'row': row3, 'teams': teams3, 'owners': None, 'result': True},
+        {'row': row3, 'teams': teams1, 'owners': owners1, 'result': False},
+        {'row': row3, 'teams': None, 'owners': owners2, 'result': False},
+        {'row': row3, 'teams': None, 'owners': owners3, 'result': False},
+
+        {'row': row4, 'teams': None, 'owners': None, 'result': True},
+        {'row': row4, 'teams': teams1, 'owners': None, 'result': False},
+        {'row': row4, 'teams': teams1, 'owners': owners1, 'result': False},
+        {'row': row4, 'teams': None, 'owners': owners2, 'result': False},
+    ]
+
+    # Replay script
+    for test in tests:
+      done = False
+
+      if test['teams']:
+        row_team = test['row'][sps.COL_TEAM]
+        mocked_syncer._ReduceTeamName(row_team).AndReturn(row_team)
+        done = row_team not in test['teams']
+
+      if not done and test['owners']:
+        row_owner = test['row'][sps.COL_OWNER]
+        mocked_syncer._ReduceOwnerName(row_owner).AndReturn(row_owner)
+    self.mox.ReplayAll()
+
+    # Verify
+    for test in tests:
+      mocked_syncer.teams = test['teams']
+      mocked_syncer.owners = test['owners']
+      result = sps.Syncer._RowPassesFilters(mocked_syncer, test['row'])
+
+      msg = ('Expected following row to %s filter, but it did not:\n%r' %
+             ('pass' if test['result'] else 'fail', test['row']))
+      msg += '\n  Using teams filter : %r' % mocked_syncer.teams
+      msg += '\n  Using owners filter: %r' % mocked_syncer.owners
+      self.assertEquals(test['result'], result, msg)
+    self.mox.VerifyAll()
+
+  def testSyncMissingTrackerColumn(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+    mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
+    mocked_tcomm = self.mox.CreateMock(gdata_lib.TrackerComm)
+    mocked_syncer.scomm = mocked_scomm
+    mocked_syncer.tcomm = mocked_tcomm
+
+    # Replay script
+    mocked_scomm.GetColumnIndex(sps.COL_TRACKER).AndReturn(None)
+    self.mox.ReplayAll()
+
+    # Verify
+    self.assertRaises(sps.SyncError, sps.Syncer.Sync, mocked_syncer)
+    self.mox.VerifyAll()
+
+  def testSyncNewIssues(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+    mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
+    mocked_tcomm = self.mox.CreateMock(gdata_lib.TrackerComm)
+    mocked_syncer.scomm = mocked_scomm
+    mocked_syncer.tcomm = mocked_tcomm
+
+    rows = [
+        {sps.COL_PACKAGE: 'd/f', sps.COL_TEAM: 'build', sps.COL_OWNER: None},
+        {sps.COL_PACKAGE: 'd/f', sps.COL_TEAM: 'build', sps.COL_OWNER: 'joe'},
+    ]
+
+    # Replay script
+    mocked_scomm.GetColumnIndex(sps.COL_TRACKER).AndReturn(1) # Any index ok.
+    mocked_scomm.GetRows().AndReturn(rows)
+
+    for ix in xrange(len(rows)):
+      mocked_syncer._RowPassesFilters(rows[ix]).AndReturn(True)
+      mocked_syncer._GenIssueForRow(rows[ix]).AndReturn('NewIssue%d' % ix)
+      mocked_syncer._GetRowTrackerId(rows[ix]).AndReturn(None)
+      mocked_syncer._CreateRowIssue(ix + 2, rows[ix], 'NewIssue%d' % ix)
+    self.mox.ReplayAll()
+
+    # Verify
+    sps.Syncer.Sync(mocked_syncer)
+    self.mox.VerifyAll()
+
+  def testSyncClearIssues(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+    mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
+    mocked_tcomm = self.mox.CreateMock(gdata_lib.TrackerComm)
+    mocked_syncer.scomm = mocked_scomm
+    mocked_syncer.tcomm = mocked_tcomm
+
+    rows = [
+        {sps.COL_PACKAGE: 'd/f', sps.COL_TEAM: 'build', sps.COL_OWNER: None},
+        {sps.COL_PACKAGE: 'd/f', sps.COL_TEAM: 'build', sps.COL_OWNER: 'joe'},
+    ]
+
+    # Replay script
+    mocked_scomm.GetColumnIndex(sps.COL_TRACKER).AndReturn(1) # Any index ok.
+    mocked_scomm.GetRows().AndReturn(rows)
+
+    for ix in xrange(len(rows)):
+      mocked_syncer._RowPassesFilters(rows[ix]).AndReturn(True)
+      mocked_syncer._GenIssueForRow(rows[ix]).AndReturn(None)
+      mocked_syncer._GetRowTrackerId(rows[ix]).AndReturn(123 + ix)
+      mocked_syncer._ClearRowIssue(ix + 2, rows[ix])
+    self.mox.ReplayAll()
+
+    # Verify
+    sps.Syncer.Sync(mocked_syncer)
+    self.mox.VerifyAll()
+
+  def testSyncFilteredOut(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+    mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
+    mocked_tcomm = self.mox.CreateMock(gdata_lib.TrackerComm)
+    mocked_syncer.scomm = mocked_scomm
+    mocked_syncer.tcomm = mocked_tcomm
+
+    rows = [
+        {sps.COL_PACKAGE: 'd/f', sps.COL_TEAM: 'build', sps.COL_OWNER: None},
+        {sps.COL_PACKAGE: 'd/f', sps.COL_TEAM: 'build', sps.COL_OWNER: 'joe'},
+    ]
+
+    # Replay script
+    mocked_scomm.GetColumnIndex(sps.COL_TRACKER).AndReturn(1) # Any index ok.
+    mocked_scomm.GetRows().AndReturn(rows)
+
+    for ix in xrange(len(rows)):
+      mocked_syncer._RowPassesFilters(rows[ix]).AndReturn(False)
+    self.mox.ReplayAll()
+
+    # Verify
+    sps.Syncer.Sync(mocked_syncer)
+    self.mox.VerifyAll()
+
+  def testGetRowValue(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+
+    row = {
+        self.col_amd64: 'ABC',
+        self.col_arm: 'XYZ',
+        self.col_x86: 'FooBar',
+        sps.COL_TEAM: 'build',
+    }
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    result = sps.Syncer._GetRowValue(mocked_syncer, row,
+                                     'stateonamd64', 'amd64')
+    self.assertEquals('ABC', result)
+    result = sps.Syncer._GetRowValue(mocked_syncer, row,
+                                     'stateonarm', 'arm')
+    self.assertEquals('XYZ', result)
+    result = sps.Syncer._GetRowValue(mocked_syncer, row,
+                                     'stateonamd64', 'amd64')
+    self.assertEquals('ABC', result)
+    result = sps.Syncer._GetRowValue(mocked_syncer, row, sps.COL_TEAM)
+    self.assertEquals('build', result)
+    self.mox.VerifyAll()
+
+  def _TestGenIssueForRowNeedsUpgrade(self, row):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+    mocked_syncer.default_owner = None
+    mocked_syncer.scomm = cros_test_lib.EasyAttr(ss_key='SomeSSKey')
+
+    # Replay script
+    for arch in sps.ARCHES:
+      state = sps.Syncer._GetRowValue(mocked_syncer, row,
+                                      utable.UpgradeTable.COL_STATE, arch)
+      mocked_syncer._GetRowValue(row, utable.UpgradeTable.COL_STATE,
+                                 arch).AndReturn(state)
+    red_team = sps.Syncer._ReduceTeamName(mocked_syncer, row[sps.COL_TEAM])
+    mocked_syncer._ReduceTeamName(row[sps.COL_TEAM]).AndReturn(red_team)
+    red_owner = sps.Syncer._ReduceOwnerName(mocked_syncer, row[sps.COL_OWNER])
+    mocked_syncer._ReduceOwnerName(row[sps.COL_OWNER]).AndReturn(red_owner)
+    for arch in sps.ARCHES:
+      mocked_syncer._GetRowValue(row, utable.UpgradeTable.COL_CURRENT_VER,
+                                 arch).AndReturn('1')
+      mocked_syncer._GetRowValue(row,
+                                 utable.UpgradeTable.COL_STABLE_UPSTREAM_VER,
+                                 arch).AndReturn('2')
+      mocked_syncer._GetRowValue(row,
+                                 utable.UpgradeTable.COL_LATEST_UPSTREAM_VER,
+                                 arch).AndReturn('3')
+    self.mox.ReplayAll()
+
+    # Verify
+    result = sps.Syncer._GenIssueForRow(mocked_syncer, row)
+    self.mox.VerifyAll()
+    return result
+
+  def testGenIssueForRowNeedsUpgrade1(self):
+    row = {
+        self.col_amd64: utable.UpgradeTable.STATE_NEEDS_UPGRADE,
+        self.col_arm: 'Not important',
+        self.col_x86: 'Not important',
+        sps.COL_TEAM: 'build',
+        sps.COL_OWNER: None,
+        sps.COL_PACKAGE: 'dev/foo',
+    }
+
+    result = self._TestGenIssueForRowNeedsUpgrade(row)
+    self.assertEquals(None, result.owner)
+    self.assertEquals(0, result.id)
+    self.assertEquals('Untriaged', result.status)
+
+  def testGenIssueForRowNeedsUpgrade2(self):
+    row = {
+        self.col_amd64: utable.UpgradeTable.STATE_NEEDS_UPGRADE,
+        self.col_arm: utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_PATCHED,
+        self.col_x86: 'Not important',
+        sps.COL_TEAM: 'build',
+        sps.COL_OWNER: 'joe',
+        sps.COL_PACKAGE: 'dev/foo',
+    }
+
+    result = self._TestGenIssueForRowNeedsUpgrade(row)
+    self.assertEquals('joe@chromium.org', result.owner)
+    self.assertEquals(0, result.id)
+    self.assertEquals('Available', result.status)
+
+  def testGenIssueForRowNeedsUpgrade3(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+
+    row = {
+        self.col_amd64: utable.UpgradeTable.STATE_NEEDS_UPGRADE,
+        self.col_arm: utable.UpgradeTable.STATE_NEEDS_UPGRADE_AND_PATCHED,
+        self.col_x86: 'Not important',
+        sps.COL_TEAM: None,
+        sps.COL_OWNER: 'joe',
+        sps.COL_PACKAGE: 'dev/foo',
+    }
+
+    # Replay script
+    for arch in sps.ARCHES:
+      state = sps.Syncer._GetRowValue(mocked_syncer, row,
+                                      utable.UpgradeTable.COL_STATE, arch)
+      mocked_syncer._GetRowValue(row, utable.UpgradeTable.COL_STATE,
+                                 arch).AndReturn(state)
+    reduced = sps.Syncer._ReduceTeamName(mocked_syncer, row[sps.COL_TEAM])
+    mocked_syncer._ReduceTeamName(row[sps.COL_TEAM]).AndReturn(reduced)
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      self.assertRaises(RuntimeError, sps.Syncer._GenIssueForRow,
+                        mocked_syncer, row)
+    self.mox.VerifyAll()
+
+  def testGenIssueForRowNoUpgrade(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+
+    row = {
+        self.col_amd64: 'Not important',
+        self.col_arm: 'Not important',
+        self.col_x86: 'Not important',
+        sps.COL_TEAM: None,
+        sps.COL_OWNER: 'joe',
+        sps.COL_PACKAGE: 'dev/foo',
+    }
+
+    # Replay script
+    for arch in sps.ARCHES:
+      state = sps.Syncer._GetRowValue(mocked_syncer, row,
+                                      utable.UpgradeTable.COL_STATE, arch)
+      mocked_syncer._GetRowValue(row, utable.UpgradeTable.COL_STATE,
+                                 arch).AndReturn(state)
+    self.mox.ReplayAll()
+
+    # Verify
+    result = sps.Syncer._GenIssueForRow(mocked_syncer, row)
+    self.mox.VerifyAll()
+    self.assertEquals(None, result)
+
+  def testGetRowTrackerId(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+
+    row = {sps.COL_TRACKER: '321'}
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      result = sps.Syncer._GetRowTrackerId(mocked_syncer, row)
+    self.mox.VerifyAll()
+    self.assertEquals(321, result)
+
+  def testCreateRowIssuePretend(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+    mocked_syncer.pretend = True
+
+    row = {sps.COL_PACKAGE: 'dev/foo'}
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      sps.Syncer._CreateRowIssue(mocked_syncer, 5, row, 'some_issue')
+    self.mox.VerifyAll()
+
+  def testCreateRowIssue(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+    mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
+    mocked_tcomm = self.mox.CreateMock(gdata_lib.TrackerComm)
+    mocked_syncer.scomm = mocked_scomm
+    mocked_syncer.tcomm = mocked_tcomm
+    mocked_syncer.tracker_col_ix = 8
+    mocked_syncer.pretend = False
+
+    row_ix = 5
+    row = {sps.COL_PACKAGE: 'dev/foo'}
+    issue = 'SomeIssue'
+    issue_id = 234
+    ss_issue_val = 'Hyperlink%d' % issue_id
+
+    # Replay script
+    mocked_scomm.ClearCellValue(row_ix, mocked_syncer.tracker_col_ix)
+    mocked_tcomm.CreateTrackerIssue(issue).AndReturn(issue_id)
+    mocked_syncer._GenSSLinkToIssue(issue_id).AndReturn(ss_issue_val)
+    mocked_scomm.ReplaceCellValue(row_ix, mocked_syncer.tracker_col_ix,
+                                  ss_issue_val)
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      sps.Syncer._CreateRowIssue(mocked_syncer, row_ix, row, issue)
+    self.mox.VerifyAll()
+
+  def testGenSSLinkToIssue(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+
+    issue_id = 123
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    result = sps.Syncer._GenSSLinkToIssue(mocked_syncer, issue_id)
+    self.mox.VerifyAll()
+    self.assertEquals('=hyperlink("crbug.com/123";"123")', result)
+
+  def testClearRowIssue(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+    mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
+    mocked_syncer.scomm = mocked_scomm
+    mocked_syncer.tracker_col_ix = 8
+    mocked_syncer.pretend = False
+
+    row_ix = 44
+    row = {sps.COL_PACKAGE: 'dev/foo'}
+
+    # Replay script
+    mocked_scomm.ClearCellValue(row_ix, mocked_syncer.tracker_col_ix)
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      sps.Syncer._ClearRowIssue(mocked_syncer, row_ix, row)
+    self.mox.VerifyAll()
+
+  def testClearRowIssuePretend(self):
+    mocked_syncer = self.mox.CreateMock(sps.Syncer)
+    mocked_scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
+    mocked_syncer.scomm = mocked_scomm
+    mocked_syncer.tracker_col_ix = 8
+    mocked_syncer.pretend = True
+
+    row_ix = 44
+    row = {sps.COL_PACKAGE: 'dev/foo'}
+
+    # Replay script
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      sps.Syncer._ClearRowIssue(mocked_syncer, row_ix, row)
+    self.mox.VerifyAll()
diff --git a/scripts/test_image.py b/scripts/test_image.py
new file mode 100644
index 0000000..23b8eb9
--- /dev/null
+++ b/scripts/test_image.py
@@ -0,0 +1,97 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to mount a built image and run tests on it."""
+
+from __future__ import print_function
+
+import os
+import unittest
+
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import cros_logging as logging
+from chromite.lib import image_test_lib
+from chromite.lib import osutils
+from chromite.lib import path_util
+
+
+def ParseArgs(args):
+  """Return parsed commandline arguments."""
+
+  parser = commandline.ArgumentParser()
+  parser.add_argument('--test_results_root', type='path',
+                      help='Directory to store test results')
+  parser.add_argument('--board', type=str, help='Board (wolf, beaglebone...)')
+  parser.add_argument('image_dir', type='path',
+                      help='Image directory (or file) with mount_image.sh and '
+                           'umount_image.sh')
+  opts = parser.parse_args(args)
+  opts.Freeze()
+  return opts
+
+
+def FindImage(image_path):
+  """Return the path to the image file.
+
+  Args:
+    image_path: A path to the image file, or a directory containing the base
+      image.
+
+  Returns:
+    ImageFileAndMountScripts containing absolute paths to the image,
+      the mount and umount invocation commands
+  """
+
+  if os.path.isdir(image_path):
+    # Assume base image.
+    image_file = os.path.join(image_path, constants.BASE_IMAGE_NAME + '.bin')
+    if not os.path.exists(image_file):
+      raise ValueError('Cannot find base image %s' % image_file)
+  elif os.path.isfile(image_path):
+    image_file = image_path
+  else:
+    raise ValueError('%s is neither a directory nor a file' % image_path)
+
+  return image_file
+
+
+def main(args):
+  opts = ParseArgs(args)
+
+  # Build up test suites.
+  loader = unittest.TestLoader()
+  loader.suiteClass = image_test_lib.ImageTestSuite
+  # We use a different prefix here so that unittest DO NOT pick up the
+  # image tests automatically because they depend on a proper environment.
+  loader.testMethodPrefix = 'Test'
+  all_tests = loader.loadTestsFromName('chromite.cros.test.image_test')
+  forgiving = image_test_lib.ImageTestSuite()
+  non_forgiving = image_test_lib.ImageTestSuite()
+  for suite in all_tests:
+    for test in suite.GetTests():
+      if test.IsForgiving():
+        forgiving.addTest(test)
+      else:
+        non_forgiving.addTest(test)
+
+  # Run them in the image directory.
+  runner = image_test_lib.ImageTestRunner()
+  runner.SetBoard(opts.board)
+  runner.SetResultDir(opts.test_results_root)
+  image_file = FindImage(opts.image_dir)
+  tmp_in_chroot = path_util.FromChrootPath('/tmp')
+  with osutils.TempDir(base_dir=tmp_in_chroot) as temp_dir:
+    with osutils.MountImageContext(image_file, temp_dir):
+      with osutils.ChdirContext(temp_dir):
+        # Run non-forgiving tests first so that exceptions in forgiving tests
+        # do not skip any required tests.
+        logging.info('Running NON-forgiving tests.')
+        result = runner.run(non_forgiving)
+        logging.info('Running forgiving tests.')
+        runner.run(forgiving)
+
+  if result and not result.wasSuccessful():
+    return 1
+  return 0
diff --git a/scripts/test_image_unittest b/scripts/test_image_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/test_image_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/test_image_unittest.py b/scripts/test_image_unittest.py
new file mode 100644
index 0000000..ed11825
--- /dev/null
+++ b/scripts/test_image_unittest.py
@@ -0,0 +1,165 @@
+# Copyright 2014 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the functions in test_image."""
+
+from __future__ import print_function
+
+import os
+import tempfile
+import unittest
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import image_test_lib
+from chromite.lib import osutils
+from chromite.scripts import test_image
+
+
+class TestImageTest(cros_test_lib.MockTempDirTestCase):
+  """Common class for tests ImageTest.
+
+  This sets up proper directory with test image. The image file is zero-byte.
+  """
+
+  def setUp(self):
+    # create dummy image file
+    self.image_file = os.path.join(self.tempdir,
+                                   constants.BASE_IMAGE_NAME + '.bin')
+    osutils.WriteFile(self.image_file, '')
+    fake_partitions = {
+        1: cros_build_lib.PartitionInfo(1, 0, 0, 0, 'fs', 'STATE', 'flag'),
+        2: cros_build_lib.PartitionInfo(2, 0, 0, 0, 'fs', 'KERN-A', 'flag'),
+        3: cros_build_lib.PartitionInfo(3, 0, 0, 0, 'fs', 'ROOT-A', 'flag'),
+    }
+    self.PatchObject(cros_build_lib, 'GetImageDiskPartitionInfo',
+                     autospec=True, return_value=fake_partitions)
+    self.PatchObject(osutils.MountImageContext, '_Mount', autospec=True)
+    self.PatchObject(osutils.MountImageContext, '_Unmount', autospec=True)
+
+
+class FindImageTest(TestImageTest):
+  """Test FindImage() function."""
+
+  def _testFindOkay(self, image_path):
+    res = test_image.FindImage(image_path)
+    self.assertEqual(
+        res,
+        os.path.join(self.tempdir, constants.BASE_IMAGE_NAME + '.bin')
+    )
+
+  def testFindWithDirectory(self):
+    self._testFindOkay(self.tempdir)
+
+  def testFindWithFile(self):
+    self._testFindOkay(self.image_file)
+
+  def testFindWithInvalid(self):
+    self.assertRaises(ValueError, test_image.FindImage,
+                      os.path.join(self.tempdir, '404'))
+
+  def testFindWithInvalidDirectory(self):
+    os.unlink(self.image_file)
+    self.assertRaises(ValueError, test_image.FindImage,
+                      os.path.join(self.tempdir))
+
+
+class MainTest(TestImageTest):
+  """Test the main invocation of the script."""
+
+  def testChdir(self):
+    """Verify the CWD is in a temp directory."""
+
+    class CwdTest(image_test_lib.NonForgivingImageTestCase):
+      """A dummy test class to verify current working directory."""
+
+      _expected_dir = None
+
+      def SetCwd(self, cwd):
+        self._expected_dir = cwd
+
+      def testExpectedCwd(self):
+        self.assertEqual(self._expected_dir, os.getcwd())
+
+    self.assertNotEqual('/tmp', os.getcwd())
+    os.chdir('/tmp')
+
+    test = CwdTest('testExpectedCwd')
+    suite = image_test_lib.ImageTestSuite()
+    suite.addTest(test)
+    self.PatchObject(unittest.TestLoader, 'loadTestsFromName', autospec=True,
+                     return_value=[suite])
+
+    # Set up the expected directory.
+    expected_dir = os.path.join(self.tempdir, 'my-subdir')
+    os.mkdir(expected_dir)
+    test.SetCwd(expected_dir)
+    self.PatchObject(tempfile, 'mkdtemp', autospec=True,
+                     return_value=expected_dir)
+
+    argv = [self.tempdir]
+    self.assertEqual(0, test_image.main(argv))
+    self.assertEqual('/tmp', os.getcwd())
+
+  def _testForgiveness(self, forgiveness, expected_result):
+
+    class ForgivenessTest(image_test_lib.ImageTestCase):
+      """A dummy test that is sometime forgiving, sometime not.
+
+      Its only test (testFail) always fail.
+      """
+
+      _forgiving = True
+
+      def SetForgiving(self, value):
+        self._forgiving = value
+
+      def IsForgiving(self):
+        return self._forgiving
+
+      def testFail(self):
+        self.fail()
+
+    test = ForgivenessTest('testFail')
+    test.SetForgiving(forgiveness)
+    suite = image_test_lib.ImageTestSuite()
+    suite.addTest(test)
+    self.PatchObject(unittest.TestLoader, 'loadTestsFromName', autospec=True,
+                     return_value=[suite])
+    argv = [self.tempdir]
+    self.assertEqual(expected_result, test_image.main(argv))
+
+  def testForgiving(self):
+    self._testForgiveness(True, 0)
+
+  def testNonForgiving(self):
+    self._testForgiveness(False, 1)
+
+  def testBoardAndDirectory(self):
+    """Verify that "--board", "--test_results_root" are passed to the tests."""
+
+    class AttributeTest(image_test_lib.ForgivingImageTestCase):
+      """Dummy test class to hold board and directory."""
+
+      def testOkay(self):
+        pass
+
+    test = AttributeTest('testOkay')
+    suite = image_test_lib.ImageTestSuite()
+    suite.addTest(test)
+    self.PatchObject(unittest.TestLoader, 'loadTestsFromName', autospec=True,
+                     return_value=[suite])
+    argv = [
+        '--board',
+        'my-board',
+        '--test_results_root',
+        'your-root',
+        self.tempdir
+    ]
+    test_image.main(argv)
+    # pylint: disable=W0212
+    self.assertEqual('my-board', test._board)
+    # pylint: disable=W0212
+    self.assertEqual('your-root', os.path.basename(test._result_dir))
diff --git a/scripts/update_manifest_remotes.py b/scripts/update_manifest_remotes.py
new file mode 100644
index 0000000..aa2a5e3
--- /dev/null
+++ b/scripts/update_manifest_remotes.py
@@ -0,0 +1,176 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A tool that updates remotes in all historical manifests to point to GoB.
+
+It clones manifest-versions repository, scans through all manifests there and
+replaces known old gerrit/gerrit-int URLs with Gerrit on Borg ones.
+
+It doesn't commit or push any changes, just updates files in a working
+directory.
+"""
+
+from __future__ import print_function
+
+import collections
+import os
+
+from xml.etree import ElementTree
+
+from chromite.cbuildbot import config_lib
+from chromite.cbuildbot import manifest_version
+from chromite.lib import commandline
+from chromite.lib import cros_logging as logging
+from chromite.lib import osutils
+
+
+site_config = config_lib.GetConfig()
+
+
+GOB_EXTERNAL = 'https://chromium.googlesource.com'
+GOB_INTERNAL = 'https://chrome-internal.googlesource.com'
+
+
+GERRIT_EXTERNAL = 'https://chromium-review.googlesource.com'
+GERRIT_INTERNAL = 'https://chrome-internal-review.googlesource.com'
+
+
+# Old fetch URL -> new fetch URL.
+# Old fetch urls are found by grepping through manifest-versions repo.
+FETCH_URLS = {
+    'http://git.chromium.org': GOB_EXTERNAL,
+    'http://git.chromium.org/git': GOB_EXTERNAL,
+    'https://git.chromium.org/git': GOB_EXTERNAL,
+    'ssh://gerrit.chromium.org:29418': GOB_EXTERNAL,
+    'ssh://git@gitrw.chromium.org:9222': GOB_EXTERNAL,
+    'ssh://gerrit-int.chromium.org:29419': GOB_INTERNAL,
+}
+
+
+# Old review URL -> new review URL.
+REVIEW_URLS = {
+    'gerrit.chromium.org/gerrit': GERRIT_EXTERNAL,
+    'gerrit-int.chromium.org': GERRIT_INTERNAL,
+}
+
+
+# Single remote entry in a manifest.
+Remote = collections.namedtuple('Remote', ['name', 'fetch', 'review'])
+
+
+def EnumerateManifests(directory):
+  """Yields paths to manifest files inside a directory."""
+  for path, directories, files in os.walk(directory):
+    # Find regular (not a symlink) xml files.
+    for name in files:
+      if not name.endswith('.xml'):
+        continue
+      full_path = os.path.join(path, name)
+      if os.path.isfile(full_path) and not os.path.islink(full_path):
+        yield full_path
+    # Skip 'hidden' directories.
+    for hidden in [name for name in directories if name.startswith('.')]:
+      directories.remove(hidden)
+
+
+def UpdateRemotes(manifest):
+  """Updates remotes in manifest to use Gerrit on Borg URLs.
+
+  Args:
+    manifest: Path to manifest file to modify in place.
+
+  Returns:
+    True if file was modified.
+  """
+  # Read manifest file as str.
+  body = osutils.ReadFile(manifest)
+  original = body
+
+  # Update fetch="..." entries.
+  for old, new in FETCH_URLS.iteritems():
+    body = body.replace('fetch="%s"' % old, 'fetch="%s"' % new)
+
+  # Update review="..." entries.
+  for old, new in REVIEW_URLS.iteritems():
+    body = body.replace('review="%s"' % old, 'review="%s"' % new)
+
+  # Write back only if modified.
+  if original != body:
+    osutils.WriteFile(manifest, body)
+    return True
+
+  return False
+
+
+def GetRemotes(manifest):
+  """Returns list of remotes referenced in manifest.
+
+  Args:
+    manifest: Path to manifest file to scan for remotes.
+
+  Returns:
+    List of Remote tuples.
+  """
+  doc = ElementTree.parse(manifest)
+  root = doc.getroot()
+  return [Remote(
+      remote.attrib['name'], remote.attrib['fetch'],
+      remote.attrib.get('review'))
+          for remote in root.findall('remote')]
+
+
+def GetParser():
+  """Creates the argparse parser."""
+  parser = commandline.ArgumentParser(description=__doc__)
+  parser.add_argument(
+      '--skip-update', action='store_true', default=False,
+      help='Do not revert versions manifest checkout to original state')
+  parser.add_argument(
+      '--remotes-summary', action='store_true', default=False,
+      help='Scan all manifests and print all various remotes found in them')
+  parser.add_argument(
+      'manifest_versions_dir', type='path',
+      help='Directory to checkout manifest versions repository into')
+  return parser
+
+
+def main(argv):
+  parser = GetParser()
+  options = parser.parse_args(argv)
+
+  # Clone manifest-versions repository.
+  manifest_repo_url = site_config.params.MANIFEST_VERSIONS_INT_GOB_URL
+  if not options.skip_update:
+    manifest_version.RefreshManifestCheckout(
+        options.manifest_versions_dir, manifest_repo_url)
+
+  if options.remotes_summary:
+    # Find all unique remotes.
+    logging.info('Scanning manifests for remotes...')
+    remotes = set()
+    for manifest in EnumerateManifests(options.manifest_versions_dir):
+      remotes.update(GetRemotes(manifest))
+    # Pretty print a table.
+    print('Remotes found:')
+    row_formatter = lambda a, b, c: ''.join(
+        [a, ' ' * (16 - len(a)), b, ' ' * (45 - len(b)), c])
+    print(row_formatter('Name', 'Remote', 'Review'))
+    print('-' * 80)
+    for remote in sorted(remotes):
+      print(row_formatter(remote.name, remote.fetch, remote.review or ''))
+    return 0
+
+  logging.info('Updating manifests...')
+  up_to_date = True
+  for manifest in EnumerateManifests(options.manifest_versions_dir):
+    if UpdateRemotes(manifest):
+      up_to_date = False
+      logging.info('Updated manifest: %s', manifest)
+
+  if up_to_date:
+    logging.info('All manifests are up to date')
+  else:
+    logging.info('Done')
+
+  return 0
diff --git a/scripts/upload_command_stats.py b/scripts/upload_command_stats.py
new file mode 100644
index 0000000..50cac30
--- /dev/null
+++ b/scripts/upload_command_stats.py
@@ -0,0 +1,105 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Upload a single build command stats file to appengine."""
+
+from __future__ import print_function
+
+import re
+import sys
+
+from chromite.cbuildbot import constants
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import stats
+
+
+FILE_LOAD_ERROR = 'Error loading %s'
+UNCAUGHT_ERROR = 'Uncaught command stats exception.'
+
+
+class LoadError(RuntimeError):
+  """Error during loading of stats file."""
+
+
+class StatsLoader(object):
+  """Loads stats from a file."""
+
+  @classmethod
+  def LoadFile(cls, stat_file):
+    """Return a Stats object constructed from contents of |stat_file|."""
+
+    with open(stat_file, 'r') as f:
+      first_line = f.readline().rstrip()
+      match = re.match(r'Chromium OS .+ Version (\d+)$', first_line)
+      if not match:
+        raise LoadError('Stats file not in expected format')
+
+      version = int(match.group(1))
+      loader = cls._GetLinesLoader(version)
+      if not loader:
+        raise LoadError('Stats file version %s not supported.' % version)
+
+      return loader(f.readlines())
+
+  @classmethod
+  def _GetLinesLoader(cls, version):
+    LOADERS = (
+        None,
+        cls._LoadLinesV1,  # Version 1 loader (at index 1)
+    )
+
+    if version < len(LOADERS) and version >= 0:
+      return LOADERS[version]
+
+    return None
+
+  @classmethod
+  def _LoadLinesV1(cls, stat_lines):
+    """Load stat lines in Version 1 format."""
+    data = {}
+    for line in stat_lines:
+      # Each line has following format:
+      # attribute_name Rest of line is value for attribute_name
+      # Note that some attributes may have no value after their name.
+      attr, _sep, value = line.rstrip().partition(' ')
+      if not attr:
+        attr = line.rstrip()
+
+      data[attr] = value
+
+    return stats.Stats(**data)
+
+
+def main(argv):
+  """Main function."""
+  # This is not meant to be a user-friendly script.  It takes one and
+  # only one argument, which is a build stats file to be uploaded
+  epilog = (
+      'This script is not intended to be run manually.  It is used as'
+      ' part of the build command statistics project.'
+  )
+  in_golo = cros_build_lib.GetHostDomain().endswith(constants.GOLO_DOMAIN)
+  debug_level = commandline.ArgumentParser.DEFAULT_LOG_LEVEL
+  if in_golo:
+    debug_level = 'debug'
+  parser = commandline.ArgumentParser(
+      epilog=epilog, default_log_level=debug_level)
+  parser.add_argument(
+      'build_stats_file', nargs=1, default=None)
+  options = parser.parse_args(argv)
+
+  try:
+    cmd_stats = StatsLoader.LoadFile(options.build_stats_file[0])
+  except LoadError:
+    logging.error(FILE_LOAD_ERROR, options.build_stats_file[0],
+                  exc_info=True)
+    sys.exit(1)
+
+  try:
+    stats.StatsUploader.Upload(cmd_stats)
+  except Exception:
+    logging.error(UNCAUGHT_ERROR, exc_info=True)
+    sys.exit(1)
diff --git a/scripts/upload_command_stats_unittest b/scripts/upload_command_stats_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/upload_command_stats_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/upload_command_stats_unittest.py b/scripts/upload_command_stats_unittest.py
new file mode 100644
index 0000000..5a2818a
--- /dev/null
+++ b/scripts/upload_command_stats_unittest.py
@@ -0,0 +1,84 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for upload_command_stats_unittest.py."""
+
+from __future__ import print_function
+
+import os
+
+from chromite.cbuildbot import constants
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import stats
+from chromite.lib import stats_unittest
+from chromite.scripts import upload_command_stats
+
+TEST_FILE = """\
+Chromium OS Build Command Stats - Version 1
+cpu_count 32
+cmd_args --board=lumpy
+host typewriter.mtv.corp.google.com
+run_time 0
+cmd_line ./build_packages --board=lumpy
+username monkey@chromium.org
+cmd_base build_packages
+cpu_type Intel(R) Xeon(R) CPU E5-2690 0 @ 2.90GHz
+board lumpy
+"""
+
+class RunScriptTest(cros_test_lib.MockTempDirTestCase,
+                    cros_test_lib.LoggingTestCase):
+  """Test the main functionality."""
+  # pylint: disable=W0212
+
+  def setUp(self):
+    self.upload_file = os.path.join(self.tempdir, 'upload_File')
+    osutils.WriteFile(self.upload_file, TEST_FILE)
+    self.argv = [self.upload_file]
+    self.PatchObject(cros_build_lib, 'GetHostDomain', autospec=True,
+                     return_value='noname.com')
+    self.StartPatcher(stats_unittest.StatsUploaderMock())
+
+  def testNormalRun(self):
+    """Going for code coverage."""
+    upload_command_stats.main(self.argv)
+    self.assertEquals(stats.StatsUploader._Upload.call_count, 1)
+
+  def testStatsDebugMsg(self, golo=False):
+    """We hide debug messages from stats module when not in golo."""
+    stats.StatsUploader._Upload.side_effect = EnvironmentError()
+    with cros_test_lib.LoggingCapturer() as logs:
+      upload_command_stats.main(self.argv)
+      self.AssertLogsContain(
+          logs, stats.StatsUploader.ENVIRONMENT_ERROR, inverted=(not golo))
+
+  def testGoloRun(self):
+    """Test when running in the golo."""
+    cros_build_lib.GetHostDomain.return_value = constants.GOLO_DOMAIN
+    upload_command_stats.main(self.argv)
+    self.assertEquals(stats.StatsUploader._Upload.call_count, 1)
+    self.testStatsDebugMsg(golo=True)
+
+  def LogContainsOnError(self, msg):
+    """Verifies a logging.error() message is printed."""
+    with cros_test_lib.LoggingCapturer() as logs:
+      self.assertRaises2(SystemExit, upload_command_stats.main, self.argv,
+                         check_attrs={'code': 1})
+      self.AssertLogsContain(logs, msg)
+
+  def testLoadFileErrorIgnore(self):
+    """We don't propagate timeouts during upload."""
+    self.PatchObject(
+        upload_command_stats.StatsLoader, 'LoadFile',
+        side_effect=upload_command_stats.LoadError(), autospec=True)
+    self.LogContainsOnError(
+        upload_command_stats.FILE_LOAD_ERROR % self.upload_file)
+
+  def testUploadErrorIgnore(self):
+    """We don't propagate timeouts during upload."""
+    stats.StatsUploader._Upload.side_effect = Exception()
+    # Logging level for the error is logging.ERROR.
+    self.LogContainsOnError(upload_command_stats.UNCAUGHT_ERROR)
diff --git a/scripts/upload_package_status.py b/scripts/upload_package_status.py
new file mode 100644
index 0000000..bf09c1e
--- /dev/null
+++ b/scripts/upload_package_status.py
@@ -0,0 +1,299 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Support uploading a csv file to a Google Docs spreadsheet."""
+
+from __future__ import print_function
+
+import optparse
+import os
+
+from chromite.lib import gdata_lib
+from chromite.lib import table
+from chromite.lib import operation
+from chromite.lib import upgrade_table as utable
+from chromite.scripts import merge_package_status as mps
+
+REAL_SS_KEY = '0AsXDKtaHikmcdEp1dVN1SG1yRU1xZEw1Yjhka2dCSUE'
+TEST_SS_KEY = '0AsXDKtaHikmcdDlQMjI3ZDdPVGc4Rkl3Yk5OLWxjR1E'
+PKGS_WS_NAME = 'Packages'
+DEPS_WS_NAME = 'Dependencies'
+
+oper = operation.Operation('upload_package_status')
+
+
+class Uploader(object):
+  """Uploads portage package status data from csv file to Google spreadsheet."""
+
+  __slots__ = ('_creds',          # gdata_lib.Creds object
+               '_scomm',          # gdata_lib.SpreadsheetComm object
+               '_ss_row_cache',   # dict with key=pkg, val=SpreadsheetRow obj
+               '_csv_table',      # table.Table of csv rows
+              )
+
+  ID_COL = utable.UpgradeTable.COL_PACKAGE
+  SS_ID_COL = gdata_lib.PrepColNameForSS(ID_COL)
+  SOURCE = 'Uploaded from CSV'
+
+  def __init__(self, creds, table_obj):
+    self._creds = creds
+    self._csv_table = table_obj
+    self._scomm = None
+    self._ss_row_cache = None
+
+  def _GetSSRowForPackage(self, package):
+    """Return the SpreadsheetRow corresponding to Package=|package|."""
+    if package in self._ss_row_cache:
+      row = self._ss_row_cache[package]
+
+      if isinstance(row, list):
+        raise LookupError('More than one row in spreadsheet with Package=%s' %
+                          package)
+
+      return row
+
+    return None
+
+  def Upload(self, ss_key, ws_name):
+    """Upload |_csv_table| to the given Google Spreadsheet.
+
+    The spreadsheet is identified the spreadsheet key |ss_key|.
+    The worksheet within that spreadsheet is identified by the
+    worksheet name |ws_name|.
+    """
+    if self._scomm:
+      self._scomm.SetCurrentWorksheet(ws_name)
+    else:
+      self._scomm = gdata_lib.SpreadsheetComm()
+      self._scomm.Connect(self._creds, ss_key, ws_name,
+                          source='Upload Package Status')
+
+    oper.Notice('Caching rows for worksheet %r.' % self._scomm.ws_name)
+    self._ss_row_cache = self._scomm.GetRowCacheByCol(self.SS_ID_COL)
+
+    oper.Notice('Uploading changes to worksheet "%s" of spreadsheet "%s" now.' %
+                (self._scomm.ws_name, self._scomm.ss_key))
+
+    oper.Info('Details by package: S=Same, C=Changed, A=Added, D=Deleted')
+    rows_unchanged, rows_updated, rows_inserted = self._UploadChangedRows()
+    rows_deleted, rows_with_owner_deleted = self._DeleteOldRows()
+
+    oper.Notice('Final row stats for worksheet "%s"'
+                ': %d changed, %d added, %d deleted, %d same.' %
+                (self._scomm.ws_name, rows_updated, rows_inserted,
+                 rows_deleted, rows_unchanged))
+    if rows_with_owner_deleted:
+      oper.Warning('%d rows with owner entry deleted, see above warnings.' %
+                   rows_with_owner_deleted)
+    else:
+      oper.Notice('No rows with owner entry were deleted.')
+
+  def _UploadChangedRows(self):
+    """Upload all rows in table that need to be changed in spreadsheet."""
+    rows_unchanged, rows_updated, rows_inserted = (0, 0, 0)
+
+    # Go over all rows in csv table.  Identify existing row by the 'Package'
+    # column.  Either update existing row or create new one.
+    for csv_row in self._csv_table:
+      # Seed new row values from csv_row values, with column translation.
+      new_row = dict((gdata_lib.PrepColNameForSS(key),
+                      csv_row[key]) for key in csv_row)
+
+      # Retrieve row values already in spreadsheet, along with row index.
+      csv_package = csv_row[self.ID_COL]
+      ss_row = self._GetSSRowForPackage(csv_package)
+
+      if ss_row:
+        changed = []  # Gather changes for log message.
+
+        # Check each key/value in new_row to see if it is different from what
+        # is already in spreadsheet (ss_row).  Keep only differences to get
+        # the row delta.
+        row_delta = {}
+        for col in new_row:
+          if col in ss_row:
+            ss_val = ss_row[col]
+            new_val = new_row[col]
+            if (ss_val or new_val) and ss_val != new_val:
+              changed.append('%s="%s"->"%s"' % (col, ss_val, new_val))
+              row_delta[col] = new_val
+
+        if row_delta:
+          self._scomm.UpdateRowCellByCell(ss_row.ss_row_num,
+                                          gdata_lib.PrepRowForSS(row_delta))
+          rows_updated += 1
+          oper.Info('C %-30s: %s' % (csv_package, ', '.join(changed)))
+        else:
+          rows_unchanged += 1
+          oper.Info('S %-30s:' % csv_package)
+      else:
+        self._scomm.InsertRow(gdata_lib.PrepRowForSS(new_row))
+        rows_inserted += 1
+        row_descr_list = []
+        for col in sorted(new_row.keys()):
+          if col != self.ID_COL:
+            row_descr_list.append('%s="%s"' % (col, new_row[col]))
+        oper.Info('A %-30s: %s' % (csv_package, ', '.join(row_descr_list)))
+
+    return (rows_unchanged, rows_updated, rows_inserted)
+
+  def _DeleteOldRows(self):
+    """Delete all rows from spreadsheet that not found in table."""
+    oper.Notice('Checking for rows in worksheet that should be deleted now.')
+
+    rows_deleted, rows_with_owner_deleted = (0, 0)
+
+    # Also need to delete rows in spreadsheet that are not in csv table.
+    ss_rows = self._scomm.GetRows()
+    for ss_row in ss_rows:
+      ss_package = gdata_lib.ScrubValFromSS(ss_row[self.SS_ID_COL])
+
+      # See whether this row is in csv table.
+      csv_rows = self._csv_table.GetRowsByValue({self.ID_COL: ss_package})
+      if not csv_rows:
+        # Row needs to be deleted from spreadsheet.
+        owner_val = None
+        owner_notes_val = None
+        row_descr_list = []
+        for col in sorted(ss_row.keys()):
+          if col == 'owner':
+            owner_val = ss_row[col]
+          if col == 'ownernotes':
+            owner_notes_val = ss_row[col]
+
+          # Don't include ID_COL value in description, it is in prefix already.
+          if col != self.SS_ID_COL:
+            val = ss_row[col]
+            row_descr_list.append('%s="%s"' % (col, val))
+
+        oper.Info('D %-30s: %s' % (ss_package, ', '.join(row_descr_list)))
+        if owner_val or owner_notes_val:
+          rows_with_owner_deleted += 1
+          oper.Notice('WARNING: Deleting spreadsheet row with owner entry:\n' +
+                      '  %-30s: Owner=%s, Owner Notes=%s' %
+                      (ss_package, owner_val, owner_notes_val))
+
+        self._scomm.DeleteRow(ss_row.ss_row_obj)
+        rows_deleted += 1
+
+    return (rows_deleted, rows_with_owner_deleted)
+
+
+def LoadTable(table_file):
+  """Load csv |table_file| into a table.  Return table."""
+  oper.Notice('Loading csv table from "%s".' % (table_file))
+  csv_table = table.Table.LoadFromCSV(table_file)
+  return csv_table
+
+
+def PrepareCreds(cred_file, token_file, email, password):
+  """Return a Creds object from given credentials.
+
+  If |email| is given, the Creds object will contain that |email|
+  and either the given |password| or one entered at a prompt.
+
+  Otherwise, if |token_file| is given then the Creds object will have
+  the auth_token from that file.
+
+  Otherwise, if |cred_file| is given then the Creds object will have
+  the email/password from that file.
+  """
+
+  creds = gdata_lib.Creds()
+
+  if email:
+    creds.SetCreds(email, password)
+  elif token_file and os.path.exists(token_file):
+    creds.LoadAuthToken(token_file)
+  elif cred_file and os.path.exists(cred_file):
+    creds.LoadCreds(cred_file)
+
+  return creds
+
+
+def main(argv):
+  """Main function."""
+  usage = 'Usage: %prog [options] csv_file'
+  parser = optparse.OptionParser(usage=usage)
+  parser.add_option('--auth-token-file', dest='token_file', type='string',
+                    action='store', default=None,
+                    help='File for reading/writing Docs auth token.')
+  parser.add_option('--cred-file', dest='cred_file', type='string',
+                    action='store', default=None,
+                    help='File for reading/writing Docs login email/password.')
+  parser.add_option('--email', dest='email', type='string',
+                    action='store', default=None,
+                    help='Email for Google Doc user')
+  parser.add_option('--password', dest='password', type='string',
+                    action='store', default=None,
+                    help='Password for Google Doc user')
+  parser.add_option('--ss-key', dest='ss_key', type='string',
+                    action='store', default=None,
+                    help='Key of spreadsheet to upload to')
+  parser.add_option('--test-spreadsheet', dest='test_ss',
+                    action='store_true', default=False,
+                    help='Upload to the testing spreadsheet.')
+  parser.add_option('--verbose', dest='verbose',
+                    action='store_true', default=False,
+                    help='Show details about packages.')
+
+  (options, args) = parser.parse_args(argv)
+
+  oper.verbose = options.verbose
+
+  if len(args) < 1:
+    parser.print_help()
+    oper.Die('One csv_file is required.')
+
+  # If email or password provided, the other is required.  If neither is
+  # provided, then either token_file or cred_file must be provided and
+  # be a real file.
+  if options.email or options.password:
+    if not (options.email and options.password):
+      parser.print_help()
+      oper.Die('The email/password options must be used together.')
+  elif not ((options.cred_file and os.path.exists(options.cred_file)) or
+            (options.token_file and os.path.exists(options.token_file))):
+    parser.print_help()
+    oper.Die('Without email/password, cred-file or auth-token-file'
+             'must exist.')
+
+  # --ss-key and --test-spreadsheet are mutually exclusive.
+  if options.ss_key and options.test_ss:
+    parser.print_help()
+    oper.Die('Cannot specify --ss-key and --test-spreadsheet together.')
+
+  # Prepare credentials for spreadsheet access.
+  creds = PrepareCreds(options.cred_file, options.token_file,
+                       options.email, options.password)
+
+  # Load the given csv file.
+  csv_table = LoadTable(args[0])
+
+  # Prepare table for upload.
+  mps.FinalizeTable(csv_table)
+
+  # Prepare the Google Doc client for uploading.
+  uploader = Uploader(creds, csv_table)
+
+  ss_key = options.ss_key
+  ws_names = [PKGS_WS_NAME, DEPS_WS_NAME]
+  if not ss_key:
+    if options.test_ss:
+      ss_key = TEST_SS_KEY  # For testing with backup spreadsheet
+    else:
+      ss_key = REAL_SS_KEY
+
+  for ws_name in ws_names:
+    uploader.Upload(ss_key, ws_name=ws_name)
+
+  # If cred_file given and new credentials were used then write
+  # credentials out to that location.
+  if options.cred_file:
+    creds.StoreCredsIfNeeded(options.cred_file)
+
+  # If token_file path given and new auth token was used then
+  # write auth_token out to that location.
+  if options.token_file:
+    creds.StoreAuthTokenIfNeeded(options.token_file)
diff --git a/scripts/upload_package_status_unittest b/scripts/upload_package_status_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/upload_package_status_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/upload_package_status_unittest.py b/scripts/upload_package_status_unittest.py
new file mode 100644
index 0000000..d0dfd3b
--- /dev/null
+++ b/scripts/upload_package_status_unittest.py
@@ -0,0 +1,472 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for cros_portage_upgrade.py."""
+
+from __future__ import print_function
+
+import exceptions
+import mox
+
+from chromite.lib import cros_test_lib
+from chromite.lib import gdata_lib
+from chromite.lib import osutils
+from chromite.lib import table as tablelib
+from chromite.scripts import merge_package_status as mps
+from chromite.scripts import upload_package_status as ups
+
+
+# pylint: disable=W0212,R0904,E1120,E1101
+
+
+class SSEntry(object):
+  """Class to simulate one spreadsheet entry."""
+  def __init__(self, text):
+    self.text = text
+
+
+class SSRow(object):
+  """Class for simulating spreadsheet row."""
+  def __init__(self, row, cols=None):
+    self.custom = {}
+
+    if not cols:
+      # If columns not specified, then column order doesn't matter.
+      cols = row.keys()
+    for col in cols:
+      ss_col = gdata_lib.PrepColNameForSS(col)
+      val = row[col]
+      ss_val = gdata_lib.PrepValForSS(val)
+      self.custom[ss_col] = SSEntry(ss_val)
+
+
+class SSFeed(object):
+  """Class for simulating spreadsheet list feed."""
+  def __init__(self, rows, cols=None):
+    self.entry = []
+    for row in rows:
+      self.entry.append(SSRow(row, cols))
+
+
+class UploaderTest(cros_test_lib.MoxOutputTestCase):
+  """Test the functionality of upload_package_status.Uploader class."""
+
+  COL_PKG = 'Package'
+  COL_SLOT = 'Slot'
+  COL_OVERLAY = 'Overlay'
+  COL_STATUS = 'Status'
+  COL_VER = 'Current Version'
+  COL_STABLE_UP = 'Stable Upstream Version'
+  COL_LATEST_UP = 'Latest Upstream Version'
+  COL_TARGET = 'Chrome OS Root Target'
+
+  SS_COL_PKG = gdata_lib.PrepColNameForSS(COL_PKG)
+  SS_COL_SLOT = gdata_lib.PrepColNameForSS(COL_SLOT)
+  SS_COL_OVERLAY = gdata_lib.PrepColNameForSS(COL_OVERLAY)
+  SS_COL_STATUS = gdata_lib.PrepColNameForSS(COL_STATUS)
+  SS_COL_VER = gdata_lib.PrepColNameForSS(COL_VER)
+  SS_COL_STABLE_UP = gdata_lib.PrepColNameForSS(COL_STABLE_UP)
+  SS_COL_LATEST_UP = gdata_lib.PrepColNameForSS(COL_LATEST_UP)
+  SS_COL_TARGET = gdata_lib.PrepColNameForSS(COL_TARGET)
+
+  COLS = [
+      COL_PKG,
+      COL_SLOT,
+      COL_OVERLAY,
+      COL_STATUS,
+      COL_VER,
+      COL_STABLE_UP,
+      COL_LATEST_UP,
+      COL_TARGET,
+  ]
+
+  ROW0 = {
+      COL_PKG: 'lib/foo',
+      COL_SLOT: '0',
+      COL_OVERLAY: 'portage',
+      COL_STATUS: 'needs upgrade',
+      COL_VER: '3.0.2',
+      COL_STABLE_UP: '3.0.9',
+      COL_LATEST_UP: '3.0.11',
+      COL_TARGET: 'virtual/target-os',
+  }
+  ROW1 = {
+      COL_PKG: 'sys-dev/bar',
+      COL_SLOT: '0',
+      COL_OVERLAY: 'chromiumos-overlay',
+      COL_STATUS: 'needs upgrade',
+      COL_VER: '1.2.3-r1',
+      COL_STABLE_UP: '1.2.3-r2',
+      COL_LATEST_UP: '1.2.4',
+      COL_TARGET: 'virtual/target-os-dev',
+  }
+  ROW2 = {
+      COL_PKG: 'sys-dev/raster',
+      COL_SLOT: '1',
+      COL_OVERLAY: 'chromiumos-overlay',
+      COL_STATUS: 'current',
+      COL_VER: '1.2.3',
+      COL_STABLE_UP: '1.2.3',
+      COL_LATEST_UP: '1.2.4',
+      COL_TARGET: 'virtual/target-os-test',
+  }
+
+  SS_ROW0 = dict([(gdata_lib.PrepColNameForSS(c), v) for c, v in ROW0.items()])
+  SS_ROW1 = dict([(gdata_lib.PrepColNameForSS(c), v) for c, v in ROW1.items()])
+  SS_ROW2 = dict([(gdata_lib.PrepColNameForSS(c), v) for c, v in ROW2.items()])
+
+  EMAIL = 'knights@ni.com'
+  PASSWORD = 'the'
+
+  def _MockUploader(self, table=None):
+    """Set up a mocked Uploader object."""
+    uploader = self.mox.CreateMock(ups.Uploader)
+
+    if not table:
+      # Use default table
+      table = self._CreateDefaultTable()
+
+    for slot in ups.Uploader.__slots__:
+      uploader.__setattr__(slot, None)
+
+    uploader._csv_table = table
+    uploader._scomm = self.mox.CreateMock(gdata_lib.SpreadsheetComm)
+    uploader._creds = cros_test_lib.EasyAttr(user=self.EMAIL,
+                                             password=self.PASSWORD)
+    uploader._ss_row_cache = self._CreateRowCache(table)
+
+    return uploader
+
+  def _CreateRowCache(self, table):
+    """Recreate the expected row cache (by pkg) from |table|."""
+    if not table:
+      return None
+
+    row_cache = {}
+    for rowIx, row in enumerate(table):
+      pkg = row[self.COL_PKG]
+
+      # Translate column names now.
+      ss_row_dict = {}
+      for col in row:
+        ss_row_dict[gdata_lib.PrepColNameForSS(col)] = row[col]
+
+      ss_row = gdata_lib.SpreadsheetRow('OrigRow%d' % (rowIx + 2),
+                                        rowIx + 2, ss_row_dict)
+      entry = row_cache.get(pkg)
+      if not entry:
+        row_cache[pkg] = ss_row
+      elif type(entry) == list:
+        row_cache[pkg] = entry + [ss_row]
+      else:
+        row_cache[pkg] = [entry, ss_row]
+    return row_cache
+
+  def _CreateDefaultTable(self):
+    return self._CreateTableWithRows(self.COLS,
+                                     [self.ROW0, self.ROW1])
+
+  def _CreateTableWithRows(self, cols, rows):
+    mytable = tablelib.Table(list(cols))
+    if rows:
+      for row in rows:
+        mytable.AppendRow(dict(row))
+    return mytable
+
+  def testLoadTable(self):
+    # Note that this test is not actually for method of Uploader class.
+
+    self.mox.StubOutWithMock(tablelib.Table, 'LoadFromCSV')
+    csv = 'any.csv'
+
+    # Replay script
+    tablelib.Table.LoadFromCSV(csv).AndReturn('loaded_table')
+    self.mox.ReplayAll()
+
+    # Verification steps.
+    with self.OutputCapturer():
+      loaded_table = ups.LoadTable(csv)
+      self.assertEquals(loaded_table, 'loaded_table')
+
+  def testGetSSRowForPackage(self):
+    mocked_uploader = self._MockUploader()
+
+    # No replay script.
+    self.mox.ReplayAll()
+
+    # Verification steps.
+    result = ups.Uploader._GetSSRowForPackage(mocked_uploader,
+                                              self.ROW0[self.COL_PKG])
+    self.assertEquals(result, self.SS_ROW0)
+    self.assertEquals(2, result.ss_row_num)
+    result = ups.Uploader._GetSSRowForPackage(mocked_uploader,
+                                              self.ROW1[self.COL_PKG])
+    self.assertEquals(result, self.SS_ROW1)
+    self.assertEquals(3, result.ss_row_num)
+    result = ups.Uploader._GetSSRowForPackage(mocked_uploader,
+                                              self.ROW2[self.COL_PKG])
+    self.assertEquals(result, None)
+
+    self.mox.VerifyAll()
+
+  def testUploadFirstWorksheet(self):
+    mocked_uploader = self._MockUploader()
+
+    # Clear ._scomm attribute to simulate uploading first worksheet.
+    mocked_scomm = mocked_uploader._scomm
+    mocked_uploader._scomm = None
+
+    self.mox.StubOutWithMock(gdata_lib.SpreadsheetComm, '__new__')
+
+    ss_key = 'Some ss_key'
+    ws_name = 'Some ws_name'
+
+    # Replay script
+    gdata_lib.SpreadsheetComm.__new__(
+        gdata_lib.SpreadsheetComm).AndReturn(mocked_scomm)
+    mocked_scomm.Connect(mocked_uploader._creds, ss_key, ws_name,
+                         source='Upload Package Status')
+    mocked_scomm.GetRowCacheByCol(self.SS_COL_PKG).AndReturn('RowCache')
+    mocked_uploader._UploadChangedRows().AndReturn(tuple([0, 1, 2]))
+    mocked_uploader._DeleteOldRows().AndReturn(tuple([3, 4]))
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      ups.Uploader.Upload(mocked_uploader, ss_key, ws_name)
+      self.mox.VerifyAll()
+
+  def testUploadSecondWorksheet(self):
+    mocked_uploader = self._MockUploader()
+
+    ss_key = 'Some ss_key'
+    ws_name = 'Some ws_name'
+
+    # Replay script
+    mocked_uploader._scomm.SetCurrentWorksheet(ws_name)
+    mocked_uploader._scomm.GetRowCacheByCol(self.SS_COL_PKG).AndReturn('RCache')
+    mocked_uploader._UploadChangedRows().AndReturn(tuple([0, 1, 2]))
+    mocked_uploader._DeleteOldRows().AndReturn(tuple([3, 4]))
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      ups.Uploader.Upload(mocked_uploader, ss_key, ws_name)
+      self.mox.VerifyAll()
+
+  def testUploadChangedRows(self):
+    table = self._CreateTableWithRows(self.COLS,
+                                      [self.ROW0, self.ROW1, self.ROW2])
+    mocked_uploader = self._MockUploader(table=table)
+
+    def RowVerifier(row_delta, golden_col_set, golden_row):
+      if golden_col_set != set(row_delta.keys()):
+        return False
+
+      for col in row_delta:
+        val = row_delta[col]
+        if val != golden_row[col]:
+          return False
+
+      return True
+
+    # First Row.
+    # Pretend first row does not exist already in online spreadsheet
+    # by returning (None, None) from _GetSSRowForPackage.
+    #
+    row0_pkg = self.ROW0[self.COL_PKG]
+    mocked_uploader._GetSSRowForPackage(row0_pkg).AndReturn(None)
+    mocked_uploader._scomm.InsertRow(mox.IgnoreArg())
+
+    # Second Row.
+    # Pretend second row does already exist in online spreadsheet, and
+    # pretend that it has a different value that needs to be changed
+    # by an upload.
+    row1_pkg = self.ROW1[self.COL_PKG]
+    row1_reverse_delta = {self.SS_COL_VER: '1.2.3'}
+    ss_row1 = dict(self.SS_ROW1)
+    for col in row1_reverse_delta:
+      ss_row1[col] = row1_reverse_delta[col]
+    ss_row1 = gdata_lib.SpreadsheetRow('OrigRow1', 3, ss_row1)
+    mocked_uploader._GetSSRowForPackage(row1_pkg).AndReturn(ss_row1)
+    # Prepare verfication for row.
+    g_col_set1 = set(row1_reverse_delta.keys())
+    g_row1 = gdata_lib.PrepRowForSS(self.SS_ROW1)
+    row1_verifier = lambda rdelta: RowVerifier(rdelta, g_col_set1, g_row1)
+    mocked_uploader._scomm.UpdateRowCellByCell(3, mox.Func(row1_verifier))
+
+    # Third Row.
+    # Pretend third row does already exist in online spreadsheet, and
+    # pretend that several values need to be changed by an upload.
+    row2_pkg = self.ROW2[self.COL_PKG]
+    row2_reverse_delta = {
+        self.SS_COL_STATUS: 'needs upgrade',
+        self.SS_COL_VER: '0.5',
+        self.SS_COL_TARGET: 'chromeos-foo',
+    }
+    ss_row2 = dict(self.SS_ROW2)
+    for col in row2_reverse_delta:
+      ss_row2[col] = row2_reverse_delta[col]
+    ss_row2 = gdata_lib.SpreadsheetRow('OrigRow2', 4, ss_row2)
+    mocked_uploader._GetSSRowForPackage(row2_pkg).AndReturn(ss_row2)
+    # Prepare verification for row.
+    g_col_set2 = set(row2_reverse_delta.keys())
+    g_row2 = gdata_lib.PrepRowForSS(self.SS_ROW2)
+    row2_verifier = lambda rdelta: RowVerifier(rdelta, g_col_set2, g_row2)
+    mocked_uploader._scomm.UpdateRowCellByCell(4, mox.Func(row2_verifier))
+
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      ups.Uploader._UploadChangedRows(mocked_uploader)
+    self.mox.VerifyAll()
+
+  def testDeleteOldRows(self):
+    mocked_uploader = self._MockUploader()
+
+    # Pretend spreadsheet has 2 rows, one in table and one not.
+    ss_row1 = gdata_lib.SpreadsheetRow('OrigRow1', 2, self.SS_ROW1)
+    ss_row2 = gdata_lib.SpreadsheetRow('OrigRow2', 3, self.SS_ROW2)
+    ss_rows = (ss_row1, ss_row2)
+
+    mocked_uploader._scomm.GetRows().AndReturn(ss_rows)
+    # We expect ROW2 in spreadsheet to be deleted.
+    mocked_uploader._scomm.DeleteRow('OrigRow2')
+    self.mox.ReplayAll()
+
+    # Verify
+    with self.OutputCapturer():
+      ups.Uploader._DeleteOldRows(mocked_uploader)
+    self.mox.VerifyAll()
+
+
+class MainTest(cros_test_lib.MoxOutputTestCase):
+  """Test argument handling at the main method level."""
+
+  def testHelp(self):
+    """Test that --help is functioning"""
+    with self.OutputCapturer() as output:
+      # Running with --help should exit with code==0
+      try:
+        ups.main(['--help'])
+      except exceptions.SystemExit as e:
+        self.assertEquals(e.args[0], 0)
+
+    # Verify that a message beginning with "Usage: " was printed
+    stdout = output.GetStdout()
+    self.assertTrue(stdout.startswith('Usage: '))
+
+  def testMissingCSV(self):
+    """Test that running without a csv file argument exits with an error."""
+    with self.OutputCapturer():
+      # Running without a package should exit with code!=0
+      try:
+        ups.main([])
+      except exceptions.SystemExit as e:
+        self.assertNotEquals(e.args[0], 0)
+
+    self.AssertOutputEndsInError(check_stdout=True)
+
+  def testPrepareCredsEmailPassword(self):
+    """Verify that creating creds w/an e-mail is used over other args."""
+    email = 'foo@g.com'
+    password = 'shh'
+    creds_file = 'bogus'
+    token_file = 'boguser'
+
+    creds = gdata_lib.Creds()
+    creds.SetCreds(email, password)
+
+    creds = ups.PrepareCreds(creds_file, token_file, email, password)
+    self.assertEqual(creds.user, email)
+    self.assertEqual(creds.password, password)
+
+  def testMainEmailPassword(self):
+    """Verify that running main with email/password follows flow."""
+    csv = 'any.csv'
+    email = 'foo@g.com'
+    password = '123'
+
+    creds = gdata_lib.Creds()
+    creds_file = 'non-existing-file'
+
+    self.mox.StubOutWithMock(ups, 'PrepareCreds')
+    self.mox.StubOutWithMock(ups, 'LoadTable')
+    self.mox.StubOutWithMock(mps, 'FinalizeTable')
+    self.mox.StubOutWithMock(ups.Uploader, 'Upload')
+
+    ups.PrepareCreds(creds_file, None, email, password).AndReturn(creds)
+    ups.LoadTable(csv).AndReturn('csv_table')
+    mps.FinalizeTable('csv_table')
+    ups.Uploader.Upload(mox.IgnoreArg(), ws_name='Packages')
+    ups.Uploader.Upload(mox.IgnoreArg(), ws_name='Dependencies')
+    self.mox.ReplayAll()
+
+    ups.main([
+        '--email=%s' % email,
+        '--password=%s' % password,
+        '--cred-file=%s' % creds_file,
+        csv,
+    ])
+
+    self.mox.VerifyAll()
+
+  @osutils.TempFileDecorator
+  def testMainCredsFile(self):
+    """Verify that running main with creds file follows flow."""
+    csv = 'any.csv'
+    creds_file = self.tempfile
+    token_file = 'non-existing-file'
+
+    creds = gdata_lib.Creds()
+
+    self.mox.StubOutWithMock(ups, 'PrepareCreds')
+    self.mox.StubOutWithMock(ups, 'LoadTable')
+    self.mox.StubOutWithMock(mps, 'FinalizeTable')
+    self.mox.StubOutWithMock(ups.Uploader, 'Upload')
+
+    ups.PrepareCreds(creds_file, token_file, None, None).AndReturn(creds)
+    ups.LoadTable(csv).AndReturn('csv_table')
+    mps.FinalizeTable('csv_table')
+    ups.Uploader.Upload(mox.IgnoreArg(), ws_name=ups.PKGS_WS_NAME)
+    ups.Uploader.Upload(mox.IgnoreArg(), ws_name=ups.DEPS_WS_NAME)
+    self.mox.ReplayAll()
+
+    ups.main([
+        '--cred-file=%s' % creds_file,
+        '--auth-token-file=%s' % token_file,
+        csv,
+    ])
+
+    self.mox.VerifyAll()
+
+  @osutils.TempFileDecorator
+  def testMainTokenFile(self):
+    """Verify that running main with token file follows flow."""
+    csv = 'any.csv'
+    token_file = self.tempfile
+    creds_file = 'non-existing-file'
+
+    creds = gdata_lib.Creds()
+
+    self.mox.StubOutWithMock(ups, 'PrepareCreds')
+    self.mox.StubOutWithMock(ups, 'LoadTable')
+    self.mox.StubOutWithMock(mps, 'FinalizeTable')
+    self.mox.StubOutWithMock(ups.Uploader, 'Upload')
+
+    ups.PrepareCreds(creds_file, token_file, None, None).AndReturn(creds)
+    ups.LoadTable(csv).AndReturn('csv_table')
+    mps.FinalizeTable('csv_table')
+    ups.Uploader.Upload(mox.IgnoreArg(), ws_name=ups.PKGS_WS_NAME)
+    ups.Uploader.Upload(mox.IgnoreArg(), ws_name=ups.DEPS_WS_NAME)
+    self.mox.ReplayAll()
+
+    ups.main([
+        '--cred-file=%s' % creds_file,
+        '--auth-token-file=%s' % token_file,
+        csv,
+    ])
+
+    self.mox.VerifyAll()
diff --git a/scripts/upload_prebuilts.py b/scripts/upload_prebuilts.py
new file mode 100644
index 0000000..ccb1dcc
--- /dev/null
+++ b/scripts/upload_prebuilts.py
@@ -0,0 +1,889 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script is used to upload host prebuilts as well as board BINHOSTS.
+
+Prebuilts are uploaded using gsutil to Google Storage. After these prebuilts
+are successfully uploaded, a file is updated with the proper BINHOST version.
+
+To read more about prebuilts/binhost binary packages please refer to:
+http://goto/chromeos-prebuilts
+
+Example of uploading prebuilt amd64 host files to Google Storage:
+upload_prebuilts -p /b/cbuild/build -s -u gs://chromeos-prebuilt
+
+Example of uploading x86-dogfood binhosts to Google Storage:
+upload_prebuilts -b x86-dogfood -p /b/cbuild/build/ -u gs://chromeos-prebuilt -g
+"""
+
+from __future__ import print_function
+
+import argparse
+import datetime
+import functools
+import glob
+import multiprocessing
+import os
+import sys
+import tempfile
+
+from chromite.cbuildbot import constants
+from chromite.cbuildbot import commands
+from chromite.lib import binpkg
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import git
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import portage_util
+from chromite.lib import toolchain
+
+# How many times to retry uploads.
+_RETRIES = 10
+
+# Multiplier for how long to sleep (in seconds) between retries; will delay
+# (1*sleep) the first time, then (2*sleep), continuing via attempt * sleep.
+_SLEEP_TIME = 60
+
+# The length of time (in seconds) that Portage should wait before refetching
+# binpkgs from the same binhost. We don't ever modify binhosts, so this should
+# be something big.
+_BINPKG_TTL = 60 * 60 * 24 * 365
+
+_HOST_PACKAGES_PATH = 'chroot/var/lib/portage/pkgs'
+_CATEGORIES_PATH = 'chroot/etc/portage/categories'
+_PYM_PATH = 'chroot/usr/lib/portage/pym'
+_HOST_ARCH = 'amd64'
+_BOARD_PATH = 'chroot/build/%(board)s'
+_REL_BOARD_PATH = 'board/%(target)s/%(version)s'
+_REL_HOST_PATH = 'host/%(host_arch)s/%(target)s/%(version)s'
+# Private overlays to look at for builds to filter
+# relative to build path
+_PRIVATE_OVERLAY_DIR = 'src/private-overlays'
+_GOOGLESTORAGE_GSUTIL_FILE = 'googlestorage_acl.txt'
+_BINHOST_BASE_URL = 'gs://chromeos-prebuilt'
+_PREBUILT_BASE_DIR = 'src/third_party/chromiumos-overlay/chromeos/config/'
+# Created in the event of new host targets becoming available
+_PREBUILT_MAKE_CONF = {'amd64': os.path.join(_PREBUILT_BASE_DIR,
+                                             'make.conf.amd64-host')}
+
+
+class BuildTarget(object):
+  """A board/variant/profile tuple."""
+
+  def __init__(self, board_variant, profile=None):
+    self.board_variant = board_variant
+    self.board, _, self.variant = board_variant.partition('_')
+    self.profile = profile
+
+  def __str__(self):
+    if self.profile:
+      return '%s_%s' % (self.board_variant, self.profile)
+    else:
+      return self.board_variant
+
+  def __eq__(self, other):
+    return str(other) == str(self)
+
+  def __hash__(self):
+    return hash(str(self))
+
+
+def UpdateLocalFile(filename, value, key='PORTAGE_BINHOST'):
+  """Update the key in file with the value passed.
+
+  File format:
+    key="value"
+  Note quotes are added automatically
+
+  Args:
+    filename: Name of file to modify.
+    value: Value to write with the key.
+    key: The variable key to update. (Default: PORTAGE_BINHOST)
+
+  Returns:
+    True if changes were made to the file.
+  """
+  if os.path.exists(filename):
+    file_fh = open(filename)
+  else:
+    file_fh = open(filename, 'w+')
+  file_lines = []
+  found = False
+  made_changes = False
+  keyval_str = '%(key)s=%(value)s'
+  for line in file_fh:
+    # Strip newlines from end of line. We already add newlines below.
+    line = line.rstrip("\n")
+
+    if len(line.split('=')) != 2:
+      # Skip any line that doesn't fit key=val.
+      file_lines.append(line)
+      continue
+
+    file_var, file_val = line.split('=')
+    if file_var == key:
+      found = True
+      print('Updating %s=%s to %s="%s"' % (file_var, file_val, key, value))
+      value = '"%s"' % value
+      made_changes |= (file_val != value)
+      file_lines.append(keyval_str % {'key': key, 'value': value})
+    else:
+      file_lines.append(keyval_str % {'key': file_var, 'value': file_val})
+
+  if not found:
+    value = '"%s"' % value
+    made_changes = True
+    file_lines.append(keyval_str % {'key': key, 'value': value})
+
+  file_fh.close()
+  # write out new file
+  osutils.WriteFile(filename, '\n'.join(file_lines) + '\n')
+  return made_changes
+
+
+def RevGitFile(filename, data, retries=5, dryrun=False):
+  """Update and push the git file.
+
+  Args:
+    filename: file to modify that is in a git repo already
+    data: A dict of key/values to update in |filename|
+    retries: The number of times to retry before giving up, default: 5
+    dryrun: If True, do not actually commit the change.
+  """
+  prebuilt_branch = 'prebuilt_branch'
+  cwd = os.path.abspath(os.path.dirname(filename))
+  commit = git.RunGit(cwd, ['rev-parse', 'HEAD']).output.rstrip()
+  description = '%s: updating %s' % (os.path.basename(filename),
+                                     ', '.join(data.keys()))
+  # UpdateLocalFile will print out the keys/values for us.
+  print('Revving git file %s' % filename)
+
+  try:
+    git.CreatePushBranch(prebuilt_branch, cwd)
+    for key, value in data.iteritems():
+      UpdateLocalFile(filename, value, key)
+    git.RunGit(cwd, ['add', filename])
+    git.RunGit(cwd, ['commit', '-m', description])
+    git.PushWithRetry(prebuilt_branch, cwd, dryrun=dryrun, retries=retries)
+  finally:
+    git.RunGit(cwd, ['checkout', commit])
+
+
+def GetVersion():
+  """Get the version to put in LATEST and update the git version with."""
+  return datetime.datetime.now().strftime('%Y.%m.%d.%H%M%S')
+
+
+def _GsUpload(gs_context, acl, local_file, remote_file):
+  """Upload to GS bucket.
+
+  Args:
+    gs_context: A lib.gs.GSContext instance.
+    acl: The ACL to use for uploading the file.
+    local_file: The local file to be uploaded.
+    remote_file: The remote location to upload to.
+  """
+  CANNED_ACLS = ['public-read', 'private', 'bucket-owner-read',
+                 'authenticated-read', 'bucket-owner-full-control',
+                 'public-read-write']
+  if acl in CANNED_ACLS:
+    gs_context.Copy(local_file, remote_file, acl=acl)
+  else:
+    # For private uploads we assume that the overlay board is set up properly
+    # and a googlestore_acl.xml is present. Otherwise, this script errors.
+    # We set version=0 here to ensure that the ACL is set only once (see
+    # http://b/15883752#comment54).
+    try:
+      gs_context.Copy(local_file, remote_file, version=0)
+    except gs.GSContextPreconditionFailed as ex:
+      # If we received a GSContextPreconditionFailed error, we know that the
+      # file exists now, but we don't know whether our specific update
+      # succeeded. See http://b/15883752#comment62
+      logging.warning(
+          'Assuming upload succeeded despite PreconditionFailed errors: %s', ex)
+
+    if acl.endswith('.xml'):
+      # Apply the passed in ACL xml file to the uploaded object.
+      gs_context.SetACL(remote_file, acl=acl)
+    else:
+      gs_context.ChangeACL(remote_file, acl_args_file=acl)
+
+
+def RemoteUpload(gs_context, acl, files, pool=10):
+  """Upload to google storage.
+
+  Create a pool of process and call _GsUpload with the proper arguments.
+
+  Args:
+    gs_context: A lib.gs.GSContext instance.
+    acl: The canned acl used for uploading. acl can be one of: "public-read",
+         "public-read-write", "authenticated-read", "bucket-owner-read",
+         "bucket-owner-full-control", or "private".
+    files: dictionary with keys to local files and values to remote path.
+    pool: integer of maximum proesses to have at the same time.
+
+  Returns:
+    Return a set of tuple arguments of the failed uploads
+  """
+  upload = functools.partial(_GsUpload, gs_context, acl)
+  tasks = [[key, value] for key, value in files.iteritems()]
+  parallel.RunTasksInProcessPool(upload, tasks, pool)
+
+
+def GenerateUploadDict(base_local_path, base_remote_path, pkgs):
+  """Build a dictionary of local remote file key pairs to upload.
+
+  Args:
+    base_local_path: The base path to the files on the local hard drive.
+    base_remote_path: The base path to the remote paths.
+    pkgs: The packages to upload.
+
+  Returns:
+    Returns a dictionary of local_path/remote_path pairs
+  """
+  upload_files = {}
+  for pkg in pkgs:
+    suffix = pkg['CPV'] + '.tbz2'
+    local_path = os.path.join(base_local_path, suffix)
+    assert os.path.exists(local_path), '%s does not exist' % local_path
+    upload_files[local_path] = os.path.join(base_remote_path, suffix)
+
+    if pkg.get('DEBUG_SYMBOLS') == 'yes':
+      debugsuffix = pkg['CPV'] + '.debug.tbz2'
+      local_path = os.path.join(base_local_path, debugsuffix)
+      assert os.path.exists(local_path)
+      upload_files[local_path] = os.path.join(base_remote_path, debugsuffix)
+
+  return upload_files
+
+
+def GetBoardOverlay(build_path, target):
+  """Get the path to the board variant.
+
+  Args:
+    build_path: The path to the root of the build directory
+    target: The target board as a BuildTarget object.
+
+  Returns:
+    The last overlay configured for the given board as a string.
+  """
+  board = target.board_variant
+  overlays = portage_util.FindOverlays(constants.BOTH_OVERLAYS, board,
+                                       buildroot=build_path)
+  # We only care about the last entry.
+  return overlays[-1]
+
+
+def DeterminePrebuiltConfFile(build_path, target):
+  """Determine the prebuilt.conf file that needs to be updated for prebuilts.
+
+  Args:
+    build_path: The path to the root of the build directory
+    target: String representation of the board. This includes host and board
+      targets
+
+  Returns:
+    A string path to a prebuilt.conf file to be updated.
+  """
+  if _HOST_ARCH == target:
+    # We are host.
+    # Without more examples of hosts this is a kludge for now.
+    # TODO(Scottz): as new host targets come online expand this to
+    # work more like boards.
+    make_path = _PREBUILT_MAKE_CONF[target]
+  else:
+    # We are a board
+    board = GetBoardOverlay(build_path, target)
+    make_path = os.path.join(board, 'prebuilt.conf')
+
+  return make_path
+
+
+def UpdateBinhostConfFile(path, key, value):
+  """Update binhost config file file with key=value.
+
+  Args:
+    path: Filename to update.
+    key: Key to update.
+    value: New value for key.
+  """
+  cwd, filename = os.path.split(os.path.abspath(path))
+  osutils.SafeMakedirs(cwd)
+  if not git.GetCurrentBranch(cwd):
+    git.CreatePushBranch(constants.STABLE_EBUILD_BRANCH, cwd, sync=False)
+  osutils.WriteFile(path, '', mode='a')
+  if UpdateLocalFile(path, value, key):
+    desc = '%s: %s %s' % (filename, 'updating' if value else 'clearing', key)
+    git.AddPath(path)
+    git.Commit(cwd, desc)
+
+def GenerateHtmlIndex(files, index, board, version):
+  """Given the list of |files|, generate an index.html at |index|.
+
+  Args:
+    files: The list of files to link to.
+    index: The path to the html index.
+    board: Name of the board this index is for.
+    version: Build version this index is for.
+  """
+  head = """<html>
+<head>
+ <title>Package Prebuilt Index: %(board)s / %(version)s</title>
+</head>
+<body>
+<h2>Package Prebuilt Index: %(board)s / %(version)s</h2>"""
+  head %= {
+      'board': board,
+      'version': version,
+  }
+
+  files = files + [
+      '.|Google Storage Index',
+      '..|',
+  ]
+  commands.GenerateHtmlIndex(index, files, head=head)
+
+
+def _GrabAllRemotePackageIndexes(binhost_urls):
+  """Grab all of the packages files associated with a list of binhost_urls.
+
+  Args:
+    binhost_urls: The URLs for the directories containing the Packages files we
+                  want to grab.
+
+  Returns:
+    A list of PackageIndex objects.
+  """
+  pkg_indexes = []
+  for url in binhost_urls:
+    pkg_index = binpkg.GrabRemotePackageIndex(url)
+    if pkg_index:
+      pkg_indexes.append(pkg_index)
+  return pkg_indexes
+
+
+class PrebuiltUploader(object):
+  """Synchronize host and board prebuilts."""
+
+  def __init__(self, upload_location, acl, binhost_base_url, pkg_indexes,
+               build_path, packages, skip_upload, binhost_conf_dir, dryrun,
+               target, slave_targets, version):
+    """Constructor for prebuilt uploader object.
+
+    This object can upload host or prebuilt files to Google Storage.
+
+    Args:
+      upload_location: The upload location.
+      acl: The canned acl used for uploading to Google Storage. acl can be one
+           of: "public-read", "public-read-write", "authenticated-read",
+           "bucket-owner-read", "bucket-owner-full-control", "project-private",
+           or "private" (see "gsutil help acls"). If we are not uploading to
+           Google Storage, this parameter is unused.
+      binhost_base_url: The URL used for downloading the prebuilts.
+      pkg_indexes: Old uploaded prebuilts to compare against. Instead of
+          uploading duplicate files, we just link to the old files.
+      build_path: The path to the directory containing the chroot.
+      packages: Packages to upload.
+      skip_upload: Don't actually upload the tarballs.
+      binhost_conf_dir: Directory where to store binhost.conf files.
+      dryrun: Don't push or upload prebuilts.
+      target: BuildTarget managed by this builder.
+      slave_targets: List of BuildTargets managed by slave builders.
+      version: A unique string, intended to be included in the upload path,
+          which identifies the version number of the uploaded prebuilts.
+    """
+    self._upload_location = upload_location
+    self._acl = acl
+    self._binhost_base_url = binhost_base_url
+    self._pkg_indexes = pkg_indexes
+    self._build_path = build_path
+    self._packages = set(packages)
+    self._found_packages = set()
+    self._skip_upload = skip_upload
+    self._binhost_conf_dir = binhost_conf_dir
+    self._dryrun = dryrun
+    self._target = target
+    self._slave_targets = slave_targets
+    self._version = version
+    self._gs_context = gs.GSContext(retries=_RETRIES, sleep=_SLEEP_TIME,
+                                    dry_run=self._dryrun)
+
+  def _Upload(self, local_file, remote_file):
+    """Wrapper around _GsUpload"""
+    _GsUpload(self._gs_context, self._acl, local_file, remote_file)
+
+  def _ShouldFilterPackage(self, pkg):
+    if not self._packages:
+      return False
+    pym_path = os.path.abspath(os.path.join(self._build_path, _PYM_PATH))
+    sys.path.insert(0, pym_path)
+    # pylint: disable=F0401
+    import portage.versions
+    cat, pkgname = portage.versions.catpkgsplit(pkg['CPV'])[0:2]
+    cp = '%s/%s' % (cat, pkgname)
+    self._found_packages.add(cp)
+    return pkgname not in self._packages and cp not in self._packages
+
+  def _UploadPrebuilt(self, package_path, url_suffix):
+    """Upload host or board prebuilt files to Google Storage space.
+
+    Args:
+      package_path: The path to the packages dir.
+      url_suffix: The remote subdirectory where we should upload the packages.
+    """
+    # Process Packages file, removing duplicates and filtered packages.
+    pkg_index = binpkg.GrabLocalPackageIndex(package_path)
+    pkg_index.SetUploadLocation(self._binhost_base_url, url_suffix)
+    pkg_index.RemoveFilteredPackages(self._ShouldFilterPackage)
+    uploads = pkg_index.ResolveDuplicateUploads(self._pkg_indexes)
+    unmatched_pkgs = self._packages - self._found_packages
+    if unmatched_pkgs:
+      logging.warning('unable to match packages: %r' % unmatched_pkgs)
+
+    # Write Packages file.
+    pkg_index.header['TTL'] = _BINPKG_TTL
+    tmp_packages_file = pkg_index.WriteToNamedTemporaryFile()
+
+    remote_location = '%s/%s' % (self._upload_location.rstrip('/'), url_suffix)
+    assert remote_location.startswith('gs://')
+
+    # Build list of files to upload. Manually include the dev-only files but
+    # skip them if not present.
+    # TODO(deymo): Upload dev-only-extras.tbz2 as dev-only-extras.tar.bz2
+    # outside packages/ directory. See crbug.com/448178 for details.
+    if os.path.exists(os.path.join(package_path, 'dev-only-extras.tbz2')):
+      uploads.append({'CPV': 'dev-only-extras'})
+    upload_files = GenerateUploadDict(package_path, remote_location, uploads)
+    remote_file = '%s/Packages' % remote_location.rstrip('/')
+    upload_files[tmp_packages_file.name] = remote_file
+
+    RemoteUpload(self._gs_context, self._acl, upload_files)
+
+    with tempfile.NamedTemporaryFile(
+        prefix='chromite.upload_prebuilts.index.') as index:
+      GenerateHtmlIndex(
+          [x[len(remote_location) + 1:] for x in upload_files.values()],
+          index.name, self._target, self._version)
+      self._Upload(index.name, '%s/index.html' % remote_location.rstrip('/'))
+
+      link_name = 'Prebuilts[%s]: %s' % (self._target, self._version)
+      url = '%s%s/index.html' % (gs.PUBLIC_BASE_HTTPS_URL,
+                                 remote_location[len(gs.BASE_GS_URL):])
+      logging.PrintBuildbotLink(link_name, url)
+
+  def _UploadSdkTarball(self, board_path, url_suffix, prepackaged,
+                        toolchains_overlay_tarballs,
+                        toolchains_overlay_upload_path,
+                        toolchain_tarballs, toolchain_upload_path):
+    """Upload a tarball of the sdk at the specified path to Google Storage.
+
+    Args:
+      board_path: The path to the board dir.
+      url_suffix: The remote subdirectory where we should upload the packages.
+      prepackaged: If given, a tarball that has been packaged outside of this
+                   script and should be used.
+      toolchains_overlay_tarballs: List of toolchains overlay tarball
+          specifications to upload. Items take the form
+          "toolchains_spec:/path/to/tarball".
+      toolchains_overlay_upload_path: Path template under the bucket to place
+          toolchains overlay tarballs.
+      toolchain_tarballs: List of toolchain tarballs to upload.
+      toolchain_upload_path: Path under the bucket to place toolchain tarballs.
+    """
+    remote_location = '%s/%s' % (self._upload_location.rstrip('/'), url_suffix)
+    assert remote_location.startswith('gs://')
+    boardname = os.path.basename(board_path.rstrip('/'))
+    # We do not upload non SDK board tarballs,
+    assert boardname == constants.CHROOT_BUILDER_BOARD
+    assert prepackaged is not None
+
+    version_str = self._version[len('chroot-'):]
+    remote_tarfile = toolchain.GetSdkURL(
+        for_gsutil=True, suburl='cros-sdk-%s.tar.xz' % (version_str,))
+    # For SDK, also upload the manifest which is guaranteed to exist
+    # by the builderstage.
+    self._Upload(prepackaged + '.Manifest', remote_tarfile + '.Manifest')
+    self._Upload(prepackaged, remote_tarfile)
+
+    # Upload SDK toolchains overlays and toolchain tarballs, if given.
+    for tarball_list, upload_path, qualifier_name in (
+        (toolchains_overlay_tarballs, toolchains_overlay_upload_path,
+         'toolchains'),
+        (toolchain_tarballs, toolchain_upload_path, 'target')):
+      for tarball_spec in tarball_list:
+        qualifier_val, local_path = tarball_spec.split(':')
+        suburl = upload_path % {qualifier_name: qualifier_val}
+        remote_path = toolchain.GetSdkURL(for_gsutil=True, suburl=suburl)
+        self._Upload(local_path, remote_path)
+
+    # Finally, also update the pointer to the latest SDK on which polling
+    # scripts rely.
+    with osutils.TempDir() as tmpdir:
+      pointerfile = os.path.join(tmpdir, 'cros-sdk-latest.conf')
+      remote_pointerfile = toolchain.GetSdkURL(for_gsutil=True,
+                                               suburl='cros-sdk-latest.conf')
+      osutils.WriteFile(pointerfile, 'LATEST_SDK="%s"' % version_str)
+      self._Upload(pointerfile, remote_pointerfile)
+
+  def _GetTargets(self):
+    """Retuns the list of targets to use."""
+    targets = self._slave_targets[:]
+    if self._target:
+      targets.append(self._target)
+
+    return targets
+
+  def SyncHostPrebuilts(self, key, git_sync, sync_binhost_conf):
+    """Synchronize host prebuilt files.
+
+    This function will sync both the standard host packages, plus the host
+    packages associated with all targets that have been "setup" with the
+    current host's chroot. For instance, if this host has been used to build
+    x86-generic, it will sync the host packages associated with
+    'i686-pc-linux-gnu'. If this host has also been used to build arm-generic,
+    it will also sync the host packages associated with
+    'armv7a-cros-linux-gnueabi'.
+
+    Args:
+      key: The variable key to update in the git file.
+      git_sync: If set, update make.conf of target to reference the latest
+          prebuilt packages generated here.
+      sync_binhost_conf: If set, update binhost config file in
+          chromiumos-overlay for the host.
+    """
+    # Slave boards are listed before the master board so that the master board
+    # takes priority (i.e. x86-generic preflight host prebuilts takes priority
+    # over preflight host prebuilts from other builders.)
+    binhost_urls = []
+    for target in self._GetTargets():
+      url_suffix = _REL_HOST_PATH % {'version': self._version,
+                                     'host_arch': _HOST_ARCH,
+                                     'target': target}
+      packages_url_suffix = '%s/packages' % url_suffix.rstrip('/')
+
+      if self._target == target and not self._skip_upload:
+        # Upload prebuilts.
+        package_path = os.path.join(self._build_path, _HOST_PACKAGES_PATH)
+        self._UploadPrebuilt(package_path, packages_url_suffix)
+
+      # Record URL where prebuilts were uploaded.
+      binhost_urls.append('%s/%s/' % (self._binhost_base_url.rstrip('/'),
+                                      packages_url_suffix.rstrip('/')))
+
+    binhost = ' '.join(binhost_urls)
+    if git_sync:
+      git_file = os.path.join(self._build_path, _PREBUILT_MAKE_CONF[_HOST_ARCH])
+      RevGitFile(git_file, {key: binhost}, dryrun=self._dryrun)
+    if sync_binhost_conf:
+      binhost_conf = os.path.join(
+          self._binhost_conf_dir, 'host', '%s-%s.conf' % (_HOST_ARCH, key))
+      UpdateBinhostConfFile(binhost_conf, key, binhost)
+
+  def SyncBoardPrebuilts(self, key, git_sync, sync_binhost_conf,
+                         upload_board_tarball, prepackaged_board,
+                         toolchains_overlay_tarballs,
+                         toolchains_overlay_upload_path,
+                         toolchain_tarballs, toolchain_upload_path):
+    """Synchronize board prebuilt files.
+
+    Args:
+      key: The variable key to update in the git file.
+      git_sync: If set, update make.conf of target to reference the latest
+          prebuilt packages generated here.
+      sync_binhost_conf: If set, update binhost config file in
+          chromiumos-overlay for the current board.
+      upload_board_tarball: Include a tarball of the board in our upload.
+      prepackaged_board: A tarball of the board built outside of this script.
+      toolchains_overlay_tarballs: List of toolchains overlay tarball
+          specifications to upload. Items take the form
+          "toolchains_spec:/path/to/tarball".
+      toolchains_overlay_upload_path: Path template under the bucket to place
+          toolchains overlay tarballs.
+      toolchain_tarballs: A list of toolchain tarballs to upload.
+      toolchain_upload_path: Path under the bucket to place toolchain tarballs.
+    """
+    updated_binhosts = set()
+    for target in self._GetTargets():
+      board_path = os.path.join(self._build_path,
+                                _BOARD_PATH % {'board': target.board_variant})
+      package_path = os.path.join(board_path, 'packages')
+      url_suffix = _REL_BOARD_PATH % {'target': target,
+                                      'version': self._version}
+      packages_url_suffix = '%s/packages' % url_suffix.rstrip('/')
+
+      # Process the target board differently if it is the main --board.
+      if self._target == target and not self._skip_upload:
+        # This strips "chroot" prefix because that is sometimes added as the
+        # --prepend-version argument (e.g. by chromiumos-sdk bot).
+        # TODO(build): Clean it up to be less hard-coded.
+        version_str = self._version[len('chroot-'):]
+
+        # Upload board tarballs in the background.
+        if upload_board_tarball:
+          if toolchain_upload_path:
+            toolchain_upload_path %= {'version': version_str}
+          if toolchains_overlay_upload_path:
+            toolchains_overlay_upload_path %= {'version': version_str}
+          tar_process = multiprocessing.Process(
+              target=self._UploadSdkTarball,
+              args=(board_path, url_suffix, prepackaged_board,
+                    toolchains_overlay_tarballs,
+                    toolchains_overlay_upload_path, toolchain_tarballs,
+                    toolchain_upload_path))
+          tar_process.start()
+
+        # Upload prebuilts.
+        self._UploadPrebuilt(package_path, packages_url_suffix)
+
+        # Make sure we finished uploading the board tarballs.
+        if upload_board_tarball:
+          tar_process.join()
+          assert tar_process.exitcode == 0
+          # TODO(zbehan): This should be done cleaner.
+          if target.board == constants.CHROOT_BUILDER_BOARD:
+            sdk_conf = os.path.join(self._binhost_conf_dir,
+                                    'host/sdk_version.conf')
+            sdk_settings = {
+                'SDK_LATEST_VERSION': version_str,
+                'TC_PATH': toolchain_upload_path,
+            }
+            RevGitFile(sdk_conf, sdk_settings, dryrun=self._dryrun)
+
+      # Record URL where prebuilts were uploaded.
+      url_value = '%s/%s/' % (self._binhost_base_url.rstrip('/'),
+                              packages_url_suffix.rstrip('/'))
+
+      if git_sync:
+        git_file = DeterminePrebuiltConfFile(self._build_path, target)
+        RevGitFile(git_file, {key: url_value}, dryrun=self._dryrun)
+
+      if sync_binhost_conf:
+        # Update the binhost configuration file in git.
+        binhost_conf = os.path.join(
+            self._binhost_conf_dir, 'target', '%s-%s.conf' % (target, key))
+        updated_binhosts.add(binhost_conf)
+        UpdateBinhostConfFile(binhost_conf, key, url_value)
+
+    if sync_binhost_conf:
+      # Clear all old binhosts. The files must be left empty in case anybody
+      # is referring to them.
+      all_binhosts = set(glob.glob(os.path.join(
+          self._binhost_conf_dir, 'target', '*-%s.conf' % key)))
+      for binhost_conf in all_binhosts - updated_binhosts:
+        UpdateBinhostConfFile(binhost_conf, key, '')
+
+
+class _AddSlaveBoardAction(argparse.Action):
+  """Callback that adds a slave board to the list of slave targets."""
+  def __call__(self, parser, namespace, values, option_string=None):
+    getattr(namespace, self.dest).append(BuildTarget(values))
+
+
+class _AddSlaveProfileAction(argparse.Action):
+  """Callback that adds a slave profile to the list of slave targets."""
+  def __call__(self, parser, namespace, values, option_string=None):
+    if not namespace.slave_targets:
+      parser.error('Must specify --slave-board before --slave-profile')
+    if namespace.slave_targets[-1].profile is not None:
+      parser.error('Cannot specify --slave-profile twice for same board')
+    namespace.slave_targets[-1].profile = values
+
+
+def ParseOptions(argv):
+  """Returns options given by the user and the target specified.
+
+  Args:
+    argv: The args to parse.
+
+  Returns:
+    A tuple containing a parsed options object and BuildTarget.
+    The target instance is None if no board is specified.
+  """
+  parser = commandline.ArgumentParser()
+  parser.add_argument('-H', '--binhost-base-url', default=_BINHOST_BASE_URL,
+                      help='Base URL to use for binhost in make.conf updates')
+  parser.add_argument('--previous-binhost-url', action='append', default=[],
+                      help='Previous binhost URL')
+  parser.add_argument('-b', '--board',
+                      help='Board type that was built on this machine')
+  parser.add_argument('-B', '--prepackaged-tarball', type='path',
+                      help='Board tarball prebuilt outside of this script.')
+  parser.add_argument('--toolchains-overlay-tarball',
+                      dest='toolchains_overlay_tarballs',
+                      action='append', default=[],
+                      help='Toolchains overlay tarball specification to '
+                           'upload. Takes the form '
+                           '"toolchains_spec:/path/to/tarball".')
+  parser.add_argument('--toolchains-overlay-upload-path', default='',
+                      help='Path template for uploading toolchains overlays.')
+  parser.add_argument('--toolchain-tarball', dest='toolchain_tarballs',
+                      action='append', default=[],
+                      help='Redistributable toolchain tarball.')
+  parser.add_argument('--toolchain-upload-path', default='',
+                      help='Path to place toolchain tarballs in the sdk tree.')
+  parser.add_argument('--profile',
+                      help='Profile that was built on this machine')
+  parser.add_argument('--slave-board', default=[], action=_AddSlaveBoardAction,
+                      dest='slave_targets',
+                      help='Board type that was built on a slave machine. To '
+                           'add a profile to this board, use --slave-profile.')
+  parser.add_argument('--slave-profile', action=_AddSlaveProfileAction,
+                      help='Board profile that was built on a slave machine. '
+                           'Applies to previous slave board.')
+  parser.add_argument('-p', '--build-path', required=True,
+                      help='Path to the directory containing the chroot')
+  parser.add_argument('--packages', action='append', default=[],
+                      help='Only include the specified packages. '
+                           '(Default is to include all packages.)')
+  parser.add_argument('-s', '--sync-host', default=False, action='store_true',
+                      help='Sync host prebuilts')
+  parser.add_argument('-g', '--git-sync', default=False, action='store_true',
+                      help='Enable git version sync (This commits to a repo.) '
+                           'This is used by full builders to commit directly '
+                           'to board overlays.')
+  parser.add_argument('-u', '--upload',
+                      help='Upload location')
+  parser.add_argument('-V', '--prepend-version',
+                      help='Add an identifier to the front of the version')
+  parser.add_argument('-f', '--filters', action='store_true', default=False,
+                      help='Turn on filtering of private ebuild packages')
+  parser.add_argument('-k', '--key', default='PORTAGE_BINHOST',
+                      help='Key to update in make.conf / binhost.conf')
+  parser.add_argument('--set-version',
+                      help='Specify the version string')
+  parser.add_argument('--sync-binhost-conf', default=False, action='store_true',
+                      help='Update binhost.conf in chromiumos-overlay or '
+                           'chromeos-overlay. Commit the changes, but don\'t '
+                           'push them. This is used for preflight binhosts.')
+  parser.add_argument('--binhost-conf-dir',
+                      help='Directory to commit binhost config with '
+                           '--sync-binhost-conf.')
+  parser.add_argument('-P', '--private', action='store_true', default=False,
+                      help='Mark gs:// uploads as private.')
+  parser.add_argument('--skip-upload', action='store_true', default=False,
+                      help='Skip upload step.')
+  parser.add_argument('--upload-board-tarball', action='store_true',
+                      default=False,
+                      help='Upload board tarball to Google Storage.')
+  parser.add_argument('-n', '--dry-run', dest='dryrun',
+                      action='store_true', default=False,
+                      help='Don\'t push or upload prebuilts.')
+
+  options = parser.parse_args(argv)
+  if not options.upload and not options.skip_upload:
+    parser.error('you need to provide an upload location using -u')
+  if not options.set_version and options.skip_upload:
+    parser.error('If you are using --skip-upload, you must specify a '
+                 'version number using --set-version.')
+
+  target = None
+  if options.board:
+    target = BuildTarget(options.board, options.profile)
+
+  if target in options.slave_targets:
+    parser.error('--board/--profile must not also be a slave target.')
+
+  if len(set(options.slave_targets)) != len(options.slave_targets):
+    parser.error('--slave-boards must not have duplicates.')
+
+  if options.slave_targets and options.git_sync:
+    parser.error('--slave-boards is not compatible with --git-sync')
+
+  if (options.upload_board_tarball and options.skip_upload and
+      options.board == 'amd64-host'):
+    parser.error('--skip-upload is not compatible with '
+                 '--upload-board-tarball and --board=amd64-host')
+
+  if (options.upload_board_tarball and not options.skip_upload and
+      not options.upload.startswith('gs://')):
+    parser.error('--upload-board-tarball only works with gs:// URLs.\n'
+                 '--upload must be a gs:// URL.')
+
+  if options.upload_board_tarball and options.prepackaged_tarball is None:
+    parser.error('--upload-board-tarball requires --prepackaged-tarball')
+
+  if options.private:
+    if options.sync_host:
+      parser.error('--private and --sync-host/-s cannot be specified '
+                   'together; we do not support private host prebuilts')
+
+    if not options.upload or not options.upload.startswith('gs://'):
+      parser.error('--private is only valid for gs:// URLs; '
+                   '--upload must be a gs:// URL.')
+
+    if options.binhost_base_url != _BINHOST_BASE_URL:
+      parser.error('when using --private the --binhost-base-url '
+                   'is automatically derived.')
+
+  if options.sync_binhost_conf and not options.binhost_conf_dir:
+    parser.error('--sync-binhost-conf requires --binhost-conf-dir')
+
+  if (options.toolchains_overlay_tarballs and
+      not options.toolchains_overlay_upload_path):
+    parser.error('--toolchains-overlay-tarball requires '
+                 '--toolchains-overlay-upload-path')
+
+  return options, target
+
+
+def main(argv):
+  # Set umask to a sane value so that files created as root are readable.
+  os.umask(0o22)
+
+  options, target = ParseOptions(argv)
+
+  # Calculate a list of Packages index files to compare against. Whenever we
+  # upload a package, we check to make sure it's not already stored in one of
+  # the packages files we uploaded. This list of packages files might contain
+  # both board and host packages.
+  pkg_indexes = _GrabAllRemotePackageIndexes(options.previous_binhost_url)
+
+  if options.set_version:
+    version = options.set_version
+  else:
+    version = GetVersion()
+
+  if options.prepend_version:
+    version = '%s-%s' % (options.prepend_version, version)
+
+  acl = 'public-read'
+  binhost_base_url = options.binhost_base_url
+
+  if options.private:
+    binhost_base_url = options.upload
+    if target:
+      acl = portage_util.FindOverlayFile(_GOOGLESTORAGE_GSUTIL_FILE,
+                                         board=target.board_variant,
+                                         buildroot=options.build_path)
+      if acl is None:
+        cros_build_lib.Die('No Google Storage ACL file %s found in %s overlay.',
+                           _GOOGLESTORAGE_GSUTIL_FILE, target.board_variant)
+
+  binhost_conf_dir = None
+  if options.binhost_conf_dir:
+    binhost_conf_dir = os.path.join(options.build_path,
+                                    options.binhost_conf_dir)
+
+  uploader = PrebuiltUploader(options.upload, acl, binhost_base_url,
+                              pkg_indexes, options.build_path,
+                              options.packages, options.skip_upload,
+                              binhost_conf_dir, options.dryrun,
+                              target, options.slave_targets, version)
+
+  if options.sync_host:
+    uploader.SyncHostPrebuilts(options.key, options.git_sync,
+                               options.sync_binhost_conf)
+
+  if options.board or options.slave_targets:
+    uploader.SyncBoardPrebuilts(options.key, options.git_sync,
+                                options.sync_binhost_conf,
+                                options.upload_board_tarball,
+                                options.prepackaged_tarball,
+                                options.toolchains_overlay_tarballs,
+                                options.toolchains_overlay_upload_path,
+                                options.toolchain_tarballs,
+                                options.toolchain_upload_path)
diff --git a/scripts/upload_prebuilts_unittest b/scripts/upload_prebuilts_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/upload_prebuilts_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/upload_prebuilts_unittest.py b/scripts/upload_prebuilts_unittest.py
new file mode 100644
index 0000000..a2ac1be
--- /dev/null
+++ b/scripts/upload_prebuilts_unittest.py
@@ -0,0 +1,542 @@
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for upload_prebuilts.py."""
+
+from __future__ import print_function
+
+import copy
+import mock
+import os
+import multiprocessing
+import tempfile
+
+from chromite.scripts import upload_prebuilts as prebuilt
+from chromite.lib import cros_test_lib
+from chromite.lib import gs
+from chromite.lib import binpkg
+from chromite.lib import osutils
+from chromite.lib import parallel_unittest
+from chromite.lib import portage_util
+
+
+# pylint: disable=E1120,W0212,R0904
+PUBLIC_PACKAGES = [{'CPV': 'gtk+/public1', 'SHA1': '1', 'MTIME': '1'},
+                   {'CPV': 'gtk+/public2', 'SHA1': '2',
+                    'PATH': 'gtk+/foo.tgz', 'MTIME': '2'}]
+PRIVATE_PACKAGES = [{'CPV': 'private', 'SHA1': '3', 'MTIME': '3'}]
+
+
+def SimplePackageIndex(header=True, packages=True):
+  pkgindex = binpkg.PackageIndex()
+  if header:
+    pkgindex.header['URI'] = 'gs://example'
+  if packages:
+    pkgindex.packages = copy.deepcopy(PUBLIC_PACKAGES + PRIVATE_PACKAGES)
+  return pkgindex
+
+
+class TestUpdateFile(cros_test_lib.TempDirTestCase):
+  """Tests for the UpdateLocalFile function."""
+
+  def setUp(self):
+    self.contents_str = [
+        '# comment that should be skipped',
+        'PKGDIR="/var/lib/portage/pkgs"',
+        'PORTAGE_BINHOST="http://no.thanks.com"',
+        'portage portage-20100310.tar.bz2',
+        'COMPILE_FLAGS="some_value=some_other"',
+    ]
+    self.version_file = os.path.join(self.tempdir, 'version')
+    osutils.WriteFile(self.version_file, '\n'.join(self.contents_str))
+
+  def _read_version_file(self, version_file=None):
+    """Read the contents of self.version_file and return as a list."""
+    if not version_file:
+      version_file = self.version_file
+
+    version_fh = open(version_file)
+    try:
+      return [line.strip() for line in version_fh.readlines()]
+    finally:
+      version_fh.close()
+
+  def _verify_key_pair(self, key, val):
+    file_contents = self._read_version_file()
+    # ensure key for verify is wrapped on quotes
+    if '"' not in val:
+      val = '"%s"' % val
+    for entry in file_contents:
+      if '=' not in entry:
+        continue
+      file_key, file_val = entry.split('=')
+      if file_key == key:
+        if val == file_val:
+          break
+    else:
+      self.fail('Could not find "%s=%s" in version file' % (key, val))
+
+  def testAddVariableThatDoesNotExist(self):
+    """Add in a new variable that was no present in the file."""
+    key = 'PORTAGE_BINHOST'
+    value = '1234567'
+    prebuilt.UpdateLocalFile(self.version_file, value)
+    print(self.version_file)
+    self._read_version_file()
+    self._verify_key_pair(key, value)
+    print(self.version_file)
+
+  def testUpdateVariable(self):
+    """Test updating a variable that already exists."""
+    key, val = self.contents_str[2].split('=')
+    new_val = 'test_update'
+    self._verify_key_pair(key, val)
+    prebuilt.UpdateLocalFile(self.version_file, new_val)
+    self._verify_key_pair(key, new_val)
+
+  def testUpdateNonExistentFile(self):
+    key = 'PORTAGE_BINHOST'
+    value = '1234567'
+    non_existent_file = tempfile.mktemp()
+    try:
+      prebuilt.UpdateLocalFile(non_existent_file, value)
+      file_contents = self._read_version_file(non_existent_file)
+      self.assertEqual(file_contents, ['%s="%s"' % (key, value)])
+    finally:
+      if os.path.exists(non_existent_file):
+        os.remove(non_existent_file)
+
+
+class TestPrebuilt(cros_test_lib.MockTestCase):
+  """Tests for Prebuilt logic."""
+
+  def setUp(self):
+    self._base_local_path = '/b/cbuild/build/chroot/build/x86-dogfood/'
+    self._gs_bucket_path = 'gs://chromeos-prebuilt/host/version'
+    self._local_path = os.path.join(self._base_local_path, 'public1.tbz2')
+
+  def testGenerateUploadDict(self):
+    self.PatchObject(prebuilt.os.path, 'exists', return_true=True)
+    pkgs = [{'CPV': 'public1'}]
+    result = prebuilt.GenerateUploadDict(self._base_local_path,
+                                         self._gs_bucket_path, pkgs)
+    expected = {self._local_path: self._gs_bucket_path + '/public1.tbz2', }
+    self.assertEqual(result, expected)
+
+  def testGenerateUploadDictWithDebug(self):
+    self.PatchObject(prebuilt.os.path, 'exists', return_true=True)
+    pkgs = [{'CPV': 'public1', 'DEBUG_SYMBOLS': 'yes'}]
+    result = prebuilt.GenerateUploadDict(self._base_local_path,
+                                         self._gs_bucket_path, pkgs)
+    expected = {self._local_path: self._gs_bucket_path + '/public1.tbz2',
+                self._local_path.replace('.tbz2', '.debug.tbz2'):
+                self._gs_bucket_path + '/public1.debug.tbz2'}
+    self.assertEqual(result, expected)
+
+  def testDeterminePrebuiltConfHost(self):
+    """Test that the host prebuilt path comes back properly."""
+    expected_path = os.path.join(prebuilt._PREBUILT_MAKE_CONF['amd64'])
+    self.assertEqual(prebuilt.DeterminePrebuiltConfFile('fake_path', 'amd64'),
+                     expected_path)
+
+
+class TestPkgIndex(cros_test_lib.TestCase):
+  """Helper for tests that update the Packages index file."""
+
+  def setUp(self):
+    self.db = {}
+    self.pkgindex = SimplePackageIndex()
+    self.empty = SimplePackageIndex(packages=False)
+
+  def assertURIs(self, uris):
+    """Verify that the duplicate DB has the specified URLs."""
+    expected = [v.uri for _, v in sorted(self.db.items())]
+    self.assertEqual(expected, uris)
+
+
+class TestPackagesFileFiltering(TestPkgIndex):
+  """Tests for Packages filtering behavior."""
+
+  def testFilterPkgIndex(self):
+    """Test filtering out of private packages."""
+    self.pkgindex.RemoveFilteredPackages(lambda pkg: pkg in PRIVATE_PACKAGES)
+    self.assertEqual(self.pkgindex.packages, PUBLIC_PACKAGES)
+    self.assertEqual(self.pkgindex.modified, True)
+
+
+class TestPopulateDuplicateDB(TestPkgIndex):
+  """Tests for the _PopulateDuplicateDB function."""
+
+  def testEmptyIndex(self):
+    """Test population of the duplicate DB with an empty index."""
+    self.empty._PopulateDuplicateDB(self.db, 0)
+    self.assertEqual(self.db, {})
+
+  def testNormalIndex(self):
+    """Test population of the duplicate DB with a full index."""
+    self.pkgindex._PopulateDuplicateDB(self.db, 0)
+    self.assertURIs(['gs://example/gtk+/public1.tbz2',
+                     'gs://example/gtk+/foo.tgz',
+                     'gs://example/private.tbz2'])
+
+  def testMissingSHA1(self):
+    """Test population of the duplicate DB with a missing SHA1."""
+    del self.pkgindex.packages[0]['SHA1']
+    self.pkgindex._PopulateDuplicateDB(self.db, 0)
+    self.assertURIs(['gs://example/gtk+/foo.tgz',
+                     'gs://example/private.tbz2'])
+
+  def testFailedPopulate(self):
+    """Test failure conditions for the populate method."""
+    headerless = SimplePackageIndex(header=False)
+    self.assertRaises(KeyError, headerless._PopulateDuplicateDB, self.db, 0)
+    del self.pkgindex.packages[0]['CPV']
+    self.assertRaises(KeyError, self.pkgindex._PopulateDuplicateDB, self.db, 0)
+
+
+class TestResolveDuplicateUploads(cros_test_lib.MockTestCase, TestPkgIndex):
+  """Tests for the ResolveDuplicateUploads function."""
+
+  def setUp(self):
+    self.PatchObject(binpkg.time, 'time', return_value=binpkg.TWO_WEEKS)
+    self.db = {}
+    self.dup = SimplePackageIndex()
+    self.expected_pkgindex = SimplePackageIndex()
+
+  def assertNoDuplicates(self, candidates):
+    """Verify no duplicates are found with the specified candidates."""
+    uploads = self.pkgindex.ResolveDuplicateUploads(candidates)
+    self.assertEqual(uploads, self.pkgindex.packages)
+    self.assertEqual(len(self.pkgindex.packages),
+                     len(self.expected_pkgindex.packages))
+    for pkg1, pkg2 in zip(self.pkgindex.packages,
+                          self.expected_pkgindex.packages):
+      self.assertNotEqual(pkg1['MTIME'], pkg2['MTIME'])
+      del pkg1['MTIME']
+      del pkg2['MTIME']
+    self.assertEqual(self.pkgindex.modified, False)
+    self.assertEqual(self.pkgindex.packages, self.expected_pkgindex.packages)
+
+  def assertAllDuplicates(self, candidates):
+    """Verify every package is a duplicate in the specified list."""
+    for pkg in self.expected_pkgindex.packages:
+      pkg.setdefault('PATH', pkg['CPV'] + '.tbz2')
+    self.pkgindex.ResolveDuplicateUploads(candidates)
+    self.assertEqual(self.pkgindex.packages, self.expected_pkgindex.packages)
+
+  def testEmptyList(self):
+    """If no candidates are supplied, no duplicates should be found."""
+    self.assertNoDuplicates([])
+
+  def testEmptyIndex(self):
+    """If no packages are supplied, no duplicates should be found."""
+    self.assertNoDuplicates([self.empty])
+
+  def testDifferentURI(self):
+    """If the URI differs, no duplicates should be found."""
+    self.dup.header['URI'] = 'gs://example2'
+    self.assertNoDuplicates([self.dup])
+
+  def testUpdateModificationTime(self):
+    """When duplicates are found, we should use the latest mtime."""
+    for pkg in self.expected_pkgindex.packages:
+      pkg['MTIME'] = '10'
+    for pkg in self.dup.packages:
+      pkg['MTIME'] = '4'
+    self.assertAllDuplicates([self.expected_pkgindex, self.dup])
+
+  def testCanonicalUrl(self):
+    """If the URL is in a different format, we should still find duplicates."""
+    self.dup.header['URI'] = gs.PUBLIC_BASE_HTTPS_URL + 'example'
+    self.assertAllDuplicates([self.dup])
+
+  def testMissingSHA1(self):
+    """We should not find duplicates if there is no SHA1."""
+    del self.pkgindex.packages[0]['SHA1']
+    del self.expected_pkgindex.packages[0]['SHA1']
+    for pkg in self.expected_pkgindex.packages[1:]:
+      pkg.setdefault('PATH', pkg['CPV'] + '.tbz2')
+    self.pkgindex.ResolveDuplicateUploads([self.dup])
+    self.assertNotEqual(self.pkgindex.packages[0]['MTIME'],
+                        self.expected_pkgindex.packages[0]['MTIME'])
+    del self.pkgindex.packages[0]['MTIME']
+    del self.expected_pkgindex.packages[0]['MTIME']
+    self.assertEqual(self.pkgindex.packages, self.expected_pkgindex.packages)
+
+  def testSymbolsAvailable(self):
+    """If symbols are available remotely, re-use them and set DEBUG_SYMBOLS."""
+    self.dup.packages[0]['DEBUG_SYMBOLS'] = 'yes'
+
+    uploads = self.pkgindex.ResolveDuplicateUploads([self.dup])
+    self.assertEqual(uploads, [])
+    self.assertEqual(self.pkgindex.packages[0].get('DEBUG_SYMBOLS'), 'yes')
+
+  def testSymbolsAvailableLocallyOnly(self):
+    """If the symbols are only available locally, reupload them."""
+    self.pkgindex.packages[0]['DEBUG_SYMBOLS'] = 'yes'
+
+    uploads = self.pkgindex.ResolveDuplicateUploads([self.dup])
+    self.assertEqual(uploads, [self.pkgindex.packages[0]])
+
+
+class TestWritePackageIndex(cros_test_lib.MockTestCase, TestPkgIndex):
+  """Tests for the WriteToNamedTemporaryFile function."""
+
+  def testSimple(self):
+    """Test simple call of WriteToNamedTemporaryFile()"""
+    self.PatchObject(self.pkgindex, 'Write')
+    f = self.pkgindex.WriteToNamedTemporaryFile()
+    self.assertEqual(f.read(), '')
+
+
+class TestUploadPrebuilt(cros_test_lib.MockTempDirTestCase):
+  """Tests for the _UploadPrebuilt function."""
+
+  def setUp(self):
+    class MockTemporaryFile(object):
+      """Mock out the temporary file logic."""
+      def __init__(self, name):
+        self.name = name
+    self.pkgindex = SimplePackageIndex()
+    self.PatchObject(binpkg, 'GrabLocalPackageIndex',
+                     return_value=self.pkgindex)
+    self.PatchObject(self.pkgindex, 'ResolveDuplicateUploads',
+                     return_value=PRIVATE_PACKAGES)
+    self.PatchObject(self.pkgindex, 'WriteToNamedTemporaryFile',
+                     return_value=MockTemporaryFile('fake'))
+    self.remote_up_mock = self.PatchObject(prebuilt, 'RemoteUpload')
+    self.gs_up_mock = self.PatchObject(prebuilt, '_GsUpload')
+
+  def testSuccessfulGsUpload(self):
+    uploads = {
+        os.path.join(self.tempdir, 'private.tbz2'): 'gs://foo/private.tbz2'}
+    packages = list(PRIVATE_PACKAGES)
+    packages.append({'CPV': 'dev-only-extras'})
+    osutils.Touch(os.path.join(self.tempdir, 'dev-only-extras.tbz2'))
+    self.PatchObject(prebuilt, 'GenerateUploadDict',
+                     return_value=uploads)
+    uploads = uploads.copy()
+    uploads['fake'] = 'gs://foo/suffix/Packages'
+    acl = 'public-read'
+    uri = self.pkgindex.header['URI']
+    uploader = prebuilt.PrebuiltUploader('gs://foo', acl, uri, [], '/', [],
+                                         False, 'foo', False, 'x86-foo', [], '')
+    uploader._UploadPrebuilt(self.tempdir, 'suffix')
+    self.remote_up_mock.assert_called_once_with(mock.ANY, acl, uploads)
+    self.assertTrue(self.gs_up_mock.called)
+
+
+class TestSyncPrebuilts(cros_test_lib.MockTestCase):
+  """Tests for the SyncHostPrebuilts function."""
+
+  def setUp(self):
+    self.rev_mock = self.PatchObject(prebuilt, 'RevGitFile', return_value=None)
+    self.update_binhost_mock = self.PatchObject(
+        prebuilt, 'UpdateBinhostConfFile', return_value=None)
+    self.build_path = '/trunk'
+    self.upload_location = 'gs://upload/'
+    self.version = '1'
+    self.binhost = 'http://prebuilt/'
+    self.key = 'PORTAGE_BINHOST'
+    self.upload_mock = self.PatchObject(prebuilt.PrebuiltUploader,
+                                        '_UploadPrebuilt', return_value=True)
+
+  def testSyncHostPrebuilts(self):
+    board = 'x86-foo'
+    target = prebuilt.BuildTarget(board, 'aura')
+    slave_targets = [prebuilt.BuildTarget('x86-bar', 'aura')]
+    package_path = os.path.join(self.build_path,
+                                prebuilt._HOST_PACKAGES_PATH)
+    url_suffix = prebuilt._REL_HOST_PATH % {
+        'version': self.version,
+        'host_arch': prebuilt._HOST_ARCH,
+        'target': target,
+    }
+    packages_url_suffix = '%s/packages' % url_suffix.rstrip('/')
+    url_value = '%s/%s/' % (self.binhost.rstrip('/'),
+                            packages_url_suffix.rstrip('/'))
+    urls = [url_value.replace('foo', 'bar'), url_value]
+    binhost = ' '.join(urls)
+    uploader = prebuilt.PrebuiltUploader(
+        self.upload_location, 'public-read', self.binhost, [],
+        self.build_path, [], False, 'foo', False, target, slave_targets,
+        self.version)
+    uploader.SyncHostPrebuilts(self.key, True, True)
+    self.upload_mock.assert_called_once_with(package_path, packages_url_suffix)
+    self.rev_mock.assert_called_once_with(
+        mock.ANY, {self.key: binhost}, dryrun=False)
+    self.update_binhost_mock.assert_called_once_with(
+        mock.ANY, self.key, binhost)
+
+  def testSyncBoardPrebuilts(self):
+    board = 'x86-foo'
+    target = prebuilt.BuildTarget(board, 'aura')
+    slave_targets = [prebuilt.BuildTarget('x86-bar', 'aura')]
+    board_path = os.path.join(
+        self.build_path, prebuilt._BOARD_PATH % {'board': board})
+    package_path = os.path.join(board_path, 'packages')
+    url_suffix = prebuilt._REL_BOARD_PATH % {
+        'version': self.version,
+        'target': target,
+    }
+    packages_url_suffix = '%s/packages' % url_suffix.rstrip('/')
+    url_value = '%s/%s/' % (self.binhost.rstrip('/'),
+                            packages_url_suffix.rstrip('/'))
+    bar_binhost = url_value.replace('foo', 'bar')
+    determine_mock = self.PatchObject(prebuilt, 'DeterminePrebuiltConfFile',
+                                      side_effect=('bar', 'foo'))
+    self.PatchObject(prebuilt.PrebuiltUploader, '_UploadSdkTarball')
+    with parallel_unittest.ParallelMock():
+      multiprocessing.Process.exitcode = 0
+      uploader = prebuilt.PrebuiltUploader(
+          self.upload_location, 'public-read', self.binhost, [],
+          self.build_path, [], False, 'foo', False, target, slave_targets,
+          self.version)
+      uploader.SyncBoardPrebuilts(self.key, True, True, True, None, None, None,
+                                  None, None)
+    determine_mock.assert_has_calls([
+        mock.call(self.build_path, slave_targets[0]),
+        mock.call(self.build_path, target),
+    ])
+    self.upload_mock.assert_called_once_with(package_path, packages_url_suffix)
+    self.rev_mock.assert_has_calls([
+        mock.call('bar', {self.key: bar_binhost}, dryrun=False),
+        mock.call('foo', {self.key: url_value}, dryrun=False),
+    ])
+    self.update_binhost_mock.assert_has_calls([
+        mock.call(mock.ANY, self.key, bar_binhost),
+        mock.call(mock.ANY, self.key, url_value),
+    ])
+
+
+class TestMain(cros_test_lib.MockTestCase):
+  """Tests for the main() function."""
+
+  def testMain(self):
+    """Test that the main function works."""
+    options = mock.MagicMock()
+    old_binhost = 'http://prebuilt/1'
+    options.previous_binhost_url = [old_binhost]
+    options.board = 'x86-foo'
+    options.profile = None
+    target = prebuilt.BuildTarget(options.board, options.profile)
+    options.build_path = '/trunk'
+    options.dryrun = False
+    options.private = True
+    options.packages = []
+    options.sync_host = True
+    options.git_sync = True
+    options.upload_board_tarball = True
+    options.prepackaged_tarball = None
+    options.toolchains_overlay_tarballs = []
+    options.toolchains_overlay_upload_path = ''
+    options.toolchain_tarballs = []
+    options.toolchain_upload_path = ''
+    options.upload = 'gs://upload/'
+    options.binhost_base_url = options.upload
+    options.prepend_version = True
+    options.set_version = None
+    options.skip_upload = False
+    options.filters = True
+    options.key = 'PORTAGE_BINHOST'
+    options.binhost_conf_dir = None
+    options.sync_binhost_conf = True
+    options.slave_targets = [prebuilt.BuildTarget('x86-bar', 'aura')]
+    self.PatchObject(prebuilt, 'ParseOptions',
+                     return_value=tuple([options, target]))
+    self.PatchObject(binpkg, 'GrabRemotePackageIndex', return_value=True)
+    init_mock = self.PatchObject(prebuilt.PrebuiltUploader, '__init__',
+                                 return_value=None)
+    expected_gs_acl_path = os.path.join('/fake_path',
+                                        prebuilt._GOOGLESTORAGE_GSUTIL_FILE)
+    self.PatchObject(portage_util, 'FindOverlayFile',
+                     return_value=expected_gs_acl_path)
+    host_mock = self.PatchObject(
+        prebuilt.PrebuiltUploader, 'SyncHostPrebuilts', return_value=None)
+    board_mock = self.PatchObject(
+        prebuilt.PrebuiltUploader, 'SyncBoardPrebuilts', return_value=None)
+
+    prebuilt.main([])
+
+    init_mock.assert_called_once_with(options.upload, expected_gs_acl_path,
+                                      options.upload, mock.ANY,
+                                      options.build_path, options.packages,
+                                      False, None, False,
+                                      target, options.slave_targets,
+                                      mock.ANY)
+    board_mock.assert_called_once_with(
+        options.key, options.git_sync,
+        options.sync_binhost_conf, options.upload_board_tarball, None,
+        [], '', [], '')
+    host_mock.assert_called_once_with(
+        options.key, options.git_sync, options.sync_binhost_conf)
+
+
+class TestSdk(cros_test_lib.MockTestCase):
+  """Test logic related to uploading SDK binaries"""
+
+  def setUp(self):
+    self.PatchObject(prebuilt, '_GsUpload',
+                     side_effect=Exception('should not get called'))
+    self.PatchObject(prebuilt, 'UpdateBinhostConfFile',
+                     side_effect=Exception('should not get called'))
+    self.upload_mock = self.PatchObject(prebuilt.PrebuiltUploader, '_Upload')
+
+    self.acl = 'magic-acl'
+
+    # All these args pretty much get ignored.  Whee.
+    self.uploader = prebuilt.PrebuiltUploader(
+        'gs://foo', self.acl, 'prebuilt', [], '/', [],
+        False, 'foo', False, 'x86-foo', [], 'chroot-1234')
+
+  def testSdkUpload(self, to_tarballs=(), to_upload_path=None,
+                    tc_tarballs=(), tc_upload_path=None):
+    """Make sure we can upload just an SDK tarball"""
+    tar = 'sdk.tar.xz'
+    ver = '1234'
+    vtar = 'cros-sdk-%s.tar.xz' % ver
+
+    calls = [
+        mock.call('%s.Manifest' % tar,
+                  'gs://chromiumos-sdk/%s.Manifest' % vtar),
+        mock.call(tar, 'gs://chromiumos-sdk/%s' % vtar),
+    ]
+    for to in to_tarballs:
+      to = to.split(':')
+      calls.append(mock.call(
+          to[1],
+          ('gs://chromiumos-sdk/' + to_upload_path) % {'toolchains': to[0]}))
+    for tc in tc_tarballs:
+      tc = tc.split(':')
+      calls.append(mock.call(
+          tc[1], ('gs://chromiumos-sdk/' + tc_upload_path) % {'target': tc[0]}))
+    calls.append(mock.call(
+        mock.ANY, 'gs://chromiumos-sdk/cros-sdk-latest.conf'))
+
+    self.uploader._UploadSdkTarball('amd64-host', '',
+                                    tar, to_tarballs, to_upload_path,
+                                    tc_tarballs, tc_upload_path)
+    self.upload_mock.assert_has_calls(calls)
+
+  def testBoardOverlayTarballUpload(self):
+    """Make sure processing of board-specific overlay tarballs works."""
+    to_tarballs = (
+        ('i686-pc-linux-gnu:'
+         '/some/path/built-sdk-overlay-toolchains-i686-pc-linux-gnu.tar.xz'),
+        ('armv7a-cros-linux-gnueabi-arm-none-eabi:'
+         '/some/path/built-sdk-overlay-toolchains-armv7a-cros-linux-gnueabi-'
+         'arm-none-eabi'),
+    )
+    to_upload_path = (
+        '1994/04/cros-sdk-overlay-toolchains-%(toolchains)s-1994.04.02.tar.xz')
+    self.testSdkUpload(to_tarballs=to_tarballs, to_upload_path=to_upload_path)
+
+  def testToolchainTarballUpload(self):
+    """Make sure processing of toolchain tarballs works."""
+    tc_tarballs = (
+        'i686:/some/i686.tar.xz',
+        'arm-none:/some/arm.tar.xz',
+    )
+    tc_upload_path = '1994/04/%(target)s-1994.04.02.tar.xz'
+    self.testSdkUpload(tc_tarballs=tc_tarballs, tc_upload_path=tc_upload_path)
diff --git a/scripts/upload_symbols.py b/scripts/upload_symbols.py
new file mode 100644
index 0000000..2a912a2
--- /dev/null
+++ b/scripts/upload_symbols.py
@@ -0,0 +1,898 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Upload all debug symbols required for crash reporting purposes.
+
+This script need only be used to upload release builds symbols or to debug
+crashes on non-release builds (in which case try to only upload the symbols
+for those executables involved).
+"""
+
+from __future__ import print_function
+
+import collections
+import ctypes
+import datetime
+import errno
+import functools
+import hashlib
+import httplib
+import multiprocessing
+import os
+import poster
+try:
+  import Queue
+except ImportError:
+  # Python-3 renamed to "queue".  We still use Queue to avoid collisions
+  # with naming variables as "queue".  Maybe we'll transition at some point.
+  # pylint: disable=F0401
+  import queue as Queue
+import random
+import signal
+import socket
+import sys
+import textwrap
+import tempfile
+import time
+import urllib2
+import urlparse
+
+from chromite.cbuildbot import constants
+from chromite.lib import cache
+from chromite.lib import commandline
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import gs
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import path_util
+from chromite.lib import retry_util
+from chromite.lib import signals
+from chromite.lib import timeout_util
+from chromite.scripts import cros_generate_breakpad_symbols
+
+# Needs to be after chromite imports.
+# We don't want to import the general keyring module as that will implicitly
+# try to import & connect to a dbus server.  That's a waste of time.
+sys.modules['keyring'] = None
+import isolateserver
+
+
+# URLs used for uploading symbols.
+OFFICIAL_UPLOAD_URL = 'http://clients2.google.com/cr/symbol'
+STAGING_UPLOAD_URL = 'http://clients2.google.com/cr/staging_symbol'
+
+
+# The crash server rejects files that are this big.
+CRASH_SERVER_FILE_LIMIT = 350 * 1024 * 1024
+# Give ourselves a little breathing room from what the server expects.
+DEFAULT_FILE_LIMIT = CRASH_SERVER_FILE_LIMIT - (10 * 1024 * 1024)
+
+
+# The batch limit when talking to the dedup server.  We avoid sending one at a
+# time as the round trip overhead will dominate.  Conversely, we avoid sending
+# all at once so we can start uploading symbols asap -- the symbol server is a
+# bit slow and will take longer than anything else.
+# TODO: A better algorithm would be adaptive.  If we have more than one symbol
+# in the upload queue waiting, we could send more symbols to the dedupe server
+# at a time.
+DEDUPE_LIMIT = 100
+
+# How long to wait for the server to respond with the results.  Note that the
+# larger the limit above, the larger this will need to be.  So we give it ~1
+# second per item max.
+DEDUPE_TIMEOUT = DEDUPE_LIMIT
+
+# How long to wait for the notification to finish (in minutes).  If it takes
+# longer than this, we'll stop notifiying, but that's not a big deal as we
+# will be able to recover in later runs.
+DEDUPE_NOTIFY_TIMEOUT = 20
+
+# The unique namespace in the dedupe server that only we use.  Helps avoid
+# collisions with all the hashed values and unrelated content.
+OFFICIAL_DEDUPE_NAMESPACE = 'chromium-os-upload-symbols'
+STAGING_DEDUPE_NAMESPACE = '%s-staging' % OFFICIAL_DEDUPE_NAMESPACE
+
+
+# The minimum average rate (in bytes per second) that we expect to maintain
+# when uploading symbols.  This has to allow for symbols that are up to
+# CRASH_SERVER_FILE_LIMIT in size.
+UPLOAD_MIN_RATE = CRASH_SERVER_FILE_LIMIT / (30 * 60)
+
+# The lowest timeout (in seconds) we'll allow.  If the server is overloaded,
+# then there might be a delay in setting up the connection, not just with the
+# transfer.  So even a small file might need a larger value.
+UPLOAD_MIN_TIMEOUT = 2 * 60
+
+
+# Sleep for 200ms in between uploads to avoid DoS'ing symbol server.
+DEFAULT_SLEEP_DELAY = 0.2
+
+
+# Number of seconds to wait before retrying an upload.  The delay will double
+# for each subsequent retry of the same symbol file.
+INITIAL_RETRY_DELAY = 1
+
+# Allow up to 7 attempts to upload a symbol file (total delay may be
+# 1+2+4+8+16+32=63 seconds).
+MAX_RETRIES = 6
+
+# Number of total errors, before uploads are no longer attempted.
+# This is used to avoid lots of errors causing unreasonable delays.
+# See the related, but independent, error values below.
+MAX_TOTAL_ERRORS_FOR_RETRY = 30
+
+# A watermark of transient errors which we allow recovery from.  If we hit
+# errors infrequently, overall we're probably doing fine.  For example, if
+# we have one failure every 100 passes, then we probably don't want to fail
+# right away.  But if we hit a string of failures in a row, we want to abort.
+#
+# The watermark starts at 0 (and can never go below that).  When this error
+# level is exceeded, we stop uploading.  When a failure happens, we add the
+# fail adjustment, and when an upload succeeds, we add the pass adjustment.
+# We want to penalize failures more so that we ramp up when there is a string
+# of them, but then slowly back off as things start working.
+#
+# A quick example:
+#  0.0: Starting point.
+#  0.0: Upload works, so add -0.5, and then clamp to 0.
+#  1.0: Upload fails, so add 1.0.
+#  2.0: Upload fails, so add 1.0.
+#  1.5: Upload works, so add -0.5.
+#  1.0: Upload works, so add -0.5.
+ERROR_WATERMARK = 3.0
+ERROR_ADJUST_FAIL = 1.0
+ERROR_ADJUST_PASS = -0.5
+
+
+# A named tuple which hold a SymbolItem object and
+# a isolateserver._IsolateServerPushState item.
+SymbolElement = collections.namedtuple(
+    'SymbolElement', ('symbol_item', 'opaque_push_state'))
+
+
+def GetUploadTimeout(path):
+  """How long to wait for a specific file to upload to the crash server.
+
+  This is a function largely to make unittesting easier.
+
+  Args:
+    path: The path to the file to calculate the timeout for
+
+  Returns:
+    Timeout length (in seconds)
+  """
+  # Scale the timeout based on the filesize.
+  return max(os.path.getsize(path) / UPLOAD_MIN_RATE, UPLOAD_MIN_TIMEOUT)
+
+
+def SymUpload(upload_url, sym_item):
+  """Upload a symbol file to a HTTP server
+
+  The upload is a multipart/form-data POST with the following parameters:
+    code_file: the basename of the module, e.g. "app"
+    code_identifier: the module file's identifier
+    debug_file: the basename of the debugging file, e.g. "app"
+    debug_identifier: the debug file's identifier, usually consisting of
+                      the guid and age embedded in the pdb, e.g.
+                      "11111111BBBB3333DDDD555555555555F"
+    version: the file version of the module, e.g. "1.2.3.4"
+    product: HTTP-friendly product name
+    os: the operating system that the module was built for
+    cpu: the CPU that the module was built for
+    symbol_file: the contents of the breakpad-format symbol file
+
+  Args:
+    upload_url: The crash URL to POST the |sym_file| to
+    sym_item: A SymbolItem containing the path to the breakpad symbol to upload
+  """
+  sym_header = sym_item.sym_header
+  sym_file = sym_item.sym_file
+
+  fields = (
+      ('code_file', sym_header.name),
+      ('debug_file', sym_header.name),
+      ('debug_identifier', sym_header.id.replace('-', '')),
+      # The product/version fields are used by the server only for statistic
+      # purposes.  They do not impact symbolization, so they're safe to set
+      # to any value all the time.
+      # In this case, we use it to help see the load our build system is
+      # placing on the server.
+      # Not sure what to set for the version.  Maybe the git sha1 of this file.
+      # Note: the server restricts this to 30 chars.
+      #('version', None),
+      ('product', 'ChromeOS'),
+      ('os', sym_header.os),
+      ('cpu', sym_header.cpu),
+      poster.encode.MultipartParam.from_file('symbol_file', sym_file),
+  )
+
+  data, headers = poster.encode.multipart_encode(fields)
+  request = urllib2.Request(upload_url, data, headers)
+  request.add_header('User-agent', 'chromite.upload_symbols')
+  urllib2.urlopen(request, timeout=GetUploadTimeout(sym_file))
+
+
+def TestingSymUpload(upload_url, sym_item):
+  """A stub version of SymUpload for --testing usage"""
+  cmd = ['sym_upload', sym_item.sym_file, upload_url]
+  # Randomly fail 80% of the time (the retry logic makes this 80%/3 per file).
+  returncode = random.randint(1, 100) <= 80
+  logging.debug('would run (and return %i): %s', returncode,
+                cros_build_lib.CmdToStr(cmd))
+  if returncode:
+    output = 'Failed to send the symbol file.'
+  else:
+    output = 'Successfully sent the symbol file.'
+  result = cros_build_lib.CommandResult(cmd=cmd, error=None, output=output,
+                                        returncode=returncode)
+  if returncode:
+    exceptions = (
+        socket.error('[socket.error] forced test fail'),
+        httplib.BadStatusLine('[BadStatusLine] forced test fail'),
+        urllib2.HTTPError(upload_url, 400, '[HTTPError] forced test fail',
+                          {}, None),
+        urllib2.URLError('[URLError] forced test fail'),
+    )
+    raise random.choice(exceptions)
+  else:
+    return result
+
+
+def ErrorLimitHit(num_errors, watermark_errors):
+  """See if our error limit has been hit
+
+  Args:
+    num_errors: A multiprocessing.Value of the raw number of failures.
+    watermark_errors: A multiprocessing.Value of the current rate of failures.
+
+  Returns:
+    True if our error limits have been exceeded.
+  """
+  return ((num_errors is not None and
+           num_errors.value > MAX_TOTAL_ERRORS_FOR_RETRY) or
+          (watermark_errors is not None and
+           watermark_errors.value > ERROR_WATERMARK))
+
+
+def _UpdateCounter(counter, adj):
+  """Update |counter| by |adj|
+
+  Handle atomic updates of |counter|.  Also make sure it does not
+  fall below 0.
+
+  Args:
+    counter: A multiprocessing.Value to update
+    adj: The value to add to |counter|
+  """
+  def _Update():
+    clamp = 0 if type(adj) is int else 0.0
+    counter.value = max(clamp, counter.value + adj)
+
+  if hasattr(counter, 'get_lock'):
+    with counter.get_lock():
+      _Update()
+  elif counter is not None:
+    _Update()
+
+
+def UploadSymbol(upload_url, symbol_element, file_limit=DEFAULT_FILE_LIMIT,
+                 sleep=0, num_errors=None, watermark_errors=None,
+                 failed_queue=None, passed_queue=None):
+  """Upload |sym_element.symbol_item| to |upload_url|
+
+  Args:
+    upload_url: The crash server to upload things to
+    symbol_element: A SymbolElement tuple. symbol_element.symbol_item is a
+                    SymbolItem object containing the path to the breakpad symbol
+                    to upload. symbol_element.opaque_push_state is an object of
+                    _IsolateServerPushState or None if the item doesn't have
+                    a push state.
+    file_limit: The max file size of a symbol file before we try to strip it
+    sleep: Number of seconds to sleep before running
+    num_errors: An object to update with the error count (needs a .value member)
+    watermark_errors: An object to track current error behavior (needs a .value)
+    failed_queue: When a symbol fails, add it to this queue
+    passed_queue: When a symbol passes, add it to this queue
+
+  Returns:
+    The number of errors that were encountered.
+  """
+  sym_file = symbol_element.symbol_item.sym_file
+  upload_item = symbol_element.symbol_item
+
+  if num_errors is None:
+    num_errors = ctypes.c_int()
+  if ErrorLimitHit(num_errors, watermark_errors):
+    # Abandon ship!  It's on fire!  NOoooooooooooOOOoooooo.
+    if failed_queue:
+      failed_queue.put(sym_file)
+    return 0
+
+  if sleep:
+    # Keeps us from DoS-ing the symbol server.
+    time.sleep(sleep)
+
+  logging.debug('uploading %s' % sym_file)
+
+  # Ideally there'd be a tempfile.SpooledNamedTemporaryFile that we could use.
+  with tempfile.NamedTemporaryFile(prefix='upload_symbols',
+                                   bufsize=0) as temp_sym_file:
+    if file_limit:
+      # If the symbols size is too big, strip out the call frame info.  The CFI
+      # is unnecessary for 32bit x86 targets where the frame pointer is used (as
+      # all of ours have) and it accounts for over half the size of the symbols
+      # uploaded.
+      file_size = os.path.getsize(sym_file)
+      if file_size > file_limit:
+        logging.warning('stripping CFI from %s due to size %s > %s', sym_file,
+                        file_size, file_limit)
+        temp_sym_file.writelines([x for x in open(sym_file, 'rb').readlines()
+                                  if not x.startswith('STACK CFI')])
+
+        upload_item = FakeItem(sym_file=temp_sym_file.name,
+                               sym_header=symbol_element.symbol_item.sym_header)
+
+    # Hopefully the crash server will let it through.  But it probably won't.
+    # Not sure what the best answer is in this case.
+    file_size = os.path.getsize(upload_item.sym_file)
+    if file_size > CRASH_SERVER_FILE_LIMIT:
+      logging.PrintBuildbotStepWarnings()
+      logging.warning('upload file %s is awfully large, risking rejection by '
+                      'the symbol server (%s > %s)', sym_file, file_size,
+                      CRASH_SERVER_FILE_LIMIT)
+
+    # Upload the symbol file.
+    success = False
+    try:
+      cros_build_lib.TimedCommand(
+          retry_util.RetryException,
+          (urllib2.HTTPError, urllib2.URLError), MAX_RETRIES, SymUpload,
+          upload_url, upload_item, sleep=INITIAL_RETRY_DELAY,
+          timed_log_msg=('upload of %10i bytes took %%(delta)s: %s' %
+                         (file_size, os.path.basename(sym_file))))
+      success = True
+
+      if passed_queue:
+        passed_queue.put(symbol_element)
+    except urllib2.HTTPError as e:
+      logging.warning('could not upload: %s: HTTP %s: %s',
+                      os.path.basename(sym_file), e.code, e.reason)
+    except (urllib2.URLError, httplib.HTTPException, socket.error) as e:
+      logging.warning('could not upload: %s: %s', os.path.basename(sym_file), e)
+    finally:
+      if success:
+        _UpdateCounter(watermark_errors, ERROR_ADJUST_PASS)
+      else:
+        _UpdateCounter(num_errors, 1)
+        _UpdateCounter(watermark_errors, ERROR_ADJUST_FAIL)
+        if failed_queue:
+          failed_queue.put(sym_file)
+
+  return num_errors.value
+
+
+# A dummy class that allows for stubbing in tests and SymUpload.
+FakeItem = cros_build_lib.Collection(
+    'FakeItem', sym_file=None, sym_header=None, content=lambda x: '')
+
+
+class SymbolItem(isolateserver.BufferItem):
+  """Turn a sym_file into an isolateserver.Item"""
+
+  ALGO = hashlib.sha1
+
+  def __init__(self, sym_file):
+    sym_header = cros_generate_breakpad_symbols.ReadSymsHeader(sym_file)
+    super(SymbolItem, self).__init__(str(sym_header), self.ALGO)
+    self.sym_header = sym_header
+    self.sym_file = sym_file
+
+
+def SymbolDeduplicatorNotify(dedupe_namespace, dedupe_queue):
+  """Send a symbol file to the swarming service
+
+  Notify the swarming service of a successful upload.  If the notification fails
+  for any reason, we ignore it.  We don't care as it just means we'll upload it
+  again later on, and the symbol server will handle that graciously.
+
+  This func runs in a different process from the main one, so we cannot share
+  the storage object.  Instead, we create our own.  This func stays alive for
+  the life of the process, so we only create one here overall.
+
+  Args:
+    dedupe_namespace: The isolateserver namespace to dedupe uploaded symbols.
+    dedupe_queue: The queue to read SymbolElements from
+  """
+  if dedupe_queue is None:
+    return
+
+  sym_file = ''
+  try:
+    with timeout_util.Timeout(DEDUPE_TIMEOUT):
+      storage = isolateserver.get_storage_api(constants.ISOLATESERVER,
+                                              dedupe_namespace)
+    for symbol_element in iter(dedupe_queue.get, None):
+      if not symbol_element or not symbol_element.symbol_item:
+        continue
+      symbol_item = symbol_element.symbol_item
+      push_state = symbol_element.opaque_push_state
+      sym_file = symbol_item.sym_file if symbol_item.sym_file else ''
+      if push_state is not None:
+        with timeout_util.Timeout(DEDUPE_TIMEOUT):
+          logging.debug('sending %s to dedupe server', sym_file)
+          symbol_item.prepare(SymbolItem.ALGO)
+          storage.push(symbol_item, push_state, symbol_item.content())
+          logging.debug('sent %s', sym_file)
+    logging.info('dedupe notification finished; exiting')
+  except Exception:
+    logging.warning('posting %s to dedupe server failed',
+                    os.path.basename(sym_file), exc_info=True)
+
+    # Keep draining the queue though so it doesn't fill up.
+    while dedupe_queue.get() is not None:
+      continue
+
+
+def SymbolDeduplicator(storage, sym_paths):
+  """Filter out symbol files that we've already uploaded
+
+  Using the swarming service, ask it to tell us which symbol files we've already
+  uploaded in previous runs and/or by other bots.  If the query fails for any
+  reason, we'll just upload all symbols.  This is fine as the symbol server will
+  do the right thing and this phase is purely an optimization.
+
+  This code runs in the main thread which is why we can re-use the existing
+  storage object.  Saves us from having to recreate one all the time.
+
+  Args:
+    storage: An isolateserver.StorageApi object
+    sym_paths: List of symbol files to check against the dedupe server
+
+  Returns:
+    List of SymbolElement objects that have not been uploaded before
+  """
+  if not sym_paths:
+    return sym_paths
+
+  items = [SymbolItem(x) for x in sym_paths]
+  for item in items:
+    item.prepare(SymbolItem.ALGO)
+  if storage:
+    try:
+      with timeout_util.Timeout(DEDUPE_TIMEOUT):
+        items = storage.contains(items)
+      return [SymbolElement(symbol_item=item, opaque_push_state=push_state)
+              for (item, push_state) in items.iteritems()]
+    except Exception:
+      logging.warning('talking to dedupe server failed', exc_info=True)
+
+  return [SymbolElement(symbol_item=item, opaque_push_state=None)
+          for item in items]
+
+
+def IsTarball(path):
+  """Guess if this is a tarball based on the filename."""
+  parts = path.split('.')
+  if len(parts) <= 1:
+    return False
+
+  if parts[-1] == 'tar':
+    return True
+
+  if parts[-2] == 'tar':
+    return parts[-1] in ('bz2', 'gz', 'xz')
+
+  return parts[-1] in ('tbz2', 'tbz', 'tgz', 'txz')
+
+
+def SymbolFinder(tempdir, paths):
+  """Locate symbol files in |paths|
+
+  Args:
+    tempdir: Path to use for temporary files (caller will clean up).
+    paths: A list of input paths to walk. Files are returned w/out any checks.
+      Dirs are searched for files that end in ".sym". Urls are fetched and then
+      processed. Tarballs are unpacked and walked.
+
+  Returns:
+    Yield every viable sym file.
+  """
+  cache_dir = path_util.GetCacheDir()
+  common_path = os.path.join(cache_dir, constants.COMMON_CACHE)
+  tar_cache = cache.TarballCache(common_path)
+
+  for p in paths:
+    # Pylint is confused about members of ParseResult.
+
+    o = urlparse.urlparse(p)
+    if o.scheme:  # pylint: disable=E1101
+      # Support globs of filenames.
+      ctx = gs.GSContext()
+      for p in ctx.LS(p):
+        logging.info('processing files inside %s', p)
+        o = urlparse.urlparse(p)
+        key = ('%s%s' % (o.netloc, o.path)).split('/')  # pylint: disable=E1101
+        # The common cache will not be LRU, removing the need to hold a read
+        # lock on the cached gsutil.
+        ref = tar_cache.Lookup(key)
+        try:
+          ref.SetDefault(p)
+        except cros_build_lib.RunCommandError as e:
+          logging.warning('ignoring %s\n%s', p, e)
+          continue
+        for p in SymbolFinder(tempdir, [ref.path]):
+          yield p
+
+    elif os.path.isdir(p):
+      for root, _, files in os.walk(p):
+        for f in files:
+          if f.endswith('.sym'):
+            yield os.path.join(root, f)
+
+    elif IsTarball(p):
+      logging.info('processing files inside %s', p)
+      tardir = tempfile.mkdtemp(dir=tempdir)
+      cache.Untar(os.path.realpath(p), tardir)
+      for p in SymbolFinder(tardir, [tardir]):
+        yield p
+
+    else:
+      yield p
+
+
+def WriteQueueToFile(listing, queue, relpath=None):
+  """Write all the items in |queue| to the |listing|.
+
+  Note: The queue must have a sentinel None appended to the end.
+
+  Args:
+    listing: Where to write out the list of files.
+    queue: The queue of paths to drain.
+    relpath: If set, write out paths relative to this one.
+  """
+  if not listing:
+    # Still drain the queue so we make sure the producer has finished
+    # before we return.  Otherwise, the queue might get destroyed too
+    # quickly which will trigger a traceback in the producer.
+    while queue.get() is not None:
+      continue
+    return
+
+  with cros_build_lib.Open(listing, 'wb+') as f:
+    while True:
+      path = queue.get()
+      if path is None:
+        return
+      if relpath:
+        path = os.path.relpath(path, relpath)
+      f.write('%s\n' % path)
+
+
+def UploadSymbols(board=None, official=False, server=None, breakpad_dir=None,
+                  file_limit=DEFAULT_FILE_LIMIT, sleep=DEFAULT_SLEEP_DELAY,
+                  upload_limit=None, sym_paths=None, failed_list=None,
+                  root=None, retry=True, dedupe_namespace=None):
+  """Upload all the generated symbols for |board| to the crash server
+
+  You can use in a few ways:
+    * pass |board| to locate all of its symbols
+    * pass |breakpad_dir| to upload all the symbols in there
+    * pass |sym_paths| to upload specific symbols (or dirs of symbols)
+
+  Args:
+    board: The board whose symbols we wish to upload
+    official: Use the official symbol server rather than the staging one
+    server: Explicit server to post symbols to
+    breakpad_dir: The full path to the breakpad directory where symbols live
+    file_limit: The max file size of a symbol file before we try to strip it
+    sleep: How long to sleep in between uploads
+    upload_limit: If set, only upload this many symbols (meant for testing)
+    sym_paths: Specific symbol files (or dirs of sym files) to upload,
+      otherwise search |breakpad_dir|
+    failed_list: Write the names of all sym files we did not upload; can be a
+      filename or file-like object.
+    root: The tree to prefix to |breakpad_dir| (if |breakpad_dir| is not set)
+    retry: Whether we should retry failures.
+    dedupe_namespace: The isolateserver namespace to dedupe uploaded symbols.
+
+  Returns:
+    The number of errors that were encountered.
+  """
+  if server is None:
+    if official:
+      upload_url = OFFICIAL_UPLOAD_URL
+    else:
+      logging.warning('unofficial builds upload to the staging server')
+      upload_url = STAGING_UPLOAD_URL
+  else:
+    upload_url = server
+
+  if sym_paths:
+    logging.info('uploading specified symbols to %s', upload_url)
+  else:
+    if breakpad_dir is None:
+      if root is None:
+        raise ValueError('breakpad_dir requires root to be set')
+      breakpad_dir = os.path.join(
+          root,
+          cros_generate_breakpad_symbols.FindBreakpadDir(board).lstrip('/'))
+    logging.info('uploading all symbols to %s from %s', upload_url,
+                 breakpad_dir)
+    sym_paths = [breakpad_dir]
+
+  # We use storage_query to ask the server about existing symbols.  The
+  # storage_notify_proc process is used to post updates to the server.  We
+  # cannot safely share the storage object between threads/processes, but
+  # we also want to minimize creating new ones as each object has to init
+  # new state (like server connections).
+  storage_query = None
+  if dedupe_namespace:
+    dedupe_limit = DEDUPE_LIMIT
+    dedupe_queue = multiprocessing.Queue()
+    try:
+      with timeout_util.Timeout(DEDUPE_TIMEOUT):
+        storage_query = isolateserver.get_storage_api(constants.ISOLATESERVER,
+                                                      dedupe_namespace)
+    except Exception:
+      logging.warning('initializing dedupe server connection failed',
+                      exc_info=True)
+  else:
+    dedupe_limit = 1
+    dedupe_queue = None
+  # Can't use parallel.BackgroundTaskRunner because that'll create multiple
+  # processes and we want only one the whole time (see comment above).
+  storage_notify_proc = multiprocessing.Process(
+      target=SymbolDeduplicatorNotify, args=(dedupe_namespace, dedupe_queue))
+
+  bg_errors = multiprocessing.Value('i')
+  watermark_errors = multiprocessing.Value('f')
+  failed_queue = multiprocessing.Queue()
+  uploader = functools.partial(
+      UploadSymbol, upload_url, file_limit=file_limit, sleep=sleep,
+      num_errors=bg_errors, watermark_errors=watermark_errors,
+      failed_queue=failed_queue, passed_queue=dedupe_queue)
+
+  start_time = datetime.datetime.now()
+  Counters = cros_build_lib.Collection(
+      'Counters', upload_limit=upload_limit, uploaded_count=0, deduped_count=0)
+  counters = Counters()
+
+  def _Upload(queue, counters, files):
+    if not files:
+      return
+
+    missing_count = 0
+    for item in SymbolDeduplicator(storage_query, files):
+      missing_count += 1
+
+      if counters.upload_limit == 0:
+        continue
+
+      queue.put((item,))
+      counters.uploaded_count += 1
+      if counters.upload_limit is not None:
+        counters.upload_limit -= 1
+
+    counters.deduped_count += (len(files) - missing_count)
+
+  try:
+    storage_notify_proc.start()
+
+    with osutils.TempDir(prefix='upload_symbols.') as tempdir:
+      # For the first run, we collect the symbols that failed.  If the
+      # overall failure rate was low, we'll retry them on the second run.
+      for retry in (retry, False):
+        # We need to limit ourselves to one upload at a time to avoid the server
+        # kicking in DoS protection.  See these bugs for more details:
+        # http://crbug.com/209442
+        # http://crbug.com/212496
+        with parallel.BackgroundTaskRunner(uploader, processes=1) as queue:
+          dedupe_list = []
+          for sym_file in SymbolFinder(tempdir, sym_paths):
+            dedupe_list.append(sym_file)
+            dedupe_len = len(dedupe_list)
+            if dedupe_len < dedupe_limit:
+              if (counters.upload_limit is None or
+                  dedupe_len < counters.upload_limit):
+                continue
+
+            # We check the counter before _Upload so that we don't keep talking
+            # to the dedupe server.  Otherwise, we end up sending one symbol at
+            # a time to it and that slows things down a lot.
+            if counters.upload_limit == 0:
+              break
+
+            _Upload(queue, counters, dedupe_list)
+            dedupe_list = []
+          _Upload(queue, counters, dedupe_list)
+
+        # See if we need to retry, and if we haven't failed too many times yet.
+        if not retry or ErrorLimitHit(bg_errors, watermark_errors):
+          break
+
+        sym_paths = []
+        failed_queue.put(None)
+        while True:
+          sym_path = failed_queue.get()
+          if sym_path is None:
+            break
+          sym_paths.append(sym_path)
+
+        if sym_paths:
+          logging.warning('retrying %i symbols', len(sym_paths))
+          if counters.upload_limit is not None:
+            counters.upload_limit += len(sym_paths)
+          # Decrement the error count in case we recover in the second pass.
+          assert bg_errors.value >= len(sym_paths), \
+                 'more failed files than errors?'
+          bg_errors.value -= len(sym_paths)
+        else:
+          # No failed symbols, so just return now.
+          break
+
+    # If the user has requested it, save all the symbol files that we failed to
+    # upload to a listing file.  This should help with recovery efforts later.
+    failed_queue.put(None)
+    WriteQueueToFile(failed_list, failed_queue, breakpad_dir)
+
+  finally:
+    logging.info('finished uploading; joining background process')
+    if dedupe_queue:
+      dedupe_queue.put(None)
+
+    # The notification might be slow going, so give it some time to finish.
+    # We have to poll here as the process monitor is watching for output and
+    # will kill us if we go silent for too long.
+    wait_minutes = DEDUPE_NOTIFY_TIMEOUT
+    while storage_notify_proc.is_alive() and wait_minutes > 0:
+      if dedupe_queue:
+        qsize = str(dedupe_queue.qsize())
+      else:
+        qsize = '[None]'
+      logging.info('waiting up to %i minutes for ~%s notifications',
+                   wait_minutes, qsize)
+      storage_notify_proc.join(60)
+      wait_minutes -= 1
+
+    # The process is taking too long, so kill it and complain.
+    if storage_notify_proc.is_alive():
+      logging.warning('notification process took too long')
+      logging.PrintBuildbotStepWarnings()
+
+      # Kill it gracefully first (traceback) before tacking it down harder.
+      pid = storage_notify_proc.pid
+      for sig in (signal.SIGINT, signal.SIGTERM, signal.SIGKILL):
+        logging.warning('sending %s to %i', signals.StrSignal(sig), pid)
+        # The process might have exited between the last check and the
+        # actual kill below, so ignore ESRCH errors.
+        try:
+          os.kill(pid, sig)
+        except OSError as e:
+          if e.errno == errno.ESRCH:
+            break
+          else:
+            raise
+        time.sleep(5)
+        if not storage_notify_proc.is_alive():
+          break
+
+      # Drain the queue so we don't hang when we finish.
+      try:
+        logging.warning('draining the notify queue manually')
+        with timeout_util.Timeout(60):
+          try:
+            while dedupe_queue.get_nowait():
+              pass
+          except Queue.Empty:
+            pass
+      except timeout_util.TimeoutError:
+        logging.warning('draining the notify queue failed; trashing it')
+        dedupe_queue.cancel_join_thread()
+
+  logging.info('uploaded %i symbols (%i were deduped) which took: %s',
+               counters.uploaded_count, counters.deduped_count,
+               datetime.datetime.now() - start_time)
+
+  return bg_errors.value
+
+
+def main(argv):
+  parser = commandline.ArgumentParser(description=__doc__)
+
+  parser.add_argument('sym_paths', type='path_or_uri', nargs='*', default=None,
+                      help='symbol file or directory or URL or tarball')
+  parser.add_argument('--board', default=None,
+                      help='board to build packages for')
+  parser.add_argument('--breakpad_root', type='path', default=None,
+                      help='full path to the breakpad symbol directory')
+  parser.add_argument('--root', type='path', default=None,
+                      help='full path to the chroot dir')
+  parser.add_argument('--official_build', action='store_true', default=False,
+                      help='point to official symbol server')
+  parser.add_argument('--server', type=str, default=None,
+                      help='URI for custom symbol server')
+  parser.add_argument('--regenerate', action='store_true', default=False,
+                      help='regenerate all symbols')
+  parser.add_argument('--upload-limit', type=int, default=None,
+                      help='only upload # number of symbols')
+  parser.add_argument('--strip_cfi', type=int,
+                      default=CRASH_SERVER_FILE_LIMIT - (10 * 1024 * 1024),
+                      help='strip CFI data for files above this size')
+  parser.add_argument('--failed-list', type='path',
+                      help='where to save a list of failed symbols')
+  parser.add_argument('--dedupe', action='store_true', default=False,
+                      help='use the swarming service to avoid re-uploading')
+  parser.add_argument('--testing', action='store_true', default=False,
+                      help='run in testing mode')
+  parser.add_argument('--yes', action='store_true', default=False,
+                      help='answer yes to all prompts')
+
+  opts = parser.parse_args(argv)
+  opts.Freeze()
+
+  if opts.sym_paths:
+    if opts.regenerate:
+      cros_build_lib.Die('--regenerate may not be used with specific files')
+  else:
+    if opts.board is None:
+      cros_build_lib.Die('--board is required')
+
+  if opts.breakpad_root and opts.regenerate:
+    cros_build_lib.Die('--regenerate may not be used with --breakpad_root')
+
+  if opts.testing:
+    # TODO(build): Kill off --testing mode once unittests are up-to-snuff.
+    logging.info('running in testing mode')
+    # pylint: disable=W0601,W0603
+    global INITIAL_RETRY_DELAY, SymUpload, DEFAULT_SLEEP_DELAY
+    INITIAL_RETRY_DELAY = DEFAULT_SLEEP_DELAY = 0
+    SymUpload = TestingSymUpload
+
+  dedupe_namespace = None
+  if opts.dedupe:
+    if opts.official_build and not opts.testing:
+      dedupe_namespace = OFFICIAL_DEDUPE_NAMESPACE
+    else:
+      dedupe_namespace = STAGING_DEDUPE_NAMESPACE
+
+  if not opts.yes:
+    prolog = '\n'.join(textwrap.wrap(textwrap.dedent("""
+        Uploading symbols for an entire Chromium OS build is really only
+        necessary for release builds and in a few cases for developers
+        to debug problems.  It will take considerable time to run.  For
+        developer debugging purposes, consider instead passing specific
+        files to upload.
+    """), 80)).strip()
+    if not cros_build_lib.BooleanPrompt(
+        prompt='Are you sure you want to upload all build symbols',
+        default=False, prolog=prolog):
+      cros_build_lib.Die('better safe than sorry')
+
+  ret = 0
+  if opts.regenerate:
+    ret += cros_generate_breakpad_symbols.GenerateBreakpadSymbols(
+        opts.board, breakpad_dir=opts.breakpad_root)
+
+  ret += UploadSymbols(opts.board, official=opts.official_build,
+                       server=opts.server, breakpad_dir=opts.breakpad_root,
+                       file_limit=opts.strip_cfi, sleep=DEFAULT_SLEEP_DELAY,
+                       upload_limit=opts.upload_limit, sym_paths=opts.sym_paths,
+                       failed_list=opts.failed_list, root=opts.root,
+                       dedupe_namespace=dedupe_namespace)
+  if ret:
+    logging.error('encountered %i problem(s)', ret)
+    # Since exit(status) gets masked, clamp it to 1 so we don't inadvertently
+    # return 0 in case we are a multiple of the mask.
+    ret = 1
+
+  return ret
+
+
+# We need this to run once per process.  Do it at module import time as that
+# will let us avoid doing it inline at function call time (see SymUpload) as
+# that func might be called by the multiprocessing module which means we'll
+# do the opener logic multiple times overall.  Plus, if you're importing this
+# module, it's a pretty good chance that you're going to need this.
+poster.streaminghttp.register_openers()
diff --git a/scripts/upload_symbols_unittest b/scripts/upload_symbols_unittest
new file mode 120000
index 0000000..b7045c5
--- /dev/null
+++ b/scripts/upload_symbols_unittest
@@ -0,0 +1 @@
+wrapper.py
\ No newline at end of file
diff --git a/scripts/upload_symbols_unittest.py b/scripts/upload_symbols_unittest.py
new file mode 100644
index 0000000..c8f3eb4
--- /dev/null
+++ b/scripts/upload_symbols_unittest.py
@@ -0,0 +1,543 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for upload_symbols.py"""
+
+from __future__ import print_function
+
+import BaseHTTPServer
+import ctypes
+import errno
+import mock
+import multiprocessing
+import os
+import signal
+import socket
+import SocketServer
+import sys
+import time
+import urllib2
+
+# We specifically set up a local server to connect to, so make sure we
+# delete any proxy settings that might screw that up.  We also need to
+# do it here because modules that are imported below will implicitly
+# initialize with this proxy setting rather than dynamically pull it
+# on the fly :(.
+os.environ.pop('http_proxy', None)
+
+from chromite.lib import cros_build_lib
+from chromite.lib import cros_logging as logging
+from chromite.lib import cros_test_lib
+from chromite.lib import osutils
+from chromite.lib import parallel
+from chromite.lib import parallel_unittest
+from chromite.lib import remote_access
+from chromite.scripts import cros_generate_breakpad_symbols
+from chromite.scripts import upload_symbols
+
+import isolateserver
+
+
+class SymbolServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+  """HTTP handler for symbol POSTs"""
+
+  RESP_CODE = None
+  RESP_MSG = None
+
+  def do_POST(self):
+    """Handle a POST request"""
+    # Drain the data from the client.  If we don't, we might write the response
+    # and close the socket before the client finishes, so they die with EPIPE.
+    clen = int(self.headers.get('Content-Length', '0'))
+    self.rfile.read(clen)
+
+    self.send_response(self.RESP_CODE, self.RESP_MSG)
+    self.end_headers()
+
+  def log_message(self, *args, **kwargs):
+    """Stub the logger as it writes to stderr"""
+    pass
+
+
+class SymbolServer(SocketServer.ThreadingTCPServer, BaseHTTPServer.HTTPServer):
+  """Simple HTTP server that forks each request"""
+
+
+class UploadSymbolsServerTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for UploadSymbols() and a local HTTP server"""
+
+  SYM_CONTENTS = """MODULE Linux arm 123-456 blkid
+PUBLIC 1471 0 main"""
+
+  def SpawnServer(self, RequestHandler):
+    """Spawn a new http server"""
+    while True:
+      try:
+        port = remote_access.GetUnusedPort()
+        address = ('', port)
+        self.httpd = SymbolServer(address, RequestHandler)
+        break
+      except socket.error as e:
+        if e.errno == errno.EADDRINUSE:
+          continue
+        raise
+    self.server = 'http://localhost:%i' % port
+    self.httpd_pid = os.fork()
+    if self.httpd_pid == 0:
+      self.httpd.serve_forever(poll_interval=0.1)
+      sys.exit(0)
+
+  def setUp(self):
+    self.httpd_pid = None
+    self.httpd = None
+    self.server = None
+    self.sym_file = os.path.join(self.tempdir, 'test.sym')
+    osutils.WriteFile(self.sym_file, self.SYM_CONTENTS)
+
+  def tearDown(self):
+    # Only kill the server if we forked one.
+    if self.httpd_pid:
+      os.kill(self.httpd_pid, signal.SIGUSR1)
+
+  def testSuccess(self):
+    """The server returns success for all uploads"""
+    class Handler(SymbolServerRequestHandler):
+      """Always return 200"""
+      RESP_CODE = 200
+
+    self.SpawnServer(Handler)
+    ret = upload_symbols.UploadSymbols('', server=self.server, sleep=0,
+                                       sym_paths=[self.sym_file] * 10,
+                                       retry=False)
+    self.assertEqual(ret, 0)
+
+  def testError(self):
+    """The server returns errors for all uploads"""
+    class Handler(SymbolServerRequestHandler):
+      """Always return 500"""
+      RESP_CODE = 500
+      RESP_MSG = 'Internal Server Error'
+
+    self.SpawnServer(Handler)
+    ret = upload_symbols.UploadSymbols('', server=self.server, sleep=0,
+                                       sym_paths=[self.sym_file] * 10,
+                                       retry=False)
+    self.assertEqual(ret, 4)
+
+  def testHungServer(self):
+    """The server chokes, but we recover"""
+    class Handler(SymbolServerRequestHandler):
+      """All connections choke forever"""
+      def do_POST(self):
+        while True:
+          time.sleep(1000)
+
+    self.SpawnServer(Handler)
+    with mock.patch.object(upload_symbols, 'GetUploadTimeout') as m:
+      m.return_value = 0.1
+      ret = upload_symbols.UploadSymbols('', server=self.server, sleep=0,
+                                         sym_paths=[self.sym_file] * 10,
+                                         retry=False)
+    self.assertEqual(ret, 4)
+
+
+class UploadSymbolsTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for UploadSymbols()"""
+
+  def setUp(self):
+    for d in ('foo', 'bar', 'some/dir/here'):
+      d = os.path.join(self.tempdir, d)
+      osutils.SafeMakedirs(d)
+      for f in ('ignored', 'real.sym', 'no.sym.here'):
+        f = os.path.join(d, f)
+        osutils.Touch(f)
+    self.sym_paths = [
+        'bar/real.sym',
+        'foo/real.sym',
+        'some/dir/here/real.sym',
+    ]
+
+    self.upload_mock = self.PatchObject(upload_symbols, 'UploadSymbol')
+    self.PatchObject(cros_generate_breakpad_symbols, 'ReadSymsHeader',
+                     return_value=cros_generate_breakpad_symbols.SymbolHeader(
+                         os='os', cpu='cpu', id='id', name='name'))
+
+  def _testUploadURL(self, official, expected_url):
+    """Helper for checking the url used"""
+    self.upload_mock.return_value = 0
+    with parallel_unittest.ParallelMock():
+      ret = upload_symbols.UploadSymbols('', official=official, retry=False,
+                                         breakpad_dir=self.tempdir, sleep=0)
+      self.assertEqual(ret, 0)
+      self.assertEqual(self.upload_mock.call_count, 3)
+      for call_args in self.upload_mock.call_args_list:
+        url, sym_element = call_args[0]
+        self.assertEqual(url, expected_url)
+        self.assertTrue(sym_element.symbol_item.sym_file.endswith('.sym'))
+
+  def testOfficialUploadURL(self):
+    """Verify we upload to the real crash server for official builds"""
+    self._testUploadURL(True, upload_symbols.OFFICIAL_UPLOAD_URL)
+
+  def testUnofficialUploadURL(self):
+    """Verify we upload to the staging crash server for unofficial builds"""
+    self._testUploadURL(False, upload_symbols.STAGING_UPLOAD_URL)
+
+  def testUploadSymbolFailureSimple(self):
+    """Verify that when UploadSymbol fails, the error count is passed up"""
+    def UploadSymbol(*_args, **kwargs):
+      kwargs['num_errors'].value = 4
+    self.upload_mock.side_effect = UploadSymbol
+    with parallel_unittest.ParallelMock():
+      ret = upload_symbols.UploadSymbols('', breakpad_dir=self.tempdir, sleep=0,
+                                         retry=False)
+      self.assertEquals(ret, 4)
+
+  def testUploadCount(self):
+    """Verify we can limit the number of uploaded symbols"""
+    self.upload_mock.return_value = 0
+    for c in xrange(3):
+      self.upload_mock.reset_mock()
+      with parallel_unittest.ParallelMock():
+        ret = upload_symbols.UploadSymbols('', breakpad_dir=self.tempdir,
+                                           sleep=0, upload_limit=c)
+        self.assertEquals(ret, 0)
+        self.assertEqual(self.upload_mock.call_count, c)
+
+  def testFailedFileList(self):
+    """Verify the failed file list is populated with the right content"""
+    def UploadSymbol(*args, **kwargs):
+      kwargs['failed_queue'].put(args[1].symbol_item.sym_file)
+      kwargs['num_errors'].value = 4
+    self.upload_mock.side_effect = UploadSymbol
+    with parallel_unittest.ParallelMock():
+      failed_list = os.path.join(self.tempdir, 'list')
+      ret = upload_symbols.UploadSymbols('', breakpad_dir=self.tempdir, sleep=0,
+                                         retry=False, failed_list=failed_list)
+      self.assertEquals(ret, 4)
+
+      # Need to sort the output as parallel/fs discovery can be unordered.
+      got_list = sorted(osutils.ReadFile(failed_list).splitlines())
+      self.assertEquals(self.sym_paths, got_list)
+
+  def _testUpload(self, inputs, sym_paths=None):
+    """Helper for testing uploading of specific paths"""
+    if sym_paths is None:
+      sym_paths = inputs
+
+    self.upload_mock.return_value = 0
+    with parallel_unittest.ParallelMock():
+      ret = upload_symbols.UploadSymbols(sym_paths=inputs, sleep=0,
+                                         retry=False)
+      self.assertEquals(ret, 0)
+      self.assertEquals(self.upload_mock.call_count, len(sym_paths))
+
+      # Since upload order is arbitrary, we have to do a manual scan for each
+      # path ourselves against the uploaded file list.
+      found_syms = [x[0][1].symbol_item.sym_file
+                    for x in self.upload_mock.call_args_list]
+      for found_sym in found_syms:
+        for path in sym_paths:
+          if found_sym.endswith(path):
+            break
+        else:
+          raise AssertionError('Could not locate %s in %r' % (path, found_syms))
+
+  def testUploadFiles(self):
+    """Test uploading specific symbol files"""
+    sym_paths = (
+        os.path.join(self.tempdir, 'bar', 'real.sym'),
+        os.path.join(self.tempdir, 'foo', 'real.sym'),
+    )
+    self._testUpload(sym_paths)
+
+  def testUploadDirectory(self):
+    """Test uploading directory of symbol files"""
+    self._testUpload([self.tempdir], sym_paths=self.sym_paths)
+
+  def testUploadLocalTarball(self):
+    """Test uploading symbols contains in a local tarball"""
+    tarball = os.path.join(self.tempdir, 'syms.tar.gz')
+    cros_build_lib.CreateTarball(
+        'syms.tar.gz', self.tempdir, compression=cros_build_lib.COMP_GZIP,
+        inputs=('foo', 'bar', 'some'))
+    self._testUpload([tarball], sym_paths=self.sym_paths)
+
+  def testUploadRemoteTarball(self):
+    """Test uploading symbols contains in a remote tarball"""
+    # TODO: Need to figure out how to mock out lib.cache.TarballCache.
+
+  def testDedupeNotifyFailure(self):
+    """Test that a dedupe server failure midway doesn't wedge things"""
+    api_mock = mock.MagicMock()
+
+    def _Contains(items):
+      """Do not dedupe anything"""
+      return items
+    api_mock.contains.side_effect = _Contains
+
+    # Use a list so the closure below can modify the value.
+    item_count = [0]
+    # Pick a number big enough to trigger a hang normally, but not so
+    # big it adds a lot of overhead.
+    item_limit = 50
+    def _Push(*_args):
+      """Die in the middle of the push list"""
+      item_count[0] += 1
+      if item_count[0] > (item_limit / 10):
+        raise ValueError('time to die')
+    api_mock.push.side_effect = _Push
+
+    self.PatchObject(isolateserver, 'get_storage_api', return_value=api_mock)
+
+    def _Uploader(*args, **kwargs):
+      """Pass the uploaded symbol to the deduper"""
+      sym_item = args[1]
+      passed_queue = kwargs['passed_queue']
+      passed_queue.put(sym_item)
+    self.upload_mock.side_effect = _Uploader
+
+    self.upload_mock.return_value = 0
+    with parallel_unittest.ParallelMock():
+      ret = upload_symbols.UploadSymbols(
+          '', sym_paths=[self.tempdir] * item_limit, sleep=0,
+          dedupe_namespace='inva!id name$pace')
+      self.assertEqual(ret, 0)
+      # This test normally passes by not hanging.
+
+  def testSlowDedupeSystem(self):
+    """Verify a slow-to-join process doesn't break things when dedupe is off"""
+    # The sleep value here is inherently a little racy, but seems to be good
+    # enough to trigger the bug on a semi-regular basis on developer systems.
+    self.PatchObject(upload_symbols, 'SymbolDeduplicatorNotify',
+                     side_effect=lambda *args: time.sleep(1))
+    # Test passing means the code didn't throw an exception.
+    upload_symbols.UploadSymbols(sym_paths=[self.tempdir])
+
+
+class SymbolDeduplicatorNotifyTest(cros_test_lib.MockTestCase):
+  """Tests for SymbolDeduplicatorNotify()"""
+
+  def setUp(self):
+    self.storage_mock = self.PatchObject(isolateserver, 'get_storage_api')
+
+  def testSmoke(self):
+    """Basic run through the system."""
+    q = mock.MagicMock()
+    q.get.side_effect = (upload_symbols.FakeItem(), None,)
+    upload_symbols.SymbolDeduplicatorNotify('name', q)
+
+  def testStorageException(self):
+    """We want to just warn & move on when dedupe server fails"""
+    log_mock = self.PatchObject(logging, 'warning')
+    q = mock.MagicMock()
+    q.get.side_effect = (upload_symbols.FakeItem(), None,)
+    self.storage_mock.side_effect = Exception
+    upload_symbols.SymbolDeduplicatorNotify('name', q)
+    self.assertEqual(log_mock.call_count, 1)
+
+
+class SymbolDeduplicatorTest(cros_test_lib.MockTestCase):
+  """Tests for SymbolDeduplicator()"""
+
+  def setUp(self):
+    self.storage_mock = mock.MagicMock()
+    self.header_mock = self.PatchObject(
+        cros_generate_breakpad_symbols, 'ReadSymsHeader',
+        return_value=cros_generate_breakpad_symbols.SymbolHeader(
+            os='os', cpu='cpu', id='id', name='name'))
+
+  def testNoStorageOrPaths(self):
+    """We don't want to talk to the server if there's no storage or files"""
+    upload_symbols.SymbolDeduplicator(None, [])
+    upload_symbols.SymbolDeduplicator(self.storage_mock, [])
+    self.assertEqual(self.storage_mock.call_count, 0)
+    self.assertEqual(self.header_mock.call_count, 0)
+
+  def testStorageException(self):
+    """We want to just warn & move on when dedupe server fails"""
+    log_mock = self.PatchObject(logging, 'warning')
+    self.storage_mock.contains.side_effect = Exception('storage error')
+    sym_paths = ['/a', '/bbbbbb', '/cc.c']
+    ret = upload_symbols.SymbolDeduplicator(self.storage_mock, sym_paths)
+    self.assertEqual(log_mock.call_count, 1)
+    self.assertEqual(self.storage_mock.contains.call_count, 1)
+    self.assertEqual(self.header_mock.call_count, len(sym_paths))
+    self.assertEqual(len(ret), len(sym_paths))
+
+
+class UploadSymbolTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for UploadSymbol()"""
+
+  def setUp(self):
+    self.sym_file = os.path.join(self.tempdir, 'foo.sym')
+    self.sym_item = upload_symbols.FakeItem(sym_file=self.sym_file)
+    self.url = 'http://eatit'
+    self.upload_mock = self.PatchObject(upload_symbols, 'SymUpload')
+
+  def testUploadSymbolNormal(self):
+    """Verify we try to upload on a normal file"""
+    osutils.Touch(self.sym_file)
+    sym_element = upload_symbols.SymbolElement(self.sym_item, None)
+    ret = upload_symbols.UploadSymbol(self.url, sym_element)
+    self.assertEqual(ret, 0)
+    self.upload_mock.assert_called_with(self.url, self.sym_item)
+    self.assertEqual(self.upload_mock.call_count, 1)
+
+  def testUploadSymbolErrorCountExceeded(self):
+    """Verify that when the error count gets too high, we stop uploading"""
+    errors = ctypes.c_int(10000)
+    # Pass in garbage values so that we crash if num_errors isn't handled.
+    ret = upload_symbols.UploadSymbol(
+        None, upload_symbols.SymbolElement(self.sym_item, None), sleep=None,
+        num_errors=errors)
+    self.assertEqual(ret, 0)
+
+  def testUploadRetryErrors(self, side_effect=None):
+    """Verify that we retry errors (and eventually give up)"""
+    if not side_effect:
+      side_effect = urllib2.HTTPError('http://', 400, 'fail', {}, None)
+    self.upload_mock.side_effect = side_effect
+    errors = ctypes.c_int()
+    item = upload_symbols.FakeItem(sym_file='/dev/null')
+    element = upload_symbols.SymbolElement(item, None)
+    ret = upload_symbols.UploadSymbol(self.url, element, num_errors=errors)
+    self.assertEqual(ret, 1)
+    self.upload_mock.assert_called_with(self.url, item)
+    self.assertTrue(self.upload_mock.call_count >= upload_symbols.MAX_RETRIES)
+
+  def testConnectRetryErrors(self):
+    """Verify that we retry errors (and eventually give up) w/connect errors"""
+    side_effect = urllib2.URLError('foo')
+    self.testUploadRetryErrors(side_effect=side_effect)
+
+  def testTruncateTooBigFiles(self):
+    """Verify we shrink big files"""
+    def SymUpload(_url, sym_item):
+      content = osutils.ReadFile(sym_item.sym_file)
+      self.assertEqual(content, 'some junk\n')
+    self.upload_mock.upload_mock.side_effect = SymUpload
+    content = '\n'.join((
+        'STACK CFI 1234',
+        'some junk',
+        'STACK CFI 1234',
+    ))
+    osutils.WriteFile(self.sym_file, content)
+    ret = upload_symbols.UploadSymbol(
+        self.url, upload_symbols.SymbolElement(self.sym_item, None),
+        file_limit=1)
+    self.assertEqual(ret, 0)
+    # Make sure the item passed to the upload has a temp file and not the
+    # original -- only the temp one has been stripped down.
+    temp_item = self.upload_mock.call_args[0][1]
+    self.assertNotEqual(temp_item.sym_file, self.sym_item.sym_file)
+    self.assertEqual(self.upload_mock.call_count, 1)
+
+  def testTruncateReallyLargeFiles(self):
+    """Verify we try to shrink really big files"""
+    warn_mock = self.PatchObject(logging, 'PrintBuildbotStepWarnings')
+    with open(self.sym_file, 'w+b') as f:
+      f.truncate(upload_symbols.CRASH_SERVER_FILE_LIMIT + 100)
+      f.seek(0)
+      f.write('STACK CFI 1234\n\n')
+    ret = upload_symbols.UploadSymbol(
+        self.url,
+        upload_symbols.SymbolElement(self.sym_item, None))
+    self.assertEqual(ret, 0)
+    # Make sure the item passed to the upload has a temp file and not the
+    # original -- only the temp one has been truncated.
+    temp_item = self.upload_mock.call_args[0][1]
+    self.assertNotEqual(temp_item.sym_file, self.sym_item.sym_file)
+    self.assertEqual(self.upload_mock.call_count, 1)
+    self.assertEqual(warn_mock.call_count, 1)
+
+
+class SymUploadTest(cros_test_lib.MockTempDirTestCase):
+  """Tests for SymUpload()"""
+
+  SYM_URL = 'http://localhost/post/it/here'
+  SYM_CONTENTS = """MODULE Linux arm 123-456 blkid
+PUBLIC 1471 0 main"""
+
+  def setUp(self):
+    self.sym_file = os.path.join(self.tempdir, 'test.sym')
+    osutils.WriteFile(self.sym_file, self.SYM_CONTENTS)
+    self.sym_item = upload_symbols.SymbolItem(self.sym_file)
+
+  def testPostUpload(self):
+    """Verify HTTP POST has all the fields we need"""
+    m = self.PatchObject(urllib2, 'urlopen', autospec=True)
+    upload_symbols.SymUpload(self.SYM_URL, self.sym_item)
+    self.assertEquals(m.call_count, 1)
+    req = m.call_args[0][0]
+    self.assertEquals(req.get_full_url(), self.SYM_URL)
+    data = ''.join([x for x in req.get_data()])
+
+    fields = {
+        'code_file': 'blkid',
+        'debug_file': 'blkid',
+        'debug_identifier': '123456',
+        'os': 'Linux',
+        'cpu': 'arm',
+    }
+    for key, val in fields.iteritems():
+      line = 'Content-Disposition: form-data; name="%s"\r\n' % key
+      self.assertTrue(line in data)
+      line = '%s\r\n' % val
+      self.assertTrue(line in data)
+    line = ('Content-Disposition: form-data; name="symbol_file"; '
+            'filename="test.sym"\r\n')
+    self.assertTrue(line in data)
+    self.assertTrue(self.SYM_CONTENTS in data)
+
+  def testTimeout(self):
+    """Verify timeouts scale based on filesize"""
+    m = self.PatchObject(urllib2, 'urlopen', autospec=True)
+    size = self.PatchObject(os.path, 'getsize')
+
+    tests = (
+        # Small files should get rounded up to the minimum timeout.
+        (10, upload_symbols.UPLOAD_MIN_TIMEOUT),
+        # A 50MiB file should take like ~4 minutes.
+        (50 * 1024 * 1024, 257),
+    )
+    for size.return_value, timeout in tests:
+      upload_symbols.SymUpload(self.SYM_URL, self.sym_item)
+      self.assertEqual(m.call_args[1]['timeout'], timeout)
+
+
+class UtilTest(cros_test_lib.TempDirTestCase):
+  """Various tests for utility funcs."""
+
+  def testWriteQueueToFile(self):
+    """Basic test for WriteQueueToFile."""
+    listing = os.path.join(self.tempdir, 'list')
+    exp_list = [
+        'b/c.txt',
+        'foo.log',
+        'there/might/be/giants',
+    ]
+    relpath = '/a'
+
+    q = multiprocessing.Queue()
+    for f in exp_list:
+      q.put(os.path.join(relpath, f))
+    q.put(None)
+    upload_symbols.WriteQueueToFile(listing, q, '/a')
+
+    got_list = osutils.ReadFile(listing).splitlines()
+    self.assertEquals(exp_list, got_list)
+
+
+def main(_argv):
+  # pylint: disable=W0212
+  # Set timeouts small so that if the unit test hangs, it won't hang for long.
+  parallel._BackgroundTask.STARTUP_TIMEOUT = 5
+  parallel._BackgroundTask.EXIT_TIMEOUT = 5
+
+  # We want to test retry behavior, so make sure we don't sleep.
+  upload_symbols.INITIAL_RETRY_DELAY = 0
+
+  # Run the tests.
+  cros_test_lib.main(level='info', module=__name__)
diff --git a/scripts/wrapper.py b/scripts/wrapper.py
new file mode 100755
index 0000000..2715ad8
--- /dev/null
+++ b/scripts/wrapper.py
@@ -0,0 +1,142 @@
+#!/usr/bin/python2
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper around chromite executable scripts.
+
+This takes care of creating a consistent environment for chromite scripts
+(like setting up import paths) so we don't have to duplicate the logic in
+lots of places.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+
+CHROMITE_PATH = None
+
+
+class ChromiteImporter(object):
+  """Virtual chromite module
+
+  If the checkout is not named 'chromite', trying to do 'from chromite.xxx'
+  to import modules fails horribly.  Instead, manually locate the chromite
+  directory (whatever it is named), load & return it whenever someone tries
+  to import it.  This lets us use the stable name 'chromite' regardless of
+  how things are structured on disk.
+
+  This also lets us keep the sys.path search clean.  Otherwise we'd have to
+  worry about what other dirs chromite were checked out near to as doing an
+  import would also search those for .py modules.
+  """
+
+  # When trying to load the chromite dir from disk, we'll get called again,
+  # so make sure to disable our logic to avoid an infinite loop.
+  _loading = False
+
+  def find_module(self, fullname, _path=None):
+    """Handle the 'chromite' module"""
+    if fullname == 'chromite' and not self._loading:
+      return self
+    return None
+
+  def load_module(self, _fullname):
+    """Return our cache of the 'chromite' module"""
+    # Locate the top of the chromite dir by searching for the PRESUBMIT.cfg
+    # file.  This assumes that file isn't found elsewhere in the tree.
+    path = os.path.dirname(os.path.realpath(__file__))
+    while not os.path.exists(os.path.join(path, 'PRESUBMIT.cfg')):
+      path = os.path.dirname(path)
+
+    # pylint: disable=W0603
+    global CHROMITE_PATH
+    CHROMITE_PATH = path + '/'
+
+    # Finally load the chromite dir.
+    path, mod = os.path.split(path)
+    sys.path.insert(0, path)
+    self._loading = True
+    try:
+      # This violates PEP302 slightly because __import__ will return the
+      # cached module from sys.modules rather than reloading it from disk.
+      # But the imp module does not work cleanly with meta_path currently
+      # which makes it hard to use.  Until that is fixed, we won't bother
+      # trying to address the edge case since it doesn't matter to us.
+      return __import__(mod)
+    finally:
+      # We can't pop by index as the import might have changed sys.path.
+      sys.path.remove(path)
+      self._loading = False
+
+sys.meta_path.insert(0, ChromiteImporter())
+
+from chromite.lib import commandline
+from chromite.lib import cros_import
+
+
+def FindTarget(target):
+  """Turn the path into something we can import from the chromite tree.
+
+  This supports a variety of ways of running chromite programs:
+  # Loaded via depot_tools in $PATH.
+  $ cros_sdk --help
+  # Loaded via .../chromite/bin in $PATH.
+  $ cros --help
+  # No $PATH needed.
+  $ ./bin/cros --help
+  # Loaded via ~/bin in $PATH to chromite bin/ subdir.
+  $ ln -s $PWD/bin/cros ~/bin; cros --help
+  # No $PATH needed.
+  $ ./cbuildbot/cbuildbot --help
+  # No $PATH needed, but symlink inside of chromite dir.
+  $ ln -s ./cbuildbot/cbuildbot; ./cbuildbot --help
+  # Loaded via ~/bin in $PATH to non-chromite bin/ subdir.
+  $ ln -s $PWD/cbuildbot/cbuildbot ~/bin/; cbuildbot --help
+  # No $PATH needed, but a relative symlink to a symlink to the chromite dir.
+  $ cd ~; ln -s bin/cbuildbot ./; ./cbuildbot --help
+
+  Args:
+    target: Path to the script we're trying to run.
+
+  Returns:
+    The module main functor.
+  """
+  while True:
+    # Walk back one symlink at a time until we get into the chromite dir.
+    parent, base = os.path.split(target)
+    parent = os.path.realpath(parent)
+    if parent.startswith(CHROMITE_PATH):
+      target = base
+      break
+    target = os.path.join(os.path.dirname(target), os.readlink(target))
+  assert parent.startswith(CHROMITE_PATH), (
+      'could not figure out leading path\n'
+      '\tparent: %s\n'
+      '\tCHROMITE_PATH: %s' % (parent, CHROMITE_PATH))
+  parent = parent[len(CHROMITE_PATH):].split(os.sep)
+  target = ['chromite'] + parent + [target]
+
+  if target[-2] == 'bin':
+    # Convert <path>/bin/foo -> <path>/scripts/foo.
+    target[-2] = 'scripts'
+  elif target[1] == 'bootstrap' and len(target) == 3:
+    # Convert <git_repo>/bootstrap/foo -> <git_repo>/bootstrap/scripts/foo.
+    target.insert(2, 'scripts')
+
+  module = cros_import.ImportModule(target)
+
+  # Run the module's main func if it has one.
+  main = getattr(module, 'main', None)
+  if main:
+    return main
+
+  # Is this a unittest?
+  if target[-1].rsplit('_', 1)[-1] in ('test', 'unittest'):
+    from chromite.lib import cros_test_lib
+    return lambda _argv: cros_test_lib.main(module=module)
+
+
+if __name__ == '__main__':
+  commandline.ScriptWrapperMain(FindTarget)
diff --git a/signing/signer_instructions/DEFAULT.instructions b/signing/signer_instructions/DEFAULT.instructions
new file mode 100644
index 0000000..9df5ffe
--- /dev/null
+++ b/signing/signer_instructions/DEFAULT.instructions
@@ -0,0 +1,4 @@
+[insns]
+# Default channels for all boards.  The channel in the board-specific insn will
+# still be used if it specifies a value.
+channel = dev, canary
diff --git a/signing/signer_instructions/test.board.instructions b/signing/signer_instructions/test.board.instructions
new file mode 100644
index 0000000..73fe5ed
--- /dev/null
+++ b/signing/signer_instructions/test.board.instructions
@@ -0,0 +1,8 @@
+[insns]
+keyset = stumpy-mp-v3
+channel = dev canary
+chromeos_shell = false
+ensure_no_password = true
+firmware_update = true
+security_checks = true
+create_nplusone = true
diff --git a/signing/signer_instructions/x86-alex.instructions b/signing/signer_instructions/x86-alex.instructions
new file mode 120000
index 0000000..336321f
--- /dev/null
+++ b/signing/signer_instructions/x86-alex.instructions
@@ -0,0 +1 @@
+test.board.instructions
\ No newline at end of file
diff --git a/ssh_keys/testing_rsa b/ssh_keys/testing_rsa
new file mode 100644
index 0000000..d50a630
--- /dev/null
+++ b/ssh_keys/testing_rsa
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEoAIBAAKCAQEAvsNpFdK5lb0GfKx+FgsrsM/2+aZVFYXHMPdvGtTz63ciRhq0
+Jnw7nln1SOcHraSz3/imECBg8NHIKV6rA+B9zbf7pZXEv20x5Ul0vrcPqYWC44PT
+tgsgvi8s0KZUZN93YlcjZ+Q7BjQ/tuwGSaLWLqJ7hnHALMJ3dbEM9fKBHQBCrG5H
+OaWD2gtXj7jp04M/WUnDDdemq/KMg6E9jcrJOiQ39IuTpas4hLQzVkKAKSrpl6MY
+2etHyoNarlWhcOwitArEDwf3WgnctwKstI/MTKB5BTpO2WXUNUv4kXzA+g8/l1al
+jIG13vtd9A/IV3KFVx/sLkkjuZ7z2rQXyNKuJwIBIwKCAQA79EWZJPh/hI0CnJyn
+16AEXp4T8nKDG2p9GpCiCGnq6u2Dvz/u1pZk97N9T+x4Zva0GvJc1vnlST7objW/
+Y8/ET8QeGSCT7x5PYDqiVspoemr3DCyYTKPkADKn+cLAngDzBXGHDTcfNP4U6xfr
+Qc5JK8BsFR8kApqSs/zCU4eqBtp2FVvPbgUOv3uUrFnjEuGs9rb1QZ0K6o08L4Cq
+N+e2nTysjp78blakZfqlurqTY6iJb0ImU2W3T8sV6w5GP1NT7eicXLO3WdIRB15a
+evogPeqtMo8GcO62wU/D4UCvq4GNEjvYOvFmPzXHvhTxsiWv5KEACtleBIEYmWHA
+POwrAoGBAOKgNRgxHL7r4bOmpLQcYK7xgA49OpikmrebXCQnZ/kZ3QsLVv1QdNMH
+Rx/ex7721g8R0oWslM14otZSMITCDCMWTYVBNM1bqYnUeEu5HagFwxjQ2tLuSs8E
+SBzEr96JLfhwuBhDH10sQqn+OQG1yj5acs4Pt3L4wlYwMx0vs1BxAoGBANd9Owro
+5ONiJXfKNaNY/cJYuLR+bzGeyp8oxToxgmM4UuA4hhDU7peg4sdoKJ4XjB9cKMCz
+ZGU5KHKKxNf95/Z7aywiIJEUE/xPRGNP6tngRunevp2QyvZf4pgvACvk1tl9B3HH
+7J5tY/GRkT4sQuZYpx3YnbdP5Y6Kx33BF7QXAoGAVCzghVQR/cVT1QNhvz29gs66
+iPIrtQnwUtNOHA6i9h+MnbPBOYRIpidGTaqEtKTTKisw79JjJ78X6TR4a9ML0oSg
+c1K71z9NmZgPbJU25qMN80ZCph3+h2f9hwc6AjLz0U5wQ4alP909VRVIX7iM8paf
+q59wBiHhyD3J16QAxhsCgYBu0rCmhmcV2rQu+kd4lCq7uJmBZZhFZ5tny9MlPgiK
+zIJkr1rkFbyIfqCDzyrU9irOTKc+iCUA25Ek9ujkHC4m/aTU3lnkNjYp/OFXpXF3
+XWZMY+0Ak5uUpldG85mwLIvATu3ivpbyZCTFYM5afSm4StmaUiU5tA+oZKEcGily
+jwKBgBdFLg+kTm877lcybQ04G1kIRMf5vAXcConzBt8ry9J+2iX1ddlu2K2vMroD
+1cP/U/EmvoCXSOGuetaI4UNQwE/rGCtkpvNj5y4twVLh5QufSOl49V0Ut0mwjPXw
+HfN/2MoO07vQrjgsFylvrw9A79xItABaqKndlmqlwMZWc9Ne
+-----END RSA PRIVATE KEY-----
diff --git a/ssh_keys/testing_rsa.pub b/ssh_keys/testing_rsa.pub
new file mode 100644
index 0000000..7a4d033
--- /dev/null
+++ b/ssh_keys/testing_rsa.pub
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAvsNpFdK5lb0GfKx+FgsrsM/2+aZVFYXHMPdvGtTz63ciRhq0Jnw7nln1SOcHraSz3/imECBg8NHIKV6rA+B9zbf7pZXEv20x5Ul0vrcPqYWC44PTtgsgvi8s0KZUZN93YlcjZ+Q7BjQ/tuwGSaLWLqJ7hnHALMJ3dbEM9fKBHQBCrG5HOaWD2gtXj7jp04M/WUnDDdemq/KMg6E9jcrJOiQ39IuTpas4hLQzVkKAKSrpl6MY2etHyoNarlWhcOwitArEDwf3WgnctwKstI/MTKB5BTpO2WXUNUv4kXzA+g8/l1aljIG13vtd9A/IV3KFVx/sLkkjuZ7z2rQXyNKuJw== ChromeOS test key
diff --git a/third_party/.gitignore b/third_party/.gitignore
new file mode 100644
index 0000000..c80efa4
--- /dev/null
+++ b/third_party/.gitignore
@@ -0,0 +1,4 @@
+/dpkt
+/gdata
+/pyelftools
+/swarming.client
diff --git a/third_party/.testignore b/third_party/.testignore
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/third_party/.testignore
diff --git a/third_party/LICENSE_argparse b/third_party/LICENSE_argparse
new file mode 100644
index 0000000..5cdb01e
--- /dev/null
+++ b/third_party/LICENSE_argparse
@@ -0,0 +1,279 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC.  Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team.  In October of the same
+year, the PythonLabs team moved to Digital Creations (now Zope
+Corporation, see http://www.zope.com).  In 2001, the Python Software
+Foundation (PSF, see http://www.python.org/psf/) was formed, a
+non-profit organization created specifically to own Python-related
+Intellectual Property.  Zope Corporation is a sponsoring member of
+the PSF.
+
+All Python releases are Open Source (see http://www.opensource.org for
+the Open Source Definition).  Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+    Release         Derived     Year        Owner       GPL-
+                    from                                compatible? (1)
+
+    0.9.0 thru 1.2              1991-1995   CWI         yes
+    1.3 thru 1.5.2  1.2         1995-1999   CNRI        yes
+    1.6             1.5.2       2000        CNRI        no
+    2.0             1.6         2000        BeOpen.com  no
+    1.6.1           1.6         2001        CNRI        yes (2)
+    2.1             2.0+1.6.1   2001        PSF         no
+    2.0.1           2.0+1.6.1   2001        PSF         yes
+    2.1.1           2.1+2.0.1   2001        PSF         yes
+    2.2             2.1.1       2001        PSF         yes
+    2.1.2           2.1.1       2002        PSF         yes
+    2.1.3           2.1.2       2002        PSF         yes
+    2.2.1           2.2         2002        PSF         yes
+    2.2.2           2.2.1       2002        PSF         yes
+    2.2.3           2.2.2       2003        PSF         yes
+    2.3             2.2.2       2002-2003   PSF         yes
+    2.3.1           2.3         2002-2003   PSF         yes
+    2.3.2           2.3.1       2002-2003   PSF         yes
+    2.3.3           2.3.2       2002-2003   PSF         yes
+    2.3.4           2.3.3       2004        PSF         yes
+    2.3.5           2.3.4       2005        PSF         yes
+    2.4             2.3         2004        PSF         yes
+    2.4.1           2.4         2005        PSF         yes
+    2.4.2           2.4.1       2005        PSF         yes
+    2.4.3           2.4.2       2006        PSF         yes
+    2.4.4           2.4.3       2006        PSF         yes
+    2.5             2.4         2006        PSF         yes
+    2.5.1           2.5         2007        PSF         yes
+    2.5.2           2.5.1       2008        PSF         yes
+    2.5.3           2.5.2       2008        PSF         yes
+    2.6             2.5         2008        PSF         yes
+    2.6.1           2.6         2008        PSF         yes
+    2.6.2           2.6.1       2009        PSF         yes
+    2.6.3           2.6.2       2009        PSF         yes
+    2.6.4           2.6.3       2009        PSF         yes
+    2.6.5           2.6.4       2010        PSF         yes
+    2.7             2.6         2010        PSF         yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+    the GPL.  All Python licenses, unlike the GPL, let you distribute
+    a modified version without making your changes open source.  The
+    GPL-compatible licenses make it possible to combine Python with
+    other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+    because its license has a choice of law clause.  According to
+    CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+    is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+Python Software Foundation; All Rights Reserved" are retained in Python alone or
+in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis.  BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions.  Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee.  This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party.  As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee.  Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement.  This Agreement together with
+Python 1.6.1 may be located on the Internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013.  This
+Agreement may also be obtained from a proxy server on the Internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis.  CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement.  Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee.  This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+        ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands.  All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/third_party/README b/third_party/README
new file mode 100644
index 0000000..09c668f
--- /dev/null
+++ b/third_party/README
@@ -0,0 +1,2 @@
+Future Python modules that we want to use as part of Chromite but not
+in the default installations of Python of supported build environments.
diff --git a/third_party/argparse.py b/third_party/argparse.py
new file mode 100644
index 0000000..23be12e
--- /dev/null
+++ b/third_party/argparse.py
@@ -0,0 +1,2406 @@
+# Author: Steven J. Bethard <steven.bethard@gmail.com>.
+
+"""Command-line parsing library
+
+This module is an optparse-inspired command-line parsing library that:
+
+    - handles both optional and positional arguments
+    - produces highly informative usage messages
+    - supports parsers that dispatch to sub-parsers
+
+The following is a simple usage example that sums integers from the
+command-line and writes the result to a file::
+
+    parser = argparse.ArgumentParser(
+        description='sum the integers at the command line')
+    parser.add_argument(
+        'integers', metavar='int', nargs='+', type=int,
+        help='an integer to be summed')
+    parser.add_argument(
+        '--log', default=sys.stdout, type=argparse.FileType('w'),
+        help='the file where the sum should be written')
+    args = parser.parse_args()
+    args.log.write('%s' % sum(args.integers))
+    args.log.close()
+
+The module contains the following public classes:
+
+    - ArgumentParser -- The main entry point for command-line parsing. As the
+        example above shows, the add_argument() method is used to populate
+        the parser with actions for optional and positional arguments. Then
+        the parse_args() method is invoked to convert the args at the
+        command-line into an object with attributes.
+
+    - ArgumentError -- The exception raised by ArgumentParser objects when
+        there are errors with the parser's actions. Errors raised while
+        parsing the command-line are caught by ArgumentParser and emitted
+        as command-line messages.
+
+    - FileType -- A factory for defining types of files to be created. As the
+        example above shows, instances of FileType are typically passed as
+        the type= argument of add_argument() calls.
+
+    - Action -- The base class for parser actions. Typically actions are
+        selected by passing strings like 'store_true' or 'append_const' to
+        the action= argument of add_argument(). However, for greater
+        customization of ArgumentParser actions, subclasses of Action may
+        be defined and passed as the action= argument.
+
+    - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,
+        ArgumentDefaultsHelpFormatter -- Formatter classes which
+        may be passed as the formatter_class= argument to the
+        ArgumentParser constructor. HelpFormatter is the default,
+        RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser
+        not to change the formatting for help text, and
+        ArgumentDefaultsHelpFormatter adds information about argument defaults
+        to the help.
+
+All other classes in this module are considered implementation details.
+(Also note that HelpFormatter and RawDescriptionHelpFormatter are only
+considered public as object names -- the API of the formatter objects is
+still considered an implementation detail.)
+"""
+
+__version__ = '1.1'
+__all__ = [
+    'ArgumentParser',
+    'ArgumentError',
+    'ArgumentTypeError',
+    'FileType',
+    'HelpFormatter',
+    'ArgumentDefaultsHelpFormatter',
+    'RawDescriptionHelpFormatter',
+    'RawTextHelpFormatter',
+    'Namespace',
+    'Action',
+    'ONE_OR_MORE',
+    'OPTIONAL',
+    'PARSER',
+    'REMAINDER',
+    'SUPPRESS',
+    'ZERO_OR_MORE',
+]
+
+
+# -- Modification by ChromeOS build, adding a simple OrderedDict implementation.
+try:
+    from collections import OrderedDict as _OrderedDict
+except ImportError:
+    # TODO(build): fallback to snakeoil.mappings.OrderedDict
+    # Note that this implementation is *just* enough to make this code work while
+    # also covering any derivative usage of the SubParser class.
+    _SENTINEL = object()
+    class _OrderedDict(dict):
+
+        def __init__(self):
+            self._sequence = []
+            dict.__init__(self)
+
+        def __setitem__(self, key, value):
+            if key not in self._sequence:
+                self._sequence.append(key)
+            return dict.__setitem__(self, key, value)
+
+        def __delitem__(self, key):
+            dict.__delitem__(self, key)
+            self._sequence.remove(key)
+
+        def pop(self, key, default=_SENTINEL):
+            try:
+                default = dict.pop(self, key)
+                self._sequence.remove(key)
+            except KeyError:
+                if default is _SENTINEL:
+                    raise
+            return default
+
+        def __iter__(self):
+            return self.iterkeys()
+
+        def iterkeys(self):
+            return iter(self._sequence)
+
+        def keys(self):
+            return list(self.iterkeys())
+
+        def iteritems(self):
+            return ((k, self[k]) for k in self)
+
+        def items(self):
+            return list(self.iteritems())
+
+        def itervalues(self):
+            return (self[k] for k in self)
+
+        def values(self):
+            return list(self.itervalues())
+
+        def update(self, iterable):
+            if isinstance(iterable, dict):
+                iterable = iterable.iteritems()
+            for key, value in iterable:
+                self[key] = value
+
+
+import copy as _copy
+import os as _os
+import re as _re
+import sys as _sys
+import textwrap as _textwrap
+
+from gettext import gettext as _
+
+
+def _callable(obj):
+    return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
+
+
+SUPPRESS = '==SUPPRESS=='
+
+OPTIONAL = '?'
+ZERO_OR_MORE = '*'
+ONE_OR_MORE = '+'
+PARSER = 'A...'
+REMAINDER = '...'
+_UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args'
+
+# =============================
+# Utility functions and classes
+# =============================
+
+class _AttributeHolder(object):
+    """Abstract base class that provides __repr__.
+
+    The __repr__ method returns a string in the format::
+        ClassName(attr=name, attr=name, ...)
+    The attributes are determined either by a class-level attribute,
+    '_kwarg_names', or by inspecting the instance __dict__.
+    """
+
+    def __repr__(self):
+        type_name = type(self).__name__
+        arg_strings = []
+        for arg in self._get_args():
+            arg_strings.append(repr(arg))
+        for name, value in self._get_kwargs():
+            arg_strings.append('%s=%r' % (name, value))
+        return '%s(%s)' % (type_name, ', '.join(arg_strings))
+
+    def _get_kwargs(self):
+        return sorted(self.__dict__.items())
+
+    def _get_args(self):
+        return []
+
+
+def _ensure_value(namespace, name, value):
+    if getattr(namespace, name, None) is None:
+        setattr(namespace, name, value)
+    return getattr(namespace, name)
+
+
+# ===============
+# Formatting Help
+# ===============
+
+class HelpFormatter(object):
+    """Formatter for generating usage messages and argument help strings.
+
+    Only the name of this class is considered a public API. All the methods
+    provided by the class are considered an implementation detail.
+    """
+
+    def __init__(self,
+                 prog,
+                 indent_increment=2,
+                 max_help_position=24,
+                 width=None):
+
+        # default setting for width
+        if width is None:
+            try:
+                width = int(_os.environ['COLUMNS'])
+            except (KeyError, ValueError):
+                width = 80
+            width -= 2
+
+        self._prog = prog
+        self._indent_increment = indent_increment
+        self._max_help_position = max_help_position
+        self._width = width
+
+        self._current_indent = 0
+        self._level = 0
+        self._action_max_length = 0
+
+        self._root_section = self._Section(self, None)
+        self._current_section = self._root_section
+
+        self._whitespace_matcher = _re.compile(r'\s+')
+        self._long_break_matcher = _re.compile(r'\n\n\n+')
+
+    # ===============================
+    # Section and indentation methods
+    # ===============================
+    def _indent(self):
+        self._current_indent += self._indent_increment
+        self._level += 1
+
+    def _dedent(self):
+        self._current_indent -= self._indent_increment
+        assert self._current_indent >= 0, 'Indent decreased below 0.'
+        self._level -= 1
+
+    class _Section(object):
+
+        def __init__(self, formatter, parent, heading=None):
+            self.formatter = formatter
+            self.parent = parent
+            self.heading = heading
+            self.items = []
+
+        def format_help(self):
+            # format the indented section
+            if self.parent is not None:
+                self.formatter._indent()
+            join = self.formatter._join_parts
+            for func, args in self.items:
+                func(*args)
+            item_help = join([func(*args) for func, args in self.items])
+            if self.parent is not None:
+                self.formatter._dedent()
+
+            # return nothing if the section was empty
+            if not item_help:
+                return ''
+
+            # add the heading if the section was non-empty
+            if self.heading is not SUPPRESS and self.heading is not None:
+                current_indent = self.formatter._current_indent
+                heading = '%*s%s:\n' % (current_indent, '', self.heading)
+            else:
+                heading = ''
+
+            # join the section-initial newline, the heading and the help
+            return join(['\n', heading, item_help, '\n'])
+
+    def _add_item(self, func, args):
+        self._current_section.items.append((func, args))
+
+    # ========================
+    # Message building methods
+    # ========================
+    def start_section(self, heading):
+        self._indent()
+        section = self._Section(self, self._current_section, heading)
+        self._add_item(section.format_help, [])
+        self._current_section = section
+
+    def end_section(self):
+        self._current_section = self._current_section.parent
+        self._dedent()
+
+    def add_text(self, text):
+        if text is not SUPPRESS and text is not None:
+            self._add_item(self._format_text, [text])
+
+    def add_usage(self, usage, actions, groups, prefix=None):
+        if usage is not SUPPRESS:
+            args = usage, actions, groups, prefix
+            self._add_item(self._format_usage, args)
+
+    def add_argument(self, action):
+        if action.help is not SUPPRESS:
+
+            # find all invocations
+            get_invocation = self._format_action_invocation
+            invocations = [get_invocation(action)]
+            for subaction in self._iter_indented_subactions(action):
+                invocations.append(get_invocation(subaction))
+
+            # update the maximum item length
+            invocation_length = max([len(s) for s in invocations])
+            action_length = invocation_length + self._current_indent
+            self._action_max_length = max(self._action_max_length,
+                                          action_length)
+
+            # add the item to the list
+            self._add_item(self._format_action, [action])
+
+    def add_arguments(self, actions):
+        for action in actions:
+            self.add_argument(action)
+
+    # =======================
+    # Help-formatting methods
+    # =======================
+    def format_help(self):
+        help = self._root_section.format_help()
+        if help:
+            help = self._long_break_matcher.sub('\n\n', help)
+            help = help.strip('\n') + '\n'
+        return help
+
+    def _join_parts(self, part_strings):
+        return ''.join([part
+                        for part in part_strings
+                        if part and part is not SUPPRESS])
+
+    def _format_usage(self, usage, actions, groups, prefix):
+        if prefix is None:
+            prefix = _('usage: ')
+
+        # if usage is specified, use that
+        if usage is not None:
+            usage = usage % dict(prog=self._prog)
+
+        # if no optionals or positionals are available, usage is just prog
+        elif usage is None and not actions:
+            usage = '%(prog)s' % dict(prog=self._prog)
+
+        # if optionals and positionals are available, calculate usage
+        elif usage is None:
+            prog = '%(prog)s' % dict(prog=self._prog)
+
+            # split optionals from positionals
+            optionals = []
+            positionals = []
+            for action in actions:
+                if action.option_strings:
+                    optionals.append(action)
+                else:
+                    positionals.append(action)
+
+            # build full usage string
+            format = self._format_actions_usage
+            action_usage = format(optionals + positionals, groups)
+            usage = ' '.join([s for s in [prog, action_usage] if s])
+
+            # wrap the usage parts if it's too long
+            text_width = self._width - self._current_indent
+            if len(prefix) + len(usage) > text_width:
+
+                # break usage into wrappable parts
+                part_regexp = r'\(.*?\)+|\[.*?\]+|\S+'
+                opt_usage = format(optionals, groups)
+                pos_usage = format(positionals, groups)
+                opt_parts = _re.findall(part_regexp, opt_usage)
+                pos_parts = _re.findall(part_regexp, pos_usage)
+                assert ' '.join(opt_parts) == opt_usage
+                assert ' '.join(pos_parts) == pos_usage
+
+                # helper for wrapping lines
+                def get_lines(parts, indent, prefix=None):
+                    lines = []
+                    line = []
+                    if prefix is not None:
+                        line_len = len(prefix) - 1
+                    else:
+                        line_len = len(indent) - 1
+                    for part in parts:
+                        if line_len + 1 + len(part) > text_width:
+                            lines.append(indent + ' '.join(line))
+                            line = []
+                            line_len = len(indent) - 1
+                        line.append(part)
+                        line_len += len(part) + 1
+                    if line:
+                        lines.append(indent + ' '.join(line))
+                    if prefix is not None:
+                        lines[0] = lines[0][len(indent):]
+                    return lines
+
+                # if prog is short, follow it with optionals or positionals
+                if len(prefix) + len(prog) <= 0.75 * text_width:
+                    indent = ' ' * (len(prefix) + len(prog) + 1)
+                    if opt_parts:
+                        lines = get_lines([prog] + opt_parts, indent, prefix)
+                        lines.extend(get_lines(pos_parts, indent))
+                    elif pos_parts:
+                        lines = get_lines([prog] + pos_parts, indent, prefix)
+                    else:
+                        lines = [prog]
+
+                # if prog is long, put it on its own line
+                else:
+                    indent = ' ' * len(prefix)
+                    parts = opt_parts + pos_parts
+                    lines = get_lines(parts, indent)
+                    if len(lines) > 1:
+                        lines = []
+                        lines.extend(get_lines(opt_parts, indent))
+                        lines.extend(get_lines(pos_parts, indent))
+                    lines = [prog] + lines
+
+                # join lines into usage
+                usage = '\n'.join(lines)
+
+        # prefix with 'usage:'
+        return '%s%s\n\n' % (prefix, usage)
+
+    def _format_actions_usage(self, actions, groups):
+        # find group indices and identify actions in groups
+        group_actions = set()
+        inserts = {}
+        for group in groups:
+            try:
+                start = actions.index(group._group_actions[0])
+            except ValueError:
+                continue
+            else:
+                end = start + len(group._group_actions)
+                if actions[start:end] == group._group_actions:
+                    for action in group._group_actions:
+                        group_actions.add(action)
+                    if not group.required:
+                        if start in inserts:
+                            inserts[start] += ' ['
+                        else:
+                            inserts[start] = '['
+                        inserts[end] = ']'
+                    else:
+                        if start in inserts:
+                            inserts[start] += ' ('
+                        else:
+                            inserts[start] = '('
+                        inserts[end] = ')'
+                    for i in range(start + 1, end):
+                        inserts[i] = '|'
+
+        # collect all actions format strings
+        parts = []
+        for i, action in enumerate(actions):
+
+            # suppressed arguments are marked with None
+            # remove | separators for suppressed arguments
+            if action.help is SUPPRESS:
+                parts.append(None)
+                if inserts.get(i) == '|':
+                    inserts.pop(i)
+                elif inserts.get(i + 1) == '|':
+                    inserts.pop(i + 1)
+
+            # produce all arg strings
+            elif not action.option_strings:
+                part = self._format_args(action, action.dest)
+
+                # if it's in a group, strip the outer []
+                if action in group_actions:
+                    if part[0] == '[' and part[-1] == ']':
+                        part = part[1:-1]
+
+                # add the action string to the list
+                parts.append(part)
+
+            # produce the first way to invoke the option in brackets
+            else:
+                option_string = action.option_strings[0]
+
+                # if the Optional doesn't take a value, format is:
+                #    -s or --long
+                if action.nargs == 0:
+                    part = '%s' % option_string
+
+                # if the Optional takes a value, format is:
+                #    -s ARGS or --long ARGS
+                else:
+                    default = action.dest.upper()
+                    args_string = self._format_args(action, default)
+                    part = '%s %s' % (option_string, args_string)
+
+                # make it look optional if it's not required or in a group
+                if not action.required and action not in group_actions:
+                    part = '[%s]' % part
+
+                # add the action string to the list
+                parts.append(part)
+
+        # insert things at the necessary indices
+        for i in sorted(inserts, reverse=True):
+            parts[i:i] = [inserts[i]]
+
+        # join all the action items with spaces
+        text = ' '.join([item for item in parts if item is not None])
+
+        # clean up separators for mutually exclusive groups
+        open = r'[\[(]'
+        close = r'[\])]'
+        text = _re.sub(r'(%s) ' % open, r'\1', text)
+        text = _re.sub(r' (%s)' % close, r'\1', text)
+        text = _re.sub(r'%s *%s' % (open, close), r'', text)
+        text = _re.sub(r'\(([^|]*)\)', r'\1', text)
+        text = text.strip()
+
+        # return the text
+        return text
+
+    def _format_text(self, text):
+        if '%(prog)' in text:
+            text = text % dict(prog=self._prog)
+        text_width = self._width - self._current_indent
+        indent = ' ' * self._current_indent
+        return self._fill_text(text, text_width, indent) + '\n\n'
+
+    def _format_action(self, action):
+        # determine the required width and the entry label
+        help_position = min(self._action_max_length + 2,
+                            self._max_help_position)
+        help_width = self._width - help_position
+        action_width = help_position - self._current_indent - 2
+        action_header = self._format_action_invocation(action)
+
+        # ho nelp; start on same line and add a final newline
+        if not action.help:
+            tup = self._current_indent, '', action_header
+            action_header = '%*s%s\n' % tup
+
+        # short action name; start on the same line and pad two spaces
+        elif len(action_header) <= action_width:
+            tup = self._current_indent, '', action_width, action_header
+            action_header = '%*s%-*s  ' % tup
+            indent_first = 0
+
+        # long action name; start on the next line
+        else:
+            tup = self._current_indent, '', action_header
+            action_header = '%*s%s\n' % tup
+            indent_first = help_position
+
+        # collect the pieces of the action help
+        parts = [action_header]
+
+        # if there was help for the action, add lines of help text
+        if action.help:
+            help_text = self._expand_help(action)
+            help_lines = self._split_lines(help_text, help_width)
+            parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
+            for line in help_lines[1:]:
+                parts.append('%*s%s\n' % (help_position, '', line))
+
+        # or add a newline if the description doesn't end with one
+        elif not action_header.endswith('\n'):
+            parts.append('\n')
+
+        # if there are any sub-actions, add their help as well
+        for subaction in self._iter_indented_subactions(action):
+            parts.append(self._format_action(subaction))
+
+        # return a single string
+        return self._join_parts(parts)
+
+    def _format_action_invocation(self, action):
+        if not action.option_strings:
+            metavar, = self._metavar_formatter(action, action.dest)(1)
+            return metavar
+
+        else:
+            parts = []
+
+            # if the Optional doesn't take a value, format is:
+            #    -s, --long
+            if action.nargs == 0:
+                parts.extend(action.option_strings)
+
+            # if the Optional takes a value, format is:
+            #    -s ARGS, --long ARGS
+            else:
+                default = action.dest.upper()
+                args_string = self._format_args(action, default)
+                for option_string in action.option_strings:
+                    parts.append('%s %s' % (option_string, args_string))
+
+            return ', '.join(parts)
+
+    def _metavar_formatter(self, action, default_metavar):
+        if action.metavar is not None:
+            result = action.metavar
+        elif action.choices is not None:
+            choice_strs = [str(choice) for choice in action.choices]
+            result = '{%s}' % ','.join(choice_strs)
+        else:
+            result = default_metavar
+
+        def format(tuple_size):
+            if isinstance(result, tuple):
+                return result
+            else:
+                return (result, ) * tuple_size
+        return format
+
+    def _format_args(self, action, default_metavar):
+        get_metavar = self._metavar_formatter(action, default_metavar)
+        if action.nargs is None:
+            result = '%s' % get_metavar(1)
+        elif action.nargs == OPTIONAL:
+            result = '[%s]' % get_metavar(1)
+        elif action.nargs == ZERO_OR_MORE:
+            result = '[%s [%s ...]]' % get_metavar(2)
+        elif action.nargs == ONE_OR_MORE:
+            result = '%s [%s ...]' % get_metavar(2)
+        elif action.nargs == REMAINDER:
+            result = '...'
+        elif action.nargs == PARSER:
+            result = '%s ...' % get_metavar(1)
+        else:
+            formats = ['%s' for _ in range(action.nargs)]
+            result = ' '.join(formats) % get_metavar(action.nargs)
+        return result
+
+    def _expand_help(self, action):
+        params = dict(vars(action), prog=self._prog)
+        for name in list(params):
+            if params[name] is SUPPRESS:
+                del params[name]
+        for name in list(params):
+            if hasattr(params[name], '__name__'):
+                params[name] = params[name].__name__
+        if params.get('choices') is not None:
+            choices_str = ', '.join([str(c) for c in params['choices']])
+            params['choices'] = choices_str
+        return self._get_help_string(action) % params
+
+    def _iter_indented_subactions(self, action):
+        try:
+            get_subactions = action._get_subactions
+        except AttributeError:
+            pass
+        else:
+            self._indent()
+            for subaction in get_subactions():
+                yield subaction
+            self._dedent()
+
+    def _split_lines(self, text, width):
+        text = self._whitespace_matcher.sub(' ', text).strip()
+        return _textwrap.wrap(text, width)
+
+    def _fill_text(self, text, width, indent):
+        text = self._whitespace_matcher.sub(' ', text).strip()
+        return _textwrap.fill(text, width, initial_indent=indent,
+                                           subsequent_indent=indent)
+
+    def _get_help_string(self, action):
+        return action.help
+
+
+class RawDescriptionHelpFormatter(HelpFormatter):
+    """Help message formatter which retains any formatting in descriptions.
+
+    Only the name of this class is considered a public API. All the methods
+    provided by the class are considered an implementation detail.
+    """
+
+    def _fill_text(self, text, width, indent):
+        return ''.join([indent + line for line in text.splitlines(True)])
+
+
+class RawTextHelpFormatter(RawDescriptionHelpFormatter):
+    """Help message formatter which retains formatting of all help text.
+
+    Only the name of this class is considered a public API. All the methods
+    provided by the class are considered an implementation detail.
+    """
+
+    def _split_lines(self, text, width):
+        return text.splitlines()
+
+
+class ArgumentDefaultsHelpFormatter(HelpFormatter):
+    """Help message formatter which adds default values to argument help.
+
+    Only the name of this class is considered a public API. All the methods
+    provided by the class are considered an implementation detail.
+    """
+
+    def _get_help_string(self, action):
+        help = action.help
+        if '%(default)' not in action.help:
+            if action.default is not SUPPRESS:
+                defaulting_nargs = [OPTIONAL, ZERO_OR_MORE]
+                if action.option_strings or action.nargs in defaulting_nargs:
+                    help += ' (default: %(default)s)'
+        return help
+
+
+# =====================
+# Options and Arguments
+# =====================
+
+def _get_action_name(argument):
+    if argument is None:
+        return None
+    elif argument.option_strings:
+        return  '/'.join(argument.option_strings)
+    elif argument.metavar not in (None, SUPPRESS):
+        return argument.metavar
+    elif argument.dest not in (None, SUPPRESS):
+        return argument.dest
+    else:
+        return None
+
+
+class ArgumentError(Exception):
+    """An error from creating or using an argument (optional or positional).
+
+    The string value of this exception is the message, augmented with
+    information about the argument that caused it.
+    """
+
+    def __init__(self, argument, message):
+        self.argument_name = _get_action_name(argument)
+        self.message = message
+
+    def __str__(self):
+        if self.argument_name is None:
+            format = '%(message)s'
+        else:
+            format = 'argument %(argument_name)s: %(message)s'
+        return format % dict(message=self.message,
+                             argument_name=self.argument_name)
+
+
+class ArgumentTypeError(Exception):
+    """An error from trying to convert a command line string to a type."""
+    pass
+
+
+# ==============
+# Action classes
+# ==============
+
+class Action(_AttributeHolder):
+    """Information about how to convert command line strings to Python objects.
+
+    Action objects are used by an ArgumentParser to represent the information
+    needed to parse a single argument from one or more strings from the
+    command line. The keyword arguments to the Action constructor are also
+    all attributes of Action instances.
+
+    Keyword Arguments:
+
+        - option_strings -- A list of command-line option strings which
+            should be associated with this action.
+
+        - dest -- The name of the attribute to hold the created object(s)
+
+        - nargs -- The number of command-line arguments that should be
+            consumed. By default, one argument will be consumed and a single
+            value will be produced.  Other values include:
+                - N (an integer) consumes N arguments (and produces a list)
+                - '?' consumes zero or one arguments
+                - '*' consumes zero or more arguments (and produces a list)
+                - '+' consumes one or more arguments (and produces a list)
+            Note that the difference between the default and nargs=1 is that
+            with the default, a single value will be produced, while with
+            nargs=1, a list containing a single value will be produced.
+
+        - const -- The value to be produced if the option is specified and the
+            option uses an action that takes no values.
+
+        - default -- The value to be produced if the option is not specified.
+
+        - type -- The type which the command-line arguments should be converted
+            to, should be one of 'string', 'int', 'float', 'complex' or a
+            callable object that accepts a single string argument. If None,
+            'string' is assumed.
+
+        - choices -- A container of values that should be allowed. If not None,
+            after a command-line argument has been converted to the appropriate
+            type, an exception will be raised if it is not a member of this
+            collection.
+
+        - required -- True if the action must always be specified at the
+            command line. This is only meaningful for optional command-line
+            arguments.
+
+        - help -- The help string describing the argument.
+
+        - metavar -- The name to be used for the option's argument with the
+            help string. If None, the 'dest' value will be used as the name.
+    """
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 nargs=None,
+                 const=None,
+                 default=None,
+                 type=None,
+                 choices=None,
+                 required=False,
+                 help=None,
+                 metavar=None):
+        self.option_strings = option_strings
+        self.dest = dest
+        self.nargs = nargs
+        self.const = const
+        self.default = default
+        self.type = type
+        self.choices = choices
+        self.required = required
+        self.help = help
+        self.metavar = metavar
+
+    def _get_kwargs(self):
+        names = [
+            'option_strings',
+            'dest',
+            'nargs',
+            'const',
+            'default',
+            'type',
+            'choices',
+            'help',
+            'metavar',
+        ]
+        return [(name, getattr(self, name)) for name in names]
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        raise NotImplementedError(_('.__call__() not defined'))
+
+
+class _StoreAction(Action):
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 nargs=None,
+                 const=None,
+                 default=None,
+                 type=None,
+                 choices=None,
+                 required=False,
+                 help=None,
+                 metavar=None):
+        if nargs == 0:
+            raise ValueError('nargs for store actions must be > 0; if you '
+                             'have nothing to store, actions such as store '
+                             'true or store const may be more appropriate')
+        if const is not None and nargs != OPTIONAL:
+            raise ValueError('nargs must be %r to supply const' % OPTIONAL)
+        super(_StoreAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            nargs=nargs,
+            const=const,
+            default=default,
+            type=type,
+            choices=choices,
+            required=required,
+            help=help,
+            metavar=metavar)
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        setattr(namespace, self.dest, values)
+
+
+class _StoreConstAction(Action):
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 const,
+                 default=None,
+                 required=False,
+                 help=None,
+                 metavar=None):
+        super(_StoreConstAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            nargs=0,
+            const=const,
+            default=default,
+            required=required,
+            help=help)
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        setattr(namespace, self.dest, self.const)
+
+
+class _StoreTrueAction(_StoreConstAction):
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 default=False,
+                 required=False,
+                 help=None):
+        super(_StoreTrueAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            const=True,
+            default=default,
+            required=required,
+            help=help)
+
+
+class _StoreFalseAction(_StoreConstAction):
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 default=True,
+                 required=False,
+                 help=None):
+        super(_StoreFalseAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            const=False,
+            default=default,
+            required=required,
+            help=help)
+
+
+class _AppendAction(Action):
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 nargs=None,
+                 const=None,
+                 default=None,
+                 type=None,
+                 choices=None,
+                 required=False,
+                 help=None,
+                 metavar=None):
+        if nargs == 0:
+            raise ValueError('nargs for append actions must be > 0; if arg '
+                             'strings are not supplying the value to append, '
+                             'the append const action may be more appropriate')
+        if const is not None and nargs != OPTIONAL:
+            raise ValueError('nargs must be %r to supply const' % OPTIONAL)
+        super(_AppendAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            nargs=nargs,
+            const=const,
+            default=default,
+            type=type,
+            choices=choices,
+            required=required,
+            help=help,
+            metavar=metavar)
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        items = _copy.copy(_ensure_value(namespace, self.dest, []))
+        items.append(values)
+        setattr(namespace, self.dest, items)
+
+
+class _AppendConstAction(Action):
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 const,
+                 default=None,
+                 required=False,
+                 help=None,
+                 metavar=None):
+        super(_AppendConstAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            nargs=0,
+            const=const,
+            default=default,
+            required=required,
+            help=help,
+            metavar=metavar)
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        items = _copy.copy(_ensure_value(namespace, self.dest, []))
+        items.append(self.const)
+        setattr(namespace, self.dest, items)
+
+
+class _CountAction(Action):
+
+    def __init__(self,
+                 option_strings,
+                 dest,
+                 default=None,
+                 required=False,
+                 help=None):
+        super(_CountAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            nargs=0,
+            default=default,
+            required=required,
+            help=help)
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        new_count = _ensure_value(namespace, self.dest, 0) + 1
+        setattr(namespace, self.dest, new_count)
+
+
+class _HelpAction(Action):
+
+    def __init__(self,
+                 option_strings,
+                 dest=SUPPRESS,
+                 default=SUPPRESS,
+                 help=None):
+        super(_HelpAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            default=default,
+            nargs=0,
+            help=help)
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        parser.print_help()
+        parser.exit()
+
+
+class _VersionAction(Action):
+
+    def __init__(self,
+                 option_strings,
+                 version=None,
+                 dest=SUPPRESS,
+                 default=SUPPRESS,
+                 help="show program's version number and exit"):
+        super(_VersionAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            default=default,
+            nargs=0,
+            help=help)
+        self.version = version
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        version = self.version
+        if version is None:
+            version = parser.version
+        formatter = parser._get_formatter()
+        formatter.add_text(version)
+        parser.exit(message=formatter.format_help())
+
+
+class _SubParsersAction(Action):
+
+    class _ChoicesPseudoAction(Action):
+
+        def __init__(self, name, help):
+            sup = super(_SubParsersAction._ChoicesPseudoAction, self)
+            sup.__init__(option_strings=[], dest=name, help=help)
+
+    def __init__(self,
+                 option_strings,
+                 prog,
+                 parser_class,
+                 dest=SUPPRESS,
+                 help=None,
+                 metavar=None):
+
+        self._prog_prefix = prog
+        self._parser_class = parser_class
+        self._name_parser_map = _OrderedDict()
+        self._choices_actions = []
+
+        super(_SubParsersAction, self).__init__(
+            option_strings=option_strings,
+            dest=dest,
+            nargs=PARSER,
+            choices=self._name_parser_map,
+            help=help,
+            metavar=metavar)
+
+    def add_parser(self, name, **kwargs):
+        # set prog from the existing prefix
+        if kwargs.get('prog') is None:
+            kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
+
+        # create a pseudo-action to hold the choice help
+        if 'help' in kwargs:
+            help = kwargs.pop('help')
+            choice_action = self._ChoicesPseudoAction(name, help)
+            self._choices_actions.append(choice_action)
+
+        # create the parser and add it to the map
+        parser = self._parser_class(**kwargs)
+        self._name_parser_map[name] = parser
+        return parser
+
+    def _get_subactions(self):
+        return self._choices_actions
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        parser_name = values[0]
+        arg_strings = values[1:]
+
+        # set the parser name if requested
+        if self.dest is not SUPPRESS:
+            setattr(namespace, self.dest, parser_name)
+
+        # select the parser
+        try:
+            parser = self._name_parser_map[parser_name]
+        except KeyError:
+            tup = parser_name, ', '.join(self._name_parser_map)
+            msg = _('unknown parser %r (choices: %s)') % tup
+            raise ArgumentError(self, msg)
+
+        # parse all the remaining options into the namespace
+        # store any unrecognized options on the object, so that the top
+        # level parser can decide what to do with them
+        namespace, arg_strings = parser.parse_known_args(arg_strings, namespace)
+        if arg_strings:
+            vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, [])
+            getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)
+
+
+# ==============
+# Type classes
+# ==============
+
+class FileType(object):
+    """Factory for creating file object types
+
+    Instances of FileType are typically passed as type= arguments to the
+    ArgumentParser add_argument() method.
+
+    Keyword Arguments:
+        - mode -- A string indicating how the file is to be opened. Accepts the
+            same values as the builtin open() function.
+        - bufsize -- The file's desired buffer size. Accepts the same values as
+            the builtin open() function.
+    """
+
+    def __init__(self, mode='r', bufsize=-1):
+        self._mode = mode
+        self._bufsize = bufsize
+
+    def __call__(self, string):
+        # the special argument "-" means sys.std{in,out}
+        if string == '-':
+            if 'r' in self._mode:
+                return _sys.stdin
+            elif 'w' in self._mode:
+                return _sys.stdout
+            else:
+                msg = _('argument "-" with mode %r') % self._mode
+                raise ValueError(msg)
+
+        # all other arguments are used as file names
+        try:
+            return open(string, self._mode, self._bufsize)
+        except IOError as e:
+            message = _("can't open '%s': %s")
+            raise ArgumentTypeError(message % (string, e))
+
+    def __repr__(self):
+        args = self._mode, self._bufsize
+        args_str = ', '.join(repr(arg) for arg in args if arg != -1)
+        return '%s(%s)' % (type(self).__name__, args_str)
+
+# ===========================
+# Optional and Positional Parsing
+# ===========================
+
+class Namespace(_AttributeHolder):
+    """Simple object for storing attributes.
+
+    Implements equality by attribute names and values, and provides a simple
+    string representation.
+    """
+
+    def __init__(self, **kwargs):
+        for name in kwargs:
+            setattr(self, name, kwargs[name])
+
+    __hash__ = None
+
+    def __eq__(self, other):
+        return vars(self) == vars(other)
+
+    def __ne__(self, other):
+        return not (self == other)
+
+    def __contains__(self, key):
+        return key in self.__dict__
+
+
+class _ActionsContainer(object):
+
+    def __init__(self,
+                 description,
+                 prefix_chars,
+                 argument_default,
+                 conflict_handler):
+        super(_ActionsContainer, self).__init__()
+
+        self.description = description
+        self.argument_default = argument_default
+        self.prefix_chars = prefix_chars
+        self.conflict_handler = conflict_handler
+
+        # set up registries
+        self._registries = {}
+
+        # register actions
+        self.register('action', None, _StoreAction)
+        self.register('action', 'store', _StoreAction)
+        self.register('action', 'store_const', _StoreConstAction)
+        self.register('action', 'store_true', _StoreTrueAction)
+        self.register('action', 'store_false', _StoreFalseAction)
+        self.register('action', 'append', _AppendAction)
+        self.register('action', 'append_const', _AppendConstAction)
+        self.register('action', 'count', _CountAction)
+        self.register('action', 'help', _HelpAction)
+        self.register('action', 'version', _VersionAction)
+        self.register('action', 'parsers', _SubParsersAction)
+
+        # raise an exception if the conflict handler is invalid
+        self._get_handler()
+
+        # action storage
+        self._actions = []
+        self._option_string_actions = {}
+
+        # groups
+        self._action_groups = []
+        self._mutually_exclusive_groups = []
+
+        # defaults storage
+        self._defaults = {}
+
+        # determines whether an "option" looks like a negative number
+        self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$')
+
+        # whether or not there are any optionals that look like negative
+        # numbers -- uses a list so it can be shared and edited
+        self._has_negative_number_optionals = []
+
+    # ====================
+    # Registration methods
+    # ====================
+    def register(self, registry_name, value, object):
+        registry = self._registries.setdefault(registry_name, {})
+        registry[value] = object
+
+    def _registry_get(self, registry_name, value, default=None):
+        return self._registries[registry_name].get(value, default)
+
+    # ==================================
+    # Namespace default accessor methods
+    # ==================================
+    def set_defaults(self, **kwargs):
+        self._defaults.update(kwargs)
+
+        # if these defaults match any existing arguments, replace
+        # the previous default on the object with the new one
+        for action in self._actions:
+            if action.dest in kwargs:
+                action.default = kwargs[action.dest]
+
+    def get_default(self, dest):
+        for action in self._actions:
+            if action.dest == dest and action.default is not None:
+                return action.default
+        return self._defaults.get(dest, None)
+
+
+    # =======================
+    # Adding argument actions
+    # =======================
+    def add_argument(self, *args, **kwargs):
+        """
+        add_argument(dest, ..., name=value, ...)
+        add_argument(option_string, option_string, ..., name=value, ...)
+        """
+
+        # if no positional args are supplied or only one is supplied and
+        # it doesn't look like an option string, parse a positional
+        # argument
+        chars = self.prefix_chars
+        if not args or len(args) == 1 and args[0][0] not in chars:
+            if args and 'dest' in kwargs:
+                raise ValueError('dest supplied twice for positional argument')
+            kwargs = self._get_positional_kwargs(*args, **kwargs)
+
+        # otherwise, we're adding an optional argument
+        else:
+            kwargs = self._get_optional_kwargs(*args, **kwargs)
+
+        # if no default was supplied, use the parser-level default
+        if 'default' not in kwargs:
+            dest = kwargs['dest']
+            if dest in self._defaults:
+                kwargs['default'] = self._defaults[dest]
+            elif self.argument_default is not None:
+                kwargs['default'] = self.argument_default
+
+        # create the action object, and add it to the parser
+        action_class = self._pop_action_class(kwargs)
+        if not _callable(action_class):
+            raise ValueError('unknown action "%s"' % (action_class,))
+        action = action_class(**kwargs)
+
+        # raise an error if the action type is not callable
+        type_func = self._registry_get('type', action.type, action.type)
+        if not _callable(type_func):
+            raise ValueError('%r is not callable' % (type_func,))
+
+        # raise an error if the metavar does not match the type
+        if hasattr(self, "_get_formatter"):
+            try:
+                self._get_formatter()._format_args(action, None)
+            except TypeError:
+                raise ValueError("length of metavar tuple does not match nargs")
+
+        return self._add_action(action)
+
+    def add_argument_group(self, *args, **kwargs):
+        group = _ArgumentGroup(self, *args, **kwargs)
+        self._action_groups.append(group)
+        return group
+
+    def add_mutually_exclusive_group(self, **kwargs):
+        group = _MutuallyExclusiveGroup(self, **kwargs)
+        self._mutually_exclusive_groups.append(group)
+        return group
+
+    def _add_action(self, action):
+        # resolve any conflicts
+        self._check_conflict(action)
+
+        # add to actions list
+        self._actions.append(action)
+        action.container = self
+
+        # index the action by any option strings it has
+        for option_string in action.option_strings:
+            self._option_string_actions[option_string] = action
+
+        # set the flag if any option strings look like negative numbers
+        for option_string in action.option_strings:
+            if self._negative_number_matcher.match(option_string):
+                if not self._has_negative_number_optionals:
+                    self._has_negative_number_optionals.append(True)
+
+        # return the created action
+        return action
+
+    def _remove_action(self, action):
+        self._actions.remove(action)
+
+    def _add_container_actions(self, container):
+        # collect groups by titles
+        title_group_map = {}
+        for group in self._action_groups:
+            if group.title in title_group_map:
+                msg = _('cannot merge actions - two groups are named %r')
+                raise ValueError(msg % (group.title))
+            title_group_map[group.title] = group
+
+        # map each action to its group
+        group_map = {}
+        for group in container._action_groups:
+
+            # if a group with the title exists, use that, otherwise
+            # create a new group matching the container's group
+            if group.title not in title_group_map:
+                title_group_map[group.title] = self.add_argument_group(
+                    title=group.title,
+                    description=group.description,
+                    conflict_handler=group.conflict_handler)
+
+            # map the actions to their new group
+            for action in group._group_actions:
+                group_map[action] = title_group_map[group.title]
+
+        # add container's mutually exclusive groups
+        # NOTE: if add_mutually_exclusive_group ever gains title= and
+        # description= then this code will need to be expanded as above
+        for group in container._mutually_exclusive_groups:
+            mutex_group = self.add_mutually_exclusive_group(
+                required=group.required)
+
+            # map the actions to their new mutex group
+            for action in group._group_actions:
+                group_map[action] = mutex_group
+
+        # add all actions to this container or their group
+        for action in container._actions:
+            group_map.get(action, self)._add_action(action)
+
+    def _get_positional_kwargs(self, dest, **kwargs):
+        # make sure required is not specified
+        if 'required' in kwargs:
+            msg = _("'required' is an invalid argument for positionals")
+            raise TypeError(msg)
+
+        # mark positional arguments as required if at least one is
+        # always required
+        if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]:
+            kwargs['required'] = True
+        if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs:
+            kwargs['required'] = True
+
+        # return the keyword arguments with no option strings
+        return dict(kwargs, dest=dest, option_strings=[])
+
+    def _get_optional_kwargs(self, *args, **kwargs):
+        # determine short and long option strings
+        option_strings = []
+        long_option_strings = []
+        for option_string in args:
+            # error on strings that don't start with an appropriate prefix
+            if not option_string[0] in self.prefix_chars:
+                msg = _('invalid option string %r: '
+                        'must start with a character %r')
+                tup = option_string, self.prefix_chars
+                raise ValueError(msg % tup)
+
+            # strings starting with two prefix characters are long options
+            option_strings.append(option_string)
+            if option_string[0] in self.prefix_chars:
+                if len(option_string) > 1:
+                    if option_string[1] in self.prefix_chars:
+                        long_option_strings.append(option_string)
+
+        # infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
+        dest = kwargs.pop('dest', None)
+        if dest is None:
+            if long_option_strings:
+                dest_option_string = long_option_strings[0]
+            else:
+                dest_option_string = option_strings[0]
+            dest = dest_option_string.lstrip(self.prefix_chars)
+            if not dest:
+                msg = _('dest= is required for options like %r')
+                raise ValueError(msg % option_string)
+            dest = dest.replace('-', '_')
+
+        # return the updated keyword arguments
+        return dict(kwargs, dest=dest, option_strings=option_strings)
+
+    def _pop_action_class(self, kwargs, default=None):
+        action = kwargs.pop('action', default)
+        return self._registry_get('action', action, action)
+
+    def _get_handler(self):
+        # determine function from conflict handler string
+        handler_func_name = '_handle_conflict_%s' % self.conflict_handler
+        try:
+            return getattr(self, handler_func_name)
+        except AttributeError:
+            msg = _('invalid conflict_resolution value: %r')
+            raise ValueError(msg % self.conflict_handler)
+
+    def _check_conflict(self, action):
+
+        # find all options that conflict with this option
+        confl_optionals = []
+        for option_string in action.option_strings:
+            if option_string in self._option_string_actions:
+                confl_optional = self._option_string_actions[option_string]
+                confl_optionals.append((option_string, confl_optional))
+
+        # resolve any conflicts
+        if confl_optionals:
+            conflict_handler = self._get_handler()
+            conflict_handler(action, confl_optionals)
+
+    def _handle_conflict_error(self, action, conflicting_actions):
+        message = _('conflicting option string(s): %s')
+        conflict_string = ', '.join([option_string
+                                     for option_string, action
+                                     in conflicting_actions])
+        raise ArgumentError(action, message % conflict_string)
+
+    def _handle_conflict_resolve(self, action, conflicting_actions):
+
+        # remove all conflicting options
+        for option_string, action in conflicting_actions:
+
+            # remove the conflicting option
+            action.option_strings.remove(option_string)
+            self._option_string_actions.pop(option_string, None)
+
+            # if the option now has no option string, remove it from the
+            # container holding it
+            if not action.option_strings:
+                action.container._remove_action(action)
+
+
+class _ArgumentGroup(_ActionsContainer):
+
+    def __init__(self, container, title=None, description=None, **kwargs):
+        # add any missing keyword arguments by checking the container
+        update = kwargs.setdefault
+        update('conflict_handler', container.conflict_handler)
+        update('prefix_chars', container.prefix_chars)
+        update('argument_default', container.argument_default)
+        super_init = super(_ArgumentGroup, self).__init__
+        super_init(description=description, **kwargs)
+
+        # group attributes
+        self.title = title
+        self._group_actions = []
+
+        # share most attributes with the container
+        self._registries = container._registries
+        self._actions = container._actions
+        self._option_string_actions = container._option_string_actions
+        self._defaults = container._defaults
+        self._has_negative_number_optionals = \
+            container._has_negative_number_optionals
+        self._mutually_exclusive_groups = container._mutually_exclusive_groups
+
+    def _add_action(self, action):
+        action = super(_ArgumentGroup, self)._add_action(action)
+        self._group_actions.append(action)
+        return action
+
+    def _remove_action(self, action):
+        super(_ArgumentGroup, self)._remove_action(action)
+        self._group_actions.remove(action)
+
+
+class _MutuallyExclusiveGroup(_ArgumentGroup):
+
+    def __init__(self, container, required=False):
+        super(_MutuallyExclusiveGroup, self).__init__(container)
+        self.required = required
+        self._container = container
+
+    def _add_action(self, action):
+        if action.required:
+            msg = _('mutually exclusive arguments must be optional')
+            raise ValueError(msg)
+        action = self._container._add_action(action)
+        self._group_actions.append(action)
+        return action
+
+    def _remove_action(self, action):
+        self._container._remove_action(action)
+        self._group_actions.remove(action)
+
+
+class ArgumentParser(_AttributeHolder, _ActionsContainer):
+    """Object for parsing command line strings into Python objects.
+
+    Keyword Arguments:
+        - prog -- The name of the program (default: sys.argv[0])
+        - usage -- A usage message (default: auto-generated from arguments)
+        - description -- A description of what the program does
+        - epilog -- Text following the argument descriptions
+        - parents -- Parsers whose arguments should be copied into this one
+        - formatter_class -- HelpFormatter class for printing help messages
+        - prefix_chars -- Characters that prefix optional arguments
+        - fromfile_prefix_chars -- Characters that prefix files containing
+            additional arguments
+        - argument_default -- The default value for all arguments
+        - conflict_handler -- String indicating how to handle conflicts
+        - add_help -- Add a -h/-help option
+    """
+
+    def __init__(self,
+                 prog=None,
+                 usage=None,
+                 description=None,
+                 epilog=None,
+                 version=None,
+                 parents=[],
+                 formatter_class=HelpFormatter,
+                 prefix_chars='-',
+                 fromfile_prefix_chars=None,
+                 argument_default=None,
+                 conflict_handler='error',
+                 add_help=True):
+
+        if version is not None:
+            import warnings
+            warnings.warn(
+                """The "version" argument to ArgumentParser is deprecated. """
+                """Please use """
+                """"add_argument(..., action='version', version="N", ...)" """
+                """instead""", DeprecationWarning)
+
+        superinit = super(ArgumentParser, self).__init__
+        superinit(description=description,
+                  prefix_chars=prefix_chars,
+                  argument_default=argument_default,
+                  conflict_handler=conflict_handler)
+
+        # default setting for prog
+        if prog is None:
+            prog = _os.path.basename(_sys.argv[0])
+
+        self.prog = prog
+        self.usage = usage
+        self.epilog = epilog
+        self.version = version
+        self.formatter_class = formatter_class
+        self.fromfile_prefix_chars = fromfile_prefix_chars
+        self.add_help = add_help
+
+        add_group = self.add_argument_group
+        self._positionals = add_group(_('positional arguments'))
+        self._optionals = add_group(_('optional arguments'))
+        self._subparsers = None
+
+        # register types
+        def identity(string):
+            return string
+        self.register('type', None, identity)
+
+        # add help and version arguments if necessary
+        # (using explicit default to override global argument_default)
+        default_prefix = '-' if '-' in prefix_chars else prefix_chars[0]
+        if self.add_help:
+            self.add_argument(
+                default_prefix+'h', default_prefix*2+'help',
+                action='help', default=SUPPRESS,
+                help=_('show this help message and exit'))
+        if self.version:
+            self.add_argument(
+                default_prefix+'v', default_prefix*2+'version',
+                action='version', default=SUPPRESS,
+                version=self.version,
+                help=_("show program's version number and exit"))
+
+        # add parent arguments and defaults
+        for parent in parents:
+            self._add_container_actions(parent)
+            try:
+                defaults = parent._defaults
+            except AttributeError:
+                pass
+            else:
+                self._defaults.update(defaults)
+
+    # =======================
+    # Pretty __repr__ methods
+    # =======================
+    def _get_kwargs(self):
+        names = [
+            'prog',
+            'usage',
+            'description',
+            'version',
+            'formatter_class',
+            'conflict_handler',
+            'add_help',
+        ]
+        return [(name, getattr(self, name)) for name in names]
+
+    # ==================================
+    # Optional/Positional adding methods
+    # ==================================
+    def add_subparsers(self, **kwargs):
+        if self._subparsers is not None:
+            self.error(_('cannot have multiple subparser arguments'))
+
+        # add the parser class to the arguments if it's not present
+        kwargs.setdefault('parser_class', type(self))
+
+        if 'title' in kwargs or 'description' in kwargs:
+            title = _(kwargs.pop('title', 'subcommands'))
+            description = _(kwargs.pop('description', None))
+            self._subparsers = self.add_argument_group(title, description)
+        else:
+            self._subparsers = self._positionals
+
+        # prog defaults to the usage message of this parser, skipping
+        # optional arguments and with no "usage:" prefix
+        if kwargs.get('prog') is None:
+            formatter = self._get_formatter()
+            positionals = self._get_positional_actions()
+            groups = self._mutually_exclusive_groups
+            formatter.add_usage(self.usage, positionals, groups, '')
+            kwargs['prog'] = formatter.format_help().strip()
+
+        # create the parsers action and add it to the positionals list
+        parsers_class = self._pop_action_class(kwargs, 'parsers')
+        action = parsers_class(option_strings=[], **kwargs)
+        self._subparsers._add_action(action)
+
+        # return the created parsers action
+        return action
+
+    def _add_action(self, action):
+        if action.option_strings:
+            self._optionals._add_action(action)
+        else:
+            self._positionals._add_action(action)
+        return action
+
+    def _get_optional_actions(self):
+        return [action
+                for action in self._actions
+                if action.option_strings]
+
+    def _get_positional_actions(self):
+        return [action
+                for action in self._actions
+                if not action.option_strings]
+
+    # =====================================
+    # Command line argument parsing methods
+    # =====================================
+    def parse_args(self, args=None, namespace=None):
+        args, argv = self.parse_known_args(args, namespace)
+        if argv:
+            msg = _('unrecognized arguments: %s')
+            self.error(msg % ' '.join(argv))
+        return args
+
+    def parse_known_args(self, args=None, namespace=None):
+        # args default to the system args
+        if args is None:
+            args = _sys.argv[1:]
+
+        # default Namespace built from parser defaults
+        if namespace is None:
+            namespace = Namespace()
+
+        # add any action defaults that aren't present
+        for action in self._actions:
+            if action.dest is not SUPPRESS:
+                if not hasattr(namespace, action.dest):
+                    if action.default is not SUPPRESS:
+                        default = action.default
+                        if isinstance(action.default, basestring):
+                            default = self._get_value(action, default)
+                        setattr(namespace, action.dest, default)
+
+        # add any parser defaults that aren't present
+        for dest in self._defaults:
+            if not hasattr(namespace, dest):
+                setattr(namespace, dest, self._defaults[dest])
+
+        # parse the arguments and exit if there are any errors
+        try:
+            namespace, args = self._parse_known_args(args, namespace)
+            if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):
+                args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))
+                delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)
+            return namespace, args
+        except ArgumentError:
+            err = _sys.exc_info()[1]
+            self.error(str(err))
+
+    def _parse_known_args(self, arg_strings, namespace):
+        # replace arg strings that are file references
+        if self.fromfile_prefix_chars is not None:
+            arg_strings = self._read_args_from_files(arg_strings)
+
+        # map all mutually exclusive arguments to the other arguments
+        # they can't occur with
+        action_conflicts = {}
+        for mutex_group in self._mutually_exclusive_groups:
+            group_actions = mutex_group._group_actions
+            for i, mutex_action in enumerate(mutex_group._group_actions):
+                conflicts = action_conflicts.setdefault(mutex_action, [])
+                conflicts.extend(group_actions[:i])
+                conflicts.extend(group_actions[i + 1:])
+
+        # find all option indices, and determine the arg_string_pattern
+        # which has an 'O' if there is an option at an index,
+        # an 'A' if there is an argument, or a '-' if there is a '--'
+        option_string_indices = {}
+        arg_string_pattern_parts = []
+        arg_strings_iter = iter(arg_strings)
+        for i, arg_string in enumerate(arg_strings_iter):
+
+            # all args after -- are non-options
+            if arg_string == '--':
+                arg_string_pattern_parts.append('-')
+                for arg_string in arg_strings_iter:
+                    arg_string_pattern_parts.append('A')
+
+            # otherwise, add the arg to the arg strings
+            # and note the index if it was an option
+            else:
+                option_tuple = self._parse_optional(arg_string)
+                if option_tuple is None:
+                    pattern = 'A'
+                else:
+                    option_string_indices[i] = option_tuple
+                    pattern = 'O'
+                arg_string_pattern_parts.append(pattern)
+
+        # join the pieces together to form the pattern
+        arg_strings_pattern = ''.join(arg_string_pattern_parts)
+
+        # converts arg strings to the appropriate and then takes the action
+        seen_actions = set()
+        seen_non_default_actions = set()
+
+        def take_action(action, argument_strings, option_string=None):
+            seen_actions.add(action)
+            argument_values = self._get_values(action, argument_strings)
+
+            # error if this argument is not allowed with other previously
+            # seen arguments, assuming that actions that use the default
+            # value don't really count as "present"
+            if argument_values is not action.default:
+                seen_non_default_actions.add(action)
+                for conflict_action in action_conflicts.get(action, []):
+                    if conflict_action in seen_non_default_actions:
+                        msg = _('not allowed with argument %s')
+                        action_name = _get_action_name(conflict_action)
+                        raise ArgumentError(action, msg % action_name)
+
+            # take the action if we didn't receive a SUPPRESS value
+            # (e.g. from a default)
+            if argument_values is not SUPPRESS:
+                action(self, namespace, argument_values, option_string)
+
+        # function to convert arg_strings into an optional action
+        def consume_optional(start_index):
+
+            # get the optional identified at this index
+            option_tuple = option_string_indices[start_index]
+            action, option_string, explicit_arg = option_tuple
+
+            # identify additional optionals in the same arg string
+            # (e.g. -xyz is the same as -x -y -z if no args are required)
+            match_argument = self._match_argument
+            action_tuples = []
+            while True:
+
+                # if we found no optional action, skip it
+                if action is None:
+                    extras.append(arg_strings[start_index])
+                    return start_index + 1
+
+                # if there is an explicit argument, try to match the
+                # optional's string arguments to only this
+                if explicit_arg is not None:
+                    arg_count = match_argument(action, 'A')
+
+                    # if the action is a single-dash option and takes no
+                    # arguments, try to parse more single-dash options out
+                    # of the tail of the option string
+                    chars = self.prefix_chars
+                    if arg_count == 0 and option_string[1] not in chars:
+                        action_tuples.append((action, [], option_string))
+                        char = option_string[0]
+                        option_string = char + explicit_arg[0]
+                        new_explicit_arg = explicit_arg[1:] or None
+                        optionals_map = self._option_string_actions
+                        if option_string in optionals_map:
+                            action = optionals_map[option_string]
+                            explicit_arg = new_explicit_arg
+                        else:
+                            msg = _('ignored explicit argument %r')
+                            raise ArgumentError(action, msg % explicit_arg)
+
+                    # if the action expect exactly one argument, we've
+                    # successfully matched the option; exit the loop
+                    elif arg_count == 1:
+                        stop = start_index + 1
+                        args = [explicit_arg]
+                        action_tuples.append((action, args, option_string))
+                        break
+
+                    # error if a double-dash option did not use the
+                    # explicit argument
+                    else:
+                        msg = _('ignored explicit argument %r')
+                        raise ArgumentError(action, msg % explicit_arg)
+
+                # if there is no explicit argument, try to match the
+                # optional's string arguments with the following strings
+                # if successful, exit the loop
+                else:
+                    start = start_index + 1
+                    selected_patterns = arg_strings_pattern[start:]
+                    arg_count = match_argument(action, selected_patterns)
+                    stop = start + arg_count
+                    args = arg_strings[start:stop]
+                    action_tuples.append((action, args, option_string))
+                    break
+
+            # add the Optional to the list and return the index at which
+            # the Optional's string args stopped
+            assert action_tuples
+            for action, args, option_string in action_tuples:
+                take_action(action, args, option_string)
+            return stop
+
+        # the list of Positionals left to be parsed; this is modified
+        # by consume_positionals()
+        positionals = self._get_positional_actions()
+
+        # function to convert arg_strings into positional actions
+        def consume_positionals(start_index):
+            # match as many Positionals as possible
+            match_partial = self._match_arguments_partial
+            selected_pattern = arg_strings_pattern[start_index:]
+            arg_counts = match_partial(positionals, selected_pattern)
+
+            # slice off the appropriate arg strings for each Positional
+            # and add the Positional and its args to the list
+            for action, arg_count in zip(positionals, arg_counts):
+                args = arg_strings[start_index: start_index + arg_count]
+                start_index += arg_count
+                take_action(action, args)
+
+            # slice off the Positionals that we just parsed and return the
+            # index at which the Positionals' string args stopped
+            positionals[:] = positionals[len(arg_counts):]
+            return start_index
+
+        # consume Positionals and Optionals alternately, until we have
+        # passed the last option string
+        extras = []
+        start_index = 0
+        if option_string_indices:
+            max_option_string_index = max(option_string_indices)
+        else:
+            max_option_string_index = -1
+        while start_index <= max_option_string_index:
+
+            # consume any Positionals preceding the next option
+            next_option_string_index = min([
+                index
+                for index in option_string_indices
+                if index >= start_index])
+            if start_index != next_option_string_index:
+                positionals_end_index = consume_positionals(start_index)
+
+                # only try to parse the next optional if we didn't consume
+                # the option string during the positionals parsing
+                if positionals_end_index > start_index:
+                    start_index = positionals_end_index
+                    continue
+                else:
+                    start_index = positionals_end_index
+
+            # if we consumed all the positionals we could and we're not
+            # at the index of an option string, there were extra arguments
+            if start_index not in option_string_indices:
+                strings = arg_strings[start_index:next_option_string_index]
+                extras.extend(strings)
+                start_index = next_option_string_index
+
+            # consume the next optional and any arguments for it
+            start_index = consume_optional(start_index)
+
+        # consume any positionals following the last Optional
+        stop_index = consume_positionals(start_index)
+
+        # if we didn't consume all the argument strings, there were extras
+        extras.extend(arg_strings[stop_index:])
+
+        # if we didn't use all the Positional objects, there were too few
+        # arg strings supplied.
+        if positionals:
+            self.error(_('too few arguments'))
+
+        # make sure all required actions were present
+        for action in self._actions:
+            if action.required:
+                if action not in seen_actions:
+                    name = _get_action_name(action)
+                    self.error(_('argument %s is required') % name)
+
+        # make sure all required groups had one option present
+        for group in self._mutually_exclusive_groups:
+            if group.required:
+                for action in group._group_actions:
+                    if action in seen_non_default_actions:
+                        break
+
+                # if no actions were used, report the error
+                else:
+                    names = [_get_action_name(action)
+                             for action in group._group_actions
+                             if action.help is not SUPPRESS]
+                    msg = _('one of the arguments %s is required')
+                    self.error(msg % ' '.join(names))
+
+        # return the updated namespace and the extra arguments
+        return namespace, extras
+
+    def _read_args_from_files(self, arg_strings):
+        # expand arguments referencing files
+        new_arg_strings = []
+        for arg_string in arg_strings:
+
+            # for regular arguments, just add them back into the list
+            if arg_string[0] not in self.fromfile_prefix_chars:
+                new_arg_strings.append(arg_string)
+
+            # replace arguments referencing files with the file content
+            else:
+                try:
+                    args_file = open(arg_string[1:])
+                    try:
+                        arg_strings = []
+                        for arg_line in args_file.read().splitlines():
+                            for arg in self.convert_arg_line_to_args(arg_line):
+                                arg_strings.append(arg)
+                        arg_strings = self._read_args_from_files(arg_strings)
+                        new_arg_strings.extend(arg_strings)
+                    finally:
+                        args_file.close()
+                except IOError:
+                    err = _sys.exc_info()[1]
+                    self.error(str(err))
+
+        # return the modified argument list
+        return new_arg_strings
+
+    def convert_arg_line_to_args(self, arg_line):
+        return [arg_line]
+
+    def _match_argument(self, action, arg_strings_pattern):
+        # match the pattern for this action to the arg strings
+        nargs_pattern = self._get_nargs_pattern(action)
+        match = _re.match(nargs_pattern, arg_strings_pattern)
+
+        # raise an exception if we weren't able to find a match
+        if match is None:
+            nargs_errors = {
+                None: _('expected one argument'),
+                OPTIONAL: _('expected at most one argument'),
+                ONE_OR_MORE: _('expected at least one argument'),
+            }
+            default = _('expected %s argument(s)') % action.nargs
+            msg = nargs_errors.get(action.nargs, default)
+            raise ArgumentError(action, msg)
+
+        # return the number of arguments matched
+        return len(match.group(1))
+
+    def _match_arguments_partial(self, actions, arg_strings_pattern):
+        # progressively shorten the actions list by slicing off the
+        # final actions until we find a match
+        result = []
+        for i in range(len(actions), 0, -1):
+            actions_slice = actions[:i]
+            pattern = ''.join([self._get_nargs_pattern(action)
+                               for action in actions_slice])
+            match = _re.match(pattern, arg_strings_pattern)
+            if match is not None:
+                result.extend([len(string) for string in match.groups()])
+                break
+
+        # return the list of arg string counts
+        return result
+
+    def _parse_optional(self, arg_string):
+        # if it's an empty string, it was meant to be a positional
+        if not arg_string:
+            return None
+
+        # if it doesn't start with a prefix, it was meant to be positional
+        if not arg_string[0] in self.prefix_chars:
+            return None
+
+        # if the option string is present in the parser, return the action
+        if arg_string in self._option_string_actions:
+            action = self._option_string_actions[arg_string]
+            return action, arg_string, None
+
+        # if it's just a single character, it was meant to be positional
+        if len(arg_string) == 1:
+            return None
+
+        # if the option string before the "=" is present, return the action
+        if '=' in arg_string:
+            option_string, explicit_arg = arg_string.split('=', 1)
+            if option_string in self._option_string_actions:
+                action = self._option_string_actions[option_string]
+                return action, option_string, explicit_arg
+
+        # search through all possible prefixes of the option string
+        # and all actions in the parser for possible interpretations
+        option_tuples = self._get_option_tuples(arg_string)
+
+        # if multiple actions match, the option string was ambiguous
+        if len(option_tuples) > 1:
+            options = ', '.join([option_string
+                for action, option_string, explicit_arg in option_tuples])
+            tup = arg_string, options
+            self.error(_('ambiguous option: %s could match %s') % tup)
+
+        # if exactly one action matched, this segmentation is good,
+        # so return the parsed action
+        elif len(option_tuples) == 1:
+            option_tuple, = option_tuples
+            return option_tuple
+
+        # if it was not found as an option, but it looks like a negative
+        # number, it was meant to be positional
+        # unless there are negative-number-like options
+        if self._negative_number_matcher.match(arg_string):
+            if not self._has_negative_number_optionals:
+                return None
+
+        # if it contains a space, it was meant to be a positional
+        if ' ' in arg_string:
+            return None
+
+        # it was meant to be an optional but there is no such option
+        # in this parser (though it might be a valid option in a subparser)
+        return None, arg_string, None
+
+    def _get_option_tuples(self, option_string):
+        result = []
+
+        # option strings starting with two prefix characters are only
+        # split at the '='
+        chars = self.prefix_chars
+        if option_string[0] in chars and option_string[1] in chars:
+            if '=' in option_string:
+                option_prefix, explicit_arg = option_string.split('=', 1)
+            else:
+                option_prefix = option_string
+                explicit_arg = None
+            for option_string in self._option_string_actions:
+                if option_string.startswith(option_prefix):
+                    action = self._option_string_actions[option_string]
+                    tup = action, option_string, explicit_arg
+                    result.append(tup)
+
+        # single character options can be concatenated with their arguments
+        # but multiple character options always have to have their argument
+        # separate
+        elif option_string[0] in chars and option_string[1] not in chars:
+            option_prefix = option_string
+            explicit_arg = None
+            short_option_prefix = option_string[:2]
+            short_explicit_arg = option_string[2:]
+
+            for option_string in self._option_string_actions:
+                if option_string == short_option_prefix:
+                    action = self._option_string_actions[option_string]
+                    tup = action, option_string, short_explicit_arg
+                    result.append(tup)
+                elif option_string.startswith(option_prefix):
+                    action = self._option_string_actions[option_string]
+                    tup = action, option_string, explicit_arg
+                    result.append(tup)
+
+        # shouldn't ever get here
+        else:
+            self.error(_('unexpected option string: %s') % option_string)
+
+        # return the collected option tuples
+        return result
+
+    def _get_nargs_pattern(self, action):
+        # in all examples below, we have to allow for '--' args
+        # which are represented as '-' in the pattern
+        nargs = action.nargs
+
+        # the default (None) is assumed to be a single argument
+        if nargs is None:
+            nargs_pattern = '(-*A-*)'
+
+        # allow zero or one arguments
+        elif nargs == OPTIONAL:
+            nargs_pattern = '(-*A?-*)'
+
+        # allow zero or more arguments
+        elif nargs == ZERO_OR_MORE:
+            nargs_pattern = '(-*[A-]*)'
+
+        # allow one or more arguments
+        elif nargs == ONE_OR_MORE:
+            nargs_pattern = '(-*A[A-]*)'
+
+        # allow any number of options or arguments
+        elif nargs == REMAINDER:
+            nargs_pattern = '([-AO]*)'
+
+        # allow one argument followed by any number of options or arguments
+        elif nargs == PARSER:
+            nargs_pattern = '(-*A[-AO]*)'
+
+        # all others should be integers
+        else:
+            nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
+
+        # if this is an optional action, -- is not allowed
+        if action.option_strings:
+            nargs_pattern = nargs_pattern.replace('-*', '')
+            nargs_pattern = nargs_pattern.replace('-', '')
+
+        # return the pattern
+        return nargs_pattern
+
+    # ========================
+    # Value conversion methods
+    # ========================
+    def _get_values(self, action, arg_strings):
+        # for everything but PARSER args, strip out '--'
+        if action.nargs not in [PARSER, REMAINDER]:
+            arg_strings = [s for s in arg_strings if s != '--']
+
+        # optional argument produces a default when not present
+        if not arg_strings and action.nargs == OPTIONAL:
+            if action.option_strings:
+                value = action.const
+            else:
+                value = action.default
+            if isinstance(value, basestring):
+                value = self._get_value(action, value)
+                self._check_value(action, value)
+
+        # when nargs='*' on a positional, if there were no command-line
+        # args, use the default if it is anything other than None
+        elif (not arg_strings and action.nargs == ZERO_OR_MORE and
+              not action.option_strings):
+            if action.default is not None:
+                value = action.default
+            else:
+                value = arg_strings
+            self._check_value(action, value)
+
+        # single argument or optional argument produces a single value
+        elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:
+            arg_string, = arg_strings
+            value = self._get_value(action, arg_string)
+            self._check_value(action, value)
+
+        # REMAINDER arguments convert all values, checking none
+        elif action.nargs == REMAINDER:
+            value = [self._get_value(action, v) for v in arg_strings]
+
+        # PARSER arguments convert all values, but check only the first
+        elif action.nargs == PARSER:
+            value = [self._get_value(action, v) for v in arg_strings]
+            self._check_value(action, value[0])
+
+        # all other types of nargs produce a list
+        else:
+            value = [self._get_value(action, v) for v in arg_strings]
+            for v in value:
+                self._check_value(action, v)
+
+        # return the converted value
+        return value
+
+    def _get_value(self, action, arg_string):
+        type_func = self._registry_get('type', action.type, action.type)
+        if not _callable(type_func):
+            msg = _('%r is not callable')
+            raise ArgumentError(action, msg % type_func)
+
+        # convert the value to the appropriate type
+        try:
+            result = type_func(arg_string)
+
+        # ArgumentTypeErrors indicate errors
+        except ArgumentTypeError:
+            name = getattr(action.type, '__name__', repr(action.type))
+            msg = str(_sys.exc_info()[1])
+            raise ArgumentError(action, msg)
+
+        # TypeErrors or ValueErrors also indicate errors
+        except (TypeError, ValueError):
+            name = getattr(action.type, '__name__', repr(action.type))
+            msg = _('invalid %s value: %r')
+            raise ArgumentError(action, msg % (name, arg_string))
+
+        # return the converted value
+        return result
+
+    def _check_value(self, action, value):
+        # converted value must be one of the choices (if specified)
+        if action.choices is not None and value not in action.choices:
+            tup = value, ', '.join(map(repr, action.choices))
+            msg = _('invalid choice: %r (choose from %s)') % tup
+            raise ArgumentError(action, msg)
+
+    # =======================
+    # Help-formatting methods
+    # =======================
+    def format_usage(self):
+        formatter = self._get_formatter()
+        formatter.add_usage(self.usage, self._actions,
+                            self._mutually_exclusive_groups)
+        return formatter.format_help()
+
+    def format_help(self):
+        formatter = self._get_formatter()
+
+        # usage
+        formatter.add_usage(self.usage, self._actions,
+                            self._mutually_exclusive_groups)
+
+        # description
+        formatter.add_text(self.description)
+
+        # positionals, optionals and user-defined groups
+        for action_group in self._action_groups:
+            formatter.start_section(action_group.title)
+            formatter.add_text(action_group.description)
+            formatter.add_arguments(action_group._group_actions)
+            formatter.end_section()
+
+        # epilog
+        formatter.add_text(self.epilog)
+
+        # determine help from format above
+        return formatter.format_help()
+
+    def format_version(self):
+        import warnings
+        warnings.warn(
+            'The format_version method is deprecated -- the "version" '
+            'argument to ArgumentParser is no longer supported.',
+            DeprecationWarning)
+        formatter = self._get_formatter()
+        formatter.add_text(self.version)
+        return formatter.format_help()
+
+    def _get_formatter(self):
+        return self.formatter_class(prog=self.prog)
+
+    # =====================
+    # Help-printing methods
+    # =====================
+    def print_usage(self, file=None):
+        if file is None:
+            file = _sys.stdout
+        self._print_message(self.format_usage(), file)
+
+    def print_help(self, file=None):
+        if file is None:
+            file = _sys.stdout
+        self._print_message(self.format_help(), file)
+
+    def print_version(self, file=None):
+        import warnings
+        warnings.warn(
+            'The print_version method is deprecated -- the "version" '
+            'argument to ArgumentParser is no longer supported.',
+            DeprecationWarning)
+        self._print_message(self.format_version(), file)
+
+    def _print_message(self, message, file=None):
+        if message:
+            if file is None:
+                file = _sys.stderr
+            file.write(message)
+
+    # ===============
+    # Exiting methods
+    # ===============
+    def exit(self, status=0, message=None):
+        if message:
+            self._print_message(message, _sys.stderr)
+        _sys.exit(status)
+
+    def error(self, message):
+        """error(message: string)
+
+        Prints a usage message incorporating the message to stderr and
+        exits.
+
+        If you override this in a subclass, it should not return -- it
+        should either exit or raise an exception.
+        """
+        self.print_usage(_sys.stderr)
+        self.exit(2, _('%s: error: %s\n') % (self.prog, message))
diff --git a/third_party/digraph.py b/third_party/digraph.py
new file mode 100644
index 0000000..31b51c9
--- /dev/null
+++ b/third_party/digraph.py
@@ -0,0 +1,74 @@
+# Copyright (c) 2013 Mark Dickinson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+def StronglyConnectedComponents(vertices, edges):
+  """Find the strongly connected components of a directed graph.
+
+  Uses a non-recursive version of Gabow's linear-time algorithm [1] to find
+  all strongly connected components of a directed graph.
+
+  A "strongly connected component" of a directed graph is a maximal subgraph
+  such that any vertex in the subgraph is reachable from any other; any
+  directed graph can be decomposed into its strongly connected components.
+
+  Written by Mark Dickinson and licensed under the MIT license [2].
+
+  [1] Harold N. Gabow, "Path-based depth-first search for strong and
+      biconnected components," Inf. Process. Lett. 74 (2000) 107--114.
+  [2] From code.activestate.com: http://goo.gl/X0z4C
+
+  Args:
+    vertices: A list of vertices. Each vertex should be hashable.
+    edges: Dictionary that maps each vertex v to a set of the vertices w
+           that are linked to v by a directed edge (v, w).
+
+  Returns:
+    A list of sets of vertices.
+  """
+  identified = set()
+  stack = []
+  index = {}
+  boundaries = []
+
+  for v in vertices:
+    if v not in index:
+      to_do = [('VISIT', v)]
+      while to_do:
+        operation_type, v = to_do.pop()
+        if operation_type == 'VISIT':
+          index[v] = len(stack)
+          stack.append(v)
+          boundaries.append(index[v])
+          to_do.append(('POSTVISIT', v))
+          to_do.extend([('VISITEDGE', w) for w in edges[v]])
+        elif operation_type == 'VISITEDGE':
+          if v not in index:
+            to_do.append(('VISIT', v))
+          elif v not in identified:
+            while index[v] < boundaries[-1]:
+              boundaries.pop()
+        else:
+          # operation_type == 'POSTVISIT'
+          if boundaries[-1] == index[v]:
+            boundaries.pop()
+            scc = set(stack[index[v]:])
+            del stack[index[v]:]
+            identified.update(scc)
+            yield scc
diff --git a/third_party/lddtree.py b/third_party/lddtree.py
new file mode 100644
index 0000000..9fffcba
--- /dev/null
+++ b/third_party/lddtree.py
@@ -0,0 +1,773 @@
+#!/usr/bin/python
+# Copyright 2012-2014 Gentoo Foundation
+# Copyright 2012-2014 Mike Frysinger <vapier@gentoo.org>
+# Copyright 2012-2014 The Chromium OS Authors
+# Use of this source code is governed by a BSD-style license (BSD-3)
+# pylint: disable=C0301
+# $Header: /var/cvsroot/gentoo-projects/pax-utils/lddtree.py,v 1.53 2014/08/01 02:20:20 vapier Exp $
+
+"""Read the ELF dependency tree and show it
+
+This does not work like `ldd` in that we do not execute/load code (only read
+files on disk), and we should the ELFs as a tree rather than a flat list.
+"""
+
+from __future__ import print_function
+
+import glob
+import errno
+import optparse
+import os
+import shutil
+import sys
+
+from elftools.elf.elffile import ELFFile
+from elftools.common import exceptions
+
+
+def warn(msg, prefix='warning'):
+  """Write |msg| to stderr with a |prefix| before it"""
+  print('%s: %s: %s' % (os.path.basename(sys.argv[0]), prefix, msg), file=sys.stderr)
+
+
+def err(msg, status=1):
+  """Write |msg| to stderr and exit with |status|"""
+  warn(msg, prefix='error')
+  sys.exit(status)
+
+
+def dbg(debug, *args, **kwargs):
+  """Pass |args| and |kwargs| to print() when |debug| is True"""
+  if debug:
+    print(*args, **kwargs)
+
+
+def bstr(buf):
+  """Decode the byte string into a string"""
+  return buf.decode('utf-8')
+
+
+def normpath(path):
+  """Normalize a path
+
+  Python's os.path.normpath() doesn't handle some cases:
+    // -> //
+    //..// -> //
+    //..//..// -> ///
+  """
+  return os.path.normpath(path).replace('//', '/')
+
+
+def readlink(path, root, prefixed=False):
+  """Like os.readlink(), but relative to a |root|
+
+  This does not currently handle the pathological case:
+    /lib/foo.so -> ../../../../../../../foo.so
+  This relies on the .. entries in / to point to itself.
+
+  Args:
+    path: The symlink to read
+    root: The path to use for resolving absolute symlinks
+    prefixed: When False, the |path| must not have |root| prefixed to it, nor
+              will the return value have |root| prefixed.  When True, |path|
+              must have |root| prefixed, and the return value will have |root|
+              added.
+
+  Returns:
+    A fully resolved symlink path
+  """
+  root = root.rstrip('/')
+  if prefixed:
+    path = path[len(root):]
+
+  while os.path.islink(root + path):
+    path = os.path.join(os.path.dirname(path), os.readlink(root + path))
+
+  return normpath((root + path) if prefixed else path)
+
+
+def makedirs(path):
+  """Like os.makedirs(), but ignore EEXIST errors"""
+  try:
+    os.makedirs(path)
+  except OSError as e:
+    if e.errno != os.errno.EEXIST:
+      raise
+
+
+def dedupe(items):
+  """Remove all duplicates from |items| (keeping order)"""
+  seen = {}
+  return [seen.setdefault(x, x) for x in items if x not in seen]
+
+
+def GenerateLdsoWrapper(root, path, interp, libpaths=(), elfsubdir=None):
+  """Generate a shell script wrapper which uses local ldso to run the ELF
+
+  Since we cannot rely on the host glibc (or other libraries), we need to
+  execute the local packaged ldso directly and tell it where to find our
+  copies of libraries.
+
+  Args:
+    root: The root tree to generate scripts inside of
+    path: The full path (inside |root|) to the program to wrap
+    interp: The ldso interpreter that we need to execute
+    libpaths: Extra lib paths to search for libraries
+    elfsubdir: The sub-directory where the original ELF file lives. If not
+               provided, a '.elf' suffix will be added to the original file
+               instead.
+  """
+  basedir = os.path.dirname(path)
+  interp_dir, interp_name = os.path.split(interp)
+  libpaths = dedupe([interp_dir] + list(libpaths))
+  replacements = {
+    'interp': os.path.join(os.path.relpath(interp_dir, basedir),
+                           interp_name),
+    'libpaths': ':'.join(['${basedir}/' + os.path.relpath(p, basedir)
+                          for p in libpaths]),
+  }
+
+  wrappath = root + path
+  if elfsubdir:
+    elf_wrap_dir = os.path.join(os.path.dirname(wrappath), elfsubdir)
+    makedirs(elf_wrap_dir)
+    elf_wrappath = os.path.join(elf_wrap_dir, os.path.basename(wrappath))
+    replacements['elf_path'] = '${basedir}/%s/%s' % (elfsubdir,
+                                                     os.path.basename(wrappath))
+  else:
+    elf_wrappath = wrappath + '.elf'
+    replacements['elf_path'] = '${base}.elf'
+
+  wrapper = """#!/bin/sh
+if ! base=$(realpath "$0" 2>/dev/null); then
+  case $0 in
+  /*) base=$0;;
+  *)  base=${PWD:-`pwd`}/$0;;
+  esac
+fi
+basedir=${base%%/*}
+exec \
+  "${basedir}/%(interp)s" \
+  --library-path "%(libpaths)s" \
+  --inhibit-rpath '' \
+  "%(elf_path)s" \
+  "$@"
+"""
+  os.rename(wrappath, elf_wrappath)
+  with open(wrappath, 'w') as f:
+    f.write(wrapper % replacements)
+  os.chmod(wrappath, 0o0755)
+
+
+def ParseLdPaths(str_ldpaths, root='', path=None):
+  """Parse the colon-delimited list of paths and apply ldso rules to each
+
+  Note the special handling as dictated by the ldso:
+   - Empty paths are equivalent to $PWD
+   - $ORIGIN is expanded to the path of the given file
+   - (TODO) $LIB and friends
+
+  Args:
+    str_ldpaths: A colon-delimited string of paths
+    root: The path to prepend to all paths found
+    path: The object actively being parsed (used for $ORIGIN)
+
+  Returns:
+    list of processed paths
+  """
+  ldpaths = []
+  for ldpath in str_ldpaths.split(':'):
+    if ldpath == '':
+      # The ldso treats "" paths as $PWD.
+      ldpath = os.getcwd()
+    elif '$ORIGIN' in ldpath:
+      ldpath = ldpath.replace('$ORIGIN', os.path.dirname(path))
+    else:
+      ldpath = root + ldpath
+    ldpaths.append(normpath(ldpath))
+  return dedupe(ldpaths)
+
+
+def ParseLdSoConf(ldso_conf, root='/', _first=True):
+  """Load all the paths from a given ldso config file
+
+  This should handle comments, whitespace, and "include" statements.
+
+  Args:
+    ldso_conf: The file to scan
+    root: The path to prepend to all paths found
+    _first: Recursive use only; is this the first ELF ?
+
+  Returns:
+    list of paths found
+  """
+  paths = []
+
+  try:
+    with open(ldso_conf) as f:
+      for line in f.readlines():
+        line = line.split('#', 1)[0].strip()
+        if not line:
+          continue
+        if line.startswith('include '):
+          line = line[8:]
+          if line[0] == '/':
+            line = root + line.lstrip('/')
+          else:
+            line = os.path.dirname(ldso_conf) + '/' + line
+          for path in glob.glob(line):
+            paths += ParseLdSoConf(path, root=root, _first=False)
+        else:
+          paths += [normpath(root + line)]
+  except IOError as e:
+    if e.errno != errno.ENOENT:
+      warn(e)
+
+  if _first:
+    # XXX: Load paths from ldso itself.
+    # Remove duplicate entries to speed things up.
+    paths = dedupe(paths)
+
+  return paths
+
+
+def LoadLdpaths(root='/', prefix=''):
+  """Load linker paths from common locations
+
+  This parses the ld.so.conf and LD_LIBRARY_PATH env var.
+
+  Args:
+    root: The root tree to prepend to paths
+    prefix: The path under |root| to search
+
+  Returns:
+    dict containing library paths to search
+  """
+  ldpaths = {
+    'conf': [],
+    'env': [],
+    'interp': [],
+  }
+
+  # Load up $LD_LIBRARY_PATH.
+  ldpaths['env'] = []
+  env_ldpath = os.environ.get('LD_LIBRARY_PATH')
+  if not env_ldpath is None:
+    if root != '/':
+      warn('ignoring LD_LIBRARY_PATH due to ROOT usage')
+    else:
+      # XXX: If this contains $ORIGIN, we probably have to parse this
+      # on a per-ELF basis so it can get turned into the right thing.
+      ldpaths['env'] = ParseLdPaths(env_ldpath, path='')
+
+  # Load up /etc/ld.so.conf.
+  ldpaths['conf'] = ParseLdSoConf(root + prefix + '/etc/ld.so.conf', root=root)
+
+  return ldpaths
+
+
+def CompatibleELFs(elf1, elf2):
+  """See if two ELFs are compatible
+
+  This compares the aspects of the ELF to see if they're compatible:
+  bit size, endianness, machine type, and operating system.
+
+  Args:
+    elf1: an ELFFile object
+    elf2: an ELFFile object
+
+  Returns:
+    True if compatible, False otherwise
+  """
+  osabis = frozenset([e.header['e_ident']['EI_OSABI'] for e in (elf1, elf2)])
+  compat_sets = (
+    frozenset('ELFOSABI_%s' % x for x in ('NONE', 'SYSV', 'GNU', 'LINUX',)),
+  )
+  return ((len(osabis) == 1 or any(osabis.issubset(x) for x in compat_sets)) and
+    elf1.elfclass == elf2.elfclass and
+    elf1.little_endian == elf2.little_endian and
+    elf1.header['e_machine'] == elf2.header['e_machine'])
+
+
+def FindLib(elf, lib, ldpaths, root='/', debug=False):
+  """Try to locate a |lib| that is compatible to |elf| in the given |ldpaths|
+
+  Args:
+    elf: The elf which the library should be compatible with (ELF wise)
+    lib: The library (basename) to search for
+    ldpaths: A list of paths to search
+    root: The root path to resolve symlinks
+    debug: Enable debug output
+
+  Returns:
+    Tuple of the full path to the desired library and the real path to it
+  """
+  dbg(debug, '  FindLib(%s)' % lib)
+
+  for ldpath in ldpaths:
+    path = os.path.join(ldpath, lib)
+    target = readlink(path, root, prefixed=True)
+    if path != target:
+      dbg(debug, '    checking: %s -> %s' % (path, target))
+    else:
+      dbg(debug, '    checking:', path)
+
+    if os.path.exists(target):
+      with open(target, 'rb') as f:
+        libelf = ELFFile(f)
+        if CompatibleELFs(elf, libelf):
+          return (target, path)
+
+  return (None, None)
+
+
+def ParseELF(path, root='/', prefix='', ldpaths={'conf':[], 'env':[], 'interp':[]},
+             display=None, debug=False, _first=True, _all_libs={}):
+  """Parse the ELF dependency tree of the specified file
+
+  Args:
+    path: The ELF to scan
+    root: The root tree to prepend to paths; this applies to interp and rpaths
+          only as |path| and |ldpaths| are expected to be prefixed already
+    prefix: The path under |root| to search
+    ldpaths: dict containing library paths to search; should have the keys:
+             conf, env, interp
+    display: The path to show rather than |path|
+    debug: Enable debug output
+    _first: Recursive use only; is this the first ELF ?
+    _all_libs: Recursive use only; dict of all libs we've seen
+
+  Returns:
+    a dict containing information about all the ELFs; e.g.
+    {
+      'interp': '/lib64/ld-linux.so.2',
+      'needed': ['libc.so.6', 'libcurl.so.4',],
+      'libs': {
+        'libc.so.6': {
+          'path': '/lib64/libc.so.6',
+          'needed': [],
+        },
+        'libcurl.so.4': {
+          'path': '/usr/lib64/libcurl.so.4',
+          'needed': ['libc.so.6', 'librt.so.1',],
+        },
+      },
+    }
+  """
+  if _first:
+    _all_libs = {}
+    ldpaths = ldpaths.copy()
+  ret = {
+    'interp': None,
+    'path': path if display is None else display,
+    'realpath': path,
+    'needed': [],
+    'rpath': [],
+    'runpath': [],
+    'libs': _all_libs,
+  }
+
+  dbg(debug, 'ParseELF(%s)' % path)
+
+  with open(path, 'rb') as f:
+    elf = ELFFile(f)
+
+    # If this is the first ELF, extract the interpreter.
+    if _first:
+      for segment in elf.iter_segments():
+        if segment.header.p_type != 'PT_INTERP':
+          continue
+
+        interp = bstr(segment.get_interp_name())
+        dbg(debug, '  interp           =', interp)
+        ret['interp'] = normpath(root + interp)
+        ret['libs'][os.path.basename(interp)] = {
+          'path': ret['interp'],
+          'realpath': readlink(ret['interp'], root, prefixed=True),
+          'needed': [],
+        }
+        # XXX: Should read it and scan for /lib paths.
+        ldpaths['interp'] = [
+          normpath(root + os.path.dirname(interp)),
+          normpath(root + prefix + '/usr' + os.path.dirname(interp).lstrip(prefix)),
+        ]
+        dbg(debug, '  ldpaths[interp]  =', ldpaths['interp'])
+        break
+
+    # Parse the ELF's dynamic tags.
+    libs = []
+    rpaths = []
+    runpaths = []
+    for segment in elf.iter_segments():
+      if segment.header.p_type != 'PT_DYNAMIC':
+        continue
+
+      for t in segment.iter_tags():
+        if t.entry.d_tag == 'DT_RPATH':
+          rpaths = ParseLdPaths(bstr(t.rpath), root=root, path=path)
+        elif t.entry.d_tag == 'DT_RUNPATH':
+          runpaths = ParseLdPaths(bstr(t.runpath), root=root, path=path)
+        elif t.entry.d_tag == 'DT_NEEDED':
+          libs.append(bstr(t.needed))
+      if runpaths:
+        # If both RPATH and RUNPATH are set, only the latter is used.
+        rpaths = []
+
+      # XXX: We assume there is only one PT_DYNAMIC.  This is
+      # probably fine since the runtime ldso does the same.
+      break
+    if _first:
+      # Propagate the rpaths used by the main ELF since those will be
+      # used at runtime to locate things.
+      ldpaths['rpath'] = rpaths
+      ldpaths['runpath'] = runpaths
+      dbg(debug, '  ldpaths[rpath]   =', rpaths)
+      dbg(debug, '  ldpaths[runpath] =', runpaths)
+    ret['rpath'] = rpaths
+    ret['runpath'] = runpaths
+    ret['needed'] = libs
+
+    # Search for the libs this ELF uses.
+    all_ldpaths = None
+    for lib in libs:
+      if lib in _all_libs:
+        continue
+      if all_ldpaths is None:
+        all_ldpaths = rpaths + ldpaths['rpath'] + ldpaths['env'] + runpaths + ldpaths['runpath'] + ldpaths['conf'] + ldpaths['interp']
+      realpath, fullpath = FindLib(elf, lib, all_ldpaths, root, debug=debug)
+      _all_libs[lib] = {
+        'realpath': realpath,
+        'path': fullpath,
+        'needed': [],
+      }
+      if fullpath:
+        lret = ParseELF(realpath, root, prefix, ldpaths, display=fullpath,
+                        debug=debug, _first=False, _all_libs=_all_libs)
+        _all_libs[lib]['needed'] = lret['needed']
+
+    del elf
+
+  return ret
+
+
+def _NormalizePath(option, _opt, value, parser):
+  setattr(parser.values, option.dest, normpath(value))
+
+
+def _ShowVersion(_option, _opt, _value, _parser):
+  d = '$Id: lddtree.py,v 1.53 2014/08/01 02:20:20 vapier Exp $'.split()
+  print('%s-%s %s %s' % (d[1].split('.')[0], d[2], d[3], d[4]))
+  sys.exit(0)
+
+
+def _ActionShow(options, elf):
+  """Show the dependency tree for this ELF"""
+  def _show(lib, depth):
+    chain_libs.append(lib)
+    fullpath = elf['libs'][lib]['path']
+    if options.list:
+      print(fullpath or lib)
+    else:
+      print('%s%s => %s' % ('    ' * depth, lib, fullpath))
+
+    new_libs = []
+    for lib in elf['libs'][lib]['needed']:
+      if lib in chain_libs:
+        if not options.list:
+          print('%s%s => !!! circular loop !!!' % ('    ' * depth, lib))
+        continue
+      if options.all or not lib in shown_libs:
+        shown_libs.add(lib)
+        new_libs.append(lib)
+
+    for lib in new_libs:
+      _show(lib, depth + 1)
+    chain_libs.pop()
+
+  shown_libs = set(elf['needed'])
+  chain_libs = []
+  interp = elf['interp']
+  if interp:
+    shown_libs.add(os.path.basename(interp))
+  if options.list:
+    print(elf['path'])
+    if not interp is None:
+      print(interp)
+  else:
+    print('%s (interpreter => %s)' % (elf['path'], interp))
+  for lib in elf['needed']:
+    _show(lib, 1)
+
+
+def _ActionCopy(options, elf):
+  """Copy the ELF and its dependencies to a destination tree"""
+  def _StripRoot(path):
+    return path[len(options.root) - 1:]
+
+  def _copy(realsrc, src, striproot=True, wrapit=False, libpaths=(),
+            outdir=None):
+    if realsrc is None:
+      return
+
+    if wrapit:
+      # Static ELFs don't need to be wrapped.
+      if not elf['interp']:
+        wrapit = False
+
+    striproot = _StripRoot if striproot else lambda x: x
+
+    if outdir:
+      subdst = os.path.join(outdir, os.path.basename(src))
+    else:
+      subdst = striproot(src)
+    dst = options.dest + subdst
+
+    try:
+      # See if they're the same file.
+      nstat = os.stat(dst + ('.elf' if wrapit else ''))
+      ostat = os.stat(realsrc)
+      for field in ('mode', 'mtime', 'size'):
+        if getattr(ostat, 'st_' + field) != \
+           getattr(nstat, 'st_' + field):
+          break
+      else:
+        return
+    except OSError as e:
+      if e.errno != errno.ENOENT:
+        raise
+
+    if options.verbose:
+      print('%s -> %s' % (src, dst))
+
+    makedirs(os.path.dirname(dst))
+    try:
+      shutil.copy2(realsrc, dst)
+    except IOError:
+      os.unlink(dst)
+      shutil.copy2(realsrc, dst)
+
+    if wrapit:
+      if options.verbose:
+        print('generate wrapper %s' % (dst,))
+
+      if options.libdir:
+        interp = os.path.join(options.libdir, os.path.basename(elf['interp']))
+      else:
+        interp = _StripRoot(elf['interp'])
+      GenerateLdsoWrapper(options.dest, subdst, interp, libpaths,
+                          options.elf_subdir)
+
+  # XXX: We should automatically import libgcc_s.so whenever libpthread.so
+  # is copied over (since we know it can be dlopen-ed by NPTL at runtime).
+  # Similarly, we should provide an option for automatically copying over
+  # the libnsl.so and libnss_*.so libraries, as well as an open ended list
+  # for known libs that get loaded (e.g. curl will dlopen(libresolv)).
+  libpaths = set()
+  for lib in elf['libs']:
+    libdata = elf['libs'][lib]
+    path = libdata['realpath']
+    if not options.libdir:
+      libpaths.add(_StripRoot(os.path.dirname(path)))
+    _copy(path, libdata['path'], outdir=options.libdir)
+
+  if not options.libdir:
+    libpaths = list(libpaths)
+    if elf['runpath']:
+      libpaths = elf['runpath'] + libpaths
+    else:
+      libpaths = elf['rpath'] + libpaths
+  else:
+    libpaths.add(options.libdir)
+
+  # We don't bother to copy this as ParseElf adds the interp to the 'libs',
+  # so it was already copied in the libs loop above.
+  #_copy(elf['interp'], outdir=options.libdir)
+  _copy(elf['realpath'], elf['path'], striproot=options.auto_root,
+        wrapit=options.generate_wrappers, libpaths=libpaths,
+        outdir=options.bindir)
+
+
+def main(argv):
+  parser = optparse.OptionParser("""%prog [options] <ELFs>
+
+Display ELF dependencies as a tree
+
+<ELFs> can be globs that lddtree will take care of expanding.
+Useful when you want to glob a path under the ROOT path.
+
+When using the --root option, all paths are implicitly prefixed by that.
+  e.g. lddtree -R /my/magic/root /bin/bash
+This will load up the ELF found at /my/magic/root/bin/bash and then resolve
+all libraries via that path.  If you wish to actually read /bin/bash (and
+so use the ROOT path as an alternative library tree), you can specify the
+--no-auto-root option.
+
+When pairing --root with --copy-to-tree, the ROOT path will be stripped.
+  e.g. lddtree -R /my/magic/root --copy-to-tree /foo /bin/bash
+You will see /foo/bin/bash and /foo/lib/libc.so.6 and not paths like
+/foo/my/magic/root/bin/bash.  If you want that, you'll have to manually
+add the ROOT path to the output path.
+
+The --bindir and --libdir flags are used to normalize the output subdirs
+when used with --copy-to-tree.
+  e.g. lddtree --copy-to-tree /foo /bin/bash /usr/sbin/lspci /usr/bin/lsof
+This will mirror the input paths in the output.  So you will end up with
+/foo/bin/bash and /foo/usr/sbin/lspci and /foo/usr/bin/lsof.  Similarly,
+the libraries needed will be scattered among /foo/lib/ and /foo/usr/lib/
+and perhaps other paths (like /foo/lib64/ and /usr/lib/gcc/...).  You can
+collapse all that down into nice directory structure.
+  e.g. lddtree --copy-to-tree /foo /bin/bash /usr/sbin/lspci /usr/bin/lsof \\
+               --bindir /bin --libdir /lib
+This will place bash, lspci, and lsof into /foo/bin/.  All the libraries
+they need will be placed into /foo/lib/ only.""")
+  parser.add_option('-a', '--all',
+    action='store_true', default=False,
+    help='Show all duplicated dependencies')
+  parser.add_option('-R', '--root',
+    default=os.environ.get('ROOT', ''), type='string',
+    action='callback', callback=_NormalizePath,
+    help='Search for all files/dependencies in ROOT')
+  parser.add_option('-P', '--prefix',
+    default=os.environ.get('EPREFIX', '@GENTOO_PORTAGE_EPREFIX@'), type='string',
+    action='callback', callback=_NormalizePath,
+    help='Specify EPREFIX for binaries (for Gentoo Prefix)')
+  parser.add_option('--no-auto-root',
+    dest='auto_root', action='store_false', default=True,
+    help='Do not automatically prefix input ELFs with ROOT')
+  parser.add_option('-l', '--list',
+    action='store_true', default=False,
+    help='Display output in a simple list (easy for copying)')
+  parser.add_option('-x', '--debug',
+    action='store_true', default=False,
+    help='Run with debugging')
+  parser.add_option('-v', '--verbose',
+    action='store_true', default=False,
+    help='Be verbose')
+  parser.add_option('--skip-non-elfs',
+    action='store_true', default=False,
+    help='Skip plain (non-ELF) files instead of warning')
+  parser.add_option('-V', '--version',
+    action='callback', callback=_ShowVersion,
+    help='Show version information')
+
+  group = optparse.OptionGroup(parser, 'Copying options')
+  group.add_option('--copy-to-tree',
+    dest='dest', default=None, type='string',
+    action='callback', callback=_NormalizePath,
+    help='Copy all files to the specified tree')
+  group.add_option('--bindir',
+    default=None, type='string',
+    action='callback', callback=_NormalizePath,
+    help='Dir to store all ELFs specified on the command line')
+  group.add_option('--libdir',
+    default=None, type='string',
+    action='callback', callback=_NormalizePath,
+    help='Dir to store all ELF libs')
+  group.add_option('--generate-wrappers',
+    action='store_true', default=False,
+    help='Wrap executable ELFs with scripts for local ldso')
+  group.add_option('--elf-subdir',
+    default=None, type='string',
+    help='When wrapping executable ELFs, place the original file in this '
+         'sub-directory. By default, it appends a .elf suffix instead.')
+  group.add_option('--copy-non-elfs',
+    action='store_true', default=False,
+    help='Copy over plain (non-ELF) files instead of warn+ignore')
+  parser.add_option_group(group)
+
+  (options, paths) = parser.parse_args(argv)
+
+  if options.root != '/':
+    options.root += '/'
+  if options.prefix == '@''GENTOO_PORTAGE_EPREFIX''@':
+    options.prefix = ''
+
+  if options.bindir and options.bindir[0] != '/':
+    parser.error('--bindir accepts absolute paths only')
+  if options.libdir and options.libdir[0] != '/':
+    parser.error('--libdir accepts absolute paths only')
+
+  if options.skip_non_elfs and options.copy_non_elfs:
+    parser.error('pick one handler for non-ELFs: skip or copy')
+
+  dbg(options.debug, 'root =', options.root)
+  if options.dest:
+    dbg(options.debug, 'dest =', options.dest)
+  if not paths:
+    err('missing ELF files to scan')
+
+  ldpaths = LoadLdpaths(options.root, options.prefix)
+  dbg(options.debug, 'ldpaths[conf] =', ldpaths['conf'])
+  dbg(options.debug, 'ldpaths[env]  =', ldpaths['env'])
+
+  # Process all the files specified.
+  ret = 0
+  for path in paths:
+    dbg(options.debug, 'argv[x]       =', path)
+    # Only auto-prefix the path if the ELF is absolute.
+    # If it's a relative path, the user most likely wants
+    # the local path.
+    if options.auto_root and path.startswith('/'):
+      path = options.root + path.lstrip('/')
+      dbg(options.debug, '  +auto-root  =', path)
+
+    matched = False
+    for p in glob.iglob(path):
+      # Once we've processed the globs, resolve the symlink.  This way you can
+      # operate on a path that is an absolute symlink itself.  e.g.:
+      #   $ ln -sf /bin/bash $PWD/root/bin/sh
+      #   $ lddtree --root $PWD/root /bin/sh
+      # First we'd turn /bin/sh into $PWD/root/bin/sh, then we want to resolve
+      # the symlink to $PWD/root/bin/bash rather than a plain /bin/bash.
+      dbg(options.debug, '  globbed     =', p)
+      if not path.startswith('/'):
+        realpath = os.path.realpath(path)
+      elif options.auto_root:
+        realpath = readlink(p, options.root, prefixed=True)
+      else:
+        realpath = path
+      if path != realpath:
+        dbg(options.debug, '  resolved    =', realpath)
+
+      matched = True
+      try:
+        elf = ParseELF(realpath, options.root, options.prefix, ldpaths,
+                       display=p, debug=options.debug)
+      except exceptions.ELFError as e:
+        if options.skip_non_elfs:
+          continue
+        # XXX: Ugly.  Should unify with _Action* somehow.
+        if options.dest is not None and options.copy_non_elfs:
+          if os.path.exists(p):
+            elf = {
+              'interp': None,
+              'libs': [],
+              'runpath': [],
+              'rpath': [],
+              'path': p,
+              'realpath': realpath,
+            }
+            _ActionCopy(options, elf)
+            continue
+        ret = 1
+        warn('%s: %s' % (p, e))
+        continue
+      except IOError as e:
+        ret = 1
+        warn('%s: %s' % (p, e))
+        continue
+
+      if options.dest is None:
+        _ActionShow(options, elf)
+      else:
+        _ActionCopy(options, elf)
+
+    if not matched:
+      ret = 1
+      warn('%s: did not match any paths' % (path,))
+
+  return ret
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/third_party/mock.py b/third_party/mock.py
new file mode 100644
index 0000000..512fc49
--- /dev/null
+++ b/third_party/mock.py
@@ -0,0 +1,2367 @@
+# mock.py
+# Test tools for mocking and patching.
+# Copyright (C) 2007-2012 Michael Foord & the mock team
+# E-mail: fuzzyman AT voidspace DOT org DOT uk
+
+# mock 1.0
+# http://www.voidspace.org.uk/python/mock/
+
+# Released subject to the BSD License
+# Please see http://www.voidspace.org.uk/python/license.shtml
+
+# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
+# Comments, suggestions and bug reports welcome.
+
+
+__all__ = (
+    'Mock',
+    'MagicMock',
+    'patch',
+    'sentinel',
+    'DEFAULT',
+    'ANY',
+    'call',
+    'create_autospec',
+    'FILTER_DIR',
+    'NonCallableMock',
+    'NonCallableMagicMock',
+    'mock_open',
+    'PropertyMock',
+)
+
+
+__version__ = '1.0.1'
+
+
+import pprint
+import sys
+
+try:
+    import inspect
+except ImportError:
+    # for alternative platforms that
+    # may not have inspect
+    inspect = None
+
+try:
+    from functools import wraps as original_wraps
+except ImportError:
+    # Python 2.4 compatibility
+    def wraps(original):
+        def inner(f):
+            f.__name__ = original.__name__
+            f.__doc__ = original.__doc__
+            f.__module__ = original.__module__
+            f.__wrapped__ = original
+            return f
+        return inner
+else:
+    if sys.version_info[:2] >= (3, 3):
+        wraps = original_wraps
+    else:
+        def wraps(func):
+            def inner(f):
+                f = original_wraps(func)(f)
+                f.__wrapped__ = func
+                return f
+            return inner
+
+try:
+    unicode
+except NameError:
+    # Python 3
+    basestring = unicode = str
+
+try:
+    long
+except NameError:
+    # Python 3
+    long = int
+
+try:
+    BaseException
+except NameError:
+    # Python 2.4 compatibility
+    BaseException = Exception
+
+try:
+    next
+except NameError:
+    def next(obj):
+        return obj.next()
+
+
+BaseExceptions = (BaseException,)
+if 'java' in sys.platform:
+    # jython
+    import java
+    BaseExceptions = (BaseException, java.lang.Throwable)
+
+try:
+    _isidentifier = str.isidentifier
+except AttributeError:
+    # Python 2.X
+    import keyword
+    import re
+    regex = re.compile(r'^[a-z_][a-z0-9_]*$', re.I)
+    def _isidentifier(string):
+        if string in keyword.kwlist:
+            return False
+        return regex.match(string)
+
+
+inPy3k = sys.version_info[0] == 3
+
+# Needed to work around Python 3 bug where use of "super" interferes with
+# defining __class__ as a descriptor
+_super = super
+
+self = 'im_self'
+builtin = '__builtin__'
+if inPy3k:
+    self = '__self__'
+    builtin = 'builtins'
+
+FILTER_DIR = True
+
+
+def _is_instance_mock(obj):
+    # can't use isinstance on Mock objects because they override __class__
+    # The base class for all mocks is NonCallableMock
+    return issubclass(type(obj), NonCallableMock)
+
+
+def _is_exception(obj):
+    return (
+        isinstance(obj, BaseExceptions) or
+        isinstance(obj, ClassTypes) and issubclass(obj, BaseExceptions)
+    )
+
+
+class _slotted(object):
+    __slots__ = ['a']
+
+
+DescriptorTypes = (
+    type(_slotted.a),
+    property,
+)
+
+
+def _getsignature(func, skipfirst, instance=False):
+    if inspect is None:
+        raise ImportError('inspect module not available')
+
+    if isinstance(func, ClassTypes) and not instance:
+        try:
+            func = func.__init__
+        except AttributeError:
+            return
+        skipfirst = True
+    elif not isinstance(func, FunctionTypes):
+        # for classes where instance is True we end up here too
+        try:
+            func = func.__call__
+        except AttributeError:
+            return
+
+    if inPy3k:
+        try:
+            argspec = inspect.getfullargspec(func)
+        except TypeError:
+            # C function / method, possibly inherited object().__init__
+            return
+        regargs, varargs, varkw, defaults, kwonly, kwonlydef, ann = argspec
+    else:
+        try:
+            regargs, varargs, varkwargs, defaults = inspect.getargspec(func)
+        except TypeError:
+            # C function / method, possibly inherited object().__init__
+            return
+
+    # instance methods and classmethods need to lose the self argument
+    if getattr(func, self, None) is not None:
+        regargs = regargs[1:]
+    if skipfirst:
+        # this condition and the above one are never both True - why?
+        regargs = regargs[1:]
+
+    if inPy3k:
+        signature = inspect.formatargspec(
+            regargs, varargs, varkw, defaults,
+            kwonly, kwonlydef, ann, formatvalue=lambda value: "")
+    else:
+        signature = inspect.formatargspec(
+            regargs, varargs, varkwargs, defaults,
+            formatvalue=lambda value: "")
+    return signature[1:-1], func
+
+
+def _check_signature(func, mock, skipfirst, instance=False):
+    if not _callable(func):
+        return
+
+    result = _getsignature(func, skipfirst, instance)
+    if result is None:
+        return
+    signature, func = result
+
+    # can't use self because "self" is common as an argument name
+    # unfortunately even not in the first place
+    src = "lambda _mock_self, %s: None" % signature
+    checksig = eval(src, {})
+    _copy_func_details(func, checksig)
+    type(mock)._mock_check_sig = checksig
+
+
+def _copy_func_details(func, funcopy):
+    funcopy.__name__ = func.__name__
+    funcopy.__doc__ = func.__doc__
+    #funcopy.__dict__.update(func.__dict__)
+    funcopy.__module__ = func.__module__
+    if not inPy3k:
+        funcopy.func_defaults = func.func_defaults
+        return
+    funcopy.__defaults__ = func.__defaults__
+    funcopy.__kwdefaults__ = func.__kwdefaults__
+
+
+def _callable(obj):
+    if isinstance(obj, (ClassTypes, staticmethod, classmethod)):
+        return True
+    if getattr(obj, '__call__', None) is not None:
+        return True
+    return False
+
+
+def _is_list(obj):
+    # checks for list or tuples
+    # XXXX badly named!
+    return type(obj) in (list, tuple)
+
+
+def _instance_callable(obj):
+    """Given an object, return True if the object is callable.
+    For classes, return True if instances would be callable."""
+    if not isinstance(obj, ClassTypes):
+        # already an instance
+        return getattr(obj, '__call__', None) is not None
+
+    klass = obj
+    # uses __bases__ instead of __mro__ so that we work with old style classes
+    if klass.__dict__.get('__call__') is not None:
+        return True
+
+    for base in klass.__bases__:
+        if _instance_callable(base):
+            return True
+    return False
+
+
+def _set_signature(mock, original, instance=False):
+    # creates a function with signature (*args, **kwargs) that delegates to a
+    # mock. It still does signature checking by calling a lambda with the same
+    # signature as the original.
+    if not _callable(original):
+        return
+
+    skipfirst = isinstance(original, ClassTypes)
+    result = _getsignature(original, skipfirst, instance)
+    if result is None:
+        # was a C function (e.g. object().__init__ ) that can't be mocked
+        return
+
+    signature, func = result
+
+    src = "lambda %s: None" % signature
+    checksig = eval(src, {})
+    _copy_func_details(func, checksig)
+
+    name = original.__name__
+    if not _isidentifier(name):
+        name = 'funcopy'
+    context = {'_checksig_': checksig, 'mock': mock}
+    src = """def %s(*args, **kwargs):
+    _checksig_(*args, **kwargs)
+    return mock(*args, **kwargs)""" % name
+    exec (src, context)
+    funcopy = context[name]
+    _setup_func(funcopy, mock)
+    return funcopy
+
+
+def _setup_func(funcopy, mock):
+    funcopy.mock = mock
+
+    # can't use isinstance with mocks
+    if not _is_instance_mock(mock):
+        return
+
+    def assert_called_with(*args, **kwargs):
+        return mock.assert_called_with(*args, **kwargs)
+    def assert_called_once_with(*args, **kwargs):
+        return mock.assert_called_once_with(*args, **kwargs)
+    def assert_has_calls(*args, **kwargs):
+        return mock.assert_has_calls(*args, **kwargs)
+    def assert_any_call(*args, **kwargs):
+        return mock.assert_any_call(*args, **kwargs)
+    def reset_mock():
+        funcopy.method_calls = _CallList()
+        funcopy.mock_calls = _CallList()
+        mock.reset_mock()
+        ret = funcopy.return_value
+        if _is_instance_mock(ret) and not ret is mock:
+            ret.reset_mock()
+
+    funcopy.called = False
+    funcopy.call_count = 0
+    funcopy.call_args = None
+    funcopy.call_args_list = _CallList()
+    funcopy.method_calls = _CallList()
+    funcopy.mock_calls = _CallList()
+
+    funcopy.return_value = mock.return_value
+    funcopy.side_effect = mock.side_effect
+    funcopy._mock_children = mock._mock_children
+
+    funcopy.assert_called_with = assert_called_with
+    funcopy.assert_called_once_with = assert_called_once_with
+    funcopy.assert_has_calls = assert_has_calls
+    funcopy.assert_any_call = assert_any_call
+    funcopy.reset_mock = reset_mock
+
+    mock._mock_delegate = funcopy
+
+
+def _is_magic(name):
+    return '__%s__' % name[2:-2] == name
+
+
+class _SentinelObject(object):
+    "A unique, named, sentinel object."
+    def __init__(self, name):
+        self.name = name
+
+    def __repr__(self):
+        return 'sentinel.%s' % self.name
+
+
+class _Sentinel(object):
+    """Access attributes to return a named object, usable as a sentinel."""
+    def __init__(self):
+        self._sentinels = {}
+
+    def __getattr__(self, name):
+        if name == '__bases__':
+            # Without this help(mock) raises an exception
+            raise AttributeError
+        return self._sentinels.setdefault(name, _SentinelObject(name))
+
+
+sentinel = _Sentinel()
+
+DEFAULT = sentinel.DEFAULT
+_missing = sentinel.MISSING
+_deleted = sentinel.DELETED
+
+
+class OldStyleClass:
+    pass
+ClassType = type(OldStyleClass)
+
+
+def _copy(value):
+    if type(value) in (dict, list, tuple, set):
+        return type(value)(value)
+    return value
+
+
+ClassTypes = (type,)
+if not inPy3k:
+    ClassTypes = (type, ClassType)
+
+_allowed_names = set(
+    [
+        'return_value', '_mock_return_value', 'side_effect',
+        '_mock_side_effect', '_mock_parent', '_mock_new_parent',
+        '_mock_name', '_mock_new_name'
+    ]
+)
+
+
+def _delegating_property(name):
+    _allowed_names.add(name)
+    _the_name = '_mock_' + name
+    def _get(self, name=name, _the_name=_the_name):
+        sig = self._mock_delegate
+        if sig is None:
+            return getattr(self, _the_name)
+        return getattr(sig, name)
+    def _set(self, value, name=name, _the_name=_the_name):
+        sig = self._mock_delegate
+        if sig is None:
+            self.__dict__[_the_name] = value
+        else:
+            setattr(sig, name, value)
+
+    return property(_get, _set)
+
+
+
+class _CallList(list):
+
+    def __contains__(self, value):
+        if not isinstance(value, list):
+            return list.__contains__(self, value)
+        len_value = len(value)
+        len_self = len(self)
+        if len_value > len_self:
+            return False
+
+        for i in range(0, len_self - len_value + 1):
+            sub_list = self[i:i+len_value]
+            if sub_list == value:
+                return True
+        return False
+
+    def __repr__(self):
+        return pprint.pformat(list(self))
+
+
+def _check_and_set_parent(parent, value, name, new_name):
+    if not _is_instance_mock(value):
+        return False
+    if ((value._mock_name or value._mock_new_name) or
+        (value._mock_parent is not None) or
+        (value._mock_new_parent is not None)):
+        return False
+
+    _parent = parent
+    while _parent is not None:
+        # setting a mock (value) as a child or return value of itself
+        # should not modify the mock
+        if _parent is value:
+            return False
+        _parent = _parent._mock_new_parent
+
+    if new_name:
+        value._mock_new_parent = parent
+        value._mock_new_name = new_name
+    if name:
+        value._mock_parent = parent
+        value._mock_name = name
+    return True
+
+
+
+class Base(object):
+    _mock_return_value = DEFAULT
+    _mock_side_effect = None
+    def __init__(self, *args, **kwargs):
+        pass
+
+
+
+class NonCallableMock(Base):
+    """A non-callable version of `Mock`"""
+
+    def __new__(cls, *args, **kw):
+        # every instance has its own class
+        # so we can create magic methods on the
+        # class without stomping on other mocks
+        new = type(cls.__name__, (cls,), {'__doc__': cls.__doc__})
+        instance = object.__new__(new)
+        return instance
+
+
+    def __init__(
+            self, spec=None, wraps=None, name=None, spec_set=None,
+            parent=None, _spec_state=None, _new_name='', _new_parent=None,
+            **kwargs
+        ):
+        if _new_parent is None:
+            _new_parent = parent
+
+        __dict__ = self.__dict__
+        __dict__['_mock_parent'] = parent
+        __dict__['_mock_name'] = name
+        __dict__['_mock_new_name'] = _new_name
+        __dict__['_mock_new_parent'] = _new_parent
+
+        if spec_set is not None:
+            spec = spec_set
+            spec_set = True
+
+        self._mock_add_spec(spec, spec_set)
+
+        __dict__['_mock_children'] = {}
+        __dict__['_mock_wraps'] = wraps
+        __dict__['_mock_delegate'] = None
+
+        __dict__['_mock_called'] = False
+        __dict__['_mock_call_args'] = None
+        __dict__['_mock_call_count'] = 0
+        __dict__['_mock_call_args_list'] = _CallList()
+        __dict__['_mock_mock_calls'] = _CallList()
+
+        __dict__['method_calls'] = _CallList()
+
+        if kwargs:
+            self.configure_mock(**kwargs)
+
+        _super(NonCallableMock, self).__init__(
+            spec, wraps, name, spec_set, parent,
+            _spec_state
+        )
+
+
+    def attach_mock(self, mock, attribute):
+        """
+        Attach a mock as an attribute of this one, replacing its name and
+        parent. Calls to the attached mock will be recorded in the
+        `method_calls` and `mock_calls` attributes of this one."""
+        mock._mock_parent = None
+        mock._mock_new_parent = None
+        mock._mock_name = ''
+        mock._mock_new_name = None
+
+        setattr(self, attribute, mock)
+
+
+    def mock_add_spec(self, spec, spec_set=False):
+        """Add a spec to a mock. `spec` can either be an object or a
+        list of strings. Only attributes on the `spec` can be fetched as
+        attributes from the mock.
+
+        If `spec_set` is True then only attributes on the spec can be set."""
+        self._mock_add_spec(spec, spec_set)
+
+
+    def _mock_add_spec(self, spec, spec_set):
+        _spec_class = None
+
+        if spec is not None and not _is_list(spec):
+            if isinstance(spec, ClassTypes):
+                _spec_class = spec
+            else:
+                _spec_class = _get_class(spec)
+
+            spec = dir(spec)
+
+        __dict__ = self.__dict__
+        __dict__['_spec_class'] = _spec_class
+        __dict__['_spec_set'] = spec_set
+        __dict__['_mock_methods'] = spec
+
+
+    def __get_return_value(self):
+        ret = self._mock_return_value
+        if self._mock_delegate is not None:
+            ret = self._mock_delegate.return_value
+
+        if ret is DEFAULT:
+            ret = self._get_child_mock(
+                _new_parent=self, _new_name='()'
+            )
+            self.return_value = ret
+        return ret
+
+
+    def __set_return_value(self, value):
+        if self._mock_delegate is not None:
+            self._mock_delegate.return_value = value
+        else:
+            self._mock_return_value = value
+            _check_and_set_parent(self, value, None, '()')
+
+    __return_value_doc = "The value to be returned when the mock is called."
+    return_value = property(__get_return_value, __set_return_value,
+                            __return_value_doc)
+
+
+    @property
+    def __class__(self):
+        if self._spec_class is None:
+            return type(self)
+        return self._spec_class
+
+    called = _delegating_property('called')
+    call_count = _delegating_property('call_count')
+    call_args = _delegating_property('call_args')
+    call_args_list = _delegating_property('call_args_list')
+    mock_calls = _delegating_property('mock_calls')
+
+
+    def __get_side_effect(self):
+        sig = self._mock_delegate
+        if sig is None:
+            return self._mock_side_effect
+        return sig.side_effect
+
+    def __set_side_effect(self, value):
+        value = _try_iter(value)
+        sig = self._mock_delegate
+        if sig is None:
+            self._mock_side_effect = value
+        else:
+            sig.side_effect = value
+
+    side_effect = property(__get_side_effect, __set_side_effect)
+
+
+    def reset_mock(self):
+        "Restore the mock object to its initial state."
+        self.called = False
+        self.call_args = None
+        self.call_count = 0
+        self.mock_calls = _CallList()
+        self.call_args_list = _CallList()
+        self.method_calls = _CallList()
+
+        for child in self._mock_children.values():
+            if isinstance(child, _SpecState):
+                continue
+            child.reset_mock()
+
+        ret = self._mock_return_value
+        if _is_instance_mock(ret) and ret is not self:
+            ret.reset_mock()
+
+
+    def configure_mock(self, **kwargs):
+        """Set attributes on the mock through keyword arguments.
+
+        Attributes plus return values and side effects can be set on child
+        mocks using standard dot notation and unpacking a dictionary in the
+        method call:
+
+        >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
+        >>> mock.configure_mock(**attrs)"""
+        for arg, val in sorted(kwargs.items(),
+                               # we sort on the number of dots so that
+                               # attributes are set before we set attributes on
+                               # attributes
+                               key=lambda entry: entry[0].count('.')):
+            args = arg.split('.')
+            final = args.pop()
+            obj = self
+            for entry in args:
+                obj = getattr(obj, entry)
+            setattr(obj, final, val)
+
+
+    def __getattr__(self, name):
+        if name == '_mock_methods':
+            raise AttributeError(name)
+        elif self._mock_methods is not None:
+            if name not in self._mock_methods or name in _all_magics:
+                raise AttributeError("Mock object has no attribute %r" % name)
+        elif _is_magic(name):
+            raise AttributeError(name)
+
+        result = self._mock_children.get(name)
+        if result is _deleted:
+            raise AttributeError(name)
+        elif result is None:
+            wraps = None
+            if self._mock_wraps is not None:
+                # XXXX should we get the attribute without triggering code
+                # execution?
+                wraps = getattr(self._mock_wraps, name)
+
+            result = self._get_child_mock(
+                parent=self, name=name, wraps=wraps, _new_name=name,
+                _new_parent=self
+            )
+            self._mock_children[name]  = result
+
+        elif isinstance(result, _SpecState):
+            result = create_autospec(
+                result.spec, result.spec_set, result.instance,
+                result.parent, result.name
+            )
+            self._mock_children[name]  = result
+
+        return result
+
+
+    def __repr__(self):
+        _name_list = [self._mock_new_name]
+        _parent = self._mock_new_parent
+        last = self
+
+        dot = '.'
+        if _name_list == ['()']:
+            dot = ''
+        seen = set()
+        while _parent is not None:
+            last = _parent
+
+            _name_list.append(_parent._mock_new_name + dot)
+            dot = '.'
+            if _parent._mock_new_name == '()':
+                dot = ''
+
+            _parent = _parent._mock_new_parent
+
+            # use ids here so as not to call __hash__ on the mocks
+            if id(_parent) in seen:
+                break
+            seen.add(id(_parent))
+
+        _name_list = list(reversed(_name_list))
+        _first = last._mock_name or 'mock'
+        if len(_name_list) > 1:
+            if _name_list[1] not in ('()', '().'):
+                _first += '.'
+        _name_list[0] = _first
+        name = ''.join(_name_list)
+
+        name_string = ''
+        if name not in ('mock', 'mock.'):
+            name_string = ' name=%r' % name
+
+        spec_string = ''
+        if self._spec_class is not None:
+            spec_string = ' spec=%r'
+            if self._spec_set:
+                spec_string = ' spec_set=%r'
+            spec_string = spec_string % self._spec_class.__name__
+        return "<%s%s%s id='%s'>" % (
+            type(self).__name__,
+            name_string,
+            spec_string,
+            id(self)
+        )
+
+
+    def __dir__(self):
+        """Filter the output of `dir(mock)` to only useful members.
+        XXXX
+        """
+        extras = self._mock_methods or []
+        from_type = dir(type(self))
+        from_dict = list(self.__dict__)
+
+        if FILTER_DIR:
+            from_type = [e for e in from_type if not e.startswith('_')]
+            from_dict = [e for e in from_dict if not e.startswith('_') or
+                         _is_magic(e)]
+        return sorted(set(extras + from_type + from_dict +
+                          list(self._mock_children)))
+
+
+    def __setattr__(self, name, value):
+        if name in _allowed_names:
+            # property setters go through here
+            return object.__setattr__(self, name, value)
+        elif (self._spec_set and self._mock_methods is not None and
+            name not in self._mock_methods and
+            name not in self.__dict__):
+            raise AttributeError("Mock object has no attribute '%s'" % name)
+        elif name in _unsupported_magics:
+            msg = 'Attempting to set unsupported magic method %r.' % name
+            raise AttributeError(msg)
+        elif name in _all_magics:
+            if self._mock_methods is not None and name not in self._mock_methods:
+                raise AttributeError("Mock object has no attribute '%s'" % name)
+
+            if not _is_instance_mock(value):
+                setattr(type(self), name, _get_method(name, value))
+                original = value
+                value = lambda *args, **kw: original(self, *args, **kw)
+            else:
+                # only set _new_name and not name so that mock_calls is tracked
+                # but not method calls
+                _check_and_set_parent(self, value, None, name)
+                setattr(type(self), name, value)
+                self._mock_children[name] = value
+        elif name == '__class__':
+            self._spec_class = value
+            return
+        else:
+            if _check_and_set_parent(self, value, name, name):
+                self._mock_children[name] = value
+        return object.__setattr__(self, name, value)
+
+
+    def __delattr__(self, name):
+        if name in _all_magics and name in type(self).__dict__:
+            delattr(type(self), name)
+            if name not in self.__dict__:
+                # for magic methods that are still MagicProxy objects and
+                # not set on the instance itself
+                return
+
+        if name in self.__dict__:
+            object.__delattr__(self, name)
+
+        obj = self._mock_children.get(name, _missing)
+        if obj is _deleted:
+            raise AttributeError(name)
+        if obj is not _missing:
+            del self._mock_children[name]
+        self._mock_children[name] = _deleted
+
+
+
+    def _format_mock_call_signature(self, args, kwargs):
+        name = self._mock_name or 'mock'
+        return _format_call_signature(name, args, kwargs)
+
+
+    def _format_mock_failure_message(self, args, kwargs):
+        message = 'Expected call: %s\nActual call: %s'
+        expected_string = self._format_mock_call_signature(args, kwargs)
+        call_args = self.call_args
+        if len(call_args) == 3:
+            call_args = call_args[1:]
+        actual_string = self._format_mock_call_signature(*call_args)
+        return message % (expected_string, actual_string)
+
+
+    def assert_called_with(_mock_self, *args, **kwargs):
+        """assert that the mock was called with the specified arguments.
+
+        Raises an AssertionError if the args and keyword args passed in are
+        different to the last call to the mock."""
+        self = _mock_self
+        if self.call_args is None:
+            expected = self._format_mock_call_signature(args, kwargs)
+            raise AssertionError('Expected call: %s\nNot called' % (expected,))
+
+        if self.call_args != (args, kwargs):
+            msg = self._format_mock_failure_message(args, kwargs)
+            raise AssertionError(msg)
+
+
+    def assert_called_once_with(_mock_self, *args, **kwargs):
+        """assert that the mock was called exactly once and with the specified
+        arguments."""
+        self = _mock_self
+        if not self.call_count == 1:
+            msg = ("Expected to be called once. Called %s times." %
+                   self.call_count)
+            raise AssertionError(msg)
+        return self.assert_called_with(*args, **kwargs)
+
+
+    def assert_has_calls(self, calls, any_order=False):
+        """assert the mock has been called with the specified calls.
+        The `mock_calls` list is checked for the calls.
+
+        If `any_order` is False (the default) then the calls must be
+        sequential. There can be extra calls before or after the
+        specified calls.
+
+        If `any_order` is True then the calls can be in any order, but
+        they must all appear in `mock_calls`."""
+        if not any_order:
+            if calls not in self.mock_calls:
+                raise AssertionError(
+                    'Calls not found.\nExpected: %r\n'
+                    'Actual: %r' % (calls, self.mock_calls)
+                )
+            return
+
+        all_calls = list(self.mock_calls)
+
+        not_found = []
+        for kall in calls:
+            try:
+                all_calls.remove(kall)
+            except ValueError:
+                not_found.append(kall)
+        if not_found:
+            raise AssertionError(
+                '%r not all found in call list' % (tuple(not_found),)
+            )
+
+
+    def assert_any_call(self, *args, **kwargs):
+        """assert the mock has been called with the specified arguments.
+
+        The assert passes if the mock has *ever* been called, unlike
+        `assert_called_with` and `assert_called_once_with` that only pass if
+        the call is the most recent one."""
+        kall = call(*args, **kwargs)
+        if kall not in self.call_args_list:
+            expected_string = self._format_mock_call_signature(args, kwargs)
+            raise AssertionError(
+                '%s call not found' % expected_string
+            )
+
+
+    def _get_child_mock(self, **kw):
+        """Create the child mocks for attributes and return value.
+        By default child mocks will be the same type as the parent.
+        Subclasses of Mock may want to override this to customize the way
+        child mocks are made.
+
+        For non-callable mocks the callable variant will be used (rather than
+        any custom subclass)."""
+        _type = type(self)
+        if not issubclass(_type, CallableMixin):
+            if issubclass(_type, NonCallableMagicMock):
+                klass = MagicMock
+            elif issubclass(_type, NonCallableMock) :
+                klass = Mock
+        else:
+            klass = _type.__mro__[1]
+        return klass(**kw)
+
+
+
+def _try_iter(obj):
+    if obj is None:
+        return obj
+    if _is_exception(obj):
+        return obj
+    if _callable(obj):
+        return obj
+    try:
+        return iter(obj)
+    except TypeError:
+        # XXXX backwards compatibility
+        # but this will blow up on first call - so maybe we should fail early?
+        return obj
+
+
+
+class CallableMixin(Base):
+
+    def __init__(self, spec=None, side_effect=None, return_value=DEFAULT,
+                 wraps=None, name=None, spec_set=None, parent=None,
+                 _spec_state=None, _new_name='', _new_parent=None, **kwargs):
+        self.__dict__['_mock_return_value'] = return_value
+
+        _super(CallableMixin, self).__init__(
+            spec, wraps, name, spec_set, parent,
+            _spec_state, _new_name, _new_parent, **kwargs
+        )
+
+        self.side_effect = side_effect
+
+
+    def _mock_check_sig(self, *args, **kwargs):
+        # stub method that can be replaced with one with a specific signature
+        pass
+
+
+    def __call__(_mock_self, *args, **kwargs):
+        # can't use self in-case a function / method we are mocking uses self
+        # in the signature
+        _mock_self._mock_check_sig(*args, **kwargs)
+        return _mock_self._mock_call(*args, **kwargs)
+
+
+    def _mock_call(_mock_self, *args, **kwargs):
+        self = _mock_self
+        self.called = True
+        self.call_count += 1
+        self.call_args = _Call((args, kwargs), two=True)
+        self.call_args_list.append(_Call((args, kwargs), two=True))
+
+        _new_name = self._mock_new_name
+        _new_parent = self._mock_new_parent
+        self.mock_calls.append(_Call(('', args, kwargs)))
+
+        seen = set()
+        skip_next_dot = _new_name == '()'
+        do_method_calls = self._mock_parent is not None
+        name = self._mock_name
+        while _new_parent is not None:
+            this_mock_call = _Call((_new_name, args, kwargs))
+            if _new_parent._mock_new_name:
+                dot = '.'
+                if skip_next_dot:
+                    dot = ''
+
+                skip_next_dot = False
+                if _new_parent._mock_new_name == '()':
+                    skip_next_dot = True
+
+                _new_name = _new_parent._mock_new_name + dot + _new_name
+
+            if do_method_calls:
+                if _new_name == name:
+                    this_method_call = this_mock_call
+                else:
+                    this_method_call = _Call((name, args, kwargs))
+                _new_parent.method_calls.append(this_method_call)
+
+                do_method_calls = _new_parent._mock_parent is not None
+                if do_method_calls:
+                    name = _new_parent._mock_name + '.' + name
+
+            _new_parent.mock_calls.append(this_mock_call)
+            _new_parent = _new_parent._mock_new_parent
+
+            # use ids here so as not to call __hash__ on the mocks
+            _new_parent_id = id(_new_parent)
+            if _new_parent_id in seen:
+                break
+            seen.add(_new_parent_id)
+
+        ret_val = DEFAULT
+        effect = self.side_effect
+        if effect is not None:
+            if _is_exception(effect):
+                raise effect
+
+            if not _callable(effect):
+                result = next(effect)
+                if _is_exception(result):
+                    raise result
+                return result
+
+            ret_val = effect(*args, **kwargs)
+            if ret_val is DEFAULT:
+                ret_val = self.return_value
+
+        if (self._mock_wraps is not None and
+             self._mock_return_value is DEFAULT):
+            return self._mock_wraps(*args, **kwargs)
+        if ret_val is DEFAULT:
+            ret_val = self.return_value
+        return ret_val
+
+
+
+class Mock(CallableMixin, NonCallableMock):
+    """
+    Create a new `Mock` object. `Mock` takes several optional arguments
+    that specify the behaviour of the Mock object:
+
+    * `spec`: This can be either a list of strings or an existing object (a
+      class or instance) that acts as the specification for the mock object. If
+      you pass in an object then a list of strings is formed by calling dir on
+      the object (excluding unsupported magic attributes and methods). Accessing
+      any attribute not in this list will raise an `AttributeError`.
+
+      If `spec` is an object (rather than a list of strings) then
+      `mock.__class__` returns the class of the spec object. This allows mocks
+      to pass `isinstance` tests.
+
+    * `spec_set`: A stricter variant of `spec`. If used, attempting to *set*
+      or get an attribute on the mock that isn't on the object passed as
+      `spec_set` will raise an `AttributeError`.
+
+    * `side_effect`: A function to be called whenever the Mock is called. See
+      the `side_effect` attribute. Useful for raising exceptions or
+      dynamically changing return values. The function is called with the same
+      arguments as the mock, and unless it returns `DEFAULT`, the return
+      value of this function is used as the return value.
+
+      Alternatively `side_effect` can be an exception class or instance. In
+      this case the exception will be raised when the mock is called.
+
+      If `side_effect` is an iterable then each call to the mock will return
+      the next value from the iterable. If any of the members of the iterable
+      are exceptions they will be raised instead of returned.
+
+    * `return_value`: The value returned when the mock is called. By default
+      this is a new Mock (created on first access). See the
+      `return_value` attribute.
+
+    * `wraps`: Item for the mock object to wrap. If `wraps` is not None then
+      calling the Mock will pass the call through to the wrapped object
+      (returning the real result). Attribute access on the mock will return a
+      Mock object that wraps the corresponding attribute of the wrapped object
+      (so attempting to access an attribute that doesn't exist will raise an
+      `AttributeError`).
+
+      If the mock has an explicit `return_value` set then calls are not passed
+      to the wrapped object and the `return_value` is returned instead.
+
+    * `name`: If the mock has a name then it will be used in the repr of the
+      mock. This can be useful for debugging. The name is propagated to child
+      mocks.
+
+    Mocks can also be called with arbitrary keyword arguments. These will be
+    used to set attributes on the mock after it is created.
+    """
+
+
+
+def _dot_lookup(thing, comp, import_path):
+    try:
+        return getattr(thing, comp)
+    except AttributeError:
+        __import__(import_path)
+        return getattr(thing, comp)
+
+
+def _importer(target):
+    components = target.split('.')
+    import_path = components.pop(0)
+    thing = __import__(import_path)
+
+    for comp in components:
+        import_path += ".%s" % comp
+        thing = _dot_lookup(thing, comp, import_path)
+    return thing
+
+
+def _is_started(patcher):
+    # XXXX horrible
+    return hasattr(patcher, 'is_local')
+
+
+class _patch(object):
+
+    attribute_name = None
+    _active_patches = set()
+
+    def __init__(
+            self, getter, attribute, new, spec, create,
+            spec_set, autospec, new_callable, kwargs
+        ):
+        if new_callable is not None:
+            if new is not DEFAULT:
+                raise ValueError(
+                    "Cannot use 'new' and 'new_callable' together"
+                )
+            if autospec is not None:
+                raise ValueError(
+                    "Cannot use 'autospec' and 'new_callable' together"
+                )
+
+        self.getter = getter
+        self.attribute = attribute
+        self.new = new
+        self.new_callable = new_callable
+        self.spec = spec
+        self.create = create
+        self.has_local = False
+        self.spec_set = spec_set
+        self.autospec = autospec
+        self.kwargs = kwargs
+        self.additional_patchers = []
+
+
+    def copy(self):
+        patcher = _patch(
+            self.getter, self.attribute, self.new, self.spec,
+            self.create, self.spec_set,
+            self.autospec, self.new_callable, self.kwargs
+        )
+        patcher.attribute_name = self.attribute_name
+        patcher.additional_patchers = [
+            p.copy() for p in self.additional_patchers
+        ]
+        return patcher
+
+
+    def __call__(self, func):
+        if isinstance(func, ClassTypes):
+            return self.decorate_class(func)
+        return self.decorate_callable(func)
+
+
+    def decorate_class(self, klass):
+        for attr in dir(klass):
+            if not attr.startswith(patch.TEST_PREFIX):
+                continue
+
+            attr_value = getattr(klass, attr)
+            if not hasattr(attr_value, "__call__"):
+                continue
+
+            patcher = self.copy()
+            setattr(klass, attr, patcher(attr_value))
+        return klass
+
+
+    def decorate_callable(self, func):
+        if hasattr(func, 'patchings'):
+            func.patchings.append(self)
+            return func
+
+        @wraps(func)
+        def patched(*args, **keywargs):
+            # don't use a with here (backwards compatability with Python 2.4)
+            extra_args = []
+            entered_patchers = []
+
+            # can't use try...except...finally because of Python 2.4
+            # compatibility
+            exc_info = tuple()
+            try:
+                try:
+                    for patching in patched.patchings:
+                        arg = patching.__enter__()
+                        entered_patchers.append(patching)
+                        if patching.attribute_name is not None:
+                            keywargs.update(arg)
+                        elif patching.new is DEFAULT:
+                            extra_args.append(arg)
+
+                    args += tuple(extra_args)
+                    return func(*args, **keywargs)
+                except:
+                    if (patching not in entered_patchers and
+                        _is_started(patching)):
+                        # the patcher may have been started, but an exception
+                        # raised whilst entering one of its additional_patchers
+                        entered_patchers.append(patching)
+                    # Pass the exception to __exit__
+                    exc_info = sys.exc_info()
+                    # re-raise the exception
+                    raise
+            finally:
+                for patching in reversed(entered_patchers):
+                    patching.__exit__(*exc_info)
+
+        patched.patchings = [self]
+        if hasattr(func, 'func_code'):
+            # not in Python 3
+            patched.compat_co_firstlineno = getattr(
+                func, "compat_co_firstlineno",
+                func.func_code.co_firstlineno
+            )
+        return patched
+
+
+    def get_original(self):
+        target = self.getter()
+        name = self.attribute
+
+        original = DEFAULT
+        local = False
+
+        try:
+            original = target.__dict__[name]
+        except (AttributeError, KeyError):
+            original = getattr(target, name, DEFAULT)
+        else:
+            local = True
+
+        if not self.create and original is DEFAULT:
+            raise AttributeError(
+                "%s does not have the attribute %r" % (target, name)
+            )
+        return original, local
+
+
+    def __enter__(self):
+        """Perform the patch."""
+        new, spec, spec_set = self.new, self.spec, self.spec_set
+        autospec, kwargs = self.autospec, self.kwargs
+        new_callable = self.new_callable
+        self.target = self.getter()
+
+        # normalise False to None
+        if spec is False:
+            spec = None
+        if spec_set is False:
+            spec_set = None
+        if autospec is False:
+            autospec = None
+
+        if spec is not None and autospec is not None:
+            raise TypeError("Can't specify spec and autospec")
+        if ((spec is not None or autospec is not None) and
+            spec_set not in (True, None)):
+            raise TypeError("Can't provide explicit spec_set *and* spec or autospec")
+
+        original, local = self.get_original()
+
+        if new is DEFAULT and autospec is None:
+            inherit = False
+            if spec is True:
+                # set spec to the object we are replacing
+                spec = original
+                if spec_set is True:
+                    spec_set = original
+                    spec = None
+            elif spec is not None:
+                if spec_set is True:
+                    spec_set = spec
+                    spec = None
+            elif spec_set is True:
+                spec_set = original
+
+            if spec is not None or spec_set is not None:
+                if original is DEFAULT:
+                    raise TypeError("Can't use 'spec' with create=True")
+                if isinstance(original, ClassTypes):
+                    # If we're patching out a class and there is a spec
+                    inherit = True
+
+            Klass = MagicMock
+            _kwargs = {}
+            if new_callable is not None:
+                Klass = new_callable
+            elif spec is not None or spec_set is not None:
+                this_spec = spec
+                if spec_set is not None:
+                    this_spec = spec_set
+                if _is_list(this_spec):
+                    not_callable = '__call__' not in this_spec
+                else:
+                    not_callable = not _callable(this_spec)
+                if not_callable:
+                    Klass = NonCallableMagicMock
+
+            if spec is not None:
+                _kwargs['spec'] = spec
+            if spec_set is not None:
+                _kwargs['spec_set'] = spec_set
+
+            # add a name to mocks
+            if (isinstance(Klass, type) and
+                issubclass(Klass, NonCallableMock) and self.attribute):
+                _kwargs['name'] = self.attribute
+
+            _kwargs.update(kwargs)
+            new = Klass(**_kwargs)
+
+            if inherit and _is_instance_mock(new):
+                # we can only tell if the instance should be callable if the
+                # spec is not a list
+                this_spec = spec
+                if spec_set is not None:
+                    this_spec = spec_set
+                if (not _is_list(this_spec) and not
+                    _instance_callable(this_spec)):
+                    Klass = NonCallableMagicMock
+
+                _kwargs.pop('name')
+                new.return_value = Klass(_new_parent=new, _new_name='()',
+                                         **_kwargs)
+        elif autospec is not None:
+            # spec is ignored, new *must* be default, spec_set is treated
+            # as a boolean. Should we check spec is not None and that spec_set
+            # is a bool?
+            if new is not DEFAULT:
+                raise TypeError(
+                    "autospec creates the mock for you. Can't specify "
+                    "autospec and new."
+                )
+            if original is DEFAULT:
+                raise TypeError("Can't use 'autospec' with create=True")
+            spec_set = bool(spec_set)
+            if autospec is True:
+                autospec = original
+
+            new = create_autospec(autospec, spec_set=spec_set,
+                                  _name=self.attribute, **kwargs)
+        elif kwargs:
+            # can't set keyword args when we aren't creating the mock
+            # XXXX If new is a Mock we could call new.configure_mock(**kwargs)
+            raise TypeError("Can't pass kwargs to a mock we aren't creating")
+
+        new_attr = new
+
+        self.temp_original = original
+        self.is_local = local
+        setattr(self.target, self.attribute, new_attr)
+        if self.attribute_name is not None:
+            extra_args = {}
+            if self.new is DEFAULT:
+                extra_args[self.attribute_name] =  new
+            for patching in self.additional_patchers:
+                arg = patching.__enter__()
+                if patching.new is DEFAULT:
+                    extra_args.update(arg)
+            return extra_args
+
+        return new
+
+
+    def __exit__(self, *exc_info):
+        """Undo the patch."""
+        if not _is_started(self):
+            raise RuntimeError('stop called on unstarted patcher')
+
+        if self.is_local and self.temp_original is not DEFAULT:
+            setattr(self.target, self.attribute, self.temp_original)
+        else:
+            delattr(self.target, self.attribute)
+            if not self.create and not hasattr(self.target, self.attribute):
+                # needed for proxy objects like django settings
+                setattr(self.target, self.attribute, self.temp_original)
+
+        del self.temp_original
+        del self.is_local
+        del self.target
+        for patcher in reversed(self.additional_patchers):
+            if _is_started(patcher):
+                patcher.__exit__(*exc_info)
+
+
+    def start(self):
+        """Activate a patch, returning any created mock."""
+        result = self.__enter__()
+        self._active_patches.add(self)
+        return result
+
+
+    def stop(self):
+        """Stop an active patch."""
+        self._active_patches.discard(self)
+        return self.__exit__()
+
+
+
+def _get_target(target):
+    try:
+        target, attribute = target.rsplit('.', 1)
+    except (TypeError, ValueError):
+        raise TypeError("Need a valid target to patch. You supplied: %r" %
+                        (target,))
+    getter = lambda: _importer(target)
+    return getter, attribute
+
+
+def _patch_object(
+        target, attribute, new=DEFAULT, spec=None,
+        create=False, spec_set=None, autospec=None,
+        new_callable=None, **kwargs
+    ):
+    """
+    patch.object(target, attribute, new=DEFAULT, spec=None, create=False,
+                 spec_set=None, autospec=None, new_callable=None, **kwargs)
+
+    patch the named member (`attribute`) on an object (`target`) with a mock
+    object.
+
+    `patch.object` can be used as a decorator, class decorator or a context
+    manager. Arguments `new`, `spec`, `create`, `spec_set`,
+    `autospec` and `new_callable` have the same meaning as for `patch`. Like
+    `patch`, `patch.object` takes arbitrary keyword arguments for configuring
+    the mock object it creates.
+
+    When used as a class decorator `patch.object` honours `patch.TEST_PREFIX`
+    for choosing which methods to wrap.
+    """
+    getter = lambda: target
+    return _patch(
+        getter, attribute, new, spec, create,
+        spec_set, autospec, new_callable, kwargs
+    )
+
+
+def _patch_multiple(target, spec=None, create=False, spec_set=None,
+                    autospec=None, new_callable=None, **kwargs):
+    """Perform multiple patches in a single call. It takes the object to be
+    patched (either as an object or a string to fetch the object by importing)
+    and keyword arguments for the patches::
+
+        with patch.multiple(settings, FIRST_PATCH='one', SECOND_PATCH='two'):
+            ...
+
+    Use `DEFAULT` as the value if you want `patch.multiple` to create
+    mocks for you. In this case the created mocks are passed into a decorated
+    function by keyword, and a dictionary is returned when `patch.multiple` is
+    used as a context manager.
+
+    `patch.multiple` can be used as a decorator, class decorator or a context
+    manager. The arguments `spec`, `spec_set`, `create`,
+    `autospec` and `new_callable` have the same meaning as for `patch`. These
+    arguments will be applied to *all* patches done by `patch.multiple`.
+
+    When used as a class decorator `patch.multiple` honours `patch.TEST_PREFIX`
+    for choosing which methods to wrap.
+    """
+    if type(target) in (unicode, str):
+        getter = lambda: _importer(target)
+    else:
+        getter = lambda: target
+
+    if not kwargs:
+        raise ValueError(
+            'Must supply at least one keyword argument with patch.multiple'
+        )
+    # need to wrap in a list for python 3, where items is a view
+    items = list(kwargs.items())
+    attribute, new = items[0]
+    patcher = _patch(
+        getter, attribute, new, spec, create, spec_set,
+        autospec, new_callable, {}
+    )
+    patcher.attribute_name = attribute
+    for attribute, new in items[1:]:
+        this_patcher = _patch(
+            getter, attribute, new, spec, create, spec_set,
+            autospec, new_callable, {}
+        )
+        this_patcher.attribute_name = attribute
+        patcher.additional_patchers.append(this_patcher)
+    return patcher
+
+
+def patch(
+        target, new=DEFAULT, spec=None, create=False,
+        spec_set=None, autospec=None, new_callable=None, **kwargs
+    ):
+    """
+    `patch` acts as a function decorator, class decorator or a context
+    manager. Inside the body of the function or with statement, the `target`
+    is patched with a `new` object. When the function/with statement exits
+    the patch is undone.
+
+    If `new` is omitted, then the target is replaced with a
+    `MagicMock`. If `patch` is used as a decorator and `new` is
+    omitted, the created mock is passed in as an extra argument to the
+    decorated function. If `patch` is used as a context manager the created
+    mock is returned by the context manager.
+
+    `target` should be a string in the form `'package.module.ClassName'`. The
+    `target` is imported and the specified object replaced with the `new`
+    object, so the `target` must be importable from the environment you are
+    calling `patch` from. The target is imported when the decorated function
+    is executed, not at decoration time.
+
+    The `spec` and `spec_set` keyword arguments are passed to the `MagicMock`
+    if patch is creating one for you.
+
+    In addition you can pass `spec=True` or `spec_set=True`, which causes
+    patch to pass in the object being mocked as the spec/spec_set object.
+
+    `new_callable` allows you to specify a different class, or callable object,
+    that will be called to create the `new` object. By default `MagicMock` is
+    used.
+
+    A more powerful form of `spec` is `autospec`. If you set `autospec=True`
+    then the mock with be created with a spec from the object being replaced.
+    All attributes of the mock will also have the spec of the corresponding
+    attribute of the object being replaced. Methods and functions being
+    mocked will have their arguments checked and will raise a `TypeError` if
+    they are called with the wrong signature. For mocks replacing a class,
+    their return value (the 'instance') will have the same spec as the class.
+
+    Instead of `autospec=True` you can pass `autospec=some_object` to use an
+    arbitrary object as the spec instead of the one being replaced.
+
+    By default `patch` will fail to replace attributes that don't exist. If
+    you pass in `create=True`, and the attribute doesn't exist, patch will
+    create the attribute for you when the patched function is called, and
+    delete it again afterwards. This is useful for writing tests against
+    attributes that your production code creates at runtime. It is off by by
+    default because it can be dangerous. With it switched on you can write
+    passing tests against APIs that don't actually exist!
+
+    Patch can be used as a `TestCase` class decorator. It works by
+    decorating each test method in the class. This reduces the boilerplate
+    code when your test methods share a common patchings set. `patch` finds
+    tests by looking for method names that start with `patch.TEST_PREFIX`.
+    By default this is `test`, which matches the way `unittest` finds tests.
+    You can specify an alternative prefix by setting `patch.TEST_PREFIX`.
+
+    Patch can be used as a context manager, with the with statement. Here the
+    patching applies to the indented block after the with statement. If you
+    use "as" then the patched object will be bound to the name after the
+    "as"; very useful if `patch` is creating a mock object for you.
+
+    `patch` takes arbitrary keyword arguments. These will be passed to
+    the `Mock` (or `new_callable`) on construction.
+
+    `patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are
+    available for alternate use-cases.
+    """
+    getter, attribute = _get_target(target)
+    return _patch(
+        getter, attribute, new, spec, create,
+        spec_set, autospec, new_callable, kwargs
+    )
+
+
+class _patch_dict(object):
+    """
+    Patch a dictionary, or dictionary like object, and restore the dictionary
+    to its original state after the test.
+
+    `in_dict` can be a dictionary or a mapping like container. If it is a
+    mapping then it must at least support getting, setting and deleting items
+    plus iterating over keys.
+
+    `in_dict` can also be a string specifying the name of the dictionary, which
+    will then be fetched by importing it.
+
+    `values` can be a dictionary of values to set in the dictionary. `values`
+    can also be an iterable of `(key, value)` pairs.
+
+    If `clear` is True then the dictionary will be cleared before the new
+    values are set.
+
+    `patch.dict` can also be called with arbitrary keyword arguments to set
+    values in the dictionary::
+
+        with patch.dict('sys.modules', mymodule=Mock(), other_module=Mock()):
+            ...
+
+    `patch.dict` can be used as a context manager, decorator or class
+    decorator. When used as a class decorator `patch.dict` honours
+    `patch.TEST_PREFIX` for choosing which methods to wrap.
+    """
+
+    def __init__(self, in_dict, values=(), clear=False, **kwargs):
+        if isinstance(in_dict, basestring):
+            in_dict = _importer(in_dict)
+        self.in_dict = in_dict
+        # support any argument supported by dict(...) constructor
+        self.values = dict(values)
+        self.values.update(kwargs)
+        self.clear = clear
+        self._original = None
+
+
+    def __call__(self, f):
+        if isinstance(f, ClassTypes):
+            return self.decorate_class(f)
+        @wraps(f)
+        def _inner(*args, **kw):
+            self._patch_dict()
+            try:
+                return f(*args, **kw)
+            finally:
+                self._unpatch_dict()
+
+        return _inner
+
+
+    def decorate_class(self, klass):
+        for attr in dir(klass):
+            attr_value = getattr(klass, attr)
+            if (attr.startswith(patch.TEST_PREFIX) and
+                 hasattr(attr_value, "__call__")):
+                decorator = _patch_dict(self.in_dict, self.values, self.clear)
+                decorated = decorator(attr_value)
+                setattr(klass, attr, decorated)
+        return klass
+
+
+    def __enter__(self):
+        """Patch the dict."""
+        self._patch_dict()
+
+
+    def _patch_dict(self):
+        values = self.values
+        in_dict = self.in_dict
+        clear = self.clear
+
+        try:
+            original = in_dict.copy()
+        except AttributeError:
+            # dict like object with no copy method
+            # must support iteration over keys
+            original = {}
+            for key in in_dict:
+                original[key] = in_dict[key]
+        self._original = original
+
+        if clear:
+            _clear_dict(in_dict)
+
+        try:
+            in_dict.update(values)
+        except AttributeError:
+            # dict like object with no update method
+            for key in values:
+                in_dict[key] = values[key]
+
+
+    def _unpatch_dict(self):
+        in_dict = self.in_dict
+        original = self._original
+
+        _clear_dict(in_dict)
+
+        try:
+            in_dict.update(original)
+        except AttributeError:
+            for key in original:
+                in_dict[key] = original[key]
+
+
+    def __exit__(self, *args):
+        """Unpatch the dict."""
+        self._unpatch_dict()
+        return False
+
+    start = __enter__
+    stop = __exit__
+
+
+def _clear_dict(in_dict):
+    try:
+        in_dict.clear()
+    except AttributeError:
+        keys = list(in_dict)
+        for key in keys:
+            del in_dict[key]
+
+
+def _patch_stopall():
+    """Stop all active patches."""
+    for patch in list(_patch._active_patches):
+        patch.stop()
+
+
+patch.object = _patch_object
+patch.dict = _patch_dict
+patch.multiple = _patch_multiple
+patch.stopall = _patch_stopall
+patch.TEST_PREFIX = 'test'
+
+magic_methods = (
+    "lt le gt ge eq ne "
+    "getitem setitem delitem "
+    "len contains iter "
+    "hash str sizeof "
+    "enter exit "
+    "divmod neg pos abs invert "
+    "complex int float index "
+    "trunc floor ceil "
+)
+
+numerics = "add sub mul div floordiv mod lshift rshift and xor or pow "
+inplace = ' '.join('i%s' % n for n in numerics.split())
+right = ' '.join('r%s' % n for n in numerics.split())
+extra = ''
+if inPy3k:
+    extra = 'bool next '
+else:
+    extra = 'unicode long nonzero oct hex truediv rtruediv '
+
+# not including __prepare__, __instancecheck__, __subclasscheck__
+# (as they are metaclass methods)
+# __del__ is not supported at all as it causes problems if it exists
+
+_non_defaults = set('__%s__' % method for method in [
+    'cmp', 'getslice', 'setslice', 'coerce', 'subclasses',
+    'format', 'get', 'set', 'delete', 'reversed',
+    'missing', 'reduce', 'reduce_ex', 'getinitargs',
+    'getnewargs', 'getstate', 'setstate', 'getformat',
+    'setformat', 'repr', 'dir'
+])
+
+
+def _get_method(name, func):
+    "Turns a callable object (like a mock) into a real function"
+    def method(self, *args, **kw):
+        return func(self, *args, **kw)
+    method.__name__ = name
+    return method
+
+
+_magics = set(
+    '__%s__' % method for method in
+    ' '.join([magic_methods, numerics, inplace, right, extra]).split()
+)
+
+_all_magics = _magics | _non_defaults
+
+_unsupported_magics = set([
+    '__getattr__', '__setattr__',
+    '__init__', '__new__', '__prepare__'
+    '__instancecheck__', '__subclasscheck__',
+    '__del__'
+])
+
+_calculate_return_value = {
+    '__hash__': lambda self: object.__hash__(self),
+    '__str__': lambda self: object.__str__(self),
+    '__sizeof__': lambda self: object.__sizeof__(self),
+    '__unicode__': lambda self: unicode(object.__str__(self)),
+}
+
+_return_values = {
+    '__lt__': NotImplemented,
+    '__gt__': NotImplemented,
+    '__le__': NotImplemented,
+    '__ge__': NotImplemented,
+    '__int__': 1,
+    '__contains__': False,
+    '__len__': 0,
+    '__exit__': False,
+    '__complex__': 1j,
+    '__float__': 1.0,
+    '__bool__': True,
+    '__nonzero__': True,
+    '__oct__': '1',
+    '__hex__': '0x1',
+    '__long__': long(1),
+    '__index__': 1,
+}
+
+
+def _get_eq(self):
+    def __eq__(other):
+        ret_val = self.__eq__._mock_return_value
+        if ret_val is not DEFAULT:
+            return ret_val
+        return self is other
+    return __eq__
+
+def _get_ne(self):
+    def __ne__(other):
+        if self.__ne__._mock_return_value is not DEFAULT:
+            return DEFAULT
+        return self is not other
+    return __ne__
+
+def _get_iter(self):
+    def __iter__():
+        ret_val = self.__iter__._mock_return_value
+        if ret_val is DEFAULT:
+            return iter([])
+        # if ret_val was already an iterator, then calling iter on it should
+        # return the iterator unchanged
+        return iter(ret_val)
+    return __iter__
+
+_side_effect_methods = {
+    '__eq__': _get_eq,
+    '__ne__': _get_ne,
+    '__iter__': _get_iter,
+}
+
+
+
+def _set_return_value(mock, method, name):
+    fixed = _return_values.get(name, DEFAULT)
+    if fixed is not DEFAULT:
+        method.return_value = fixed
+        return
+
+    return_calulator = _calculate_return_value.get(name)
+    if return_calulator is not None:
+        try:
+            return_value = return_calulator(mock)
+        except AttributeError:
+            # XXXX why do we return AttributeError here?
+            #      set it as a side_effect instead?
+            return_value = AttributeError(name)
+        method.return_value = return_value
+        return
+
+    side_effector = _side_effect_methods.get(name)
+    if side_effector is not None:
+        method.side_effect = side_effector(mock)
+
+
+
+class MagicMixin(object):
+    def __init__(self, *args, **kw):
+        _super(MagicMixin, self).__init__(*args, **kw)
+        self._mock_set_magics()
+
+
+    def _mock_set_magics(self):
+        these_magics = _magics
+
+        if self._mock_methods is not None:
+            these_magics = _magics.intersection(self._mock_methods)
+
+            remove_magics = set()
+            remove_magics = _magics - these_magics
+
+            for entry in remove_magics:
+                if entry in type(self).__dict__:
+                    # remove unneeded magic methods
+                    delattr(self, entry)
+
+        # don't overwrite existing attributes if called a second time
+        these_magics = these_magics - set(type(self).__dict__)
+
+        _type = type(self)
+        for entry in these_magics:
+            setattr(_type, entry, MagicProxy(entry, self))
+
+
+
+class NonCallableMagicMock(MagicMixin, NonCallableMock):
+    """A version of `MagicMock` that isn't callable."""
+    def mock_add_spec(self, spec, spec_set=False):
+        """Add a spec to a mock. `spec` can either be an object or a
+        list of strings. Only attributes on the `spec` can be fetched as
+        attributes from the mock.
+
+        If `spec_set` is True then only attributes on the spec can be set."""
+        self._mock_add_spec(spec, spec_set)
+        self._mock_set_magics()
+
+
+
+class MagicMock(MagicMixin, Mock):
+    """
+    MagicMock is a subclass of Mock with default implementations
+    of most of the magic methods. You can use MagicMock without having to
+    configure the magic methods yourself.
+
+    If you use the `spec` or `spec_set` arguments then *only* magic
+    methods that exist in the spec will be created.
+
+    Attributes and the return value of a `MagicMock` will also be `MagicMocks`.
+    """
+    def mock_add_spec(self, spec, spec_set=False):
+        """Add a spec to a mock. `spec` can either be an object or a
+        list of strings. Only attributes on the `spec` can be fetched as
+        attributes from the mock.
+
+        If `spec_set` is True then only attributes on the spec can be set."""
+        self._mock_add_spec(spec, spec_set)
+        self._mock_set_magics()
+
+
+
+class MagicProxy(object):
+    def __init__(self, name, parent):
+        self.name = name
+        self.parent = parent
+
+    def __call__(self, *args, **kwargs):
+        m = self.create_mock()
+        return m(*args, **kwargs)
+
+    def create_mock(self):
+        entry = self.name
+        parent = self.parent
+        m = parent._get_child_mock(name=entry, _new_name=entry,
+                                   _new_parent=parent)
+        setattr(parent, entry, m)
+        _set_return_value(parent, m, entry)
+        return m
+
+    def __get__(self, obj, _type=None):
+        return self.create_mock()
+
+
+
+class _ANY(object):
+    "A helper object that compares equal to everything."
+
+    def __eq__(self, other):
+        return True
+
+    def __ne__(self, other):
+        return False
+
+    def __repr__(self):
+        return '<ANY>'
+
+ANY = _ANY()
+
+
+
+def _format_call_signature(name, args, kwargs):
+    message = '%s(%%s)' % name
+    formatted_args = ''
+    args_string = ', '.join([repr(arg) for arg in args])
+    kwargs_string = ', '.join([
+        '%s=%r' % (key, value) for key, value in kwargs.items()
+    ])
+    if args_string:
+        formatted_args = args_string
+    if kwargs_string:
+        if formatted_args:
+            formatted_args += ', '
+        formatted_args += kwargs_string
+
+    return message % formatted_args
+
+
+
+class _Call(tuple):
+    """
+    A tuple for holding the results of a call to a mock, either in the form
+    `(args, kwargs)` or `(name, args, kwargs)`.
+
+    If args or kwargs are empty then a call tuple will compare equal to
+    a tuple without those values. This makes comparisons less verbose::
+
+        _Call(('name', (), {})) == ('name',)
+        _Call(('name', (1,), {})) == ('name', (1,))
+        _Call(((), {'a': 'b'})) == ({'a': 'b'},)
+
+    The `_Call` object provides a useful shortcut for comparing with call::
+
+        _Call(((1, 2), {'a': 3})) == call(1, 2, a=3)
+        _Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3)
+
+    If the _Call has no name then it will match any name.
+    """
+    def __new__(cls, value=(), name=None, parent=None, two=False,
+                from_kall=True):
+        name = ''
+        args = ()
+        kwargs = {}
+        _len = len(value)
+        if _len == 3:
+            name, args, kwargs = value
+        elif _len == 2:
+            first, second = value
+            if isinstance(first, basestring):
+                name = first
+                if isinstance(second, tuple):
+                    args = second
+                else:
+                    kwargs = second
+            else:
+                args, kwargs = first, second
+        elif _len == 1:
+            value, = value
+            if isinstance(value, basestring):
+                name = value
+            elif isinstance(value, tuple):
+                args = value
+            else:
+                kwargs = value
+
+        if two:
+            return tuple.__new__(cls, (args, kwargs))
+
+        return tuple.__new__(cls, (name, args, kwargs))
+
+
+    def __init__(self, value=(), name=None, parent=None, two=False,
+                 from_kall=True):
+        self.name = name
+        self.parent = parent
+        self.from_kall = from_kall
+
+
+    def __eq__(self, other):
+        if other is ANY:
+            return True
+        try:
+            len_other = len(other)
+        except TypeError:
+            return False
+
+        self_name = ''
+        if len(self) == 2:
+            self_args, self_kwargs = self
+        else:
+            self_name, self_args, self_kwargs = self
+
+        other_name = ''
+        if len_other == 0:
+            other_args, other_kwargs = (), {}
+        elif len_other == 3:
+            other_name, other_args, other_kwargs = other
+        elif len_other == 1:
+            value, = other
+            if isinstance(value, tuple):
+                other_args = value
+                other_kwargs = {}
+            elif isinstance(value, basestring):
+                other_name = value
+                other_args, other_kwargs = (), {}
+            else:
+                other_args = ()
+                other_kwargs = value
+        else:
+            # len 2
+            # could be (name, args) or (name, kwargs) or (args, kwargs)
+            first, second = other
+            if isinstance(first, basestring):
+                other_name = first
+                if isinstance(second, tuple):
+                    other_args, other_kwargs = second, {}
+                else:
+                    other_args, other_kwargs = (), second
+            else:
+                other_args, other_kwargs = first, second
+
+        if self_name and other_name != self_name:
+            return False
+
+        # this order is important for ANY to work!
+        return (other_args, other_kwargs) == (self_args, self_kwargs)
+
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+
+    def __call__(self, *args, **kwargs):
+        if self.name is None:
+            return _Call(('', args, kwargs), name='()')
+
+        name = self.name + '()'
+        return _Call((self.name, args, kwargs), name=name, parent=self)
+
+
+    def __getattr__(self, attr):
+        if self.name is None:
+            return _Call(name=attr, from_kall=False)
+        name = '%s.%s' % (self.name, attr)
+        return _Call(name=name, parent=self, from_kall=False)
+
+
+    def __repr__(self):
+        if not self.from_kall:
+            name = self.name or 'call'
+            if name.startswith('()'):
+                name = 'call%s' % name
+            return name
+
+        if len(self) == 2:
+            name = 'call'
+            args, kwargs = self
+        else:
+            name, args, kwargs = self
+            if not name:
+                name = 'call'
+            elif not name.startswith('()'):
+                name = 'call.%s' % name
+            else:
+                name = 'call%s' % name
+        return _format_call_signature(name, args, kwargs)
+
+
+    def call_list(self):
+        """For a call object that represents multiple calls, `call_list`
+        returns a list of all the intermediate calls as well as the
+        final call."""
+        vals = []
+        thing = self
+        while thing is not None:
+            if thing.from_kall:
+                vals.append(thing)
+            thing = thing.parent
+        return _CallList(reversed(vals))
+
+
+call = _Call(from_kall=False)
+
+
+
+def create_autospec(spec, spec_set=False, instance=False, _parent=None,
+                    _name=None, **kwargs):
+    """Create a mock object using another object as a spec. Attributes on the
+    mock will use the corresponding attribute on the `spec` object as their
+    spec.
+
+    Functions or methods being mocked will have their arguments checked
+    to check that they are called with the correct signature.
+
+    If `spec_set` is True then attempting to set attributes that don't exist
+    on the spec object will raise an `AttributeError`.
+
+    If a class is used as a spec then the return value of the mock (the
+    instance of the class) will have the same spec. You can use a class as the
+    spec for an instance object by passing `instance=True`. The returned mock
+    will only be callable if instances of the mock are callable.
+
+    `create_autospec` also takes arbitrary keyword arguments that are passed to
+    the constructor of the created mock."""
+    if _is_list(spec):
+        # can't pass a list instance to the mock constructor as it will be
+        # interpreted as a list of strings
+        spec = type(spec)
+
+    is_type = isinstance(spec, ClassTypes)
+
+    _kwargs = {'spec': spec}
+    if spec_set:
+        _kwargs = {'spec_set': spec}
+    elif spec is None:
+        # None we mock with a normal mock without a spec
+        _kwargs = {}
+
+    _kwargs.update(kwargs)
+
+    Klass = MagicMock
+    if type(spec) in DescriptorTypes:
+        # descriptors don't have a spec
+        # because we don't know what type they return
+        _kwargs = {}
+    elif not _callable(spec):
+        Klass = NonCallableMagicMock
+    elif is_type and instance and not _instance_callable(spec):
+        Klass = NonCallableMagicMock
+
+    _new_name = _name
+    if _parent is None:
+        # for a top level object no _new_name should be set
+        _new_name = ''
+
+    mock = Klass(parent=_parent, _new_parent=_parent, _new_name=_new_name,
+                 name=_name, **_kwargs)
+
+    if isinstance(spec, FunctionTypes):
+        # should only happen at the top level because we don't
+        # recurse for functions
+        mock = _set_signature(mock, spec)
+    else:
+        _check_signature(spec, mock, is_type, instance)
+
+    if _parent is not None and not instance:
+        _parent._mock_children[_name] = mock
+
+    if is_type and not instance and 'return_value' not in kwargs:
+        mock.return_value = create_autospec(spec, spec_set, instance=True,
+                                            _name='()', _parent=mock)
+
+    for entry in dir(spec):
+        if _is_magic(entry):
+            # MagicMock already does the useful magic methods for us
+            continue
+
+        if isinstance(spec, FunctionTypes) and entry in FunctionAttributes:
+            # allow a mock to actually be a function
+            continue
+
+        # XXXX do we need a better way of getting attributes without
+        # triggering code execution (?) Probably not - we need the actual
+        # object to mock it so we would rather trigger a property than mock
+        # the property descriptor. Likewise we want to mock out dynamically
+        # provided attributes.
+        # XXXX what about attributes that raise exceptions other than
+        # AttributeError on being fetched?
+        # we could be resilient against it, or catch and propagate the
+        # exception when the attribute is fetched from the mock
+        try:
+            original = getattr(spec, entry)
+        except AttributeError:
+            continue
+
+        kwargs = {'spec': original}
+        if spec_set:
+            kwargs = {'spec_set': original}
+
+        if not isinstance(original, FunctionTypes):
+            new = _SpecState(original, spec_set, mock, entry, instance)
+            mock._mock_children[entry] = new
+        else:
+            parent = mock
+            if isinstance(spec, FunctionTypes):
+                parent = mock.mock
+
+            new = MagicMock(parent=parent, name=entry, _new_name=entry,
+                            _new_parent=parent, **kwargs)
+            mock._mock_children[entry] = new
+            skipfirst = _must_skip(spec, entry, is_type)
+            _check_signature(original, new, skipfirst=skipfirst)
+
+        # so functions created with _set_signature become instance attributes,
+        # *plus* their underlying mock exists in _mock_children of the parent
+        # mock. Adding to _mock_children may be unnecessary where we are also
+        # setting as an instance attribute?
+        if isinstance(new, FunctionTypes):
+            setattr(mock, entry, new)
+
+    return mock
+
+
+def _must_skip(spec, entry, is_type):
+    if not isinstance(spec, ClassTypes):
+        if entry in getattr(spec, '__dict__', {}):
+            # instance attribute - shouldn't skip
+            return False
+        spec = spec.__class__
+    if not hasattr(spec, '__mro__'):
+        # old style class: can't have descriptors anyway
+        return is_type
+
+    for klass in spec.__mro__:
+        result = klass.__dict__.get(entry, DEFAULT)
+        if result is DEFAULT:
+            continue
+        if isinstance(result, (staticmethod, classmethod)):
+            return False
+        return is_type
+
+    # shouldn't get here unless function is a dynamically provided attribute
+    # XXXX untested behaviour
+    return is_type
+
+
+def _get_class(obj):
+    try:
+        return obj.__class__
+    except AttributeError:
+        # in Python 2, _sre.SRE_Pattern objects have no __class__
+        return type(obj)
+
+
+class _SpecState(object):
+
+    def __init__(self, spec, spec_set=False, parent=None,
+                 name=None, ids=None, instance=False):
+        self.spec = spec
+        self.ids = ids
+        self.spec_set = spec_set
+        self.parent = parent
+        self.instance = instance
+        self.name = name
+
+
+FunctionTypes = (
+    # python function
+    type(create_autospec),
+    # instance method
+    type(ANY.__eq__),
+    # unbound method
+    type(_ANY.__eq__),
+)
+
+FunctionAttributes = set([
+    'func_closure',
+    'func_code',
+    'func_defaults',
+    'func_dict',
+    'func_doc',
+    'func_globals',
+    'func_name',
+])
+
+
+file_spec = None
+
+
+def mock_open(mock=None, read_data=''):
+    """
+    A helper function to create a mock to replace the use of `open`. It works
+    for `open` called directly or used as a context manager.
+
+    The `mock` argument is the mock object to configure. If `None` (the
+    default) then a `MagicMock` will be created for you, with the API limited
+    to methods or attributes available on standard file handles.
+
+    `read_data` is a string for the `read` method of the file handle to return.
+    This is an empty string by default.
+    """
+    global file_spec
+    if file_spec is None:
+        # set on first use
+        if inPy3k:
+            import _io
+            file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
+        else:
+            file_spec = file
+
+    if mock is None:
+        mock = MagicMock(name='open', spec=open)
+
+    handle = MagicMock(spec=file_spec)
+    handle.write.return_value = None
+    handle.__enter__.return_value = handle
+    handle.read.return_value = read_data
+
+    mock.return_value = handle
+    return mock
+
+
+class PropertyMock(Mock):
+    """
+    A mock intended to be used as a property, or other descriptor, on a class.
+    `PropertyMock` provides `__get__` and `__set__` methods so you can specify
+    a return value when it is fetched.
+
+    Fetching a `PropertyMock` instance from an object calls the mock, with
+    no args. Setting it calls the mock with the value being set.
+    """
+    def _get_child_mock(self, **kwargs):
+        return MagicMock(**kwargs)
+
+    def __get__(self, obj, obj_type):
+        return self()
+    def __set__(self, obj, val):
+        self(val)
diff --git a/third_party/poster/README.chromium b/third_party/poster/README.chromium
new file mode 100644
index 0000000..c5173ee
--- /dev/null
+++ b/third_party/poster/README.chromium
@@ -0,0 +1,10 @@
+Name: poster
+Short Name: poster
+URL: http://atlee.ca/software/poster/
+Version: 0.8.1
+License: MIT
+Security Critical: no
+
+Description:
+The poster python module for encoding HTTP POST requests and
+streaming of files.
diff --git a/third_party/poster/__init__.py b/third_party/poster/__init__.py
new file mode 100644
index 0000000..2907314
--- /dev/null
+++ b/third_party/poster/__init__.py
@@ -0,0 +1,32 @@
+# Copyright (c) 2011 Chris AtLee
+# 
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+# 
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+# 
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+"""poster module
+
+Support for streaming HTTP uploads, and multipart/form-data encoding
+
+```poster.version``` is a 3-tuple of integers representing the version number.
+New releases of poster will always have a version number that compares greater
+than an older version of poster.
+New in version 0.6."""
+
+import poster.streaminghttp
+import poster.encode
+
+version = (0, 8, 1) # Thanks JP!
diff --git a/third_party/poster/encode.py b/third_party/poster/encode.py
new file mode 100644
index 0000000..cf2298d
--- /dev/null
+++ b/third_party/poster/encode.py
@@ -0,0 +1,414 @@
+"""multipart/form-data encoding module
+
+This module provides functions that faciliate encoding name/value pairs
+as multipart/form-data suitable for a HTTP POST or PUT request.
+
+multipart/form-data is the standard way to upload files over HTTP"""
+
+__all__ = ['gen_boundary', 'encode_and_quote', 'MultipartParam',
+        'encode_string', 'encode_file_header', 'get_body_size', 'get_headers',
+        'multipart_encode']
+
+try:
+    import uuid
+    def gen_boundary():
+        """Returns a random string to use as the boundary for a message"""
+        return uuid.uuid4().hex
+except ImportError:
+    import random, sha
+    def gen_boundary():
+        """Returns a random string to use as the boundary for a message"""
+        bits = random.getrandbits(160)
+        return sha.new(str(bits)).hexdigest()
+
+import urllib, re, os, mimetypes
+try:
+    from email.header import Header
+except ImportError:
+    # Python 2.4
+    from email.Header import Header
+
+def encode_and_quote(data):
+    """If ``data`` is unicode, return urllib.quote_plus(data.encode("utf-8"))
+    otherwise return urllib.quote_plus(data)"""
+    if data is None:
+        return None
+
+    if isinstance(data, unicode):
+        data = data.encode("utf-8")
+    return urllib.quote_plus(data)
+
+def _strify(s):
+    """If s is a unicode string, encode it to UTF-8 and return the results,
+    otherwise return str(s), or None if s is None"""
+    if s is None:
+        return None
+    if isinstance(s, unicode):
+        return s.encode("utf-8")
+    return str(s)
+
+class MultipartParam(object):
+    """Represents a single parameter in a multipart/form-data request
+
+    ``name`` is the name of this parameter.
+
+    If ``value`` is set, it must be a string or unicode object to use as the
+    data for this parameter.
+
+    If ``filename`` is set, it is what to say that this parameter's filename
+    is.  Note that this does not have to be the actual filename any local file.
+
+    If ``filetype`` is set, it is used as the Content-Type for this parameter.
+    If unset it defaults to "text/plain; charset=utf8"
+
+    If ``filesize`` is set, it specifies the length of the file ``fileobj``
+
+    If ``fileobj`` is set, it must be a file-like object that supports
+    .read().
+
+    Both ``value`` and ``fileobj`` must not be set, doing so will
+    raise a ValueError assertion.
+
+    If ``fileobj`` is set, and ``filesize`` is not specified, then
+    the file's size will be determined first by stat'ing ``fileobj``'s
+    file descriptor, and if that fails, by seeking to the end of the file,
+    recording the current position as the size, and then by seeking back to the
+    beginning of the file.
+
+    ``cb`` is a callable which will be called from iter_encode with (self,
+    current, total), representing the current parameter, current amount
+    transferred, and the total size.
+    """
+    def __init__(self, name, value=None, filename=None, filetype=None,
+                        filesize=None, fileobj=None, cb=None):
+        self.name = Header(name).encode()
+        self.value = _strify(value)
+        if filename is None:
+            self.filename = None
+        else:
+            if isinstance(filename, unicode):
+                # Encode with XML entities
+                self.filename = filename.encode("ascii", "xmlcharrefreplace")
+            else:
+                self.filename = str(filename)
+            self.filename = self.filename.encode("string_escape").\
+                    replace('"', '\\"')
+        self.filetype = _strify(filetype)
+
+        self.filesize = filesize
+        self.fileobj = fileobj
+        self.cb = cb
+
+        if self.value is not None and self.fileobj is not None:
+            raise ValueError("Only one of value or fileobj may be specified")
+
+        if fileobj is not None and filesize is None:
+            # Try and determine the file size
+            try:
+                self.filesize = os.fstat(fileobj.fileno()).st_size
+            except (OSError, AttributeError):
+                try:
+                    fileobj.seek(0, 2)
+                    self.filesize = fileobj.tell()
+                    fileobj.seek(0)
+                except:
+                    raise ValueError("Could not determine filesize")
+
+    def __cmp__(self, other):
+        attrs = ['name', 'value', 'filename', 'filetype', 'filesize', 'fileobj']
+        myattrs = [getattr(self, a) for a in attrs]
+        oattrs = [getattr(other, a) for a in attrs]
+        return cmp(myattrs, oattrs)
+
+    def reset(self):
+        if self.fileobj is not None:
+            self.fileobj.seek(0)
+        elif self.value is None:
+            raise ValueError("Don't know how to reset this parameter")
+
+    @classmethod
+    def from_file(cls, paramname, filename):
+        """Returns a new MultipartParam object constructed from the local
+        file at ``filename``.
+
+        ``filesize`` is determined by os.path.getsize(``filename``)
+
+        ``filetype`` is determined by mimetypes.guess_type(``filename``)[0]
+
+        ``filename`` is set to os.path.basename(``filename``)
+        """
+
+        return cls(paramname, filename=os.path.basename(filename),
+                filetype=mimetypes.guess_type(filename)[0],
+                filesize=os.path.getsize(filename),
+                fileobj=open(filename, "rb"))
+
+    @classmethod
+    def from_params(cls, params):
+        """Returns a list of MultipartParam objects from a sequence of
+        name, value pairs, MultipartParam instances,
+        or from a mapping of names to values
+
+        The values may be strings or file objects, or MultipartParam objects.
+        MultipartParam object names must match the given names in the
+        name,value pairs or mapping, if applicable."""
+        if hasattr(params, 'items'):
+            params = params.items()
+
+        retval = []
+        for item in params:
+            if isinstance(item, cls):
+                retval.append(item)
+                continue
+            name, value = item
+            if isinstance(value, cls):
+                assert value.name == name
+                retval.append(value)
+                continue
+            if hasattr(value, 'read'):
+                # Looks like a file object
+                filename = getattr(value, 'name', None)
+                if filename is not None:
+                    filetype = mimetypes.guess_type(filename)[0]
+                else:
+                    filetype = None
+
+                retval.append(cls(name=name, filename=filename,
+                    filetype=filetype, fileobj=value))
+            else:
+                retval.append(cls(name, value))
+        return retval
+
+    def encode_hdr(self, boundary):
+        """Returns the header of the encoding of this parameter"""
+        boundary = encode_and_quote(boundary)
+
+        headers = ["--%s" % boundary]
+
+        if self.filename:
+            disposition = 'form-data; name="%s"; filename="%s"' % (self.name,
+                    self.filename)
+        else:
+            disposition = 'form-data; name="%s"' % self.name
+
+        headers.append("Content-Disposition: %s" % disposition)
+
+        if self.filetype:
+            filetype = self.filetype
+        else:
+            filetype = "text/plain; charset=utf-8"
+
+        headers.append("Content-Type: %s" % filetype)
+
+        headers.append("")
+        headers.append("")
+
+        return "\r\n".join(headers)
+
+    def encode(self, boundary):
+        """Returns the string encoding of this parameter"""
+        if self.value is None:
+            value = self.fileobj.read()
+        else:
+            value = self.value
+
+        if re.search("^--%s$" % re.escape(boundary), value, re.M):
+            raise ValueError("boundary found in encoded string")
+
+        return "%s%s\r\n" % (self.encode_hdr(boundary), value)
+
+    def iter_encode(self, boundary, blocksize=4096):
+        """Yields the encoding of this parameter
+        If self.fileobj is set, then blocks of ``blocksize`` bytes are read and
+        yielded."""
+        total = self.get_size(boundary)
+        current = 0
+        if self.value is not None:
+            block = self.encode(boundary)
+            current += len(block)
+            yield block
+            if self.cb:
+                self.cb(self, current, total)
+        else:
+            block = self.encode_hdr(boundary)
+            current += len(block)
+            yield block
+            if self.cb:
+                self.cb(self, current, total)
+            last_block = ""
+            encoded_boundary = "--%s" % encode_and_quote(boundary)
+            boundary_exp = re.compile("^%s$" % re.escape(encoded_boundary),
+                    re.M)
+            while True:
+                block = self.fileobj.read(blocksize)
+                if not block:
+                    current += 2
+                    yield "\r\n"
+                    if self.cb:
+                        self.cb(self, current, total)
+                    break
+                last_block += block
+                if boundary_exp.search(last_block):
+                    raise ValueError("boundary found in file data")
+                last_block = last_block[-len(encoded_boundary)-2:]
+                current += len(block)
+                yield block
+                if self.cb:
+                    self.cb(self, current, total)
+
+    def get_size(self, boundary):
+        """Returns the size in bytes that this param will be when encoded
+        with the given boundary."""
+        if self.filesize is not None:
+            valuesize = self.filesize
+        else:
+            valuesize = len(self.value)
+
+        return len(self.encode_hdr(boundary)) + 2 + valuesize
+
+def encode_string(boundary, name, value):
+    """Returns ``name`` and ``value`` encoded as a multipart/form-data
+    variable.  ``boundary`` is the boundary string used throughout
+    a single request to separate variables."""
+
+    return MultipartParam(name, value).encode(boundary)
+
+def encode_file_header(boundary, paramname, filesize, filename=None,
+        filetype=None):
+    """Returns the leading data for a multipart/form-data field that contains
+    file data.
+
+    ``boundary`` is the boundary string used throughout a single request to
+    separate variables.
+
+    ``paramname`` is the name of the variable in this request.
+
+    ``filesize`` is the size of the file data.
+
+    ``filename`` if specified is the filename to give to this field.  This
+    field is only useful to the server for determining the original filename.
+
+    ``filetype`` if specified is the MIME type of this file.
+
+    The actual file data should be sent after this header has been sent.
+    """
+
+    return MultipartParam(paramname, filesize=filesize, filename=filename,
+            filetype=filetype).encode_hdr(boundary)
+
+def get_body_size(params, boundary):
+    """Returns the number of bytes that the multipart/form-data encoding
+    of ``params`` will be."""
+    size = sum(p.get_size(boundary) for p in MultipartParam.from_params(params))
+    return size + len(boundary) + 6
+
+def get_headers(params, boundary):
+    """Returns a dictionary with Content-Type and Content-Length headers
+    for the multipart/form-data encoding of ``params``."""
+    headers = {}
+    boundary = urllib.quote_plus(boundary)
+    headers['Content-Type'] = "multipart/form-data; boundary=%s" % boundary
+    headers['Content-Length'] = str(get_body_size(params, boundary))
+    return headers
+
+class multipart_yielder:
+    def __init__(self, params, boundary, cb):
+        self.params = params
+        self.boundary = boundary
+        self.cb = cb
+
+        self.i = 0
+        self.p = None
+        self.param_iter = None
+        self.current = 0
+        self.total = get_body_size(params, boundary)
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        """generator function to yield multipart/form-data representation
+        of parameters"""
+        if self.param_iter is not None:
+            try:
+                block = self.param_iter.next()
+                self.current += len(block)
+                if self.cb:
+                    self.cb(self.p, self.current, self.total)
+                return block
+            except StopIteration:
+                self.p = None
+                self.param_iter = None
+
+        if self.i is None:
+            raise StopIteration
+        elif self.i >= len(self.params):
+            self.param_iter = None
+            self.p = None
+            self.i = None
+            block = "--%s--\r\n" % self.boundary
+            self.current += len(block)
+            if self.cb:
+                self.cb(self.p, self.current, self.total)
+            return block
+
+        self.p = self.params[self.i]
+        self.param_iter = self.p.iter_encode(self.boundary)
+        self.i += 1
+        return self.next()
+
+    def reset(self):
+        self.i = 0
+        self.current = 0
+        for param in self.params:
+            param.reset()
+
+def multipart_encode(params, boundary=None, cb=None):
+    """Encode ``params`` as multipart/form-data.
+
+    ``params`` should be a sequence of (name, value) pairs or MultipartParam
+    objects, or a mapping of names to values.
+    Values are either strings parameter values, or file-like objects to use as
+    the parameter value.  The file-like objects must support .read() and either
+    .fileno() or both .seek() and .tell().
+
+    If ``boundary`` is set, then it as used as the MIME boundary.  Otherwise
+    a randomly generated boundary will be used.  In either case, if the
+    boundary string appears in the parameter values a ValueError will be
+    raised.
+
+    If ``cb`` is set, it should be a callback which will get called as blocks
+    of data are encoded.  It will be called with (param, current, total),
+    indicating the current parameter being encoded, the current amount encoded,
+    and the total amount to encode.
+
+    Returns a tuple of `datagen`, `headers`, where `datagen` is a
+    generator that will yield blocks of data that make up the encoded
+    parameters, and `headers` is a dictionary with the assoicated
+    Content-Type and Content-Length headers.
+
+    Examples:
+
+    >>> datagen, headers = multipart_encode( [("key", "value1"), ("key", "value2")] )
+    >>> s = "".join(datagen)
+    >>> assert "value2" in s and "value1" in s
+
+    >>> p = MultipartParam("key", "value2")
+    >>> datagen, headers = multipart_encode( [("key", "value1"), p] )
+    >>> s = "".join(datagen)
+    >>> assert "value2" in s and "value1" in s
+
+    >>> datagen, headers = multipart_encode( {"key": "value1"} )
+    >>> s = "".join(datagen)
+    >>> assert "value2" not in s and "value1" in s
+
+    """
+    if boundary is None:
+        boundary = gen_boundary()
+    else:
+        boundary = urllib.quote_plus(boundary)
+
+    headers = get_headers(params, boundary)
+    params = MultipartParam.from_params(params)
+
+    return multipart_yielder(params, boundary, cb), headers
diff --git a/third_party/poster/streaminghttp.py b/third_party/poster/streaminghttp.py
new file mode 100644
index 0000000..1b591d4
--- /dev/null
+++ b/third_party/poster/streaminghttp.py
@@ -0,0 +1,199 @@
+"""Streaming HTTP uploads module.
+
+This module extends the standard httplib and urllib2 objects so that
+iterable objects can be used in the body of HTTP requests.
+
+In most cases all one should have to do is call :func:`register_openers()`
+to register the new streaming http handlers which will take priority over
+the default handlers, and then you can use iterable objects in the body
+of HTTP requests.
+
+**N.B.** You must specify a Content-Length header if using an iterable object
+since there is no way to determine in advance the total size that will be
+yielded, and there is no way to reset an interator.
+
+Example usage:
+
+>>> from StringIO import StringIO
+>>> import urllib2, poster.streaminghttp
+
+>>> opener = poster.streaminghttp.register_openers()
+
+>>> s = "Test file data"
+>>> f = StringIO(s)
+
+>>> req = urllib2.Request("http://localhost:5000", f,
+...                       {'Content-Length': str(len(s))})
+"""
+
+import httplib, urllib2, socket
+from httplib import NotConnected
+
+__all__ = ['StreamingHTTPConnection', 'StreamingHTTPRedirectHandler',
+        'StreamingHTTPHandler', 'register_openers']
+
+if hasattr(httplib, 'HTTPS'):
+    __all__.extend(['StreamingHTTPSHandler', 'StreamingHTTPSConnection'])
+
+class _StreamingHTTPMixin:
+    """Mixin class for HTTP and HTTPS connections that implements a streaming
+    send method."""
+    def send(self, value):
+        """Send ``value`` to the server.
+
+        ``value`` can be a string object, a file-like object that supports
+        a .read() method, or an iterable object that supports a .next()
+        method.
+        """
+        # Based on python 2.6's httplib.HTTPConnection.send()
+        if self.sock is None:
+            if self.auto_open:
+                self.connect()
+            else:
+                raise NotConnected()
+
+        # send the data to the server. if we get a broken pipe, then close
+        # the socket. we want to reconnect when somebody tries to send again.
+        #
+        # NOTE: we DO propagate the error, though, because we cannot simply
+        #       ignore the error... the caller will know if they can retry.
+        if self.debuglevel > 0:
+            print "send:", repr(value)
+        try:
+            blocksize = 8192
+            if hasattr(value, 'read') :
+                if hasattr(value, 'seek'):
+                    value.seek(0)
+                if self.debuglevel > 0:
+                    print "sendIng a read()able"
+                data = value.read(blocksize)
+                while data:
+                    self.sock.sendall(data)
+                    data = value.read(blocksize)
+            elif hasattr(value, 'next'):
+                if hasattr(value, 'reset'):
+                    value.reset()
+                if self.debuglevel > 0:
+                    print "sendIng an iterable"
+                for data in value:
+                    self.sock.sendall(data)
+            else:
+                self.sock.sendall(value)
+        except socket.error, v:
+            if v[0] == 32:      # Broken pipe
+                self.close()
+            raise
+
+class StreamingHTTPConnection(_StreamingHTTPMixin, httplib.HTTPConnection):
+    """Subclass of `httplib.HTTPConnection` that overrides the `send()` method
+    to support iterable body objects"""
+
+class StreamingHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
+    """Subclass of `urllib2.HTTPRedirectHandler` that overrides the
+    `redirect_request` method to properly handle redirected POST requests
+
+    This class is required because python 2.5's HTTPRedirectHandler does
+    not remove the Content-Type or Content-Length headers when requesting
+    the new resource, but the body of the original request is not preserved.
+    """
+
+    handler_order = urllib2.HTTPRedirectHandler.handler_order - 1
+
+    # From python2.6 urllib2's HTTPRedirectHandler
+    def redirect_request(self, req, fp, code, msg, headers, newurl):
+        """Return a Request or None in response to a redirect.
+
+        This is called by the http_error_30x methods when a
+        redirection response is received.  If a redirection should
+        take place, return a new Request to allow http_error_30x to
+        perform the redirect.  Otherwise, raise HTTPError if no-one
+        else should try to handle this url.  Return None if you can't
+        but another Handler might.
+        """
+        m = req.get_method()
+        if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
+            or code in (301, 302, 303) and m == "POST"):
+            # Strictly (according to RFC 2616), 301 or 302 in response
+            # to a POST MUST NOT cause a redirection without confirmation
+            # from the user (of urllib2, in this case).  In practice,
+            # essentially all clients do redirect in this case, so we
+            # do the same.
+            # be conciliant with URIs containing a space
+            newurl = newurl.replace(' ', '%20')
+            newheaders = dict((k, v) for k, v in req.headers.items()
+                              if k.lower() not in (
+                                  "content-length", "content-type")
+                             )
+            return urllib2.Request(newurl,
+                           headers=newheaders,
+                           origin_req_host=req.get_origin_req_host(),
+                           unverifiable=True)
+        else:
+            raise urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp)
+
+class StreamingHTTPHandler(urllib2.HTTPHandler):
+    """Subclass of `urllib2.HTTPHandler` that uses
+    StreamingHTTPConnection as its http connection class."""
+
+    handler_order = urllib2.HTTPHandler.handler_order - 1
+
+    def http_open(self, req):
+        """Open a StreamingHTTPConnection for the given request"""
+        return self.do_open(StreamingHTTPConnection, req)
+
+    def http_request(self, req):
+        """Handle a HTTP request.  Make sure that Content-Length is specified
+        if we're using an interable value"""
+        # Make sure that if we're using an iterable object as the request
+        # body, that we've also specified Content-Length
+        if req.has_data():
+            data = req.get_data()
+            if hasattr(data, 'read') or hasattr(data, 'next'):
+                if not req.has_header('Content-length'):
+                    raise ValueError(
+                            "No Content-Length specified for iterable body")
+        return urllib2.HTTPHandler.do_request_(self, req)
+
+if hasattr(httplib, 'HTTPS'):
+    class StreamingHTTPSConnection(_StreamingHTTPMixin,
+            httplib.HTTPSConnection):
+        """Subclass of `httplib.HTTSConnection` that overrides the `send()`
+        method to support iterable body objects"""
+
+    class StreamingHTTPSHandler(urllib2.HTTPSHandler):
+        """Subclass of `urllib2.HTTPSHandler` that uses
+        StreamingHTTPSConnection as its http connection class."""
+
+        handler_order = urllib2.HTTPSHandler.handler_order - 1
+
+        def https_open(self, req):
+            return self.do_open(StreamingHTTPSConnection, req)
+
+        def https_request(self, req):
+            # Make sure that if we're using an iterable object as the request
+            # body, that we've also specified Content-Length
+            if req.has_data():
+                data = req.get_data()
+                if hasattr(data, 'read') or hasattr(data, 'next'):
+                    if not req.has_header('Content-length'):
+                        raise ValueError(
+                                "No Content-Length specified for iterable body")
+            return urllib2.HTTPSHandler.do_request_(self, req)
+
+
+def get_handlers():
+    handlers = [StreamingHTTPHandler, StreamingHTTPRedirectHandler]
+    if hasattr(httplib, "HTTPS"):
+        handlers.append(StreamingHTTPSHandler)
+    return handlers
+    
+def register_openers():
+    """Register the streaming http handlers in the global urllib2 default
+    opener object.
+
+    Returns the created OpenerDirector object."""
+    opener = urllib2.build_opener(*get_handlers())
+
+    urllib2.install_opener(opener)
+
+    return opener
diff --git a/third_party/pylintrc b/third_party/pylintrc
new file mode 100644
index 0000000..e257965
--- /dev/null
+++ b/third_party/pylintrc
@@ -0,0 +1,286 @@
+# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+[MASTER]
+
+# Specify a configuration file.
+rcfile=../pylintrc
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Profiled execution.
+#profile=no
+
+# Add <file or directory> to the black list. It should be a base name, not a
+# path. You may set this option multiple times.
+#ignore=CVS
+
+# Pickle collected data for later comparisons.
+#persistent=yes
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+#load-plugins=
+
+
+[MESSAGES CONTROL]
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple times.
+#enable=
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifier separated by comma (,) or put this option
+# multiple times (only on the command line, not in the configuration file where
+# it should appear only once).
+# C0103: Invalid name ""
+# C0111: Missing docstring
+# C0301: Line too long.
+# C0302: Too many lines in module (N)
+# C0321: More than one statement on a single line
+# E1103: Instance has no '*' member (but some types could not be inferred)
+# I0011: Locally disabling warning.
+# I0012: Locally enabling warning.
+# R0201: Method could be a function
+# R0902: Too many instance attributes (N/7)
+# R0903: Too few public methods (N/2)
+# R0911: Too many return statements (N/6)
+# R0912: Too many branches (N/12)
+# R0913: Too many arguments (N/5)
+# R0914: Too many local variables (N/15)
+# R0915: Too many statements (N/50)
+# W0122: Use of the exec statement
+# W0102: Dangerous default value
+# W0141: Used builtin function ''
+# W0142: Used * or ** magic
+# W0212: Access to protected member
+# W0311: Bad indentation.
+# W0312: Found indentation with tabs instead of spaces
+# W0403: Relative import 'constants', should be 'chromite.cbuildbot.constants'
+# W0511: Used when a warning note as FIXME or XXX is detected.
+# W0622: Redefining built-in
+# R0904: Too many public methods
+# R0921: Abstract class not referenced.
+disable=C0103,C0111,C0301,C0302,C0321,E1103,I0011,I0012,R0201,R0902,R0903,R0911,R0912,R0913,R0914,R0915,W0102,W0122,W0141,W0142,W0212,W0311,W0312,W0403,W0511,W0622,R0904,R0921
+
+
+[REPORTS]
+
+# Set the output format. Available formats are text, parseable, colorized, msvs
+# (visual studio) and html
+#output-format=text
+
+# Put messages in a separate file for each module / package specified on the
+# command line instead of printing them on stdout. Reports (if any) will be
+# written in a file name "pylint_global.[txt|html]".
+#files-output=no
+
+# Tells whether to display a full report or only the messages
+# CHANGE: No report.
+reports=no
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note). You have access to the variables errors warning, statement which
+# respectively contain the number of errors / warnings messages and the total
+# number of statements analyzed. This is used by the global evaluation report
+# (RP0004).
+#evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Add a comment according to your evaluation note. This is used by the global
+# evaluation report (RP0004).
+#comment=no
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+#notes=FIXME,XXX,TODO
+
+
+[FORMAT]
+
+# Maximum number of characters on a single line.
+#max-line-length=80
+
+# Maximum number of lines in a module
+#max-module-lines=1000
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+# CHANGE: Use "  " instead.
+indent-string='  '
+
+
+[TYPECHECK]
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+#ignore-mixin-members=yes
+
+# List of classes names for which member attributes should not be checked
+# (useful for classes with attributes dynamically set).
+#ignored-classes=SQLObject
+
+# When zope mode is activated, add a predefined set of Zope acquired attributes
+# to generated-members.
+#zope=no
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E0201 when accessed.
+# CHANGE: Added 'AndReturn', 'InAnyOrder' and 'MultipleTimes' for pymox.
+# CHANGE: Added tempdir for @osutils.TempDirDecorator.
+generated-members=REQUEST,acl_users,aq_parent,AndReturn,InAnyOrder,MultipleTimes,tempdir
+
+
+[BASIC]
+
+# Required attributes for module, separated by a comma
+#required-attributes=
+
+# List of builtins function names that should not be used, separated by a comma
+#bad-functions=map,filter,apply,input
+
+# Regular expression which should only match correct module names
+#module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Regular expression which should only match correct module level names
+#const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Regular expression which should only match correct class names
+#class-rgx=[A-Z_][a-zA-Z0-9]+$
+
+# Regular expression which should only match correct function names
+#
+# CHANGE: The ChromiumOS standard is different than PEP-8, so we need to
+# redefine this.
+#
+# Common exceptions to ChromiumOS standard:
+# - main: Standard for main function
+function-rgx=([A-Z_][a-zA-Z0-9]{2,30}|main)$
+
+# Regular expression which should only match correct method names
+#
+# CHANGE: The ChromiumOS standard is different than PEP-8, so we need to
+# redefine this. Here's what we allow:
+# - CamelCaps, starting with a capital letter.  No underscores in function
+#   names.  Can also have a "_" prefix (private method) or a "test" prefix
+#   (unit test).
+# - Methods that look like __xyz__, which are used to do things like
+#   __init__, __del__, etc.
+# - setUp, tearDown: For unit tests.
+method-rgx=((_|test)?[A-Z][a-zA-Z0-9]{2,30}|__[a-z]+__|setUp|tearDown)$
+
+# Regular expression which should only match correct instance attribute names
+#attr-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct argument names
+#argument-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct variable names
+#variable-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match correct list comprehension /
+# generator expression variable names
+#inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
+
+# Good variable names which should always be accepted, separated by a comma
+#good-names=i,j,k,ex,Run,_
+
+# Bad variable names which should always be refused, separated by a comma
+#bad-names=foo,bar,baz,toto,tutu,tata
+
+# Regular expression which should only match functions or classes name which do
+# not require a docstring
+#no-docstring-rgx=__.*__
+
+
+[SIMILARITIES]
+
+# Minimum lines number of a similarity.
+min-similarity-lines=8
+
+# Ignore comments when computing similarities.
+#ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+#ignore-docstrings=yes
+
+
+[VARIABLES]
+
+# Tells whether we should check for unused import in __init__ files.
+#init-import=no
+
+# A regular expression matching the beginning of the name of dummy variables
+# (i.e. not used).
+#dummy-variables-rgx=_|dummy
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid to define new builtins when possible.
+#additional-builtins=
+
+
+[CLASSES]
+
+# List of interface methods to ignore, separated by a comma. This is used for
+# instance to not check methods defines in Zope's Interface base class.
+#ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
+
+# List of method names used to declare (i.e. assign) instance attributes.
+#defining-attr-methods=__init__,__new__,setUp
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method
+#max-args=5
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore
+#ignored-argument-names=_.*
+
+# Maximum number of locals for function / method body
+#max-locals=15
+
+# Maximum number of return / yield for function / method body
+#max-returns=6
+
+# Maximum number of branch for function / method body
+#max-branchs=12
+
+# Maximum number of statements in function / method body
+#max-statements=50
+
+# Maximum number of parents for a class (see R0901).
+#max-parents=7
+
+# Maximum number of attributes for a class (see R0902).
+#max-attributes=7
+
+# Minimum number of public methods for a class (see R0903).
+#min-public-methods=2
+
+# Maximum number of public methods for a class (see R0904).
+#max-public-methods=20
+
+
+[IMPORTS]
+
+# Deprecated modules which should not be used, separated by a comma
+#deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled)
+#import-graph=
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled)
+#ext-import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled)
+#int-import-graph=